Parallelize compiling of individual extension modules

This commit is contained in:
Kovid Goyal 2014-07-12 18:19:32 +05:30
parent 6a5acd3fe7
commit f49cd34288
3 changed files with 61 additions and 14 deletions

View File

@ -41,6 +41,16 @@ QMAKE = os.environ.get('QMAKE', QMAKE)
PKGCONFIG = find_executable('pkg-config')
PKGCONFIG = os.environ.get('PKG_CONFIG', PKGCONFIG)
if iswindows:
import win32api
cpu_count = win32api.GetSystemInfo()[5]
else:
from multiprocessing import cpu_count
try:
cpu_count = cpu_count()
except NotImplementedError:
cpu_count = 1
def run_pkgconfig(name, envvar, default, flag, prefix):
ans = []
if envvar:

View File

@ -8,16 +8,15 @@ __docformat__ = 'restructuredtext en'
import textwrap, os, shlex, subprocess, glob, shutil, re, sys
from distutils import sysconfig
from multiprocessing import cpu_count
from setup import Command, islinux, isbsd, isosx, SRC, iswindows, __version__
from setup.build_environment import (chmlib_inc_dirs,
podofo_inc, podofo_lib, podofo_error, pyqt, NMAKE, QMAKE,
msvc, MT, win_inc, win_lib, magick_inc_dirs, magick_lib_dirs,
msvc, win_inc, win_lib, magick_inc_dirs, magick_lib_dirs,
magick_libs, chmlib_lib_dirs, sqlite_inc_dirs, icu_inc_dirs,
icu_lib_dirs, ft_libs, ft_lib_dirs, ft_inc_dirs,
icu_lib_dirs, ft_libs, ft_lib_dirs, ft_inc_dirs, cpu_count,
zlib_libs, zlib_lib_dirs, zlib_inc_dirs, is64bit, glib_flags, fontconfig_flags)
MT
from setup.parallel_build import create_job, parallel_build
isunix = islinux or isosx or isbsd
make = 'make' if isunix else NMAKE
@ -439,6 +438,8 @@ class Build(Command):
einc = self.inc_dirs_to_cflags(ext.inc_dirs)
if not os.path.exists(obj_dir):
os.makedirs(obj_dir)
jobs = []
for src in ext.sources:
obj = self.j(obj_dir, os.path.splitext(self.b(src))[0]+'.o')
objects.append(obj)
@ -447,14 +448,17 @@ class Build(Command):
sinc = [inf+src] if iswindows else ['-c', src]
oinc = ['/Fo'+obj] if iswindows else ['-o', obj]
cmd = [compiler] + cflags + ext.cflags + einc + sinc + oinc
self.info(' '.join(cmd))
self.check_call(cmd)
jobs.append(create_job(cmd))
if jobs:
self.info('Compiling', ext.name)
if not parallel_build(jobs, self.info):
raise SystemExit(1)
dest = self.dest(ext)
elib = self.lib_dirs_to_ldflags(ext.lib_dirs)
xlib = self.libraries_to_ldflags(ext.libraries)
if self.newer(dest, objects+ext.extra_objs):
print 'Linking', ext.name
self.info('Linking', ext.name)
cmd = [linker]
if iswindows:
cmd += ldflags + ext.ldflags + elib + xlib + \
@ -464,11 +468,6 @@ class Build(Command):
self.info('\n\n', ' '.join(cmd), '\n\n')
self.check_call(cmd)
if iswindows:
# manifest = dest+'.manifest'
# cmd = [MT, '-manifest', manifest, '-outputresource:%s;2'%dest]
# self.info(*cmd)
# self.check_call(cmd)
# os.remove(manifest)
for x in ('.exp', '.lib'):
x = os.path.splitext(dest)[0]+x
if os.path.exists(x):
@ -533,7 +532,7 @@ class Build(Command):
os.chdir(bdir)
try:
self.check_call([QMAKE] + [self.b(pf)])
self.check_call([make] + ['-j%d'%(cpu_count() or 1)])
self.check_call([make] + ['-j%d'%(cpu_count or 1)])
finally:
os.chdir(cwd)
@ -607,7 +606,7 @@ class Build(Command):
os.chdir(src_dir)
if self.newer(dest, sip['headers'] + sip['sources'] + ext.sources + ext.headers):
self.check_call([QMAKE] + qmc + [proname])
self.check_call([make]+([] if iswindows else ['-j%d'%(cpu_count() or 1)]))
self.check_call([make]+([] if iswindows else ['-j%d'%(cpu_count or 1)]))
shutil.copy2(os.path.realpath(name), dest)
if iswindows:
shutil.copy2(name + '.manifest', dest + '.manifest')

38
setup/parallel_build.py Normal file
View File

@ -0,0 +1,38 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
import subprocess
from multiprocessing.dummy import Pool
from setup.build_environment import cpu_count
def run_worker(job):
cmd, human_text = job
human_text = human_text or b' '.join(cmd)
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except Exception as err:
return False, human_text, unicode(err)
stdout, stderr = p.communicate()
stdout = human_text + b'\n' + (stdout or b'')
ok = p.returncode == 0
return ok, stdout, (stderr or b'')
def create_job(cmd, human_text=None):
return (cmd, human_text)
def parallel_build(jobs, log):
p = Pool(cpu_count)
for ok, stdout, stderr in p.imap(run_worker, jobs):
log(stdout)
if stderr:
log(stderr)
if not ok:
return False
return True