Skip to content
Snippets Groups Projects
Commit e181ba75 authored by Piotr Maślanka's avatar Piotr Maślanka
Browse files

add `monkey_patch_parallel_compilation`

parent f81298c8
No related branches found
No related tags found
No related merge requests found
...@@ -3,3 +3,4 @@ ...@@ -3,3 +3,4 @@
* added `write_json_to_file` * added `write_json_to_file`
* added `read_json_from_file` * added `read_json_from_file`
* added `write_json_to_file_if_different` * added `write_json_to_file_if_different`
* added `satella.distutils`
Distutils extensions
====================
.. autofunction:: satella.distutils.monkey_patch_parallel_compilation
...@@ -38,6 +38,7 @@ Visit the project's page at GitHub_! ...@@ -38,6 +38,7 @@ Visit the project's page at GitHub_!
processes processes
cassandra cassandra
opentracing opentracing
distutils
Indices and tables Indices and tables
......
__version__ = '2.14.23_a2' __version__ = '2.14.23_a3'
import typing as tp
import multiprocessing
__all__ = ['monkey_patch_parallel_compilation']
def monkey_patch_parallel_compilation(cores: tp.Optional[int] = None):
"""
This monkey-patches distutils to provide parallel compilation, even if you have
a single extension built from multiple .c files.
Invoke in your setup.py file
:param cores: amount of cores. Leave at default (None) for autodetection.
"""
if cores is None:
cores = multiprocessing.cpu_count()
# monkey-patch for parallel compilation
def parallelCCompile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
# those lines are copied from distutils.ccompiler.CCompiler directly
macros, objects, extra_postargs, pp_opts, build = self._setup_compile(output_dir, macros,
include_dirs, sources,
depends,
extra_postargs)
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
# parallel code
N = 2 # number of parallel compilations
import multiprocessing.pool
def _single_compile(obj):
try:
src, ext = build[obj]
except KeyError:
return
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
# convert to list, imap is evaluated on-demand
list(multiprocessing.pool.ThreadPool(cores).imap(_single_compile, objects))
return objects
import distutils.ccompiler
distutils.ccompiler.CCompiler.compile = parallelCCompile
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment