diff --git a/CHANGELOG.md b/CHANGELOG.md index 011325eb50886ce799636f01f8c5b8b1a426d9dd..ca66895de681ccdda35d0d914b6c85e5af89e464 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1 +1,3 @@ # v2.14.25 + +* moved `distutils` to [snakehouse](https://pypi.org/project/snakehouse/) diff --git a/docs/distutils.rst b/docs/distutils.rst deleted file mode 100644 index b809deea272c3be1e1686015eadfd0a0c8df890d..0000000000000000000000000000000000000000 --- a/docs/distutils.rst +++ /dev/null @@ -1,13 +0,0 @@ -Distutils extensions -==================== - -Below was shamelessly ripped from StackOverflow_, with some changes by me. - -.. _StackOverflow: https://stackoverflow.com/questions/11013851/speeding-up-build-process-with-distutils - -.. autofunction:: satella.distutils.monkey_patch_parallel_compilation - -.. warning:: This function remains experimental and is quite likely to be - dropped from satella and moved into snakehouse_. - -.. _snakehouse: https://pypi.org/project/snakehouse/ diff --git a/docs/index.rst b/docs/index.rst index a7eaa31a73716c897f97c2722df5b4f56d493a7d..7a13933cb0d8dd74e3500d4710d4380590bec94c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -38,7 +38,6 @@ Visit the project's page at GitHub_! processes cassandra opentracing - distutils Indices and tables diff --git a/satella/__init__.py b/satella/__init__.py index dde696200cf142ab73be2bf676be60761d2780f3..bf864c1d6c0384b2c9f971ca92be727afcafc34f 100644 --- a/satella/__init__.py +++ b/satella/__init__.py @@ -1 +1 @@ -__version__ = '2.14.25a1' +__version__ = '2.14.25a2' diff --git a/satella/distutils.py b/satella/distutils.py deleted file mode 100644 index ea1f87ef5bfbb837389534a25e1122aaee7f901d..0000000000000000000000000000000000000000 --- a/satella/distutils.py +++ /dev/null @@ -1,51 +0,0 @@ -import typing as tp -import multiprocessing -import warnings - -__all__ = ['monkey_patch_parallel_compilation'] - -# shamelessly ripped from -# https://stackoverflow.com/questions/11013851/speeding-up-build-process-with-distutils -# with some changes introduced by me - - -def monkey_patch_parallel_compilation(cores: tp.Optional[int] = None) -> None: - """ - This monkey-patches distutils to provide parallel compilation, even if you have - a single extension built from multiple .c files. - - Invoke in your setup.py file - - :param cores: amount of cores. Leave at default (None) for autodetection. - """ - if cores is None: - cores = multiprocessing.cpu_count() - - # monkey-patch for parallel compilation - def parallelCCompile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - # those lines are copied from distutils.ccompiler.CCompiler directly - macros, objects, extra_postargs, pp_opts, build = self._setup_compile(output_dir, macros, - include_dirs, sources, - depends, - extra_postargs) - cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) - # parallel code - import multiprocessing.pool - - def single_compile(obj): - try: - src, ext = build[obj] - except KeyError: - return - self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) - - # evaluate everything - for _ in multiprocessing.pool.ThreadPool(cores).imap(single_compile, objects): - pass - return objects - - import distutils.ccompiler - distutils.ccompiler.CCompiler.compile = parallelCCompile - warnings.warn('This function remains experimental and is likely to be moved into snakehouse', - UserWarning)