Home | History | Annotate | Download | only in Tools
      1 """
      2 Highly experimental script that compiles the CPython standard library using Cython.
      3 
      4 Execute the script either in the CPython 'Lib' directory or pass the
      5 option '--current-python' to compile the standard library of the running
      6 Python interpreter.
      7 
      8 Pass '-j N' to get a parallel build with N processes.
      9 
     10 Usage example::
     11 
     12     $ python cystdlib.py --current-python build_ext -i
     13 """
     14 
     15 import os
     16 import sys
     17 from distutils.core import setup
     18 from Cython.Build import cythonize
     19 from Cython.Compiler import Options
     20 
     21 # improve Python compatibility by allowing some broken code
     22 Options.error_on_unknown_names = False
     23 Options.error_on_uninitialized = False
     24 
     25 exclude_patterns = ['**/test/**/*.py', '**/tests/**/*.py', '**/__init__.py']
     26 broken = [
     27     'idlelib/MultiCall.py',
     28     'email/utils.py',
     29     'multiprocessing/reduction.py',
     30     'multiprocessing/util.py',
     31     'threading.py',      # interrupt handling
     32     'lib2to3/fixes/fix_sys_exc.py',
     33     'traceback.py',
     34     'types.py',
     35     'enum.py',
     36     'importlib/_bootstrap',
     37 ]
     38 
     39 default_directives = dict(
     40     auto_cpdef=False,   # enable when it's safe, see long list of failures below
     41     binding=True,
     42     set_initial_path='SOURCEFILE')
     43 default_directives['optimize.inline_defnode_calls'] = True
     44 
     45 special_directives = [
     46     (['pkgutil.py',
     47       'decimal.py',
     48       'datetime.py',
     49       'optparse.py',
     50       'sndhdr.py',
     51       'opcode.py',
     52       'ntpath.py',
     53       'urllib/request.py',
     54       'plat-*/TYPES.py',
     55       'plat-*/IN.py',
     56       'tkinter/_fix.py',
     57       'lib2to3/refactor.py',
     58       'webbrowser.py',
     59       'shutil.py',
     60       'multiprocessing/forking.py',
     61       'xml/sax/expatreader.py',
     62       'xmlrpc/client.py',
     63       'pydoc.py',
     64       'xml/etree/ElementTree.py',
     65       'posixpath.py',
     66       'inspect.py',
     67       'ctypes/util.py',
     68       'urllib/parse.py',
     69       'warnings.py',
     70       'tempfile.py',
     71       'trace.py',
     72       'heapq.py',
     73       'pickletools.py',
     74       'multiprocessing/connection.py',
     75       'hashlib.py',
     76       'getopt.py',
     77       'os.py',
     78       'types.py',
     79      ], dict(auto_cpdef=False)),
     80 ]
     81 del special_directives[:]  # currently unused
     82 
     83 def build_extensions(includes='**/*.py',
     84                      excludes=None,
     85                      special_directives=special_directives,
     86                      language_level=sys.version_info[0],
     87                      parallel=None):
     88     if isinstance(includes, str):
     89         includes = [includes]
     90     excludes = list(excludes or exclude_patterns) + broken
     91 
     92     all_groups = (special_directives or []) + [(includes, {})]
     93     extensions = []
     94     for modules, directives in all_groups:
     95         exclude_now = excludes[:]
     96         for other_modules, _ in special_directives:
     97             if other_modules != modules:
     98                 exclude_now.extend(other_modules)
     99 
    100         d = dict(default_directives)
    101         d.update(directives)
    102 
    103         extensions.extend(
    104             cythonize(
    105                 modules,
    106                 exclude=exclude_now,
    107                 exclude_failures=True,
    108                 language_level=language_level,
    109                 compiler_directives=d,
    110                 nthreads=parallel,
    111             ))
    112     return extensions
    113 
    114 
    115 def build(extensions):
    116     try:
    117         setup(ext_modules=extensions)
    118         result = True
    119     except:
    120         import traceback
    121         print('error building extensions %s' % (
    122             [ext.name for ext in extensions],))
    123         traceback.print_exc()
    124         result = False
    125     return extensions, result
    126 
    127 
    128 def _build(args):
    129     sys_args, ext = args
    130     sys.argv[1:] = sys_args
    131     return build([ext])
    132 
    133 
    134 def parse_args():
    135     from optparse import OptionParser
    136     parser = OptionParser('%prog [options] [LIB_DIR (default: ./Lib)]')
    137     parser.add_option(
    138         '--current-python', dest='current_python', action='store_true',
    139         help='compile the stdlib of the running Python')
    140     parser.add_option(
    141         '-j', '--jobs', dest='parallel_jobs', metavar='N',
    142         type=int, default=1,
    143         help='run builds in N parallel jobs (default: 1)')
    144     parser.add_option(
    145         '-x', '--exclude', dest='excludes', metavar='PATTERN',
    146         action="append", help='exclude modules/packages matching PATTERN')
    147     options, args = parser.parse_args()
    148     if not args:
    149         args = ['./Lib']
    150     elif len(args) > 1:
    151         parser.error('only one argument expected, got %d' % len(args))
    152     return options, args
    153 
    154 
    155 if __name__ == '__main__':
    156     options, args = parse_args()
    157     if options.current_python:
    158         # assume that the stdlib is where the "os" module lives
    159         os.chdir(os.path.dirname(os.__file__))
    160     else:
    161         os.chdir(args[0])
    162 
    163     pool = None
    164     parallel_jobs = options.parallel_jobs
    165     if options.parallel_jobs:
    166         try:
    167             import multiprocessing
    168             pool = multiprocessing.Pool(parallel_jobs)
    169             print("Building in %d parallel processes" % parallel_jobs)
    170         except (ImportError, OSError):
    171             print("Not building in parallel")
    172             parallel_jobs = 0
    173 
    174     extensions = build_extensions(
    175         parallel=parallel_jobs,
    176         excludes=options.excludes)
    177     sys_args = ['build_ext', '-i']
    178     if pool is not None:
    179         results = pool.map(_build, [(sys_args, ext) for ext in extensions])
    180         pool.close()
    181         pool.join()
    182         for ext, result in results:
    183             if not result:
    184                 print("building extension %s failed" % (ext[0].name,))
    185     else:
    186         sys.argv[1:] = sys_args
    187         build(extensions)
    188