Home | History | Annotate | Download | only in util
      1 # Copyright 2013 The Chromium Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 import ast
      6 import contextlib
      7 import fnmatch
      8 import json
      9 import os
     10 import pipes
     11 import re
     12 import shlex
     13 import shutil
     14 import subprocess
     15 import sys
     16 import tempfile
     17 import zipfile
     18 
     19 
     20 CHROMIUM_SRC = os.path.normpath(
     21     os.path.join(os.path.dirname(__file__),
     22                  os.pardir, os.pardir, os.pardir, os.pardir))
     23 COLORAMA_ROOT = os.path.join(CHROMIUM_SRC,
     24                              'third_party', 'colorama', 'src')
     25 
     26 
     27 @contextlib.contextmanager
     28 def TempDir():
     29   dirname = tempfile.mkdtemp()
     30   try:
     31     yield dirname
     32   finally:
     33     shutil.rmtree(dirname)
     34 
     35 
     36 def MakeDirectory(dir_path):
     37   try:
     38     os.makedirs(dir_path)
     39   except OSError:
     40     pass
     41 
     42 
     43 def DeleteDirectory(dir_path):
     44   if os.path.exists(dir_path):
     45     shutil.rmtree(dir_path)
     46 
     47 
     48 def Touch(path, fail_if_missing=False):
     49   if fail_if_missing and not os.path.exists(path):
     50     raise Exception(path + ' doesn\'t exist.')
     51 
     52   MakeDirectory(os.path.dirname(path))
     53   with open(path, 'a'):
     54     os.utime(path, None)
     55 
     56 
     57 def FindInDirectory(directory, filename_filter):
     58   files = []
     59   for root, _dirnames, filenames in os.walk(directory):
     60     matched_files = fnmatch.filter(filenames, filename_filter)
     61     files.extend((os.path.join(root, f) for f in matched_files))
     62   return files
     63 
     64 
     65 def FindInDirectories(directories, filename_filter):
     66   all_files = []
     67   for directory in directories:
     68     all_files.extend(FindInDirectory(directory, filename_filter))
     69   return all_files
     70 
     71 
     72 def ParseGnList(gn_string):
     73   return ast.literal_eval(gn_string)
     74 
     75 
     76 def ParseGypList(gyp_string):
     77   # The ninja generator doesn't support $ in strings, so use ## to
     78   # represent $.
     79   # TODO(cjhopman): Remove when
     80   # https://code.google.com/p/gyp/issues/detail?id=327
     81   # is addressed.
     82   gyp_string = gyp_string.replace('##', '$')
     83 
     84   if gyp_string.startswith('['):
     85     return ParseGnList(gyp_string)
     86   return shlex.split(gyp_string)
     87 
     88 
     89 def CheckOptions(options, parser, required=None):
     90   if not required:
     91     return
     92   for option_name in required:
     93     if getattr(options, option_name) is None:
     94       parser.error('--%s is required' % option_name.replace('_', '-'))
     95 
     96 
     97 def WriteJson(obj, path, only_if_changed=False):
     98   old_dump = None
     99   if os.path.exists(path):
    100     with open(path, 'r') as oldfile:
    101       old_dump = oldfile.read()
    102 
    103   new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
    104 
    105   if not only_if_changed or old_dump != new_dump:
    106     with open(path, 'w') as outfile:
    107       outfile.write(new_dump)
    108 
    109 
    110 def ReadJson(path):
    111   with open(path, 'r') as jsonfile:
    112     return json.load(jsonfile)
    113 
    114 
    115 class CalledProcessError(Exception):
    116   """This exception is raised when the process run by CheckOutput
    117   exits with a non-zero exit code."""
    118 
    119   def __init__(self, cwd, args, output):
    120     super(CalledProcessError, self).__init__()
    121     self.cwd = cwd
    122     self.args = args
    123     self.output = output
    124 
    125   def __str__(self):
    126     # A user should be able to simply copy and paste the command that failed
    127     # into their shell.
    128     copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
    129         ' '.join(map(pipes.quote, self.args)))
    130     return 'Command failed: {}\n{}'.format(copyable_command, self.output)
    131 
    132 
    133 # This can be used in most cases like subprocess.check_output(). The output,
    134 # particularly when the command fails, better highlights the command's failure.
    135 # If the command fails, raises a build_utils.CalledProcessError.
    136 def CheckOutput(args, cwd=None,
    137                 print_stdout=False, print_stderr=True,
    138                 stdout_filter=None,
    139                 stderr_filter=None,
    140                 fail_func=lambda returncode, stderr: returncode != 0):
    141   if not cwd:
    142     cwd = os.getcwd()
    143 
    144   child = subprocess.Popen(args,
    145       stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
    146   stdout, stderr = child.communicate()
    147 
    148   if stdout_filter is not None:
    149     stdout = stdout_filter(stdout)
    150 
    151   if stderr_filter is not None:
    152     stderr = stderr_filter(stderr)
    153 
    154   if fail_func(child.returncode, stderr):
    155     raise CalledProcessError(cwd, args, stdout + stderr)
    156 
    157   if print_stdout:
    158     sys.stdout.write(stdout)
    159   if print_stderr:
    160     sys.stderr.write(stderr)
    161 
    162   return stdout
    163 
    164 
    165 def GetModifiedTime(path):
    166   # For a symlink, the modified time should be the greater of the link's
    167   # modified time and the modified time of the target.
    168   return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
    169 
    170 
    171 def IsTimeStale(output, inputs):
    172   if not os.path.exists(output):
    173     return True
    174 
    175   output_time = GetModifiedTime(output)
    176   for i in inputs:
    177     if GetModifiedTime(i) > output_time:
    178       return True
    179   return False
    180 
    181 
    182 def IsDeviceReady():
    183   device_state = CheckOutput(['adb', 'get-state'])
    184   return device_state.strip() == 'device'
    185 
    186 
    187 def CheckZipPath(name):
    188   if os.path.normpath(name) != name:
    189     raise Exception('Non-canonical zip path: %s' % name)
    190   if os.path.isabs(name):
    191     raise Exception('Absolute zip path: %s' % name)
    192 
    193 
    194 def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None):
    195   if path is None:
    196     path = os.getcwd()
    197   elif not os.path.exists(path):
    198     MakeDirectory(path)
    199 
    200   with zipfile.ZipFile(zip_path) as z:
    201     for name in z.namelist():
    202       if name.endswith('/'):
    203         continue
    204       if pattern is not None:
    205         if not fnmatch.fnmatch(name, pattern):
    206           continue
    207       CheckZipPath(name)
    208       if no_clobber:
    209         output_path = os.path.join(path, name)
    210         if os.path.exists(output_path):
    211           raise Exception(
    212               'Path already exists from zip: %s %s %s'
    213               % (zip_path, name, output_path))
    214 
    215     z.extractall(path=path)
    216 
    217 
    218 def DoZip(inputs, output, base_dir):
    219   with zipfile.ZipFile(output, 'w') as outfile:
    220     for f in inputs:
    221       CheckZipPath(os.path.relpath(f, base_dir))
    222       outfile.write(f, os.path.relpath(f, base_dir))
    223 
    224 
    225 def ZipDir(output, base_dir):
    226   with zipfile.ZipFile(output, 'w') as outfile:
    227     for root, _, files in os.walk(base_dir):
    228       for f in files:
    229         path = os.path.join(root, f)
    230         archive_path = os.path.relpath(path, base_dir)
    231         CheckZipPath(archive_path)
    232         outfile.write(path, archive_path)
    233 
    234 
    235 def MergeZips(output, inputs, exclude_patterns=None):
    236   def Allow(name):
    237     if exclude_patterns is not None:
    238       for p in exclude_patterns:
    239         if fnmatch.fnmatch(name, p):
    240           return False
    241     return True
    242 
    243   with zipfile.ZipFile(output, 'w') as out_zip:
    244     for in_file in inputs:
    245       with zipfile.ZipFile(in_file, 'r') as in_zip:
    246         for name in in_zip.namelist():
    247           if Allow(name):
    248             out_zip.writestr(name, in_zip.read(name))
    249 
    250 
    251 def PrintWarning(message):
    252   print 'WARNING: ' + message
    253 
    254 
    255 def PrintBigWarning(message):
    256   print '*****     ' * 8
    257   PrintWarning(message)
    258   print '*****     ' * 8
    259 
    260 
    261 def GetSortedTransitiveDependencies(top, deps_func):
    262   """Gets the list of all transitive dependencies in sorted order.
    263 
    264   There should be no cycles in the dependency graph.
    265 
    266   Args:
    267     top: a list of the top level nodes
    268     deps_func: A function that takes a node and returns its direct dependencies.
    269   Returns:
    270     A list of all transitive dependencies of nodes in top, in order (a node will
    271     appear in the list at a higher index than all of its dependencies).
    272   """
    273   def Node(dep):
    274     return (dep, deps_func(dep))
    275 
    276   # First: find all deps
    277   unchecked_deps = list(top)
    278   all_deps = set(top)
    279   while unchecked_deps:
    280     dep = unchecked_deps.pop()
    281     new_deps = deps_func(dep).difference(all_deps)
    282     unchecked_deps.extend(new_deps)
    283     all_deps = all_deps.union(new_deps)
    284 
    285   # Then: simple, slow topological sort.
    286   sorted_deps = []
    287   unsorted_deps = dict(map(Node, all_deps))
    288   while unsorted_deps:
    289     for library, dependencies in unsorted_deps.items():
    290       if not dependencies.intersection(unsorted_deps.keys()):
    291         sorted_deps.append(library)
    292         del unsorted_deps[library]
    293 
    294   return sorted_deps
    295 
    296 
    297 def GetPythonDependencies():
    298   """Gets the paths of imported non-system python modules.
    299 
    300   A path is assumed to be a "system" import if it is outside of chromium's
    301   src/. The paths will be relative to the current directory.
    302   """
    303   module_paths = (m.__file__ for m in sys.modules.itervalues()
    304                   if m is not None and hasattr(m, '__file__'))
    305 
    306   abs_module_paths = map(os.path.abspath, module_paths)
    307 
    308   non_system_module_paths = [
    309       p for p in abs_module_paths if p.startswith(CHROMIUM_SRC)]
    310   def ConvertPycToPy(s):
    311     if s.endswith('.pyc'):
    312       return s[:-1]
    313     return s
    314 
    315   non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
    316   non_system_module_paths = map(os.path.relpath, non_system_module_paths)
    317   return sorted(set(non_system_module_paths))
    318 
    319 
    320 def AddDepfileOption(parser):
    321   parser.add_option('--depfile',
    322                     help='Path to depfile. This must be specified as the '
    323                     'action\'s first output.')
    324 
    325 
    326 def WriteDepfile(path, dependencies):
    327   with open(path, 'w') as depfile:
    328     depfile.write(path)
    329     depfile.write(': ')
    330     depfile.write(' '.join(dependencies))
    331     depfile.write('\n')
    332 
    333 
    334 def ExpandFileArgs(args):
    335   """Replaces file-arg placeholders in args.
    336 
    337   These placeholders have the form:
    338     @FileArg(filename:key1:key2:...:keyn)
    339 
    340   The value of such a placeholder is calculated by reading 'filename' as json.
    341   And then extracting the value at [key1][key2]...[keyn].
    342 
    343   Note: This intentionally does not return the list of files that appear in such
    344   placeholders. An action that uses file-args *must* know the paths of those
    345   files prior to the parsing of the arguments (typically by explicitly listing
    346   them in the action's inputs in build files).
    347   """
    348   new_args = list(args)
    349   file_jsons = dict()
    350   r = re.compile('@FileArg\((.*?)\)')
    351   for i, arg in enumerate(args):
    352     match = r.search(arg)
    353     if not match:
    354       continue
    355 
    356     if match.end() != len(arg):
    357       raise Exception('Unexpected characters after FileArg: ' + arg)
    358 
    359     lookup_path = match.group(1).split(':')
    360     file_path = lookup_path[0]
    361     if not file_path in file_jsons:
    362       file_jsons[file_path] = ReadJson(file_path)
    363 
    364     expansion = file_jsons[file_path]
    365     for k in lookup_path[1:]:
    366       expansion = expansion[k]
    367 
    368     new_args[i] = arg[:match.start()] + str(expansion)
    369 
    370   return new_args
    371 
    372