Home | History | Annotate | Download | only in commands
      1 # Copyright (c) 2010 Google Inc. All rights reserved.
      2 #
      3 # Redistribution and use in source and binary forms, with or without
      4 # modification, are permitted provided that the following conditions are
      5 # met:
      6 #
      7 #     * Redistributions of source code must retain the above copyright
      8 # notice, this list of conditions and the following disclaimer.
      9 #     * Redistributions in binary form must reproduce the above
     10 # copyright notice, this list of conditions and the following disclaimer
     11 # in the documentation and/or other materials provided with the
     12 # distribution.
     13 #     * Neither the name of Google Inc. nor the names of its
     14 # contributors may be used to endorse or promote products derived from
     15 # this software without specific prior written permission.
     16 #
     17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     26 # (INCLUDING NEGLIGENCE OR/ OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     28 
     29 import json
     30 import logging
     31 import optparse
     32 import re
     33 import sys
     34 import time
     35 import traceback
     36 import urllib
     37 import urllib2
     38 
     39 from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer
     40 from webkitpy.common.memoized import memoized
     41 from webkitpy.common.system.executive import ScriptError
     42 from webkitpy.layout_tests.controllers.test_result_writer import TestResultWriter
     43 from webkitpy.layout_tests.models import test_failures
     44 from webkitpy.layout_tests.models.test_expectations import TestExpectations, BASELINE_SUFFIX_LIST, SKIP
     45 from webkitpy.layout_tests.port import builders
     46 from webkitpy.layout_tests.port import factory
     47 from webkitpy.tool.multicommandtool import AbstractDeclarativeCommand
     48 
     49 
     50 _log = logging.getLogger(__name__)
     51 
     52 
     53 # FIXME: Should TestResultWriter know how to compute this string?
     54 def _baseline_name(fs, test_name, suffix):
     55     return fs.splitext(test_name)[0] + TestResultWriter.FILENAME_SUFFIX_EXPECTED + "." + suffix
     56 
     57 
     58 class AbstractRebaseliningCommand(AbstractDeclarativeCommand):
     59     # not overriding execute() - pylint: disable=W0223
     60 
     61     no_optimize_option = optparse.make_option('--no-optimize', dest='optimize', action='store_false', default=True,
     62         help=('Do not optimize/de-dup the expectations after rebaselining (default is to de-dup automatically). '
     63               'You can use "webkit-patch optimize-baselines" to optimize separately.'))
     64 
     65     platform_options = factory.platform_options(use_globs=True)
     66 
     67     results_directory_option = optparse.make_option("--results-directory", help="Local results directory to use")
     68 
     69     suffixes_option = optparse.make_option("--suffixes", default=','.join(BASELINE_SUFFIX_LIST), action="store",
     70         help="Comma-separated-list of file types to rebaseline")
     71 
     72     def __init__(self, options=None):
     73         super(AbstractRebaseliningCommand, self).__init__(options=options)
     74         self._baseline_suffix_list = BASELINE_SUFFIX_LIST
     75         self._scm_changes = {'add': [], 'delete': [], 'remove-lines': []}
     76 
     77     def _add_to_scm_later(self, path):
     78         self._scm_changes['add'].append(path)
     79 
     80     def _delete_from_scm_later(self, path):
     81         self._scm_changes['delete'].append(path)
     82 
     83 
     84 class BaseInternalRebaselineCommand(AbstractRebaseliningCommand):
     85     def __init__(self):
     86         super(BaseInternalRebaselineCommand, self).__init__(options=[
     87             self.results_directory_option,
     88             self.suffixes_option,
     89             optparse.make_option("--builder", help="Builder to pull new baselines from"),
     90             optparse.make_option("--test", help="Test to rebaseline"),
     91             ])
     92 
     93     def _baseline_directory(self, builder_name):
     94         port = self._tool.port_factory.get_from_builder_name(builder_name)
     95         override_dir = builders.rebaseline_override_dir(builder_name)
     96         if override_dir:
     97             return self._tool.filesystem.join(port.layout_tests_dir(), 'platform', override_dir)
     98         return port.baseline_version_dir()
     99 
    100     def _test_root(self, test_name):
    101         return self._tool.filesystem.splitext(test_name)[0]
    102 
    103     def _file_name_for_actual_result(self, test_name, suffix):
    104         return "%s-actual.%s" % (self._test_root(test_name), suffix)
    105 
    106     def _file_name_for_expected_result(self, test_name, suffix):
    107         return "%s-expected.%s" % (self._test_root(test_name), suffix)
    108 
    109 
    110 class CopyExistingBaselinesInternal(BaseInternalRebaselineCommand):
    111     name = "copy-existing-baselines-internal"
    112     help_text = "Copy existing baselines down one level in the baseline order to ensure new baselines don't break existing passing platforms."
    113 
    114     @memoized
    115     def _immediate_predecessors_in_fallback(self, path_to_rebaseline):
    116         port_names = self._tool.port_factory.all_port_names()
    117         immediate_predecessors_in_fallback = []
    118         for port_name in port_names:
    119             port = self._tool.port_factory.get(port_name)
    120             if not port.buildbot_archives_baselines():
    121                 continue
    122             baseline_search_path = port.baseline_search_path()
    123             try:
    124                 index = baseline_search_path.index(path_to_rebaseline)
    125                 if index:
    126                     immediate_predecessors_in_fallback.append(self._tool.filesystem.basename(baseline_search_path[index - 1]))
    127             except ValueError:
    128                 # index throw's a ValueError if the item isn't in the list.
    129                 pass
    130         return immediate_predecessors_in_fallback
    131 
    132     def _port_for_primary_baseline(self, baseline):
    133         for port in [self._tool.port_factory.get(port_name) for port_name in self._tool.port_factory.all_port_names()]:
    134             if self._tool.filesystem.basename(port.baseline_version_dir()) == baseline:
    135                 return port
    136         raise Exception("Failed to find port for primary baseline %s." % baseline)
    137 
    138     def _copy_existing_baseline(self, builder_name, test_name, suffix):
    139         baseline_directory = self._baseline_directory(builder_name)
    140         ports = [self._port_for_primary_baseline(baseline) for baseline in self._immediate_predecessors_in_fallback(baseline_directory)]
    141 
    142         old_baselines = []
    143         new_baselines = []
    144 
    145         # Need to gather all the baseline paths before modifying the filesystem since
    146         # the modifications can affect the results of port.expected_filename.
    147         for port in ports:
    148             old_baseline = port.expected_filename(test_name, "." + suffix)
    149             if not self._tool.filesystem.exists(old_baseline):
    150                 _log.debug("No existing baseline for %s." % test_name)
    151                 continue
    152 
    153             new_baseline = self._tool.filesystem.join(port.baseline_path(), self._file_name_for_expected_result(test_name, suffix))
    154             if self._tool.filesystem.exists(new_baseline):
    155                 _log.debug("Existing baseline at %s, not copying over it." % new_baseline)
    156                 continue
    157 
    158             expectations = TestExpectations(port, [test_name])
    159             if SKIP in expectations.get_expectations(test_name):
    160                 _log.debug("%s is skipped on %s." % (test_name, port.name()))
    161                 continue
    162 
    163             old_baselines.append(old_baseline)
    164             new_baselines.append(new_baseline)
    165 
    166         for i in range(len(old_baselines)):
    167             old_baseline = old_baselines[i]
    168             new_baseline = new_baselines[i]
    169 
    170             _log.debug("Copying baseline from %s to %s." % (old_baseline, new_baseline))
    171             self._tool.filesystem.maybe_make_directory(self._tool.filesystem.dirname(new_baseline))
    172             self._tool.filesystem.copyfile(old_baseline, new_baseline)
    173             if not self._tool.scm().exists(new_baseline):
    174                 self._add_to_scm_later(new_baseline)
    175 
    176     def execute(self, options, args, tool):
    177         for suffix in options.suffixes.split(','):
    178             self._copy_existing_baseline(options.builder, options.test, suffix)
    179         print json.dumps(self._scm_changes)
    180 
    181 
    182 class RebaselineTest(BaseInternalRebaselineCommand):
    183     name = "rebaseline-test-internal"
    184     help_text = "Rebaseline a single test from a buildbot. Only intended for use by other webkit-patch commands."
    185 
    186     def _results_url(self, builder_name):
    187         return self._tool.buildbot_for_builder_name(builder_name).builder_with_name(builder_name).latest_layout_test_results_url()
    188 
    189     def _save_baseline(self, data, target_baseline, baseline_directory, test_name, suffix):
    190         if not data:
    191             _log.debug("No baseline data to save.")
    192             return
    193 
    194         filesystem = self._tool.filesystem
    195         filesystem.maybe_make_directory(filesystem.dirname(target_baseline))
    196         filesystem.write_binary_file(target_baseline, data)
    197         if not self._tool.scm().exists(target_baseline):
    198             self._add_to_scm_later(target_baseline)
    199 
    200     def _rebaseline_test(self, builder_name, test_name, suffix, results_url):
    201         baseline_directory = self._baseline_directory(builder_name)
    202 
    203         source_baseline = "%s/%s" % (results_url, self._file_name_for_actual_result(test_name, suffix))
    204         target_baseline = self._tool.filesystem.join(baseline_directory, self._file_name_for_expected_result(test_name, suffix))
    205 
    206         _log.debug("Retrieving %s." % source_baseline)
    207         self._save_baseline(self._tool.web.get_binary(source_baseline, convert_404_to_None=True), target_baseline, baseline_directory, test_name, suffix)
    208 
    209     def _rebaseline_test_and_update_expectations(self, options):
    210         port = self._tool.port_factory.get_from_builder_name(options.builder)
    211         if (port.reference_files(options.test)):
    212             _log.warning("Cannot rebaseline reftest: %s", options.test)
    213             return
    214 
    215         if options.results_directory:
    216             results_url = 'file://' + options.results_directory
    217         else:
    218             results_url = self._results_url(options.builder)
    219         self._baseline_suffix_list = options.suffixes.split(',')
    220 
    221         for suffix in self._baseline_suffix_list:
    222             self._rebaseline_test(options.builder, options.test, suffix, results_url)
    223         self._scm_changes['remove-lines'].append({'builder': options.builder, 'test': options.test})
    224 
    225     def execute(self, options, args, tool):
    226         self._rebaseline_test_and_update_expectations(options)
    227         print json.dumps(self._scm_changes)
    228 
    229 
    230 class OptimizeBaselines(AbstractRebaseliningCommand):
    231     name = "optimize-baselines"
    232     help_text = "Reshuffles the baselines for the given tests to use as litte space on disk as possible."
    233     show_in_main_help = True
    234     argument_names = "TEST_NAMES"
    235 
    236     def __init__(self):
    237         super(OptimizeBaselines, self).__init__(options=[
    238             self.suffixes_option,
    239             optparse.make_option('--no-modify-scm', action='store_true', default=False, help='Dump SCM commands as JSON instead of '),
    240             ] + self.platform_options)
    241 
    242     def _optimize_baseline(self, optimizer, test_name):
    243         for suffix in self._baseline_suffix_list:
    244             baseline_name = _baseline_name(self._tool.filesystem, test_name, suffix)
    245             succeeded, files_to_delete, files_to_add = optimizer.optimize(baseline_name)
    246             if not succeeded:
    247                 print "Heuristics failed to optimize %s" % baseline_name
    248             return files_to_delete, files_to_add
    249 
    250     def execute(self, options, args, tool):
    251         self._baseline_suffix_list = options.suffixes.split(',')
    252         port_names = tool.port_factory.all_port_names(options.platform)
    253         if not port_names:
    254             print "No port names match '%s'" % options.platform
    255             return
    256 
    257         optimizer = BaselineOptimizer(tool, port_names, skip_scm_commands=options.no_modify_scm)
    258         port = tool.port_factory.get(port_names[0])
    259         for test_name in port.tests(args):
    260             _log.info("Optimizing %s" % test_name)
    261             files_to_delete, files_to_add = self._optimize_baseline(optimizer, test_name)
    262             for path in files_to_delete:
    263                 self._delete_from_scm_later(path)
    264             for path in files_to_add:
    265                 self._add_to_scm_later(path)
    266 
    267         print json.dumps(self._scm_changes)
    268 
    269 
    270 class AnalyzeBaselines(AbstractRebaseliningCommand):
    271     name = "analyze-baselines"
    272     help_text = "Analyzes the baselines for the given tests and prints results that are identical."
    273     show_in_main_help = True
    274     argument_names = "TEST_NAMES"
    275 
    276     def __init__(self):
    277         super(AnalyzeBaselines, self).__init__(options=[
    278             self.suffixes_option,
    279             optparse.make_option('--missing', action='store_true', default=False, help='show missing baselines as well'),
    280             ] + self.platform_options)
    281         self._optimizer_class = BaselineOptimizer  # overridable for testing
    282         self._baseline_optimizer = None
    283         self._port = None
    284 
    285     def _write(self, msg):
    286         print msg
    287 
    288     def _analyze_baseline(self, options, test_name):
    289         for suffix in self._baseline_suffix_list:
    290             baseline_name = _baseline_name(self._tool.filesystem, test_name, suffix)
    291             results_by_directory = self._baseline_optimizer.read_results_by_directory(baseline_name)
    292             if results_by_directory:
    293                 self._write("%s:" % baseline_name)
    294                 self._baseline_optimizer.write_by_directory(results_by_directory, self._write, "  ")
    295             elif options.missing:
    296                 self._write("%s: (no baselines found)" % baseline_name)
    297 
    298     def execute(self, options, args, tool):
    299         self._baseline_suffix_list = options.suffixes.split(',')
    300         port_names = tool.port_factory.all_port_names(options.platform)
    301         if not port_names:
    302             print "No port names match '%s'" % options.platform
    303             return
    304 
    305         self._baseline_optimizer = self._optimizer_class(tool, port_names, skip_scm_commands=False)
    306         self._port = tool.port_factory.get(port_names[0])
    307         for test_name in self._port.tests(args):
    308             self._analyze_baseline(options, test_name)
    309 
    310 
    311 class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand):
    312     # not overriding execute() - pylint: disable=W0223
    313 
    314     def __init__(self, options=None):
    315         super(AbstractParallelRebaselineCommand, self).__init__(options=options)
    316         self._builder_data = {}
    317 
    318     def builder_data(self):
    319         if not self._builder_data:
    320             for builder_name in self._release_builders():
    321                 builder = self._tool.buildbot_for_builder_name(builder_name).builder_with_name(builder_name)
    322                 self._builder_data[builder_name] = builder.latest_layout_test_results()
    323         return self._builder_data
    324 
    325     # The release builders cycle much faster than the debug ones and cover all the platforms.
    326     def _release_builders(self):
    327         release_builders = []
    328         for builder_name in builders.all_builder_names():
    329             if builder_name.find('ASAN') != -1:
    330                 continue
    331             port = self._tool.port_factory.get_from_builder_name(builder_name)
    332             if port.test_configuration().build_type == 'release':
    333                 release_builders.append(builder_name)
    334         return release_builders
    335 
    336     def _run_webkit_patch(self, args, verbose):
    337         try:
    338             verbose_args = ['--verbose'] if verbose else []
    339             stderr = self._tool.executive.run_command([self._tool.path()] + verbose_args + args, cwd=self._tool.scm().checkout_root, return_stderr=True)
    340             for line in stderr.splitlines():
    341                 _log.warning(line)
    342         except ScriptError, e:
    343             _log.error(e)
    344 
    345     def _builders_to_fetch_from(self, builders_to_check):
    346         # This routine returns the subset of builders that will cover all of the baseline search paths
    347         # used in the input list. In particular, if the input list contains both Release and Debug
    348         # versions of a configuration, we *only* return the Release version (since we don't save
    349         # debug versions of baselines).
    350         release_builders = set()
    351         debug_builders = set()
    352         builders_to_fallback_paths = {}
    353         for builder in builders_to_check:
    354             port = self._tool.port_factory.get_from_builder_name(builder)
    355             if port.test_configuration().build_type == 'release':
    356                 release_builders.add(builder)
    357             else:
    358                 debug_builders.add(builder)
    359         for builder in list(release_builders) + list(debug_builders):
    360             port = self._tool.port_factory.get_from_builder_name(builder)
    361             fallback_path = port.baseline_search_path()
    362             if fallback_path not in builders_to_fallback_paths.values():
    363                 builders_to_fallback_paths[builder] = fallback_path
    364         return builders_to_fallback_paths.keys()
    365 
    366     def _rebaseline_commands(self, test_prefix_list, options):
    367         path_to_webkit_patch = self._tool.path()
    368         cwd = self._tool.scm().checkout_root
    369         copy_baseline_commands = []
    370         rebaseline_commands = []
    371         lines_to_remove = {}
    372         port = self._tool.port_factory.get()
    373 
    374         for test_prefix in test_prefix_list:
    375             for test in port.tests([test_prefix]):
    376                 for builder in self._builders_to_fetch_from(test_prefix_list[test_prefix]):
    377                     actual_failures_suffixes = self._suffixes_for_actual_failures(test, builder, test_prefix_list[test_prefix][builder])
    378                     if not actual_failures_suffixes:
    379                         # If we're not going to rebaseline the test because it's passing on this
    380                         # builder, we still want to remove the line from TestExpectations.
    381                         if test not in lines_to_remove:
    382                             lines_to_remove[test] = []
    383                         lines_to_remove[test].append(builder)
    384                         continue
    385 
    386                     suffixes = ','.join(actual_failures_suffixes)
    387                     cmd_line = ['--suffixes', suffixes, '--builder', builder, '--test', test]
    388                     if options.results_directory:
    389                         cmd_line.extend(['--results-directory', options.results_directory])
    390                     if options.verbose:
    391                         cmd_line.append('--verbose')
    392                     copy_baseline_commands.append(tuple([[path_to_webkit_patch, 'copy-existing-baselines-internal'] + cmd_line, cwd]))
    393                     rebaseline_commands.append(tuple([[path_to_webkit_patch, 'rebaseline-test-internal'] + cmd_line, cwd]))
    394         return copy_baseline_commands, rebaseline_commands, lines_to_remove
    395 
    396     def _serial_commands(self, command_results):
    397         files_to_add = set()
    398         files_to_delete = set()
    399         lines_to_remove = {}
    400         for output in [result[1].split('\n') for result in command_results]:
    401             file_added = False
    402             for line in output:
    403                 try:
    404                     if line:
    405                         parsed_line = json.loads(line)
    406                         if 'add' in parsed_line:
    407                             files_to_add.update(parsed_line['add'])
    408                         if 'delete' in parsed_line:
    409                             files_to_delete.update(parsed_line['delete'])
    410                         if 'remove-lines' in parsed_line:
    411                             for line_to_remove in parsed_line['remove-lines']:
    412                                 test = line_to_remove['test']
    413                                 builder = line_to_remove['builder']
    414                                 if test not in lines_to_remove:
    415                                     lines_to_remove[test] = []
    416                                 lines_to_remove[test].append(builder)
    417                         file_added = True
    418                 except ValueError:
    419                     _log.debug('"%s" is not a JSON object, ignoring' % line)
    420 
    421             if not file_added:
    422                 _log.debug('Could not add file based off output "%s"' % output)
    423 
    424         return list(files_to_add), list(files_to_delete), lines_to_remove
    425 
    426     def _optimize_baselines(self, test_prefix_list, verbose=False):
    427         optimize_commands = []
    428         for test in test_prefix_list:
    429             all_suffixes = set()
    430             for builder in self._builders_to_fetch_from(test_prefix_list[test]):
    431                 all_suffixes.update(self._suffixes_for_actual_failures(test, builder, test_prefix_list[test][builder]))
    432 
    433             # FIXME: We should propagate the platform options as well.
    434             cmd_line = ['--no-modify-scm', '--suffixes', ','.join(all_suffixes), test]
    435             if verbose:
    436                 cmd_line.append('--verbose')
    437 
    438             path_to_webkit_patch = self._tool.path()
    439             cwd = self._tool.scm().checkout_root
    440             optimize_commands.append(tuple([[path_to_webkit_patch, 'optimize-baselines'] + cmd_line, cwd]))
    441         return optimize_commands
    442 
    443     def _update_expectations_files(self, lines_to_remove):
    444         # FIXME: This routine is way too expensive. We're creating O(n ports) TestExpectations objects.
    445         # This is slow and uses a lot of memory.
    446         tests = lines_to_remove.keys()
    447         to_remove = []
    448 
    449         # This is so we remove lines for builders that skip this test, e.g. Android skips most
    450         # tests and we don't want to leave stray [ Android ] lines in TestExpectations..
    451         # This is only necessary for "webkit-patch rebaseline" and for rebaselining expected
    452         # failures from garden-o-matic. rebaseline-expectations and auto-rebaseline will always
    453         # pass the exact set of ports to rebaseline.
    454         for port_name in self._tool.port_factory.all_port_names():
    455             port = self._tool.port_factory.get(port_name)
    456             generic_expectations = TestExpectations(port, tests=tests, include_overrides=False)
    457             full_expectations = TestExpectations(port, tests=tests, include_overrides=True)
    458             for test in tests:
    459                 if self._port_skips_test(port, test, generic_expectations, full_expectations):
    460                     for test_configuration in port.all_test_configurations():
    461                         if test_configuration.version == port.test_configuration().version:
    462                             to_remove.append((test, test_configuration))
    463 
    464         for test in lines_to_remove:
    465             for builder in lines_to_remove[test]:
    466                 port = self._tool.port_factory.get_from_builder_name(builder)
    467                 for test_configuration in port.all_test_configurations():
    468                     if test_configuration.version == port.test_configuration().version:
    469                         to_remove.append((test, test_configuration))
    470 
    471         port = self._tool.port_factory.get()
    472         expectations = TestExpectations(port, include_overrides=False)
    473         expectationsString = expectations.remove_configurations(to_remove)
    474         path = port.path_to_generic_test_expectations_file()
    475         self._tool.filesystem.write_text_file(path, expectationsString)
    476 
    477     def _port_skips_test(self, port, test, generic_expectations, full_expectations):
    478         fs = port.host.filesystem
    479         if port.default_smoke_test_only():
    480             smoke_test_filename = fs.join(port.layout_tests_dir(), 'SmokeTests')
    481             if fs.exists(smoke_test_filename) and test not in fs.read_text_file(smoke_test_filename):
    482                 return True
    483 
    484         return (SKIP in full_expectations.get_expectations(test) and
    485                 SKIP not in generic_expectations.get_expectations(test))
    486 
    487     def _run_in_parallel_and_update_scm(self, commands):
    488         command_results = self._tool.executive.run_in_parallel(commands)
    489         log_output = '\n'.join(result[2] for result in command_results).replace('\n\n', '\n')
    490         for line in log_output.split('\n'):
    491             if line:
    492                 print >> sys.stderr, line  # FIXME: Figure out how to log properly.
    493 
    494         files_to_add, files_to_delete, lines_to_remove = self._serial_commands(command_results)
    495         if files_to_delete:
    496             self._tool.scm().delete_list(files_to_delete)
    497         if files_to_add:
    498             self._tool.scm().add_list(files_to_add)
    499         return lines_to_remove
    500 
    501     def _rebaseline(self, options, test_prefix_list):
    502         for test, builders_to_check in sorted(test_prefix_list.items()):
    503             _log.info("Rebaselining %s" % test)
    504             for builder, suffixes in sorted(builders_to_check.items()):
    505                 _log.debug("  %s: %s" % (builder, ",".join(suffixes)))
    506 
    507         copy_baseline_commands, rebaseline_commands, extra_lines_to_remove = self._rebaseline_commands(test_prefix_list, options)
    508         lines_to_remove = {}
    509 
    510         if copy_baseline_commands:
    511             self._run_in_parallel_and_update_scm(copy_baseline_commands)
    512         if rebaseline_commands:
    513             lines_to_remove = self._run_in_parallel_and_update_scm(rebaseline_commands)
    514 
    515         for test in extra_lines_to_remove:
    516             if test in lines_to_remove:
    517                 lines_to_remove[test] = lines_to_remove[test] + extra_lines_to_remove[test]
    518             else:
    519                 lines_to_remove[test] = extra_lines_to_remove[test]
    520 
    521         if lines_to_remove:
    522             self._update_expectations_files(lines_to_remove)
    523 
    524         if options.optimize:
    525             self._run_in_parallel_and_update_scm(self._optimize_baselines(test_prefix_list, options.verbose))
    526 
    527     def _suffixes_for_actual_failures(self, test, builder_name, existing_suffixes):
    528         actual_results = self.builder_data()[builder_name].actual_results(test)
    529         if not actual_results:
    530             return set()
    531         return set(existing_suffixes) & TestExpectations.suffixes_for_actual_expectations_string(actual_results)
    532 
    533 
    534 class RebaselineJson(AbstractParallelRebaselineCommand):
    535     name = "rebaseline-json"
    536     help_text = "Rebaseline based off JSON passed to stdin. Intended to only be called from other scripts."
    537 
    538     def __init__(self,):
    539         super(RebaselineJson, self).__init__(options=[
    540             self.no_optimize_option,
    541             self.results_directory_option,
    542             ])
    543 
    544     def execute(self, options, args, tool):
    545         self._rebaseline(options, json.loads(sys.stdin.read()))
    546 
    547 
    548 class RebaselineExpectations(AbstractParallelRebaselineCommand):
    549     name = "rebaseline-expectations"
    550     help_text = "Rebaselines the tests indicated in TestExpectations."
    551     show_in_main_help = True
    552 
    553     def __init__(self):
    554         super(RebaselineExpectations, self).__init__(options=[
    555             self.no_optimize_option,
    556             ] + self.platform_options)
    557         self._test_prefix_list = None
    558 
    559     def _tests_to_rebaseline(self, port):
    560         tests_to_rebaseline = {}
    561         for path, value in port.expectations_dict().items():
    562             expectations = TestExpectations(port, include_overrides=False, expectations_dict={path: value})
    563             for test in expectations.get_rebaselining_failures():
    564                 suffixes = TestExpectations.suffixes_for_expectations(expectations.get_expectations(test))
    565                 tests_to_rebaseline[test] = suffixes or BASELINE_SUFFIX_LIST
    566         return tests_to_rebaseline
    567 
    568     def _add_tests_to_rebaseline_for_port(self, port_name):
    569         builder_name = builders.builder_name_for_port_name(port_name)
    570         if not builder_name:
    571             return
    572         tests = self._tests_to_rebaseline(self._tool.port_factory.get(port_name)).items()
    573 
    574         if tests:
    575             _log.info("Retrieving results for %s from %s." % (port_name, builder_name))
    576 
    577         for test_name, suffixes in tests:
    578             _log.info("    %s (%s)" % (test_name, ','.join(suffixes)))
    579             if test_name not in self._test_prefix_list:
    580                 self._test_prefix_list[test_name] = {}
    581             self._test_prefix_list[test_name][builder_name] = suffixes
    582 
    583     def execute(self, options, args, tool):
    584         options.results_directory = None
    585         self._test_prefix_list = {}
    586         port_names = tool.port_factory.all_port_names(options.platform)
    587         for port_name in port_names:
    588             self._add_tests_to_rebaseline_for_port(port_name)
    589         if not self._test_prefix_list:
    590             _log.warning("Did not find any tests marked Rebaseline.")
    591             return
    592 
    593         self._rebaseline(options, self._test_prefix_list)
    594 
    595 
    596 class Rebaseline(AbstractParallelRebaselineCommand):
    597     name = "rebaseline"
    598     help_text = "Rebaseline tests with results from the build bots. Shows the list of failing tests on the builders if no test names are provided."
    599     show_in_main_help = True
    600     argument_names = "[TEST_NAMES]"
    601 
    602     def __init__(self):
    603         super(Rebaseline, self).__init__(options=[
    604             self.no_optimize_option,
    605             # FIXME: should we support the platform options in addition to (or instead of) --builders?
    606             self.suffixes_option,
    607             self.results_directory_option,
    608             optparse.make_option("--builders", default=None, action="append", help="Comma-separated-list of builders to pull new baselines from (can also be provided multiple times)"),
    609             ])
    610 
    611     def _builders_to_pull_from(self):
    612         chosen_names = self._tool.user.prompt_with_list("Which builder to pull results from:", self._release_builders(), can_choose_multiple=True)
    613         return [self._builder_with_name(name) for name in chosen_names]
    614 
    615     def _builder_with_name(self, name):
    616         return self._tool.buildbot_for_builder_name(name).builder_with_name(name)
    617 
    618     def execute(self, options, args, tool):
    619         if not args:
    620             _log.error("Must list tests to rebaseline.")
    621             return
    622 
    623         if options.builders:
    624             builders_to_check = []
    625             for builder_names in options.builders:
    626                 builders_to_check += [self._builder_with_name(name) for name in builder_names.split(",")]
    627         else:
    628             builders_to_check = self._builders_to_pull_from()
    629 
    630         test_prefix_list = {}
    631         suffixes_to_update = options.suffixes.split(",")
    632 
    633         for builder in builders_to_check:
    634             for test in args:
    635                 if test not in test_prefix_list:
    636                     test_prefix_list[test] = {}
    637                 test_prefix_list[test][builder.name()] = suffixes_to_update
    638 
    639         if options.verbose:
    640             _log.debug("rebaseline-json: " + str(test_prefix_list))
    641 
    642         self._rebaseline(options, test_prefix_list)
    643 
    644 
    645 class AutoRebaseline(AbstractParallelRebaselineCommand):
    646     name = "auto-rebaseline"
    647     help_text = "Rebaselines any NeedsRebaseline lines in TestExpectations that have cycled through all the bots."
    648     AUTO_REBASELINE_BRANCH_NAME = "auto-rebaseline-temporary-branch"
    649 
    650     # Rietveld uploader stinks. Limit the number of rebaselines in a given patch to keep upload from failing.
    651     # FIXME: http://crbug.com/263676 Obviously we should fix the uploader here.
    652     MAX_LINES_TO_REBASELINE = 200
    653 
    654     SECONDS_BEFORE_GIVING_UP = 300
    655 
    656     def __init__(self):
    657         super(AutoRebaseline, self).__init__(options=[
    658             # FIXME: Remove this option.
    659             self.no_optimize_option,
    660             # FIXME: Remove this option.
    661             self.results_directory_option,
    662             ])
    663 
    664     def bot_revision_data(self):
    665         revisions = []
    666         for result in self.builder_data().values():
    667             if result.run_was_interrupted():
    668                 _log.error("Can't rebaseline because the latest run on %s exited early." % result.builder_name())
    669                 return []
    670             revisions.append({
    671                 "builder": result.builder_name(),
    672                 "revision": result.blink_revision(),
    673             })
    674         return revisions
    675 
    676     def tests_to_rebaseline(self, tool, min_revision, print_revisions):
    677         port = tool.port_factory.get()
    678         expectations_file_path = port.path_to_generic_test_expectations_file()
    679 
    680         tests = set()
    681         revision = None
    682         author = None
    683         bugs = set()
    684         has_any_needs_rebaseline_lines = False
    685 
    686         for line in tool.scm().blame(expectations_file_path).split("\n"):
    687             comment_index = line.find("#")
    688             if comment_index == -1:
    689                 comment_index = len(line)
    690             line_without_comments = re.sub(r"\s+", " ", line[:comment_index].strip())
    691 
    692             if "NeedsRebaseline" not in line_without_comments:
    693                 continue
    694 
    695             has_any_needs_rebaseline_lines = True
    696 
    697             parsed_line = re.match("^(\S*)[^(]*\((\S*).*?([^ ]*)\ \[[^[]*$", line_without_comments)
    698 
    699             commit_hash = parsed_line.group(1)
    700             svn_revision = tool.scm().svn_revision_from_git_commit(commit_hash)
    701 
    702             test = parsed_line.group(3)
    703             if print_revisions:
    704                 _log.info("%s is waiting for r%s" % (test, svn_revision))
    705 
    706             if not svn_revision or svn_revision > min_revision:
    707                 continue
    708 
    709             if revision and svn_revision != revision:
    710                 continue
    711 
    712             if not revision:
    713                 revision = svn_revision
    714                 author = parsed_line.group(2)
    715 
    716             bugs.update(re.findall("crbug\.com\/(\d+)", line_without_comments))
    717             tests.add(test)
    718 
    719             if len(tests) >= self.MAX_LINES_TO_REBASELINE:
    720                 _log.info("Too many tests to rebaseline in one patch. Doing the first %d." % self.MAX_LINES_TO_REBASELINE)
    721                 break
    722 
    723         return tests, revision, author, bugs, has_any_needs_rebaseline_lines
    724 
    725     def link_to_patch(self, revision):
    726         return "http://src.chromium.org/viewvc/blink?view=revision&revision=" + str(revision)
    727 
    728     def commit_message(self, author, revision, bugs):
    729         bug_string = ""
    730         if bugs:
    731             bug_string = "BUG=%s\n" % ",".join(bugs)
    732 
    733         return """Auto-rebaseline for r%s
    734 
    735 %s
    736 
    737 %sTBR=%s
    738 """ % (revision, self.link_to_patch(revision), bug_string, author)
    739 
    740     def get_test_prefix_list(self, tests):
    741         test_prefix_list = {}
    742         lines_to_remove = {}
    743 
    744         for builder_name in self._release_builders():
    745             port_name = builders.port_name_for_builder_name(builder_name)
    746             port = self._tool.port_factory.get(port_name)
    747             expectations = TestExpectations(port, include_overrides=True)
    748             for test in expectations.get_needs_rebaseline_failures():
    749                 if test not in tests:
    750                     continue
    751 
    752                 if test not in test_prefix_list:
    753                     lines_to_remove[test] = []
    754                     test_prefix_list[test] = {}
    755                 lines_to_remove[test].append(builder_name)
    756                 test_prefix_list[test][builder_name] = BASELINE_SUFFIX_LIST
    757 
    758         return test_prefix_list, lines_to_remove
    759 
    760     def _run_git_cl_command(self, options, command):
    761         subprocess_command = ['git', 'cl'] + command
    762         if options.verbose:
    763             subprocess_command.append('--verbose')
    764 
    765         process = self._tool.executive.popen(subprocess_command, stdout=self._tool.executive.PIPE, stderr=self._tool.executive.STDOUT)
    766         last_output_time = time.time()
    767 
    768         # git cl sometimes completely hangs. Bail if we haven't gotten any output to stdout/stderr in a while.
    769         while process.poll() == None and time.time() < last_output_time + self.SECONDS_BEFORE_GIVING_UP:
    770             # FIXME: This doesn't make any sense. readline blocks, so all this code to
    771             # try and bail is useless. Instead, we should do the readline calls on a
    772             # subthread. Then the rest of this code would make sense.
    773             out = process.stdout.readline().rstrip('\n')
    774             if out:
    775                 last_output_time = time.time()
    776                 _log.info(out)
    777 
    778         if process.poll() == None:
    779             _log.error('Command hung: %s' % subprocess_command)
    780             return False
    781         return True
    782 
    783     # FIXME: Move this somewhere more general.
    784     def tree_status(self):
    785         blink_tree_status_url = "http://blink-status.appspot.com/status"
    786         status = urllib2.urlopen(blink_tree_status_url).read().lower()
    787         if status.find('closed') != -1 or status == "0":
    788             return 'closed'
    789         elif status.find('open') != -1 or status == "1":
    790             return 'open'
    791         return 'unknown'
    792 
    793     def execute(self, options, args, tool):
    794         if tool.scm().executable_name == "svn":
    795             _log.error("Auto rebaseline only works with a git checkout.")
    796             return
    797 
    798         if tool.scm().has_working_directory_changes():
    799             _log.error("Cannot proceed with working directory changes. Clean working directory first.")
    800             return
    801 
    802         revision_data = self.bot_revision_data()
    803         if not revision_data:
    804             return
    805 
    806         min_revision = int(min([item["revision"] for item in revision_data]))
    807         tests, revision, author, bugs, has_any_needs_rebaseline_lines = self.tests_to_rebaseline(tool, min_revision, print_revisions=options.verbose)
    808 
    809         if options.verbose:
    810             _log.info("Min revision across all bots is %s." % min_revision)
    811             for item in revision_data:
    812                 _log.info("%s: r%s" % (item["builder"], item["revision"]))
    813 
    814         if not tests:
    815             _log.debug('No tests to rebaseline.')
    816             return
    817 
    818         if self.tree_status() == 'closed':
    819             _log.info('Cannot proceed. Tree is closed.')
    820             return
    821 
    822         _log.info('Rebaselining %s for r%s by %s.' % (list(tests), revision, author))
    823 
    824         test_prefix_list, lines_to_remove = self.get_test_prefix_list(tests)
    825 
    826         try:
    827             old_branch_name = tool.scm().current_branch()
    828             tool.scm().delete_branch(self.AUTO_REBASELINE_BRANCH_NAME)
    829             tool.scm().create_clean_branch(self.AUTO_REBASELINE_BRANCH_NAME)
    830 
    831             # If the tests are passing everywhere, then this list will be empty. We don't need
    832             # to rebaseline, but we'll still need to update TestExpectations.
    833             if test_prefix_list:
    834                 self._rebaseline(options, test_prefix_list)
    835 
    836             tool.scm().commit_locally_with_message(self.commit_message(author, revision, bugs))
    837 
    838             # FIXME: It would be nice if we could dcommit the patch without uploading, but still
    839             # go through all the precommit hooks. For rebaselines with lots of files, uploading
    840             # takes a long time and sometimes fails, but we don't want to commit if, e.g. the
    841             # tree is closed.
    842             did_finish = self._run_git_cl_command(options, ['upload', '-f'])
    843 
    844             if did_finish:
    845                 # Uploading can take a very long time. Do another pull to make sure TestExpectations is up to date,
    846                 # so the dcommit can go through.
    847                 # FIXME: Log the pull and dcommit stdout/stderr to the log-server.
    848                 tool.executive.run_command(['git', 'pull'])
    849 
    850                 self._run_git_cl_command(options, ['dcommit', '-f'])
    851         finally:
    852             self._run_git_cl_command(options, ['set_close'])
    853             tool.scm().ensure_cleanly_tracking_remote_master()
    854             tool.scm().checkout_branch(old_branch_name)
    855             tool.scm().delete_branch(self.AUTO_REBASELINE_BRANCH_NAME)
    856 
    857 
    858 class RebaselineOMatic(AbstractDeclarativeCommand):
    859     name = "rebaseline-o-matic"
    860     help_text = "Calls webkit-patch auto-rebaseline in a loop."
    861     show_in_main_help = True
    862 
    863     SLEEP_TIME_IN_SECONDS = 30
    864     LOG_SERVER = 'blinkrebaseline.appspot.com'
    865 
    866     # Uploaded log entries append to the existing entry unless the
    867     # newentry flag is set. In that case it starts a new entry to
    868     # start appending to.
    869     def _log_to_server(self, log='', is_new_entry=False):
    870         query = {
    871             'log': log,
    872         }
    873         if is_new_entry:
    874             query['newentry'] = 'on'
    875         urllib2.urlopen("http://" + self.LOG_SERVER + "/updatelog", data=urllib.urlencode(query))
    876 
    877     def _log_line(self, handle):
    878         out = handle.readline().rstrip('\n')
    879         if out:
    880             if self._verbose:
    881                 print out
    882             self._log_to_server(out)
    883         return out
    884 
    885     def _run_logged_command(self, command):
    886         process = self._tool.executive.popen(command, stdout=self._tool.executive.PIPE, stderr=self._tool.executive.STDOUT)
    887 
    888         out = self._log_line(process.stdout)
    889         while out:
    890             # FIXME: This should probably batch up lines if they're available and log to the server once.
    891             out = self._log_line(process.stdout)
    892 
    893     def _do_one_rebaseline(self):
    894         try:
    895             old_branch_name = self._tool.scm().current_branch()
    896             self._log_to_server(is_new_entry=True)
    897             self._run_logged_command(['git', 'pull'])
    898             rebaseline_command = [self._tool.filesystem.join(self._tool.scm().checkout_root, 'Tools', 'Scripts', 'webkit-patch'), 'auto-rebaseline']
    899             if self._verbose:
    900                 rebaseline_command.append('--verbose')
    901             self._run_logged_command(rebaseline_command)
    902         except:
    903             traceback.print_exc(file=sys.stderr)
    904             # Sometimes git crashes and leaves us on a detached head.
    905             self._tool.scm().checkout_branch(old_branch_name)
    906 
    907     def execute(self, options, args, tool):
    908         self._verbose = options.verbose
    909         while True:
    910             self._do_one_rebaseline()
    911             time.sleep(self.SLEEP_TIME_IN_SECONDS)
    912