Home | History | Annotate | Download | only in bin
      1 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 import logging
      6 import os
      7 import stat
      8 
      9 from autotest_lib.client.common_lib import log
     10 from autotest_lib.client.common_lib import error, utils, global_config
     11 from autotest_lib.client.bin import base_sysinfo, utils
     12 from autotest_lib.client.cros import constants
     13 
     14 get_value = global_config.global_config.get_config_value
     15 collect_corefiles = get_value('CLIENT', 'collect_corefiles',
     16                               type=bool, default=False)
     17 
     18 
     19 logfile = base_sysinfo.logfile
     20 command = base_sysinfo.command
     21 
     22 
     23 class logdir(base_sysinfo.loggable):
     24     """Represents a log directory."""
     25 
     26     DEFAULT_EXCLUDES = ("**autoserv*", "**.journal",)
     27 
     28     def __init__(self, directory, excludes=DEFAULT_EXCLUDES):
     29         super(logdir, self).__init__(directory, log_in_keyval=False)
     30         self.dir = directory
     31         self._excludes = excludes
     32         self._infer_old_attributes()
     33 
     34 
     35     def __setstate__(self, state):
     36         """Unpickle handler
     37 
     38         When client tests are run without SSP, we pickle this object on the
     39         server-side (using the version of the class deployed in the lab) and
     40         unpickle it on the DUT (using the version of the class from the build).
     41         This means that when adding a new attribute to this class, for a while
     42         the server-side code does not populate that attribute. So, deal with
     43         missing attributes in a sane way.
     44         """
     45         self.__dict__ = state
     46         if '_excludes' not in state:
     47             self._excludes = self.DEFAULT_EXCLUDES
     48             if self.additional_exclude:
     49                 self._excludes += tuple(self.additional_exclude)
     50 
     51 
     52     def __repr__(self):
     53         return "site_sysinfo.logdir(%r, %s)" % (self.dir,
     54                                                 self._excludes)
     55 
     56 
     57     def __eq__(self, other):
     58         if isinstance(other, logdir):
     59             return (self.dir == other.dir and self._excludes == other._excludes)
     60         elif isinstance(other, base_sysinfo.loggable):
     61             return False
     62         return NotImplemented
     63 
     64 
     65     def __ne__(self, other):
     66         result = self.__eq__(other)
     67         if result is NotImplemented:
     68             return result
     69         return not result
     70 
     71 
     72     def __hash__(self):
     73         return hash(self.dir) + hash(self._excludes)
     74 
     75 
     76     def run(self, log_dir):
     77         """Copies this log directory to the specified directory.
     78 
     79         @param log_dir: The destination log directory.
     80         """
     81         from_dir = os.path.realpath(self.dir)
     82         if os.path.exists(from_dir):
     83             parent_dir = os.path.dirname(from_dir)
     84             utils.system("mkdir -p %s%s" % (log_dir, parent_dir))
     85 
     86             excludes = [
     87                     "--exclude=%s" % self._anchored_exclude_pattern(from_dir, x)
     88                     for x in self._excludes]
     89             # Take source permissions and add ugo+r so files are accessible via
     90             # archive server.
     91             utils.system(
     92                     "rsync --no-perms --chmod=ugo+r -a --safe-links %s %s %s%s"
     93                     % (" ".join(excludes), from_dir, log_dir, parent_dir))
     94 
     95 
     96     def _anchored_exclude_pattern(self, from_dir, pattern):
     97         return '/%s/%s' % (os.path.basename(from_dir), pattern)
     98 
     99 
    100     def _infer_old_attributes(self):
    101         """ Backwards compatibility attributes.
    102 
    103         YOU MUST NEVER DROP / REINTERPRET THESE.
    104         A logdir object is pickled on server-side and unpickled on
    105         client-side. This means that, when running aginst client-side code
    106         from an older build, we need to be able to unpickle an instance of
    107         logdir pickled from a newer version of the class.
    108 
    109         Some old attributes are not sanely handled via __setstate__, so we can't
    110         drop them without breaking compatibility.
    111         """
    112         additional_excludes = list(set(self._excludes) -
    113                                    set(self.DEFAULT_EXCLUDES))
    114         if additional_excludes:
    115             # Old API only allowed a single additional exclude.
    116             # Best effort, keep the first one, throw the rest.
    117             self.additional_exclude = additional_excludes[0]
    118         else:
    119             self.additional_exclude = None
    120 
    121 
    122 class file_stat(object):
    123     """Store the file size and inode, used for retrieving new data in file."""
    124     def __init__(self, file_path):
    125         """Collect the size and inode information of a file.
    126 
    127         @param file_path: full path to the file.
    128 
    129         """
    130         stat = os.stat(file_path)
    131         # Start size of the file, skip that amount of bytes when do diff.
    132         self.st_size = stat.st_size
    133         # inode of the file. If inode is changed, treat this as a new file and
    134         # copy the whole file.
    135         self.st_ino = stat.st_ino
    136 
    137 
    138 class diffable_logdir(logdir):
    139     """Represents a log directory that only new content will be copied.
    140 
    141     An instance of this class should be added in both
    142     before_iteration_loggables and after_iteration_loggables. This is to
    143     guarantee the file status information is collected when run method is
    144     called in before_iteration_loggables, and diff is executed when run
    145     method is called in after_iteration_loggables.
    146 
    147     """
    148     def __init__(self, directory, excludes=logdir.DEFAULT_EXCLUDES,
    149                  keep_file_hierarchy=True, append_diff_in_name=True):
    150         """
    151         Constructor of a diffable_logdir instance.
    152 
    153         @param directory: directory to be diffed after an iteration finished.
    154         @param excludes: path patterns to exclude for rsync.
    155         @param keep_file_hierarchy: True if need to preserve full path, e.g.,
    156             sysinfo/var/log/sysstat, v.s. sysinfo/sysstat if it's False.
    157         @param append_diff_in_name: True if you want to append '_diff' to the
    158             folder name to indicate it's a diff, e.g., var/log_diff. Option
    159             keep_file_hierarchy must be True for this to take effect.
    160 
    161         """
    162         super(diffable_logdir, self).__init__(directory, excludes)
    163         self.keep_file_hierarchy = keep_file_hierarchy
    164         self.append_diff_in_name = append_diff_in_name
    165         # Init dictionary to store all file status for files in the directory.
    166         self._log_stats = {}
    167 
    168 
    169     def _get_init_status_of_src_dir(self, src_dir):
    170         """Get initial status of files in src_dir folder.
    171 
    172         @param src_dir: directory to be diff-ed.
    173 
    174         """
    175         # Dictionary used to store the initial status of files in src_dir.
    176         for file_path in self._get_all_files(src_dir):
    177             self._log_stats[file_path] = file_stat(file_path)
    178         self.file_stats_collected = True
    179 
    180 
    181     def _get_all_files(self, path):
    182         """Iterate through files in given path including subdirectories.
    183 
    184         @param path: root directory.
    185         @return: an iterator that iterates through all files in given path
    186             including subdirectories.
    187 
    188         """
    189         if not os.path.exists(path):
    190             yield []
    191         for root, dirs, files in os.walk(path):
    192             for f in files:
    193                 if f.startswith('autoserv'):
    194                     continue
    195                 if f.endswith('.journal'):
    196                     continue
    197                 full_path = os.path.join(root, f)
    198                 # Only list regular files or symlinks to those (os.stat follows
    199                 # symlinks)
    200                 if stat.S_ISREG(os.stat(full_path).st_mode):
    201                     yield full_path
    202 
    203 
    204     def _copy_new_data_in_file(self, file_path, src_dir, dest_dir):
    205         """Copy all new data in a file to target directory.
    206 
    207         @param file_path: full path to the file to be copied.
    208         @param src_dir: source directory to do the diff.
    209         @param dest_dir: target directory to store new data of src_dir.
    210 
    211         """
    212         bytes_to_skip = 0
    213         if self._log_stats.has_key(file_path):
    214             prev_stat = self._log_stats[file_path]
    215             new_stat = os.stat(file_path)
    216             if new_stat.st_ino == prev_stat.st_ino:
    217                 bytes_to_skip = prev_stat.st_size
    218             if new_stat.st_size == bytes_to_skip:
    219                 return
    220             elif new_stat.st_size < prev_stat.st_size:
    221                 # File is modified to a smaller size, copy whole file.
    222                 bytes_to_skip = 0
    223         try:
    224             with open(file_path, 'r') as in_log:
    225                 if bytes_to_skip > 0:
    226                     in_log.seek(bytes_to_skip)
    227                 # Skip src_dir in path, e.g., src_dir/[sub_dir]/file_name.
    228                 target_path = os.path.join(dest_dir,
    229                                            os.path.relpath(file_path, src_dir))
    230                 target_dir = os.path.dirname(target_path)
    231                 if not os.path.exists(target_dir):
    232                     os.makedirs(target_dir)
    233                 with open(target_path, "w") as out_log:
    234                     out_log.write(in_log.read())
    235         except IOError as e:
    236             logging.error('Diff %s failed with error: %s', file_path, e)
    237 
    238 
    239     def _log_diff(self, src_dir, dest_dir):
    240         """Log all of the new data in src_dir to dest_dir.
    241 
    242         @param src_dir: source directory to do the diff.
    243         @param dest_dir: target directory to store new data of src_dir.
    244 
    245         """
    246         if self.keep_file_hierarchy:
    247             dir = src_dir.lstrip('/')
    248             if self.append_diff_in_name:
    249                 dir = dir.rstrip('/') + '_diff'
    250             dest_dir = os.path.join(dest_dir, dir)
    251 
    252         if not os.path.exists(dest_dir):
    253             os.makedirs(dest_dir)
    254 
    255         for src_file in self._get_all_files(src_dir):
    256             self._copy_new_data_in_file(src_file, src_dir, dest_dir)
    257 
    258 
    259     def run(self, log_dir, collect_init_status=True, collect_all=False):
    260         """Copies new content from self.dir to the destination log_dir.
    261 
    262         @param log_dir: The destination log directory.
    263         @param collect_init_status: Set to True if run method is called to
    264             collect the initial status of files.
    265         @param collect_all: Set to True to force to collect all files.
    266 
    267         """
    268         if collect_init_status:
    269             self._get_init_status_of_src_dir(self.dir)
    270         elif os.path.exists(self.dir):
    271             # Always create a copy of the new logs to help debugging.
    272             self._log_diff(self.dir, log_dir)
    273             if collect_all:
    274                 logdir_temp = logdir(self.dir)
    275                 logdir_temp.run(log_dir)
    276 
    277 
    278 class purgeable_logdir(logdir):
    279     """Represents a log directory that will be purged."""
    280     def __init__(self, directory, excludes=logdir.DEFAULT_EXCLUDES):
    281         super(purgeable_logdir, self).__init__(directory, excludes)
    282 
    283     def run(self, log_dir):
    284         """Copies this log dir to the destination dir, then purges the source.
    285 
    286         @param log_dir: The destination log directory.
    287         """
    288         super(purgeable_logdir, self).run(log_dir)
    289 
    290         if os.path.exists(self.dir):
    291             utils.system("rm -rf %s/*" % (self.dir))
    292 
    293 
    294 class site_sysinfo(base_sysinfo.base_sysinfo):
    295     """Represents site system info."""
    296     def __init__(self, job_resultsdir):
    297         super(site_sysinfo, self).__init__(job_resultsdir)
    298         crash_exclude_string = None
    299         if not collect_corefiles:
    300             crash_exclude_string = "*.core"
    301 
    302         # This is added in before and after_iteration_loggables. When run is
    303         # called in before_iteration_loggables, it collects file status in
    304         # the directory. When run is called in after_iteration_loggables, diff
    305         # is executed.
    306         # self.diffable_loggables is only initialized if the instance does not
    307         # have this attribute yet. The sysinfo instance could be loaded
    308         # from an earlier pickle dump, which has already initialized attribute
    309         # self.diffable_loggables.
    310         if not hasattr(self, 'diffable_loggables'):
    311             diffable_log = diffable_logdir(constants.LOG_DIR)
    312             self.diffable_loggables = set()
    313             self.diffable_loggables.add(diffable_log)
    314 
    315         # add in some extra command logging
    316         self.boot_loggables.add(command("ls -l /boot",
    317                                         "boot_file_list"))
    318         self.before_iteration_loggables.add(
    319             command(constants.CHROME_VERSION_COMMAND, "chrome_version"))
    320         self.boot_loggables.add(command("crossystem", "crossystem"))
    321         self.test_loggables.add(
    322             purgeable_logdir(
    323                 os.path.join(constants.CRYPTOHOME_MOUNT_PT, "log")))
    324         # We only want to gather and purge crash reports after the client test
    325         # runs in case a client test is checking that a crash found at boot
    326         # (such as a kernel crash) is handled.
    327         self.after_iteration_loggables.add(
    328             purgeable_logdir(
    329                 os.path.join(constants.CRYPTOHOME_MOUNT_PT, "crash"),
    330                 excludes=logdir.DEFAULT_EXCLUDES + (crash_exclude_string,)))
    331         self.after_iteration_loggables.add(
    332             purgeable_logdir(
    333                 constants.CRASH_DIR,
    334                 excludes=logdir.DEFAULT_EXCLUDES + (crash_exclude_string,)))
    335         self.test_loggables.add(
    336             logfile(os.path.join(constants.USER_DATA_DIR,
    337                                  ".Google/Google Talk Plugin/gtbplugin.log")))
    338         self.test_loggables.add(purgeable_logdir(
    339                 constants.CRASH_DIR,
    340                 excludes=logdir.DEFAULT_EXCLUDES + (crash_exclude_string,)))
    341         # Collect files under /tmp/crash_reporter, which contain the procfs
    342         # copy of those crashed processes whose core file didn't get converted
    343         # into minidump. We need these additional files for post-mortem analysis
    344         # of the conversion failure.
    345         self.test_loggables.add(
    346             purgeable_logdir(constants.CRASH_REPORTER_RESIDUE_DIR))
    347 
    348 
    349     @log.log_and_ignore_errors("pre-test sysinfo error:")
    350     def log_before_each_test(self, test):
    351         """Logging hook called before a test starts.
    352 
    353         @param test: A test object.
    354         """
    355         super(site_sysinfo, self).log_before_each_test(test)
    356 
    357         for log in self.diffable_loggables:
    358             log.run(log_dir=None, collect_init_status=True)
    359 
    360 
    361     @log.log_and_ignore_errors("post-test sysinfo error:")
    362     def log_after_each_test(self, test):
    363         """Logging hook called after a test finishs.
    364 
    365         @param test: A test object.
    366         """
    367         super(site_sysinfo, self).log_after_each_test(test)
    368 
    369         test_sysinfodir = self._get_sysinfodir(test.outputdir)
    370 
    371         for log in self.diffable_loggables:
    372             log.run(log_dir=test_sysinfodir, collect_init_status=False,
    373                     collect_all=not test.success)
    374 
    375 
    376     def _get_chrome_version(self):
    377         """Gets the Chrome version number and milestone as strings.
    378 
    379         Invokes "chrome --version" to get the version number and milestone.
    380 
    381         @return A tuple (chrome_ver, milestone) where "chrome_ver" is the
    382             current Chrome version number as a string (in the form "W.X.Y.Z")
    383             and "milestone" is the first component of the version number
    384             (the "W" from "W.X.Y.Z").  If the version number cannot be parsed
    385             in the "W.X.Y.Z" format, the "chrome_ver" will be the full output
    386             of "chrome --version" and the milestone will be the empty string.
    387 
    388         """
    389         version_string = utils.system_output(constants.CHROME_VERSION_COMMAND,
    390                                              ignore_status=True)
    391         return utils.parse_chrome_version(version_string)
    392 
    393 
    394     def log_test_keyvals(self, test_sysinfodir):
    395         """Generate keyval for the sysinfo.
    396 
    397         Collects keyval entries to be written in the test keyval.
    398 
    399         @param test_sysinfodir: The test's system info directory.
    400         """
    401         keyval = super(site_sysinfo, self).log_test_keyvals(test_sysinfodir)
    402 
    403         lsb_lines = utils.system_output(
    404             "cat /etc/lsb-release",
    405             ignore_status=True).splitlines()
    406         lsb_dict = dict(item.split("=") for item in lsb_lines)
    407 
    408         for lsb_key in lsb_dict.keys():
    409             # Special handling for build number
    410             if lsb_key == "CHROMEOS_RELEASE_DESCRIPTION":
    411                 keyval["CHROMEOS_BUILD"] = (
    412                     lsb_dict[lsb_key].rstrip(")").split(" ")[3])
    413             keyval[lsb_key] = lsb_dict[lsb_key]
    414 
    415         # Get the hwid (hardware ID), if applicable.
    416         try:
    417             keyval["hwid"] = utils.system_output('crossystem hwid')
    418         except error.CmdError:
    419             # The hwid may not be available (e.g, when running on a VM).
    420             # If the output of 'crossystem mainfw_type' is 'nonchrome', then
    421             # we expect the hwid to not be avilable, and we can proceed in this
    422             # case.  Otherwise, the hwid is missing unexpectedly.
    423             mainfw_type = utils.system_output('crossystem mainfw_type')
    424             if mainfw_type == 'nonchrome':
    425                 logging.info(
    426                     'HWID not available; not logging it as a test keyval.')
    427             else:
    428                 logging.exception('HWID expected but could not be identified; '
    429                                   'output of "crossystem mainfw_type" is "%s"',
    430                                   mainfw_type)
    431                 raise
    432 
    433         # Get the chrome version and milestone numbers.
    434         keyval["CHROME_VERSION"], keyval["MILESTONE"] = (
    435                 self._get_chrome_version())
    436 
    437         # TODO(kinaba): crbug.com/707448 Import at the head of this file.
    438         # Currently a server-side script server/server_job.py is indirectly
    439         # importing this file, so we cannot globaly import cryptohome that
    440         # has dependency to a client-only library.
    441         from autotest_lib.client.cros import cryptohome
    442         # Get the dictionary attack counter.
    443         keyval["TPM_DICTIONARY_ATTACK_COUNTER"] = (
    444                 cryptohome.get_tpm_more_status().get(
    445                     'dictionary_attack_counter', 'Failed to query cryptohome'))
    446 
    447         # Return the updated keyvals.
    448         return keyval
    449 
    450 
    451     def add_logdir(self, loggable):
    452         """Collect files in log_path to sysinfo folder.
    453 
    454         This method can be called from a control file for test to collect files
    455         in a specified folder. autotest creates a folder [test result
    456         dir]/sysinfo folder with the full path of log_path and copy all files in
    457         log_path to that folder.
    458 
    459         @param loggable: A logdir instance corresponding to the logs to collect.
    460         """
    461         self.test_loggables.add(loggable)
    462