Home | History | Annotate | Download | only in cros
      1 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 import abc, logging, os, re, time
      6 import subprocess
      7 
      8 import common
      9 from autotest_lib.client.bin import utils
     10 from autotest_lib.client.common_lib import error
     11 from constants import CLEANUP_LOGS_PAUSED_FILE
     12 
     13 
     14 def strip_timestamp(msg):
     15     """
     16     Strips timestamps and PIDs from a syslog message to facilitate
     17     failure reason aggregation when this message is used as an autotest
     18     exception text.
     19     """
     20     kernel = re.search(r' kernel: \[ *\d+.\d+\] (.*)$', msg)
     21     if kernel:
     22         return 'kernel: ' + kernel.group(1)
     23 
     24     user = re.match(r'[^a-z]+ [^ ]* ([^\[ ]+)(?:\[.*\])?(: .*)$', msg)
     25     if user:
     26         return user.group(1) + user.group(2)
     27 
     28     logging.warning('Could not parse syslog message: ' + msg)
     29     return msg
     30 
     31 
     32 def extract_kernel_timestamp(msg):
     33     """Extract a timestmap that appears in kernel log messages and looks
     34     like this:
     35     ... kernel: [78791.721832] ...
     36 
     37     Returns:
     38         The timestamp as float in seconds since last boot.
     39     """
     40 
     41     match = re.search('\[\s*([0-9]+\.[0-9]+)\] ', msg)
     42     if match:
     43         return float(match.group(1))
     44     raise error.TestError('Could not extract timestamp from message: ' + msg)
     45 
     46 
     47 class AbstractLogReader(object):
     48 
     49     def __init__(self):
     50         self._start_line = 1
     51 
     52     @abc.abstractmethod
     53     def read_all_logs(self):
     54         """Read all content from log files.
     55 
     56         Generator function.
     57         Return an iterator on the content of files.
     58 
     59         This generator can peek a line once (and only once!) by using
     60         .send(offset). Must iterate over the peeked line before you can
     61         peek again.
     62         """
     63         pass
     64 
     65     def set_start_by_regexp(self, index, regexp):
     66         """Set the start of logs based on a regular expression.
     67 
     68         @param index: line matching regexp to start at, earliest log at 0.
     69                 Negative numbers indicate matches since end of log.
     70         """
     71         regexp_compiled = re.compile(regexp)
     72         starts = []
     73         line_number = 1
     74         self._start_line = 1
     75         for line in self.read_all_logs():
     76             if regexp_compiled.match(line):
     77                 starts.append(line_number)
     78             line_number += 1
     79         if index < -len(starts):
     80             self._start_line = 1
     81         elif index >= len(starts):
     82             self._start_line = line_number
     83         else:
     84             self._start_line = starts[index]
     85 
     86 
     87     def set_start_by_reboot(self, index):
     88         """ Set the start of logs (must be system log) based on reboot.
     89 
     90         @param index: reboot to start at, earliest log at 0.  Negative
     91                 numbers indicate reboots since end of log.
     92         """
     93         return self.set_start_by_regexp(index,
     94                                         r'.*000\] Linux version \d')
     95 
     96 
     97     def set_start_by_current(self, relative=0):
     98         """ Set start of logs based on current last line.
     99 
    100         @param relative: line relative to current to start at.  1 means
    101                 to start the log after this line.
    102         """
    103         count = self._start_line + relative
    104         for line in self.read_all_logs():
    105             count += 1
    106         self._start_line = count
    107 
    108 
    109     def get_logs(self):
    110         """ Get logs since the start line.
    111 
    112         Start line is set by set_start_* functions or
    113         since the start of the file if none were called.
    114 
    115         @return string of contents of file since start line.
    116         """
    117         logs = []
    118         for line in self.read_all_logs():
    119             logs.append(line)
    120         return ''.join(logs)
    121 
    122 
    123     def can_find(self, string):
    124         """ Try to find string in the logs.
    125 
    126         @return boolean indicating if we found the string.
    127         """
    128         return string in self.get_logs()
    129 
    130 
    131     def get_last_msg(self, patterns, retries=0, sleep_seconds=0.2):
    132         """Search the logs and return the latest occurrence of a message
    133         matching one of the patterns.
    134 
    135         Args:
    136             patterns: A regexp or a list of regexps to search the logs with.
    137                 The function returns as soon as it finds any match to one of
    138                 the patters, it will not search for the other patterns.
    139             retries: Number of times to retry if none of the patterns were
    140                 found. Default is one attempt.
    141             sleep_seconds: Time to sleep between retries.
    142 
    143         Returns:
    144             The last occurrence of the first matching pattern. "None" if none
    145             of the patterns matched.
    146         """
    147 
    148         if not type(patterns) in (list, tuple):
    149             patterns = [patterns]
    150 
    151         for retry in xrange(retries + 1):
    152             for pattern in patterns:
    153                 regexp_compiled = re.compile(pattern)
    154                 last_match = None
    155                 for line in self.read_all_logs():
    156                     if regexp_compiled.search(line):
    157                         last_match = line
    158                 if last_match:
    159                     return last_match
    160             time.sleep(sleep_seconds)
    161 
    162         return None
    163 
    164 
    165 class LogRotationPauser(object):
    166     """
    167     Class to control when logs are rotated from either server or client.
    168 
    169     Assumes all setting of CLEANUP_LOGS_PAUSED_FILE is done by this class
    170     and that all calls to begin and end are properly
    171     nested.  For instance, [ a.begin(), b.begin(), b.end(), a.end() ] is
    172     supported, but [ a.begin(), b.begin(), a.end(), b.end() ]  is not.
    173     We do support redundant calls to the same class, such as
    174     [ a.begin(), a.begin(), a.end() ].
    175     """
    176     def __init__(self, host=None):
    177         self._host = host
    178         self._begun = False
    179         self._is_nested = True
    180 
    181 
    182     def _run(self, command, *args, **dargs):
    183         if self._host:
    184             return self._host.run(command, *args, **dargs).exit_status
    185         else:
    186             return utils.system(command, *args, **dargs)
    187 
    188 
    189     def begin(self):
    190         """Make sure that log rotation is disabled."""
    191         if self._begun:
    192             return
    193         self._is_nested = (self._run(('[ -r %s ]' %
    194                                       CLEANUP_LOGS_PAUSED_FILE),
    195                                      ignore_status=True) == 0)
    196         if self._is_nested:
    197             logging.info('File %s was already present' %
    198                          CLEANUP_LOGS_PAUSED_FILE)
    199         else:
    200             self._run('touch ' + CLEANUP_LOGS_PAUSED_FILE)
    201         self._begun = True
    202 
    203 
    204     def end(self):
    205         assert self._begun
    206         if not self._is_nested:
    207             self._run('rm -f ' + CLEANUP_LOGS_PAUSED_FILE)
    208         else:
    209             logging.info('Leaving existing %s file' % CLEANUP_LOGS_PAUSED_FILE)
    210         self._begun = False
    211 
    212 
    213 class LogReader(AbstractLogReader):
    214     """Class to read traditional text log files.
    215 
    216     Be default reads all logs from /var/log/messages.
    217     """
    218 
    219     def __init__(self, filename='/var/log/messages', include_rotated_logs=True):
    220         AbstractLogReader.__init__(self)
    221         self._filename = filename
    222         self._include_rotated_logs = include_rotated_logs
    223         if not os.path.exists(CLEANUP_LOGS_PAUSED_FILE):
    224             raise error.TestError('LogReader created without ' +
    225                                   CLEANUP_LOGS_PAUSED_FILE)
    226 
    227     def read_all_logs(self):
    228         log_files = []
    229         line_number = 0
    230         if self._include_rotated_logs:
    231             log_files.extend(utils.system_output(
    232                 'ls -tr1 %s.*' % self._filename,
    233                 ignore_status=True).splitlines())
    234         log_files.append(self._filename)
    235         for log_file in log_files:
    236             f = open(log_file)
    237             for line in f:
    238                 line_number += 1
    239                 if line_number < self._start_line:
    240                     continue
    241                 peek = yield line
    242                 if peek:
    243                   buf = [f.next() for _ in xrange(peek)]
    244                   yield buf[-1]
    245                   while buf:
    246                     yield buf.pop(0)
    247             f.close()
    248 
    249 
    250 class JournalLogReader(AbstractLogReader):
    251     """A class to read logs stored by systemd-journald.
    252     """
    253 
    254     def read_all_logs(self):
    255       proc = subprocess.Popen(['journalctl'], stdout=subprocess.PIPE)
    256       line_number = 0
    257       for line in proc.stdout:
    258           line_number += 1
    259           if line_number < self._start_line:
    260               continue
    261           yield line
    262       proc.terminate()
    263 
    264 
    265 class ContinuousLogReader(AbstractLogReader):
    266   """Reads log file from where it left over last time.
    267 
    268   Useful when reading a growing log file.
    269   """
    270   def __init__(self, filename):
    271     self._filename = filename
    272     # Keeps file offset of last read.
    273     self._last_pos = 0
    274 
    275   def read_all_logs(self):
    276     try:
    277         with open(self._filename, 'r') as f:
    278             f.seek(self._last_pos)
    279             for line in f:
    280                 yield line
    281             # Notice that _last_pos is recorded only if all lines in the file is
    282             # read up. Maybe put this into a context manager so it's always
    283             # recorded if needed.
    284             self._last_pos = f.tell()
    285     except IOError:
    286         logging.error('Could not read log file %s', self._filename)
    287 
    288 
    289 def make_system_log_reader():
    290     """Create a system log reader.
    291 
    292     This will create JournalLogReader() or LogReader() depending on
    293     whether the system is configured with systemd.
    294     """
    295     if os.path.exists("/var/log/journal"):
    296         return JournalLogReader()
    297     else:
    298         return LogReader()
    299