Home | History | Annotate | Download | only in common_lib
      1 import os, copy, logging, errno, fcntl, time, re, weakref, traceback
      2 import tarfile
      3 import cPickle as pickle
      4 import tempfile
      5 from autotest_lib.client.common_lib import autotemp, error, log
      6 
      7 
      8 class job_directory(object):
      9     """Represents a job.*dir directory."""
     10 
     11 
     12     class JobDirectoryException(error.AutotestError):
     13         """Generic job_directory exception superclass."""
     14 
     15 
     16     class MissingDirectoryException(JobDirectoryException):
     17         """Raised when a directory required by the job does not exist."""
     18         def __init__(self, path):
     19             Exception.__init__(self, 'Directory %s does not exist' % path)
     20 
     21 
     22     class UncreatableDirectoryException(JobDirectoryException):
     23         """Raised when a directory required by the job is missing and cannot
     24         be created."""
     25         def __init__(self, path, error):
     26             msg = 'Creation of directory %s failed with exception %s'
     27             msg %= (path, error)
     28             Exception.__init__(self, msg)
     29 
     30 
     31     class UnwritableDirectoryException(JobDirectoryException):
     32         """Raised when a writable directory required by the job exists
     33         but is not writable."""
     34         def __init__(self, path):
     35             msg = 'Directory %s exists but is not writable' % path
     36             Exception.__init__(self, msg)
     37 
     38 
     39     def __init__(self, path, is_writable=False):
     40         """
     41         Instantiate a job directory.
     42 
     43         @param path: The path of the directory. If None a temporary directory
     44             will be created instead.
     45         @param is_writable: If True, expect the directory to be writable.
     46 
     47         @raise MissingDirectoryException: raised if is_writable=False and the
     48             directory does not exist.
     49         @raise UnwritableDirectoryException: raised if is_writable=True and
     50             the directory exists but is not writable.
     51         @raise UncreatableDirectoryException: raised if is_writable=True, the
     52             directory does not exist and it cannot be created.
     53         """
     54         if path is None:
     55             if is_writable:
     56                 self._tempdir = autotemp.tempdir(unique_id='autotest')
     57                 self.path = self._tempdir.name
     58             else:
     59                 raise self.MissingDirectoryException(path)
     60         else:
     61             self._tempdir = None
     62             self.path = path
     63         self._ensure_valid(is_writable)
     64 
     65 
     66     def _ensure_valid(self, is_writable):
     67         """
     68         Ensure that this is a valid directory.
     69 
     70         Will check if a directory exists, can optionally also enforce that
     71         it be writable. It can optionally create it if necessary. Creation
     72         will still fail if the path is rooted in a non-writable directory, or
     73         if a file already exists at the given location.
     74 
     75         @param dir_path A path where a directory should be located
     76         @param is_writable A boolean indicating that the directory should
     77             not only exist, but also be writable.
     78 
     79         @raises MissingDirectoryException raised if is_writable=False and the
     80             directory does not exist.
     81         @raises UnwritableDirectoryException raised if is_writable=True and
     82             the directory is not wrtiable.
     83         @raises UncreatableDirectoryException raised if is_writable=True, the
     84             directory does not exist and it cannot be created
     85         """
     86         # ensure the directory exists
     87         if is_writable:
     88             try:
     89                 os.makedirs(self.path)
     90             except OSError, e:
     91                 if e.errno != errno.EEXIST or not os.path.isdir(self.path):
     92                     raise self.UncreatableDirectoryException(self.path, e)
     93         elif not os.path.isdir(self.path):
     94             raise self.MissingDirectoryException(self.path)
     95 
     96         # if is_writable=True, also check that the directory is writable
     97         if is_writable and not os.access(self.path, os.W_OK):
     98             raise self.UnwritableDirectoryException(self.path)
     99 
    100 
    101     @staticmethod
    102     def property_factory(attribute):
    103         """
    104         Create a job.*dir -> job._*dir.path property accessor.
    105 
    106         @param attribute A string with the name of the attribute this is
    107             exposed as. '_'+attribute must then be attribute that holds
    108             either None or a job_directory-like object.
    109 
    110         @returns A read-only property object that exposes a job_directory path
    111         """
    112         @property
    113         def dir_property(self):
    114             underlying_attribute = getattr(self, '_' + attribute)
    115             if underlying_attribute is None:
    116                 return None
    117             else:
    118                 return underlying_attribute.path
    119         return dir_property
    120 
    121 
    122 # decorator for use with job_state methods
    123 def with_backing_lock(method):
    124     """A decorator to perform a lock-*-unlock cycle.
    125 
    126     When applied to a method, this decorator will automatically wrap
    127     calls to the method in a backing file lock and before the call
    128     followed by a backing file unlock.
    129     """
    130     def wrapped_method(self, *args, **dargs):
    131         already_have_lock = self._backing_file_lock is not None
    132         if not already_have_lock:
    133             self._lock_backing_file()
    134         try:
    135             return method(self, *args, **dargs)
    136         finally:
    137             if not already_have_lock:
    138                 self._unlock_backing_file()
    139     wrapped_method.__name__ = method.__name__
    140     wrapped_method.__doc__ = method.__doc__
    141     return wrapped_method
    142 
    143 
    144 # decorator for use with job_state methods
    145 def with_backing_file(method):
    146     """A decorator to perform a lock-read-*-write-unlock cycle.
    147 
    148     When applied to a method, this decorator will automatically wrap
    149     calls to the method in a lock-and-read before the call followed by a
    150     write-and-unlock. Any operation that is reading or writing state
    151     should be decorated with this method to ensure that backing file
    152     state is consistently maintained.
    153     """
    154     @with_backing_lock
    155     def wrapped_method(self, *args, **dargs):
    156         self._read_from_backing_file()
    157         try:
    158             return method(self, *args, **dargs)
    159         finally:
    160             self._write_to_backing_file()
    161     wrapped_method.__name__ = method.__name__
    162     wrapped_method.__doc__ = method.__doc__
    163     return wrapped_method
    164 
    165 
    166 
    167 class job_state(object):
    168     """A class for managing explicit job and user state, optionally persistent.
    169 
    170     The class allows you to save state by name (like a dictionary). Any state
    171     stored in this class should be picklable and deep copyable. While this is
    172     not enforced it is recommended that only valid python identifiers be used
    173     as names. Additionally, the namespace 'stateful_property' is used for
    174     storing the valued associated with properties constructed using the
    175     property_factory method.
    176     """
    177 
    178     NO_DEFAULT = object()
    179     PICKLE_PROTOCOL = 2  # highest protocol available in python 2.4
    180 
    181 
    182     def __init__(self):
    183         """Initialize the job state."""
    184         self._state = {}
    185         self._backing_file = None
    186         self._backing_file_initialized = False
    187         self._backing_file_lock = None
    188 
    189 
    190     def _lock_backing_file(self):
    191         """Acquire a lock on the backing file."""
    192         if self._backing_file:
    193             self._backing_file_lock = open(self._backing_file, 'a')
    194             fcntl.flock(self._backing_file_lock, fcntl.LOCK_EX)
    195 
    196 
    197     def _unlock_backing_file(self):
    198         """Release a lock on the backing file."""
    199         if self._backing_file_lock:
    200             fcntl.flock(self._backing_file_lock, fcntl.LOCK_UN)
    201             self._backing_file_lock.close()
    202             self._backing_file_lock = None
    203 
    204 
    205     def read_from_file(self, file_path, merge=True):
    206         """Read in any state from the file at file_path.
    207 
    208         When merge=True, any state specified only in-memory will be preserved.
    209         Any state specified on-disk will be set in-memory, even if an in-memory
    210         setting already exists.
    211 
    212         @param file_path: The path where the state should be read from. It must
    213             exist but it can be empty.
    214         @param merge: If true, merge the on-disk state with the in-memory
    215             state. If false, replace the in-memory state with the on-disk
    216             state.
    217 
    218         @warning: This method is intentionally concurrency-unsafe. It makes no
    219             attempt to control concurrent access to the file at file_path.
    220         """
    221 
    222         # we can assume that the file exists
    223         if os.path.getsize(file_path) == 0:
    224             on_disk_state = {}
    225         else:
    226             on_disk_state = pickle.load(open(file_path))
    227 
    228         if merge:
    229             # merge the on-disk state with the in-memory state
    230             for namespace, namespace_dict in on_disk_state.iteritems():
    231                 in_memory_namespace = self._state.setdefault(namespace, {})
    232                 for name, value in namespace_dict.iteritems():
    233                     if name in in_memory_namespace:
    234                         if in_memory_namespace[name] != value:
    235                             logging.info('Persistent value of %s.%s from %s '
    236                                          'overridding existing in-memory '
    237                                          'value', namespace, name, file_path)
    238                             in_memory_namespace[name] = value
    239                         else:
    240                             logging.debug('Value of %s.%s is unchanged, '
    241                                           'skipping import', namespace, name)
    242                     else:
    243                         logging.debug('Importing %s.%s from state file %s',
    244                                       namespace, name, file_path)
    245                         in_memory_namespace[name] = value
    246         else:
    247             # just replace the in-memory state with the on-disk state
    248             self._state = on_disk_state
    249 
    250         # lock the backing file before we refresh it
    251         with_backing_lock(self.__class__._write_to_backing_file)(self)
    252 
    253 
    254     def write_to_file(self, file_path):
    255         """Write out the current state to the given path.
    256 
    257         @param file_path: The path where the state should be written out to.
    258             Must be writable.
    259 
    260         @warning: This method is intentionally concurrency-unsafe. It makes no
    261             attempt to control concurrent access to the file at file_path.
    262         """
    263         outfile = open(file_path, 'w')
    264         try:
    265             pickle.dump(self._state, outfile, self.PICKLE_PROTOCOL)
    266         finally:
    267             outfile.close()
    268 
    269 
    270     def _read_from_backing_file(self):
    271         """Refresh the current state from the backing file.
    272 
    273         If the backing file has never been read before (indicated by checking
    274         self._backing_file_initialized) it will merge the file with the
    275         in-memory state, rather than overwriting it.
    276         """
    277         if self._backing_file:
    278             merge_backing_file = not self._backing_file_initialized
    279             self.read_from_file(self._backing_file, merge=merge_backing_file)
    280             self._backing_file_initialized = True
    281 
    282 
    283     def _write_to_backing_file(self):
    284         """Flush the current state to the backing file."""
    285         if self._backing_file:
    286             self.write_to_file(self._backing_file)
    287 
    288 
    289     @with_backing_file
    290     def _synchronize_backing_file(self):
    291         """Synchronizes the contents of the in-memory and on-disk state."""
    292         # state is implicitly synchronized in _with_backing_file methods
    293         pass
    294 
    295 
    296     def set_backing_file(self, file_path):
    297         """Change the path used as the backing file for the persistent state.
    298 
    299         When a new backing file is specified if a file already exists then
    300         its contents will be added into the current state, with conflicts
    301         between the file and memory being resolved in favor of the file
    302         contents. The file will then be kept in sync with the (combined)
    303         in-memory state. The syncing can be disabled by setting this to None.
    304 
    305         @param file_path: A path on the filesystem that can be read from and
    306             written to, or None to turn off the backing store.
    307         """
    308         self._synchronize_backing_file()
    309         self._backing_file = file_path
    310         self._backing_file_initialized = False
    311         self._synchronize_backing_file()
    312 
    313 
    314     @with_backing_file
    315     def get(self, namespace, name, default=NO_DEFAULT):
    316         """Returns the value associated with a particular name.
    317 
    318         @param namespace: The namespace that the property should be stored in.
    319         @param name: The name the value was saved with.
    320         @param default: A default value to return if no state is currently
    321             associated with var.
    322 
    323         @return: A deep copy of the value associated with name. Note that this
    324             explicitly returns a deep copy to avoid problems with mutable
    325             values; mutations are not persisted or shared.
    326         @raise KeyError: raised when no state is associated with var and a
    327             default value is not provided.
    328         """
    329         if self.has(namespace, name):
    330             return copy.deepcopy(self._state[namespace][name])
    331         elif default is self.NO_DEFAULT:
    332             raise KeyError('No key %s in namespace %s' % (name, namespace))
    333         else:
    334             return default
    335 
    336 
    337     @with_backing_file
    338     def set(self, namespace, name, value):
    339         """Saves the value given with the provided name.
    340 
    341         @param namespace: The namespace that the property should be stored in.
    342         @param name: The name the value should be saved with.
    343         @param value: The value to save.
    344         """
    345         namespace_dict = self._state.setdefault(namespace, {})
    346         namespace_dict[name] = copy.deepcopy(value)
    347         logging.debug('Persistent state %s.%s now set to %r', namespace,
    348                       name, value)
    349 
    350 
    351     @with_backing_file
    352     def has(self, namespace, name):
    353         """Return a boolean indicating if namespace.name is defined.
    354 
    355         @param namespace: The namespace to check for a definition.
    356         @param name: The name to check for a definition.
    357 
    358         @return: True if the given name is defined in the given namespace and
    359             False otherwise.
    360         """
    361         return namespace in self._state and name in self._state[namespace]
    362 
    363 
    364     @with_backing_file
    365     def discard(self, namespace, name):
    366         """If namespace.name is a defined value, deletes it.
    367 
    368         @param namespace: The namespace that the property is stored in.
    369         @param name: The name the value is saved with.
    370         """
    371         if self.has(namespace, name):
    372             del self._state[namespace][name]
    373             if len(self._state[namespace]) == 0:
    374                 del self._state[namespace]
    375             logging.debug('Persistent state %s.%s deleted', namespace, name)
    376         else:
    377             logging.debug(
    378                 'Persistent state %s.%s not defined so nothing is discarded',
    379                 namespace, name)
    380 
    381 
    382     @with_backing_file
    383     def discard_namespace(self, namespace):
    384         """Delete all defined namespace.* names.
    385 
    386         @param namespace: The namespace to be cleared.
    387         """
    388         if namespace in self._state:
    389             del self._state[namespace]
    390         logging.debug('Persistent state %s.* deleted', namespace)
    391 
    392 
    393     @staticmethod
    394     def property_factory(state_attribute, property_attribute, default,
    395                          namespace='global_properties'):
    396         """
    397         Create a property object for an attribute using self.get and self.set.
    398 
    399         @param state_attribute: A string with the name of the attribute on
    400             job that contains the job_state instance.
    401         @param property_attribute: A string with the name of the attribute
    402             this property is exposed as.
    403         @param default: A default value that should be used for this property
    404             if it is not set.
    405         @param namespace: The namespace to store the attribute value in.
    406 
    407         @return: A read-write property object that performs self.get calls
    408             to read the value and self.set calls to set it.
    409         """
    410         def getter(job):
    411             state = getattr(job, state_attribute)
    412             return state.get(namespace, property_attribute, default)
    413         def setter(job, value):
    414             state = getattr(job, state_attribute)
    415             state.set(namespace, property_attribute, value)
    416         return property(getter, setter)
    417 
    418 
    419 class status_log_entry(object):
    420     """Represents a single status log entry."""
    421 
    422     RENDERED_NONE_VALUE = '----'
    423     TIMESTAMP_FIELD = 'timestamp'
    424     LOCALTIME_FIELD = 'localtime'
    425 
    426     # non-space whitespace is forbidden in any fields
    427     BAD_CHAR_REGEX = re.compile(r'[\t\n\r\v\f]')
    428 
    429     def __init__(self, status_code, subdir, operation, message, fields,
    430                  timestamp=None):
    431         """Construct a status.log entry.
    432 
    433         @param status_code: A message status code. Must match the codes
    434             accepted by autotest_lib.common_lib.log.is_valid_status.
    435         @param subdir: A valid job subdirectory, or None.
    436         @param operation: Description of the operation, or None.
    437         @param message: A printable string describing event to be recorded.
    438         @param fields: A dictionary of arbitrary alphanumeric key=value pairs
    439             to be included in the log, or None.
    440         @param timestamp: An optional integer timestamp, in the same format
    441             as a time.time() timestamp. If unspecified, the current time is
    442             used.
    443 
    444         @raise ValueError: if any of the parameters are invalid
    445         """
    446 
    447         if not log.is_valid_status(status_code):
    448             raise ValueError('status code %r is not valid' % status_code)
    449         self.status_code = status_code
    450 
    451         if subdir and self.BAD_CHAR_REGEX.search(subdir):
    452             raise ValueError('Invalid character in subdir string')
    453         self.subdir = subdir
    454 
    455         if operation and self.BAD_CHAR_REGEX.search(operation):
    456             raise ValueError('Invalid character in operation string')
    457         self.operation = operation
    458 
    459         # break the message line into a single-line message that goes into the
    460         # database, and a block of additional lines that goes into the status
    461         # log but will never be parsed
    462         message_lines = message.split('\n')
    463         self.message = message_lines[0].replace('\t', ' ' * 8)
    464         self.extra_message_lines = message_lines[1:]
    465         if self.BAD_CHAR_REGEX.search(self.message):
    466             raise ValueError('Invalid character in message %r' % self.message)
    467 
    468         if not fields:
    469             self.fields = {}
    470         else:
    471             self.fields = fields.copy()
    472         for key, value in self.fields.iteritems():
    473             if type(value) is int:
    474                 value = str(value)
    475             if self.BAD_CHAR_REGEX.search(key + value):
    476                 raise ValueError('Invalid character in %r=%r field'
    477                                  % (key, value))
    478 
    479         # build up the timestamp
    480         if timestamp is None:
    481             timestamp = int(time.time())
    482         self.fields[self.TIMESTAMP_FIELD] = str(timestamp)
    483         self.fields[self.LOCALTIME_FIELD] = time.strftime(
    484             '%b %d %H:%M:%S', time.localtime(timestamp))
    485 
    486 
    487     def is_start(self):
    488         """Indicates if this status log is the start of a new nested block.
    489 
    490         @return: A boolean indicating if this entry starts a new nested block.
    491         """
    492         return self.status_code == 'START'
    493 
    494 
    495     def is_end(self):
    496         """Indicates if this status log is the end of a nested block.
    497 
    498         @return: A boolean indicating if this entry ends a nested block.
    499         """
    500         return self.status_code.startswith('END ')
    501 
    502 
    503     def render(self):
    504         """Render the status log entry into a text string.
    505 
    506         @return: A text string suitable for writing into a status log file.
    507         """
    508         # combine all the log line data into a tab-delimited string
    509         subdir = self.subdir or self.RENDERED_NONE_VALUE
    510         operation = self.operation or self.RENDERED_NONE_VALUE
    511         extra_fields = ['%s=%s' % field for field in self.fields.iteritems()]
    512         line_items = [self.status_code, subdir, operation]
    513         line_items += extra_fields + [self.message]
    514         first_line = '\t'.join(line_items)
    515 
    516         # append the extra unparsable lines, two-space indented
    517         all_lines = [first_line]
    518         all_lines += ['  ' + line for line in self.extra_message_lines]
    519         return '\n'.join(all_lines)
    520 
    521 
    522     @classmethod
    523     def parse(cls, line):
    524         """Parse a status log entry from a text string.
    525 
    526         This method is the inverse of render; it should always be true that
    527         parse(entry.render()) produces a new status_log_entry equivalent to
    528         entry.
    529 
    530         @return: A new status_log_entry instance with fields extracted from the
    531             given status line. If the line is an extra message line then None
    532             is returned.
    533         """
    534         # extra message lines are always prepended with two spaces
    535         if line.startswith('  '):
    536             return None
    537 
    538         line = line.lstrip('\t')  # ignore indentation
    539         entry_parts = line.split('\t')
    540         if len(entry_parts) < 4:
    541             raise ValueError('%r is not a valid status line' % line)
    542         status_code, subdir, operation = entry_parts[:3]
    543         if subdir == cls.RENDERED_NONE_VALUE:
    544             subdir = None
    545         if operation == cls.RENDERED_NONE_VALUE:
    546             operation = None
    547         message = entry_parts[-1]
    548         fields = dict(part.split('=', 1) for part in entry_parts[3:-1])
    549         if cls.TIMESTAMP_FIELD in fields:
    550             timestamp = int(fields[cls.TIMESTAMP_FIELD])
    551         else:
    552             timestamp = None
    553         return cls(status_code, subdir, operation, message, fields, timestamp)
    554 
    555 
    556 class status_indenter(object):
    557     """Abstract interface that a status log indenter should use."""
    558 
    559     @property
    560     def indent(self):
    561         raise NotImplementedError
    562 
    563 
    564     def increment(self):
    565         """Increase indentation by one level."""
    566         raise NotImplementedError
    567 
    568 
    569     def decrement(self):
    570         """Decrease indentation by one level."""
    571 
    572 
    573 class status_logger(object):
    574     """Represents a status log file. Responsible for translating messages
    575     into on-disk status log lines.
    576 
    577     @property global_filename: The filename to write top-level logs to.
    578     @property subdir_filename: The filename to write subdir-level logs to.
    579     """
    580     def __init__(self, job, indenter, global_filename='status',
    581                  subdir_filename='status', record_hook=None,
    582                  tap_writer=None):
    583         """Construct a logger instance.
    584 
    585         @param job: A reference to the job object this is logging for. Only a
    586             weak reference to the job is held, to avoid a
    587             status_logger <-> job circular reference.
    588         @param indenter: A status_indenter instance, for tracking the
    589             indentation level.
    590         @param global_filename: An optional filename to initialize the
    591             self.global_filename attribute.
    592         @param subdir_filename: An optional filename to initialize the
    593             self.subdir_filename attribute.
    594         @param record_hook: An optional function to be called before an entry
    595             is logged. The function should expect a single parameter, a
    596             copy of the status_log_entry object.
    597         @param tap_writer: An instance of the class TAPReport for addionally
    598             writing TAP files
    599         """
    600         self._jobref = weakref.ref(job)
    601         self._indenter = indenter
    602         self.global_filename = global_filename
    603         self.subdir_filename = subdir_filename
    604         self._record_hook = record_hook
    605         if tap_writer is None:
    606             self._tap_writer = TAPReport(None)
    607         else:
    608             self._tap_writer = tap_writer
    609 
    610 
    611     def render_entry(self, log_entry):
    612         """Render a status_log_entry as it would be written to a log file.
    613 
    614         @param log_entry: A status_log_entry instance to be rendered.
    615 
    616         @return: The status log entry, rendered as it would be written to the
    617             logs (including indentation).
    618         """
    619         if log_entry.is_end():
    620             indent = self._indenter.indent - 1
    621         else:
    622             indent = self._indenter.indent
    623         return '\t' * indent + log_entry.render().rstrip('\n')
    624 
    625 
    626     def record_entry(self, log_entry, log_in_subdir=True):
    627         """Record a status_log_entry into the appropriate status log files.
    628 
    629         @param log_entry: A status_log_entry instance to be recorded into the
    630                 status logs.
    631         @param log_in_subdir: A boolean that indicates (when true) that subdir
    632                 logs should be written into the subdirectory status log file.
    633         """
    634         # acquire a strong reference for the duration of the method
    635         job = self._jobref()
    636         if job is None:
    637             logging.warning('Something attempted to write a status log entry '
    638                             'after its job terminated, ignoring the attempt.')
    639             logging.warning(traceback.format_stack())
    640             return
    641 
    642         # call the record hook if one was given
    643         if self._record_hook:
    644             self._record_hook(log_entry)
    645 
    646         # figure out where we need to log to
    647         log_files = [os.path.join(job.resultdir, self.global_filename)]
    648         if log_in_subdir and log_entry.subdir:
    649             log_files.append(os.path.join(job.resultdir, log_entry.subdir,
    650                                           self.subdir_filename))
    651 
    652         # write out to entry to the log files
    653         log_text = self.render_entry(log_entry)
    654         for log_file in log_files:
    655             fileobj = open(log_file, 'a')
    656             try:
    657                 print >> fileobj, log_text
    658             finally:
    659                 fileobj.close()
    660 
    661         # write to TAPRecord instance
    662         if log_entry.is_end() and self._tap_writer.do_tap_report:
    663             self._tap_writer.record(log_entry, self._indenter.indent, log_files)
    664 
    665         # adjust the indentation if this was a START or END entry
    666         if log_entry.is_start():
    667             self._indenter.increment()
    668         elif log_entry.is_end():
    669             self._indenter.decrement()
    670 
    671 
    672 class TAPReport(object):
    673     """
    674     Deal with TAP reporting for the Autotest client.
    675     """
    676 
    677     job_statuses = {
    678         "TEST_NA": False,
    679         "ABORT": False,
    680         "ERROR": False,
    681         "FAIL": False,
    682         "WARN": False,
    683         "GOOD": True,
    684         "START": True,
    685         "END GOOD": True,
    686         "ALERT": False,
    687         "RUNNING": False,
    688         "NOSTATUS": False
    689     }
    690 
    691 
    692     def __init__(self, enable, resultdir=None, global_filename='status'):
    693         """
    694         @param enable: Set self.do_tap_report to trigger TAP reporting.
    695         @param resultdir: Path where the TAP report files will be written.
    696         @param global_filename: File name of the status files .tap extensions
    697                 will be appended.
    698         """
    699         self.do_tap_report = enable
    700         if resultdir is not None:
    701             self.resultdir = os.path.abspath(resultdir)
    702         self._reports_container = {}
    703         self._keyval_container = {} # {'path1': [entries],}
    704         self.global_filename = global_filename
    705 
    706 
    707     @classmethod
    708     def tap_ok(self, success, counter, message):
    709         """
    710         return a TAP message string.
    711 
    712         @param success: True for positive message string.
    713         @param counter: number of TAP line in plan.
    714         @param message: additional message to report in TAP line.
    715         """
    716         if success:
    717             message = "ok %s - %s" % (counter, message)
    718         else:
    719             message = "not ok %s - %s" % (counter, message)
    720         return message
    721 
    722 
    723     def record(self, log_entry, indent, log_files):
    724         """
    725         Append a job-level status event to self._reports_container. All
    726         events will be written to TAP log files at the end of the test run.
    727         Otherwise, it's impossilble to determine the TAP plan.
    728 
    729         @param log_entry: A string status code describing the type of status
    730                 entry being recorded. It must pass log.is_valid_status to be
    731                 considered valid.
    732         @param indent: Level of the log_entry to determine the operation if
    733                 log_entry.operation is not given.
    734         @param log_files: List of full path of files the TAP report will be
    735                 written to at the end of the test.
    736         """
    737         for log_file in log_files:
    738             log_file_path = os.path.dirname(log_file)
    739             key = log_file_path.split(self.resultdir, 1)[1].strip(os.sep)
    740             if not key:
    741                 key = 'root'
    742 
    743             if not self._reports_container.has_key(key):
    744                 self._reports_container[key] = []
    745 
    746             if log_entry.operation:
    747                 operation = log_entry.operation
    748             elif indent == 1:
    749                 operation = "job"
    750             else:
    751                 operation = "unknown"
    752             entry = self.tap_ok(
    753                 self.job_statuses.get(log_entry.status_code, False),
    754                 len(self._reports_container[key]) + 1, operation + "\n"
    755             )
    756             self._reports_container[key].append(entry)
    757 
    758 
    759     def record_keyval(self, path, dictionary, type_tag=None):
    760         """
    761         Append a key-value pairs of dictionary to self._keyval_container in
    762         TAP format. Once finished write out the keyval.tap file to the file
    763         system.
    764 
    765         If type_tag is None, then the key must be composed of alphanumeric
    766         characters (or dashes + underscores). However, if type-tag is not
    767         null then the keys must also have "{type_tag}" as a suffix. At
    768         the moment the only valid values of type_tag are "attr" and "perf".
    769 
    770         @param path: The full path of the keyval.tap file to be created
    771         @param dictionary: The keys and values.
    772         @param type_tag: The type of the values
    773         """
    774         self._keyval_container.setdefault(path, [0, []])
    775         self._keyval_container[path][0] += 1
    776 
    777         if type_tag is None:
    778             key_regex = re.compile(r'^[-\.\w]+$')
    779         else:
    780             if type_tag not in ('attr', 'perf'):
    781                 raise ValueError('Invalid type tag: %s' % type_tag)
    782             escaped_tag = re.escape(type_tag)
    783             key_regex = re.compile(r'^[-\.\w]+\{%s\}$' % escaped_tag)
    784         self._keyval_container[path][1].extend([
    785             self.tap_ok(True, self._keyval_container[path][0], "results"),
    786             "\n  ---\n",
    787         ])
    788         try:
    789             for key in sorted(dictionary.keys()):
    790                 if not key_regex.search(key):
    791                     raise ValueError('Invalid key: %s' % key)
    792                 self._keyval_container[path][1].append(
    793                     '  %s: %s\n' % (key.replace('{', '_').rstrip('}'),
    794                                     dictionary[key])
    795                 )
    796         finally:
    797             self._keyval_container[path][1].append("  ...\n")
    798         self._write_keyval()
    799 
    800 
    801     def _write_reports(self):
    802         """
    803         Write TAP reports to file.
    804         """
    805         for key in self._reports_container.keys():
    806             if key == 'root':
    807                 sub_dir = ''
    808             else:
    809                 sub_dir = key
    810             tap_fh = open(os.sep.join(
    811                 [self.resultdir, sub_dir, self.global_filename]
    812             ) + ".tap", 'w')
    813             tap_fh.write('1..' + str(len(self._reports_container[key])) + '\n')
    814             tap_fh.writelines(self._reports_container[key])
    815             tap_fh.close()
    816 
    817 
    818     def _write_keyval(self):
    819         """
    820         Write the self._keyval_container key values to a file.
    821         """
    822         for path in self._keyval_container.keys():
    823             tap_fh = open(path + ".tap", 'w')
    824             tap_fh.write('1..' + str(self._keyval_container[path][0]) + '\n')
    825             tap_fh.writelines(self._keyval_container[path][1])
    826             tap_fh.close()
    827 
    828 
    829     def write(self):
    830         """
    831         Write the TAP reports to files.
    832         """
    833         self._write_reports()
    834 
    835 
    836     def _write_tap_archive(self):
    837         """
    838         Write a tar archive containing all the TAP files and
    839         a meta.yml containing the file names.
    840         """
    841         os.chdir(self.resultdir)
    842         tap_files = []
    843         for rel_path, d, files in os.walk('.'):
    844             tap_files.extend(["/".join(
    845                 [rel_path, f]) for f in files if f.endswith('.tap')])
    846         meta_yaml = open('meta.yml', 'w')
    847         meta_yaml.write('file_order:\n')
    848         tap_tar = tarfile.open(self.resultdir + '/tap.tar.gz', 'w:gz')
    849         for f in tap_files:
    850             meta_yaml.write("  - " + f.lstrip('./') + "\n")
    851             tap_tar.add(f)
    852         meta_yaml.close()
    853         tap_tar.add('meta.yml')
    854         tap_tar.close()
    855 
    856 
    857 class base_job(object):
    858     """An abstract base class for the various autotest job classes.
    859 
    860     @property autodir: The top level autotest directory.
    861     @property clientdir: The autotest client directory.
    862     @property serverdir: The autotest server directory. [OPTIONAL]
    863     @property resultdir: The directory where results should be written out.
    864         [WRITABLE]
    865 
    866     @property pkgdir: The job packages directory. [WRITABLE]
    867     @property tmpdir: The job temporary directory. [WRITABLE]
    868     @property testdir: The job test directory. [WRITABLE]
    869     @property site_testdir: The job site test directory. [WRITABLE]
    870 
    871     @property bindir: The client bin/ directory.
    872     @property profdir: The client profilers/ directory.
    873     @property toolsdir: The client tools/ directory.
    874 
    875     @property control: A path to the control file to be executed. [OPTIONAL]
    876     @property hosts: A set of all live Host objects currently in use by the
    877         job. Code running in the context of a local client can safely assume
    878         that this set contains only a single entry.
    879     @property machines: A list of the machine names associated with the job.
    880     @property user: The user executing the job.
    881     @property tag: A tag identifying the job. Often used by the scheduler to
    882         give a name of the form NUMBER-USERNAME/HOSTNAME.
    883     @property test_retry: The number of times to retry a test if the test did
    884         not complete successfully.
    885     @property args: A list of addtional miscellaneous command-line arguments
    886         provided when starting the job.
    887 
    888     @property last_boot_tag: The label of the kernel from the last reboot.
    889         [OPTIONAL,PERSISTENT]
    890     @property automatic_test_tag: A string which, if set, will be automatically
    891         added to the test name when running tests.
    892 
    893     @property default_profile_only: A boolean indicating the default value of
    894         profile_only used by test.execute. [PERSISTENT]
    895     @property drop_caches: A boolean indicating if caches should be dropped
    896         before each test is executed.
    897     @property drop_caches_between_iterations: A boolean indicating if caches
    898         should be dropped before each test iteration is executed.
    899     @property run_test_cleanup: A boolean indicating if test.cleanup should be
    900         run by default after a test completes, if the run_cleanup argument is
    901         not specified. [PERSISTENT]
    902 
    903     @property num_tests_run: The number of tests run during the job. [OPTIONAL]
    904     @property num_tests_failed: The number of tests failed during the job.
    905         [OPTIONAL]
    906 
    907     @property bootloader: An instance of the boottool class. May not be
    908         available on job instances where access to the bootloader is not
    909         available (e.g. on the server running a server job). [OPTIONAL]
    910     @property harness: An instance of the client test harness. Only available
    911         in contexts where client test execution happens. [OPTIONAL]
    912     @property logging: An instance of the logging manager associated with the
    913         job.
    914     @property profilers: An instance of the profiler manager associated with
    915         the job.
    916     @property sysinfo: An instance of the sysinfo object. Only available in
    917         contexts where it's possible to collect sysinfo.
    918     @property warning_manager: A class for managing which types of WARN
    919         messages should be logged and which should be supressed. [OPTIONAL]
    920     @property warning_loggers: A set of readable streams that will be monitored
    921         for WARN messages to be logged. [OPTIONAL]
    922 
    923     Abstract methods:
    924         _find_base_directories [CLASSMETHOD]
    925             Returns the location of autodir, clientdir and serverdir
    926 
    927         _find_resultdir
    928             Returns the location of resultdir. Gets a copy of any parameters
    929             passed into base_job.__init__. Can return None to indicate that
    930             no resultdir is to be used.
    931 
    932         _get_status_logger
    933             Returns a status_logger instance for recording job status logs.
    934     """
    935 
    936    # capture the dependency on several helper classes with factories
    937     _job_directory = job_directory
    938     _job_state = job_state
    939 
    940 
    941     # all the job directory attributes
    942     autodir = _job_directory.property_factory('autodir')
    943     clientdir = _job_directory.property_factory('clientdir')
    944     serverdir = _job_directory.property_factory('serverdir')
    945     resultdir = _job_directory.property_factory('resultdir')
    946     pkgdir = _job_directory.property_factory('pkgdir')
    947     tmpdir = _job_directory.property_factory('tmpdir')
    948     testdir = _job_directory.property_factory('testdir')
    949     site_testdir = _job_directory.property_factory('site_testdir')
    950     bindir = _job_directory.property_factory('bindir')
    951     profdir = _job_directory.property_factory('profdir')
    952     toolsdir = _job_directory.property_factory('toolsdir')
    953 
    954 
    955     # all the generic persistent properties
    956     tag = _job_state.property_factory('_state', 'tag', '')
    957     test_retry = _job_state.property_factory('_state', 'test_retry', 0)
    958     default_profile_only = _job_state.property_factory(
    959         '_state', 'default_profile_only', False)
    960     run_test_cleanup = _job_state.property_factory(
    961         '_state', 'run_test_cleanup', True)
    962     last_boot_tag = _job_state.property_factory(
    963         '_state', 'last_boot_tag', None)
    964     automatic_test_tag = _job_state.property_factory(
    965         '_state', 'automatic_test_tag', None)
    966 
    967     # the use_sequence_number property
    968     _sequence_number = _job_state.property_factory(
    969         '_state', '_sequence_number', None)
    970     def _get_use_sequence_number(self):
    971         return bool(self._sequence_number)
    972     def _set_use_sequence_number(self, value):
    973         if value:
    974             self._sequence_number = 1
    975         else:
    976             self._sequence_number = None
    977     use_sequence_number = property(_get_use_sequence_number,
    978                                    _set_use_sequence_number)
    979 
    980     # parent job id is passed in from autoserv command line. It's only used in
    981     # server job. The property is added here for unittest
    982     # (base_job_unittest.py) to be consistent on validating public properties of
    983     # a base_job object.
    984     parent_job_id = None
    985 
    986     def __init__(self, *args, **dargs):
    987         # initialize the base directories, all others are relative to these
    988         autodir, clientdir, serverdir = self._find_base_directories()
    989         self._autodir = self._job_directory(autodir)
    990         self._clientdir = self._job_directory(clientdir)
    991         # TODO(scottz): crosbug.com/38259, needed to pass unittests for now.
    992         self.label = None
    993         if serverdir:
    994             self._serverdir = self._job_directory(serverdir)
    995         else:
    996             self._serverdir = None
    997 
    998         # initialize all the other directories relative to the base ones
    999         self._initialize_dir_properties()
   1000         self._resultdir = self._job_directory(
   1001             self._find_resultdir(*args, **dargs), True)
   1002         self._execution_contexts = []
   1003 
   1004         # initialize all the job state
   1005         self._state = self._job_state()
   1006 
   1007         # initialize tap reporting
   1008         if dargs.has_key('options'):
   1009             self._tap = self._tap_init(dargs['options'].tap_report)
   1010         else:
   1011             self._tap = self._tap_init(False)
   1012 
   1013     @classmethod
   1014     def _find_base_directories(cls):
   1015         raise NotImplementedError()
   1016 
   1017 
   1018     def _initialize_dir_properties(self):
   1019         """
   1020         Initializes all the secondary self.*dir properties. Requires autodir,
   1021         clientdir and serverdir to already be initialized.
   1022         """
   1023         # create some stubs for use as shortcuts
   1024         def readonly_dir(*args):
   1025             return self._job_directory(os.path.join(*args))
   1026         def readwrite_dir(*args):
   1027             return self._job_directory(os.path.join(*args), True)
   1028 
   1029         # various client-specific directories
   1030         self._bindir = readonly_dir(self.clientdir, 'bin')
   1031         self._profdir = readonly_dir(self.clientdir, 'profilers')
   1032         self._pkgdir = readwrite_dir(self.clientdir, 'packages')
   1033         self._toolsdir = readonly_dir(self.clientdir, 'tools')
   1034 
   1035         # directories which are in serverdir on a server, clientdir on a client
   1036         # tmp tests, and site_tests need to be read_write for client, but only
   1037         # read for server.
   1038         if self.serverdir:
   1039             root = self.serverdir
   1040             r_or_rw_dir = readonly_dir
   1041         else:
   1042             root = self.clientdir
   1043             r_or_rw_dir = readwrite_dir
   1044         self._testdir = r_or_rw_dir(root, 'tests')
   1045         self._site_testdir = r_or_rw_dir(root, 'site_tests')
   1046 
   1047         # various server-specific directories
   1048         if self.serverdir:
   1049             self._tmpdir = readwrite_dir(tempfile.gettempdir())
   1050         else:
   1051             self._tmpdir = readwrite_dir(root, 'tmp')
   1052 
   1053 
   1054     def _find_resultdir(self, *args, **dargs):
   1055         raise NotImplementedError()
   1056 
   1057 
   1058     def push_execution_context(self, resultdir):
   1059         """
   1060         Save off the current context of the job and change to the given one.
   1061 
   1062         In practice method just changes the resultdir, but it may become more
   1063         extensive in the future. The expected use case is for when a child
   1064         job needs to be executed in some sort of nested context (for example
   1065         the way parallel_simple does). The original context can be restored
   1066         with a pop_execution_context call.
   1067 
   1068         @param resultdir: The new resultdir, relative to the current one.
   1069         """
   1070         new_dir = self._job_directory(
   1071             os.path.join(self.resultdir, resultdir), True)
   1072         self._execution_contexts.append(self._resultdir)
   1073         self._resultdir = new_dir
   1074 
   1075 
   1076     def pop_execution_context(self):
   1077         """
   1078         Reverse the effects of the previous push_execution_context call.
   1079 
   1080         @raise IndexError: raised when the stack of contexts is empty.
   1081         """
   1082         if not self._execution_contexts:
   1083             raise IndexError('No old execution context to restore')
   1084         self._resultdir = self._execution_contexts.pop()
   1085 
   1086 
   1087     def get_state(self, name, default=_job_state.NO_DEFAULT):
   1088         """Returns the value associated with a particular name.
   1089 
   1090         @param name: The name the value was saved with.
   1091         @param default: A default value to return if no state is currently
   1092             associated with var.
   1093 
   1094         @return: A deep copy of the value associated with name. Note that this
   1095             explicitly returns a deep copy to avoid problems with mutable
   1096             values; mutations are not persisted or shared.
   1097         @raise KeyError: raised when no state is associated with var and a
   1098             default value is not provided.
   1099         """
   1100         try:
   1101             return self._state.get('public', name, default=default)
   1102         except KeyError:
   1103             raise KeyError(name)
   1104 
   1105 
   1106     def set_state(self, name, value):
   1107         """Saves the value given with the provided name.
   1108 
   1109         @param name: The name the value should be saved with.
   1110         @param value: The value to save.
   1111         """
   1112         self._state.set('public', name, value)
   1113 
   1114 
   1115     def _build_tagged_test_name(self, testname, dargs):
   1116         """Builds the fully tagged testname and subdirectory for job.run_test.
   1117 
   1118         @param testname: The base name of the test
   1119         @param dargs: The ** arguments passed to run_test. And arguments
   1120             consumed by this method will be removed from the dictionary.
   1121 
   1122         @return: A 3-tuple of the full name of the test, the subdirectory it
   1123             should be stored in, and the full tag of the subdir.
   1124         """
   1125         tag_parts = []
   1126 
   1127         # build up the parts of the tag used for the test name
   1128         master_testpath = dargs.get('master_testpath', "")
   1129         base_tag = dargs.pop('tag', None)
   1130         if base_tag:
   1131             tag_parts.append(str(base_tag))
   1132         if self.use_sequence_number:
   1133             tag_parts.append('_%02d_' % self._sequence_number)
   1134             self._sequence_number += 1
   1135         if self.automatic_test_tag:
   1136             tag_parts.append(self.automatic_test_tag)
   1137         full_testname = '.'.join([testname] + tag_parts)
   1138 
   1139         # build up the subdir and tag as well
   1140         subdir_tag = dargs.pop('subdir_tag', None)
   1141         if subdir_tag:
   1142             tag_parts.append(subdir_tag)
   1143         subdir = '.'.join([testname] + tag_parts)
   1144         subdir = os.path.join(master_testpath, subdir)
   1145         tag = '.'.join(tag_parts)
   1146 
   1147         return full_testname, subdir, tag
   1148 
   1149 
   1150     def _make_test_outputdir(self, subdir):
   1151         """Creates an output directory for a test to run it.
   1152 
   1153         @param subdir: The subdirectory of the test. Generally computed by
   1154             _build_tagged_test_name.
   1155 
   1156         @return: A job_directory instance corresponding to the outputdir of
   1157             the test.
   1158         @raise TestError: If the output directory is invalid.
   1159         """
   1160         # explicitly check that this subdirectory is new
   1161         path = os.path.join(self.resultdir, subdir)
   1162         if os.path.exists(path):
   1163             msg = ('%s already exists; multiple tests cannot run with the '
   1164                    'same subdirectory' % subdir)
   1165             raise error.TestError(msg)
   1166 
   1167         # create the outputdir and raise a TestError if it isn't valid
   1168         try:
   1169             outputdir = self._job_directory(path, True)
   1170             return outputdir
   1171         except self._job_directory.JobDirectoryException, e:
   1172             logging.exception('%s directory creation failed with %s',
   1173                               subdir, e)
   1174             raise error.TestError('%s directory creation failed' % subdir)
   1175 
   1176     def _tap_init(self, enable):
   1177         """Initialize TAP reporting
   1178         """
   1179         return TAPReport(enable, resultdir=self.resultdir)
   1180 
   1181 
   1182     def record(self, status_code, subdir, operation, status='',
   1183                optional_fields=None):
   1184         """Record a job-level status event.
   1185 
   1186         Logs an event noteworthy to the Autotest job as a whole. Messages will
   1187         be written into a global status log file, as well as a subdir-local
   1188         status log file (if subdir is specified).
   1189 
   1190         @param status_code: A string status code describing the type of status
   1191             entry being recorded. It must pass log.is_valid_status to be
   1192             considered valid.
   1193         @param subdir: A specific results subdirectory this also applies to, or
   1194             None. If not None the subdirectory must exist.
   1195         @param operation: A string describing the operation that was run.
   1196         @param status: An optional human-readable message describing the status
   1197             entry, for example an error message or "completed successfully".
   1198         @param optional_fields: An optional dictionary of addtional named fields
   1199             to be included with the status message. Every time timestamp and
   1200             localtime entries are generated with the current time and added
   1201             to this dictionary.
   1202         """
   1203         entry = status_log_entry(status_code, subdir, operation, status,
   1204                                  optional_fields)
   1205         self.record_entry(entry)
   1206 
   1207 
   1208     def record_entry(self, entry, log_in_subdir=True):
   1209         """Record a job-level status event, using a status_log_entry.
   1210 
   1211         This is the same as self.record but using an existing status log
   1212         entry object rather than constructing one for you.
   1213 
   1214         @param entry: A status_log_entry object
   1215         @param log_in_subdir: A boolean that indicates (when true) that subdir
   1216                 logs should be written into the subdirectory status log file.
   1217         """
   1218         self._get_status_logger().record_entry(entry, log_in_subdir)
   1219