Home | History | Annotate | Download | only in site_utils
      1 #! /usr/bin/python
      2 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
      3 # Use of this source code is governed by a BSD-style license that can be
      4 # found in the LICENSE file.
      5 
      6 # This module provides functions for caller to retrieve a job's history,
      7 # including special tasks executed before and after the job, and each steps
      8 # start/end time.
      9 
     10 import argparse
     11 import datetime as datetime_base
     12 
     13 import common
     14 from autotest_lib.client.common_lib import global_config
     15 from autotest_lib.frontend import setup_django_environment
     16 from autotest_lib.frontend.afe import models
     17 from autotest_lib.frontend.tko import models as tko_models
     18 
     19 CONFIG = global_config.global_config
     20 AUTOTEST_SERVER = CONFIG.get_config_value('SERVER', 'hostname', type=str)
     21 
     22 LOG_BASE_URL = 'http://%s/tko/retrieve_logs.cgi?job=/results/' % AUTOTEST_SERVER
     23 JOB_URL = LOG_BASE_URL + '%(job_id)s-%(owner)s/%(hostname)s'
     24 LOG_PATH_FMT = 'hosts/%(hostname)s/%(task_id)d-%(task_name)s'
     25 TASK_URL = LOG_BASE_URL + LOG_PATH_FMT
     26 AUTOSERV_DEBUG_LOG = 'debug/autoserv.DEBUG'
     27 
     28 # Add some buffer before and after job start/end time when searching for special
     29 # tasks. This is to guarantee to include reset before the job starts and repair
     30 # and cleanup after the job finishes.
     31 TIME_BUFFER = datetime_base.timedelta(hours=2)
     32 
     33 
     34 class JobHistoryObject(object):
     35     """A common interface to call get_history to return a dictionary of the
     36     object's history record, e.g., start/end time.
     37     """
     38 
     39     def build_history_entry(self):
     40         """Build a history entry.
     41 
     42         This function expect the object has required attributes. Any missing
     43         attributes will lead to failure.
     44 
     45         @return: A dictionary as the history entry of given job/task.
     46         """
     47         return  {'id': self.id,
     48                  'name': self.name,
     49                  'hostname': self.hostname,
     50                  'status': self.status,
     51                  'log_url': self.log_url,
     52                  'autoserv_log_url': self.autoserv_log_url,
     53                  'start_time': self.start_time,
     54                  'end_time': self.end_time,
     55                  'time_used': self.time_used,
     56                  }
     57 
     58 
     59     def get_history(self):
     60         """Return a list of dictionaries of select job/task's history.
     61         """
     62         raise NotImplementedError('You must override this method in child '
     63                                   'class.')
     64 
     65 
     66 class SpecialTaskInfo(JobHistoryObject):
     67     """Information of a special task.
     68 
     69     Its properties include:
     70         id: Special task ID.
     71         task: An AFE models.SpecialTask object.
     72         hostname: hostname of the DUT that runs the special task.
     73         log_url: Url to debug log.
     74         autoserv_log_url: Url to the autoserv log.
     75     """
     76 
     77     def __init__(self, task):
     78         """Constructor
     79 
     80         @param task: An AFE models.SpecialTask object, which has the information
     81                      of the special task from database.
     82         """
     83         # Special task ID
     84         self.id = task.id
     85         # AFE special_task model
     86         self.task = task
     87         self.name = task.task
     88         self.hostname = task.host.hostname
     89         self.status = task.status
     90 
     91         # Link to log
     92         task_info = {'task_id': task.id, 'task_name': task.task.lower(),
     93                      'hostname': self.hostname}
     94         self.log_url = TASK_URL % task_info
     95         self.autoserv_log_url = '%s/%s' % (self.log_url, AUTOSERV_DEBUG_LOG)
     96 
     97         self.start_time = self.task.time_started
     98         self.end_time = self.task.time_finished
     99         if self.start_time and self.end_time:
    100             self.time_used = (self.end_time - self.start_time).total_seconds()
    101         else:
    102             self.time_used = None
    103 
    104 
    105     def __str__(self):
    106         """Get a formatted string of the details of the task info.
    107         """
    108         return ('Task %d: %s from %s to %s, for %s seconds.\n' %
    109                 (self.id, self.task.task, self.start_time, self.end_time,
    110                  self.time_used))
    111 
    112 
    113     def get_history(self):
    114         """Return a dictionary of selected object properties.
    115         """
    116         return [self.build_history_entry()]
    117 
    118 
    119 class TaskCacheCollection(dict):
    120     """A cache to hold tasks for multiple hosts.
    121 
    122     It's a dictionary of host_id: TaskCache.
    123     """
    124 
    125     def try_get(self, host_id, job_id, start_time, end_time):
    126         """Try to get tasks from cache.
    127 
    128         @param host_id: ID of the host.
    129         @param job_id: ID of the test job that's related to the special task.
    130         @param start_time: Start time to search for special task.
    131         @param end_time: End time to search for special task.
    132         @return: The list of special tasks that are related to given host and
    133                  Job id. Note that, None means the cache is not available.
    134                  However, [] means no special tasks found in cache.
    135         """
    136         if not host_id in self:
    137             return None
    138         return self[host_id].try_get(job_id, start_time, end_time)
    139 
    140 
    141     def update(self, host_id, start_time, end_time):
    142         """Update the cache of the given host by searching database.
    143 
    144         @param host_id: ID of the host.
    145         @param start_time: Start time to search for special task.
    146         @param end_time: End time to search for special task.
    147         """
    148         search_start_time = start_time - TIME_BUFFER
    149         search_end_time = end_time + TIME_BUFFER
    150         tasks = models.SpecialTask.objects.filter(
    151                 host_id=host_id,
    152                 time_started__gte=search_start_time,
    153                 time_started__lte=search_end_time)
    154         self[host_id] = TaskCache(tasks, search_start_time, search_end_time)
    155 
    156 
    157 class TaskCache(object):
    158     """A cache that hold tasks for a host.
    159     """
    160 
    161     def __init__(self, tasks=[], start_time=None, end_time=None):
    162         """Constructor
    163         """
    164         self.tasks = tasks
    165         self.start_time = start_time
    166         self.end_time = end_time
    167 
    168     def try_get(self, job_id, start_time, end_time):
    169         """Try to get tasks from cache.
    170 
    171         @param job_id: ID of the test job that's related to the special task.
    172         @param start_time: Start time to search for special task.
    173         @param end_time: End time to search for special task.
    174         @return: The list of special tasks that are related to the job id.
    175                  Note that, None means the cache is not available.
    176                  However, [] means no special tasks found in cache.
    177         """
    178         if start_time < self.start_time or end_time > self.end_time:
    179             return None
    180         return [task for task in self.tasks if task.queue_entry and
    181                 task.queue_entry.job.id == job_id]
    182 
    183 
    184 class TestJobInfo(JobHistoryObject):
    185     """Information of a test job
    186     """
    187 
    188     def __init__(self, hqe, task_caches=None, suite_start_time=None,
    189                  suite_end_time=None):
    190         """Constructor
    191 
    192         @param hqe: HostQueueEntry of the job.
    193         @param task_caches: Special tasks that's from a previous query.
    194         @param suite_start_time: Start time of the suite job, default is
    195                 None. Used to build special task search cache.
    196         @param suite_end_time: End time of the suite job, default is
    197                 None. Used to build special task search cache.
    198         """
    199         # AFE job ID
    200         self.id = hqe.job.id
    201         # AFE job model
    202         self.job = hqe.job
    203         # Name of the job, strip all build and suite info.
    204         self.name = hqe.job.name.split('/')[-1]
    205         self.status = hqe.status if hqe else None
    206 
    207         try:
    208             self.tko_job = tko_models.Job.objects.filter(afe_job_id=self.id)[0]
    209             self.host = models.Host.objects.filter(
    210                     hostname=self.tko_job.machine.hostname)[0]
    211             self.hostname = self.tko_job.machine.hostname
    212             self.start_time = self.tko_job.started_time
    213             self.end_time = self.tko_job.finished_time
    214         except IndexError:
    215             # The test job was never started.
    216             self.tko_job = None
    217             self.host = None
    218             self.hostname = None
    219             self.start_time = None
    220             self.end_time = None
    221 
    222         if self.end_time and self.start_time:
    223             self.time_used = (self.end_time - self.start_time).total_seconds()
    224         else:
    225             self.time_used = None
    226 
    227         # Link to log
    228         self.log_url = JOB_URL % {'job_id': hqe.job.id, 'owner': hqe.job.owner,
    229                                   'hostname': self.hostname}
    230         self.autoserv_log_url = '%s/%s' % (self.log_url, AUTOSERV_DEBUG_LOG)
    231 
    232         self._get_special_tasks(hqe, task_caches, suite_start_time,
    233                                 suite_end_time)
    234 
    235 
    236     def _get_special_tasks(self, hqe, task_caches=None, suite_start_time=None,
    237                            suite_end_time=None):
    238         """Get special tasks ran before and after the test job.
    239 
    240         @param hqe: HostQueueEntry of the job.
    241         @param task_caches: Special tasks that's from a previous query.
    242         @param suite_start_time: Start time of the suite job, default is
    243                 None. Used to build special task search cache.
    244         @param suite_end_time: End time of the suite job, default is
    245                 None. Used to build special task search cache.
    246         """
    247         # Special tasks run before job starts.
    248         self.tasks_before = []
    249         # Special tasks run after job finished.
    250         self.tasks_after = []
    251 
    252         # Skip locating special tasks if hqe is None, or not started yet, as
    253         # that indicates the test job might not be started.
    254         if not hqe or not hqe.started_on:
    255             return
    256 
    257         # Assume special tasks for the test job all start within 2 hours
    258         # before the test job starts or 2 hours after the test finishes. In most
    259         # cases, special task won't take longer than 2 hours to start before
    260         # test job starts and after test job finishes.
    261         search_start_time = hqe.started_on - TIME_BUFFER
    262         search_end_time = (hqe.finished_on + TIME_BUFFER if hqe.finished_on else
    263                            hqe.started_on + TIME_BUFFER)
    264 
    265         if task_caches is not None and suite_start_time and suite_end_time:
    266             tasks = task_caches.try_get(self.host.id, self.id,
    267                                         suite_start_time, suite_end_time)
    268             if tasks is None:
    269                 task_caches.update(self.host.id, search_start_time,
    270                                    search_end_time)
    271                 tasks = task_caches.try_get(self.host.id, self.id,
    272                                             suite_start_time, suite_end_time)
    273         else:
    274             tasks = models.SpecialTask.objects.filter(
    275                         host_id=self.host.id,
    276                         time_started__gte=search_start_time,
    277                         time_started__lte=search_end_time)
    278             tasks = [task for task in tasks if task.queue_entry and
    279                      task.queue_entry.job.id == self.id]
    280 
    281         for task in tasks:
    282             task_info = SpecialTaskInfo(task)
    283             if task.time_started < self.start_time:
    284                 self.tasks_before.append(task_info)
    285             else:
    286                 self.tasks_after.append(task_info)
    287 
    288 
    289     def get_history(self):
    290         """Get the history of a test job.
    291 
    292         @return: A list of special tasks and test job information.
    293         """
    294         history = []
    295         history.extend([task.build_history_entry() for task in
    296                         self.tasks_before])
    297         history.append(self.build_history_entry())
    298         history.extend([task.build_history_entry() for task in
    299                         self.tasks_after])
    300         return history
    301 
    302 
    303     def __str__(self):
    304         """Get a formatted string of the details of the job info.
    305         """
    306         result = '%d: %s\n' % (self.id, self.name)
    307         for task in self.tasks_before:
    308             result += str(task)
    309 
    310         result += ('Test from %s to %s, for %s seconds.\n' %
    311                    (self.start_time, self.end_time, self.time_used))
    312 
    313         for task in self.tasks_after:
    314             result += str(task)
    315 
    316         return result
    317 
    318 
    319 class SuiteJobInfo(JobHistoryObject):
    320     """Information of a suite job
    321     """
    322 
    323     def __init__(self, hqe):
    324         """Constructor
    325 
    326         @param hqe: HostQueueEntry of the job.
    327         """
    328         # AFE job ID
    329         self.id = hqe.job.id
    330         # AFE job model
    331         self.job = hqe.job
    332         # Name of the job, strip all build and suite info.
    333         self.name = hqe.job.name.split('/')[-1]
    334         self.status = hqe.status if hqe else None
    335 
    336         self.log_url = JOB_URL % {'job_id': hqe.job.id, 'owner': hqe.job.owner,
    337                                   'hostname': 'hostless'}
    338 
    339         hqe = models.HostQueueEntry.objects.filter(job_id=hqe.job.id)[0]
    340         self.start_time = hqe.started_on
    341         self.end_time = hqe.finished_on
    342         if self.start_time and self.end_time:
    343             self.time_used = (self.end_time - self.start_time).total_seconds()
    344         else:
    345             self.time_used = None
    346 
    347         # Cache of special tasks, hostname: ((start_time, end_time), [tasks])
    348         task_caches = TaskCacheCollection()
    349         self.test_jobs = []
    350         for job in models.Job.objects.filter(parent_job_id=self.id):
    351             try:
    352                 job_hqe = models.HostQueueEntry.objects.filter(job_id=job.id)[0]
    353             except IndexError:
    354                 continue
    355             self.test_jobs.append(TestJobInfo(job_hqe, task_caches,
    356                                                 self.start_time, self.end_time))
    357 
    358 
    359     def get_history(self):
    360         """Get the history of a suite job.
    361 
    362         @return: A list of special tasks and test job information that has
    363                  suite job as the parent job.
    364         """
    365         history = []
    366         for job in sorted(self.test_jobs,
    367                           key=lambda j: (j.hostname, j.start_time)):
    368             history.extend(job.get_history())
    369         return history
    370 
    371 
    372     def __str__(self):
    373         """Get a formatted string of the details of the job info.
    374         """
    375         result = '%d: %s\n' % (self.id, self.name)
    376         for job in self.test_jobs:
    377             result += str(job)
    378             result += '-' * 80 + '\n'
    379         return result
    380 
    381 
    382 def get_job_info(job_id):
    383     """Get the history of a job.
    384 
    385     @param job_id: ID of the job.
    386     @return: A TestJobInfo object that contains the test job and its special
    387              tasks' start/end time, if the job is a test job. Otherwise, return
    388              a SuiteJobInfo object if the job is a suite job.
    389     @raise Exception: if the test job can't be found in database.
    390     """
    391     try:
    392         hqe = models.HostQueueEntry.objects.filter(job_id=job_id)[0]
    393     except IndexError:
    394         raise Exception('No HQE found for job ID %d' % job_id)
    395 
    396     if hqe and hqe.execution_subdir != 'hostless':
    397         return TestJobInfo(hqe)
    398     else:
    399         return SuiteJobInfo(hqe)
    400 
    401 
    402 def main():
    403     """Main script.
    404 
    405     The script accepts a job ID and print out the test job and its special
    406     tasks' start/end time.
    407     """
    408     parser = argparse.ArgumentParser()
    409     parser.add_argument('--job_id', type=int, dest='job_id', required=True)
    410     options = parser.parse_args()
    411 
    412     job_info = get_job_info(options.job_id)
    413 
    414     print job_info
    415 
    416 
    417 if __name__ == '__main__':
    418     main()
    419