Home | History | Annotate | Download | only in scheduler
      1 #!/usr/bin/python
      2 
      3 import logging, os, signal, unittest
      4 import common
      5 from autotest_lib.client.common_lib import enum, global_config, host_protections
      6 from autotest_lib.database import database_connection
      7 from autotest_lib.frontend import setup_django_environment
      8 from autotest_lib.frontend.afe import frontend_test_utils, models
      9 from autotest_lib.frontend.afe import model_attributes
     10 from autotest_lib.scheduler import drone_manager, email_manager
     11 from autotest_lib.scheduler import monitor_db, scheduler_models
     12 from autotest_lib.scheduler import scheduler_config
     13 from autotest_lib.scheduler import scheduler_lib
     14 
     15 HqeStatus = models.HostQueueEntry.Status
     16 HostStatus = models.Host.Status
     17 
     18 class NullMethodObject(object):
     19     _NULL_METHODS = ()
     20 
     21     def __init__(self):
     22         def null_method(*args, **kwargs):
     23             pass
     24 
     25         for method_name in self._NULL_METHODS:
     26             setattr(self, method_name, null_method)
     27 
     28 # the SpecialTask names here must match the suffixes used on the SpecialTask
     29 # results directories
     30 _PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather',
     31                          'parse', 'archive', 'reset', 'provision')
     32 
     33 
     34 _PIDFILE_TO_PIDFILE_TYPE = {
     35         drone_manager.AUTOSERV_PID_FILE: _PidfileType.JOB,
     36         drone_manager.CRASHINFO_PID_FILE: _PidfileType.GATHER,
     37         drone_manager.PARSER_PID_FILE: _PidfileType.PARSE,
     38         drone_manager.ARCHIVER_PID_FILE: _PidfileType.ARCHIVE,
     39         }
     40 
     41 
     42 _PIDFILE_TYPE_TO_PIDFILE = dict((value, key) for key, value
     43                                 in _PIDFILE_TO_PIDFILE_TYPE.iteritems())
     44 
     45 
     46 class MockConnectionManager(object):
     47     """docstring for MockConnectionManager"""
     48 
     49     db = None
     50 
     51     def __init__(self):
     52         super(MockConnectionManager, self).__init__()
     53 
     54     def get_connection(self):
     55         assert MockConnectionManager.db
     56         return MockConnectionManager.db
     57 
     58 
     59 class MockDroneManager(NullMethodObject):
     60     """
     61     Public attributes:
     62     max_runnable_processes_value: value returned by max_runnable_processes().
     63             tests can change this to activate throttling.
     64     """
     65     _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository',
     66                      'copy_results_on_drone', 'trigger_refresh', 'sync_refresh')
     67 
     68     class _DummyPidfileId(object):
     69         """
     70         Object to represent pidfile IDs that is opaque to the scheduler code but
     71         still debugging-friendly for us.
     72         """
     73         def __init__(self, working_directory, pidfile_name, num_processes=None):
     74             self._working_directory = working_directory
     75             self._pidfile_name = pidfile_name
     76             self._num_processes = num_processes
     77             self._paired_with_pidfile = None
     78 
     79 
     80         def key(self):
     81             """Key for MockDroneManager._pidfile_index"""
     82             return (self._working_directory, self._pidfile_name)
     83 
     84 
     85         def __str__(self):
     86             return os.path.join(self._working_directory, self._pidfile_name)
     87 
     88 
     89         def __repr__(self):
     90             return '<_DummyPidfileId: %s>' % str(self)
     91 
     92 
     93     def __init__(self):
     94         super(MockDroneManager, self).__init__()
     95         self.process_capacity = 100
     96 
     97         # maps result_dir to set of tuples (file_path, file_contents)
     98         self._attached_files = {}
     99         # maps pidfile IDs to PidfileContents
    100         self._pidfiles = {}
    101         # pidfile IDs that haven't been created yet
    102         self._future_pidfiles = []
    103         # maps _PidfileType to the most recently created pidfile ID of that type
    104         self._last_pidfile_id = {}
    105         # maps (working_directory, pidfile_name) to pidfile IDs
    106         self._pidfile_index = {}
    107         # maps process to pidfile IDs
    108         self._process_index = {}
    109         # tracks pidfiles of processes that have been killed
    110         self._pids_to_signals_received = {}
    111         # pidfile IDs that have just been unregistered (so will disappear on the
    112         # next cycle)
    113         self._unregistered_pidfiles = set()
    114         # Pids to write exit status for at end of tick
    115         self._set_pidfile_exit_status_queue = []
    116 
    117     # utility APIs for use by the test
    118 
    119     def finish_process(self, pidfile_type, exit_status=0):
    120         pidfile_id = self._last_pidfile_id[pidfile_type]
    121         self._set_pidfile_exit_status(pidfile_id, exit_status)
    122 
    123 
    124     def finish_specific_process(self, working_directory, pidfile_name):
    125         pidfile_id = self.pidfile_from_path(working_directory, pidfile_name)
    126         self._set_pidfile_exit_status(pidfile_id, 0)
    127 
    128     def finish_active_process_on_host(self, host_id):
    129         match = 'hosts/host%d/' % host_id
    130         for pidfile_id in self.nonfinished_pidfile_ids():
    131             if pidfile_id._working_directory.startswith(match):
    132                 self._set_pidfile_exit_status(pidfile_id, 0)
    133                 break
    134         else:
    135           raise KeyError('No active process matched %s' % match)
    136 
    137     def _set_pidfile_exit_status(self, pidfile_id, exit_status):
    138         assert pidfile_id is not None
    139         contents = self._pidfiles[pidfile_id]
    140         contents.exit_status = exit_status
    141         contents.num_tests_failed = 0
    142 
    143 
    144     def was_last_process_killed(self, pidfile_type, sigs):
    145         pidfile_id = self._last_pidfile_id[pidfile_type]
    146         return sigs == self._pids_to_signals_received[pidfile_id]
    147 
    148 
    149     def nonfinished_pidfile_ids(self):
    150         return [pidfile_id for pidfile_id, pidfile_contents
    151                 in self._pidfiles.iteritems()
    152                 if pidfile_contents.exit_status is None]
    153 
    154 
    155     def running_pidfile_ids(self):
    156         return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids()
    157                 if self._pidfiles[pidfile_id].process is not None]
    158 
    159 
    160     def pidfile_from_path(self, working_directory, pidfile_name):
    161         return self._pidfile_index[(working_directory, pidfile_name)]
    162 
    163 
    164     def attached_files(self, working_directory):
    165         """
    166         Return dict mapping path to contents for attached files with specified
    167         paths.
    168         """
    169         return dict((path, contents) for path, contents
    170                     in self._attached_files.get(working_directory, [])
    171                     if path is not None)
    172 
    173 
    174     # DroneManager emulation APIs for use by monitor_db
    175 
    176     def get_orphaned_autoserv_processes(self):
    177         return set()
    178 
    179 
    180     def total_running_processes(self):
    181         return sum(pidfile_id._num_processes
    182                    for pidfile_id in self.nonfinished_pidfile_ids())
    183 
    184 
    185     def max_runnable_processes(self, username, drone_hostnames_allowed):
    186         return self.process_capacity - self.total_running_processes()
    187 
    188 
    189     def refresh(self):
    190         for pidfile_id in self._unregistered_pidfiles:
    191             # intentionally handle non-registered pidfiles silently
    192             self._pidfiles.pop(pidfile_id, None)
    193         self._unregistered_pidfiles = set()
    194 
    195 
    196     def execute_actions(self):
    197         # executing an "execute_command" causes a pidfile to be created
    198         for pidfile_id in self._future_pidfiles:
    199             # Process objects are opaque to monitor_db
    200             process = object()
    201             self._pidfiles[pidfile_id].process = process
    202             self._process_index[process] = pidfile_id
    203         self._future_pidfiles = []
    204 
    205         for pidfile_id in self._set_pidfile_exit_status_queue:
    206             self._set_pidfile_exit_status(pidfile_id, 271)
    207         self._set_pidfile_exit_status_queue = []
    208 
    209 
    210     def attach_file_to_execution(self, result_dir, file_contents,
    211                                  file_path=None):
    212         self._attached_files.setdefault(result_dir, set()).add((file_path,
    213                                                                 file_contents))
    214         return 'attach_path'
    215 
    216 
    217     def _initialize_pidfile(self, pidfile_id):
    218         if pidfile_id not in self._pidfiles:
    219             assert pidfile_id.key() not in self._pidfile_index
    220             self._pidfiles[pidfile_id] = drone_manager.PidfileContents()
    221             self._pidfile_index[pidfile_id.key()] = pidfile_id
    222 
    223 
    224     def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name):
    225         if working_directory.startswith('hosts/'):
    226             # such paths look like hosts/host1/1-verify, we'll grab the end
    227             type_string = working_directory.rsplit('-', 1)[1]
    228             pidfile_type = _PidfileType.get_value(type_string)
    229         else:
    230             pidfile_type = _PIDFILE_TO_PIDFILE_TYPE[pidfile_name]
    231         self._last_pidfile_id[pidfile_type] = pidfile_id
    232 
    233 
    234     def execute_command(self, command, working_directory, pidfile_name,
    235                         num_processes, log_file=None, paired_with_pidfile=None,
    236                         username=None, drone_hostnames_allowed=None):
    237         logging.debug('Executing %s in %s', command, working_directory)
    238         pidfile_id = self._DummyPidfileId(working_directory, pidfile_name)
    239         if pidfile_id.key() in self._pidfile_index:
    240             pidfile_id = self._pidfile_index[pidfile_id.key()]
    241         pidfile_id._num_processes = num_processes
    242         pidfile_id._paired_with_pidfile = paired_with_pidfile
    243 
    244         self._future_pidfiles.append(pidfile_id)
    245         self._initialize_pidfile(pidfile_id)
    246         self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id
    247         self._set_last_pidfile(pidfile_id, working_directory, pidfile_name)
    248         return pidfile_id
    249 
    250 
    251     def get_pidfile_contents(self, pidfile_id, use_second_read=False):
    252         if pidfile_id not in self._pidfiles:
    253             logging.debug('Request for nonexistent pidfile %s' % pidfile_id)
    254         return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents())
    255 
    256 
    257     def is_process_running(self, process):
    258         return True
    259 
    260 
    261     def register_pidfile(self, pidfile_id):
    262         self._initialize_pidfile(pidfile_id)
    263 
    264 
    265     def unregister_pidfile(self, pidfile_id):
    266         self._unregistered_pidfiles.add(pidfile_id)
    267 
    268 
    269     def declare_process_count(self, pidfile_id, num_processes):
    270         pidfile_id.num_processes = num_processes
    271 
    272 
    273     def absolute_path(self, path):
    274         return 'absolute/' + path
    275 
    276 
    277     def write_lines_to_file(self, file_path, lines, paired_with_process=None):
    278         # TODO: record this
    279         pass
    280 
    281 
    282     def get_pidfile_id_from(self, execution_tag, pidfile_name):
    283         default_pidfile = self._DummyPidfileId(execution_tag, pidfile_name,
    284                                                num_processes=0)
    285         return self._pidfile_index.get((execution_tag, pidfile_name),
    286                                        default_pidfile)
    287 
    288 
    289     def kill_process(self, process, sig=signal.SIGKILL):
    290         pidfile_id = self._process_index[process]
    291 
    292         if pidfile_id not in self._pids_to_signals_received:
    293             self._pids_to_signals_received[pidfile_id] = set()
    294         self._pids_to_signals_received[pidfile_id].add(sig)
    295 
    296         if signal.SIGKILL == sig:
    297             self._set_pidfile_exit_status_queue.append(pidfile_id)
    298 
    299 
    300 class MockEmailManager(NullMethodObject):
    301     _NULL_METHODS = ('send_queued_emails', 'send_email')
    302 
    303     def enqueue_notify_email(self, subject, message):
    304         logging.warning('enqueue_notify_email: %s', subject)
    305         logging.warning(message)
    306 
    307 
    308 class SchedulerFunctionalTest(unittest.TestCase,
    309                               frontend_test_utils.FrontendTestMixin):
    310     # some number of ticks after which the scheduler is presumed to have
    311     # stabilized, given no external changes
    312     _A_LOT_OF_TICKS = 10
    313 
    314     def setUp(self):
    315         self._frontend_common_setup()
    316         self._set_stubs()
    317         self._set_global_config_values()
    318         self._create_dispatcher()
    319 
    320         logging.basicConfig(level=logging.DEBUG)
    321 
    322 
    323     def _create_dispatcher(self):
    324         self.dispatcher = monitor_db.Dispatcher()
    325 
    326 
    327     def tearDown(self):
    328         self._database.disconnect()
    329         self._frontend_common_teardown()
    330 
    331 
    332     def _set_stubs(self):
    333         self.mock_config = global_config.FakeGlobalConfig()
    334         self.god.stub_with(global_config, 'global_config', self.mock_config)
    335 
    336         self.mock_drone_manager = MockDroneManager()
    337         drone_manager._set_instance(self.mock_drone_manager)
    338 
    339         self.mock_email_manager = MockEmailManager()
    340         self.god.stub_with(email_manager, 'manager', self.mock_email_manager)
    341 
    342         self._database = (
    343             database_connection.TranslatingDatabase.get_test_database(
    344                 translators=scheduler_lib._DB_TRANSLATORS))
    345         self._database.connect(db_type='django')
    346         self.god.stub_with(monitor_db, '_db', self._database)
    347         self.god.stub_with(scheduler_models, '_db', self._database)
    348 
    349         MockConnectionManager.db = self._database
    350         scheduler_lib.ConnectionManager = MockConnectionManager
    351 
    352         monitor_db.initialize_globals()
    353         scheduler_models.initialize_globals()
    354 
    355 
    356     def _set_global_config_values(self):
    357         self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',
    358                                           1)
    359         self.mock_config.set_config_value('SCHEDULER', 'gc_stats_interval_mins',
    360                                           999999)
    361         self.mock_config.set_config_value('SCHEDULER', 'enable_archiving', True)
    362         self.mock_config.set_config_value('SCHEDULER',
    363                                           'clean_interval_minutes', 60)
    364         self.mock_config.set_config_value('SCHEDULER',
    365                                           'max_parse_processes', 50)
    366         self.mock_config.set_config_value('SCHEDULER',
    367                                           'max_transfer_processes', 50)
    368         self.mock_config.set_config_value('SCHEDULER',
    369                                           'clean_interval_minutes', 50)
    370         self.mock_config.set_config_value('SCHEDULER',
    371                                           'max_provision_retries', 1)
    372         self.mock_config.set_config_value('SCHEDULER', 'max_repair_limit', 1)
    373         self.mock_config.set_config_value(
    374                 'SCHEDULER', 'secs_to_wait_for_atomic_group_hosts', 600)
    375         self.mock_config.set_config_value(
    376                 'SCHEDULER', 'inline_host_acquisition', True)
    377         scheduler_config.config.read_config()
    378 
    379 
    380     def _initialize_test(self):
    381         self.dispatcher.initialize()
    382 
    383 
    384     def _run_dispatcher(self):
    385         for _ in xrange(self._A_LOT_OF_TICKS):
    386             self.dispatcher.tick()
    387 
    388 
    389     def test_idle(self):
    390         self._initialize_test()
    391         self._run_dispatcher()
    392 
    393 
    394     def _assert_process_executed(self, working_directory, pidfile_name):
    395         process_was_executed = self.mock_drone_manager.was_process_executed(
    396                 'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE)
    397         self.assert_(process_was_executed,
    398                      '%s/%s not executed' % (working_directory, pidfile_name))
    399 
    400 
    401     def _update_instance(self, model_instance):
    402         return type(model_instance).objects.get(pk=model_instance.pk)
    403 
    404 
    405     def _check_statuses(self, queue_entry, queue_entry_status,
    406                         host_status=None):
    407         self._check_entry_status(queue_entry, queue_entry_status)
    408         if host_status:
    409             self._check_host_status(queue_entry.host, host_status)
    410 
    411 
    412     def _check_entry_status(self, queue_entry, status):
    413         # update from DB
    414         queue_entry = self._update_instance(queue_entry)
    415         self.assertEquals(queue_entry.status, status)
    416 
    417 
    418     def _check_host_status(self, host, status):
    419         # update from DB
    420         host = self._update_instance(host)
    421         self.assertEquals(host.status, status)
    422 
    423 
    424     def _run_pre_job_verify(self, queue_entry):
    425         self._run_dispatcher() # launches verify
    426         self._check_statuses(queue_entry, HqeStatus.VERIFYING,
    427                              HostStatus.VERIFYING)
    428         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    429 
    430 
    431     def test_simple_job(self):
    432         self._initialize_test()
    433         job, queue_entry = self._make_job_and_queue_entry()
    434         self._run_pre_job_verify(queue_entry)
    435         self._run_dispatcher() # launches job
    436         self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
    437         self._finish_job(queue_entry)
    438         self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
    439         self._assert_nothing_is_running()
    440 
    441 
    442     def _setup_for_pre_job_reset(self):
    443         self._initialize_test()
    444         job, queue_entry = self._make_job_and_queue_entry()
    445         job.reboot_before = model_attributes.RebootBefore.ALWAYS
    446         job.save()
    447         return queue_entry
    448 
    449 
    450     def _run_pre_job_reset_job(self, queue_entry):
    451         self._run_dispatcher() # reset
    452         self._check_statuses(queue_entry, HqeStatus.RESETTING,
    453                              HostStatus.RESETTING)
    454         self.mock_drone_manager.finish_process(_PidfileType.RESET)
    455         self._run_dispatcher() # job
    456         self._finish_job(queue_entry)
    457 
    458 
    459     def test_pre_job_reset(self):
    460         queue_entry = self._setup_for_pre_job_reset()
    461         self._run_pre_job_reset_job(queue_entry)
    462 
    463 
    464     def _run_pre_job_reset_one_failure(self):
    465         queue_entry = self._setup_for_pre_job_reset()
    466         self._run_dispatcher() # reset
    467         self.mock_drone_manager.finish_process(_PidfileType.RESET,
    468                                                exit_status=256)
    469         self._run_dispatcher() # repair
    470         self._check_statuses(queue_entry, HqeStatus.QUEUED,
    471                              HostStatus.REPAIRING)
    472         self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
    473         return queue_entry
    474 
    475 
    476     def test_pre_job_reset_failure(self):
    477         queue_entry = self._run_pre_job_reset_one_failure()
    478         # from here the job should run as normal
    479         self._run_pre_job_reset_job(queue_entry)
    480 
    481 
    482     def test_pre_job_reset_double_failure(self):
    483         # TODO (showard): this test isn't perfect.  in reality, when the second
    484         # reset fails, it copies its results over to the job directory using
    485         # copy_results_on_drone() and then parses them.  since we don't handle
    486         # that, there appear to be no results at the job directory.  the
    487         # scheduler handles this gracefully, parsing gets effectively skipped,
    488         # and this test passes as is.  but we ought to properly test that
    489         # behavior.
    490         queue_entry = self._run_pre_job_reset_one_failure()
    491         self._run_dispatcher() # second reset
    492         self.mock_drone_manager.finish_process(_PidfileType.RESET,
    493                                                exit_status=256)
    494         self._run_dispatcher()
    495         self._check_statuses(queue_entry, HqeStatus.FAILED,
    496                              HostStatus.REPAIR_FAILED)
    497         # nothing else should run
    498         self._assert_nothing_is_running()
    499 
    500 
    501     def _assert_nothing_is_running(self):
    502         self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])
    503 
    504 
    505     def _setup_for_post_job_cleanup(self):
    506         self._initialize_test()
    507         job, queue_entry = self._make_job_and_queue_entry()
    508         job.reboot_after = model_attributes.RebootAfter.ALWAYS
    509         job.save()
    510         return queue_entry
    511 
    512 
    513     def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
    514                                                    include_verify=True):
    515         if include_verify:
    516             self._run_pre_job_verify(queue_entry)
    517         self._run_dispatcher() # job
    518         self.mock_drone_manager.finish_process(_PidfileType.JOB)
    519         self._run_dispatcher() # parsing + cleanup
    520         self.mock_drone_manager.finish_process(_PidfileType.PARSE)
    521         self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
    522                                                exit_status=256)
    523         self._run_dispatcher() # repair, HQE unaffected
    524         return queue_entry
    525 
    526 
    527     def test_post_job_cleanup_failure(self):
    528         queue_entry = self._setup_for_post_job_cleanup()
    529         self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
    530         self._check_statuses(queue_entry, HqeStatus.COMPLETED,
    531                              HostStatus.REPAIRING)
    532         self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
    533         self._run_dispatcher()
    534         self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
    535 
    536 
    537     def test_post_job_cleanup_failure_repair_failure(self):
    538         queue_entry = self._setup_for_post_job_cleanup()
    539         self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
    540         self.mock_drone_manager.finish_process(_PidfileType.REPAIR,
    541                                                exit_status=256)
    542         self._run_dispatcher()
    543         self._check_statuses(queue_entry, HqeStatus.COMPLETED,
    544                              HostStatus.REPAIR_FAILED)
    545 
    546 
    547     def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type):
    548         pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type]
    549         queue_entry = self._update_instance(queue_entry)
    550         pidfile_id = self.mock_drone_manager.pidfile_from_path(
    551                 queue_entry.execution_path(), pidfile_name)
    552         self.assert_(pidfile_id._paired_with_pidfile)
    553 
    554 
    555     def _finish_job(self, queue_entry):
    556         self._check_statuses(queue_entry, HqeStatus.RUNNING)
    557         self.mock_drone_manager.finish_process(_PidfileType.JOB)
    558         self._run_dispatcher() # launches parsing
    559         self._check_statuses(queue_entry, HqeStatus.PARSING)
    560         self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)
    561         self._finish_parsing()
    562 
    563 
    564     def _finish_parsing(self):
    565         self.mock_drone_manager.finish_process(_PidfileType.PARSE)
    566         self._run_dispatcher()
    567 
    568 
    569     def _create_reverify_request(self):
    570         host = self.hosts[0]
    571         models.SpecialTask.schedule_special_task(
    572                 host=host, task=models.SpecialTask.Task.VERIFY)
    573         return host
    574 
    575 
    576     def test_requested_reverify(self):
    577         host = self._create_reverify_request()
    578         self._run_dispatcher()
    579         self._check_host_status(host, HostStatus.VERIFYING)
    580         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    581         self._run_dispatcher()
    582         self._check_host_status(host, HostStatus.READY)
    583 
    584 
    585     def test_requested_reverify_failure(self):
    586         host = self._create_reverify_request()
    587         self._run_dispatcher()
    588         self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
    589                                                exit_status=256)
    590         self._run_dispatcher() # repair
    591         self._check_host_status(host, HostStatus.REPAIRING)
    592         self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
    593         self._run_dispatcher()
    594         self._check_host_status(host, HostStatus.READY)
    595 
    596 
    597     def _setup_for_do_not_verify(self):
    598         self._initialize_test()
    599         job, queue_entry = self._make_job_and_queue_entry()
    600         queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY
    601         queue_entry.host.save()
    602         return queue_entry
    603 
    604 
    605     def test_do_not_verify_job(self):
    606         queue_entry = self._setup_for_do_not_verify()
    607         self._run_dispatcher() # runs job directly
    608         self._finish_job(queue_entry)
    609 
    610 
    611     def test_do_not_verify_job_with_cleanup(self):
    612         queue_entry = self._setup_for_do_not_verify()
    613         queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS
    614         queue_entry.job.save()
    615 
    616         self._run_dispatcher() # cleanup
    617         self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
    618         self._run_dispatcher() # job
    619         self._finish_job(queue_entry)
    620 
    621 
    622     def test_do_not_verify_pre_job_cleanup_failure(self):
    623         queue_entry = self._setup_for_do_not_verify()
    624         queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS
    625         queue_entry.job.save()
    626 
    627         self._run_dispatcher() # cleanup
    628         self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
    629                                                exit_status=256)
    630         self._run_dispatcher() # failure ignored; job runs
    631         self._finish_job(queue_entry)
    632 
    633 
    634     def test_do_not_verify_post_job_cleanup_failure(self):
    635         queue_entry = self._setup_for_do_not_verify()
    636         queue_entry.job.reboot_after = model_attributes.RebootAfter.ALWAYS
    637         queue_entry.job.save()
    638 
    639         self._run_post_job_cleanup_failure_up_to_repair(queue_entry,
    640                                                         include_verify=False)
    641         # failure ignored, host still set to Ready
    642         self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
    643         self._run_dispatcher() # nothing else runs
    644         self._assert_nothing_is_running()
    645 
    646 
    647     def test_do_not_verify_requested_reverify_failure(self):
    648         host = self._create_reverify_request()
    649         host.protection = host_protections.Protection.DO_NOT_VERIFY
    650         host.save()
    651 
    652         self._run_dispatcher()
    653         self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
    654                                                exit_status=256)
    655         self._run_dispatcher()
    656         self._check_host_status(host, HostStatus.READY) # ignore failure
    657         self._assert_nothing_is_running()
    658 
    659 
    660     def test_job_abort_in_verify(self):
    661         self._initialize_test()
    662         job = self._create_job(hosts=[1])
    663         queue_entries = list(job.hostqueueentry_set.all())
    664         self._run_dispatcher() # launches verify
    665         self._check_statuses(queue_entries[0], HqeStatus.VERIFYING)
    666         job.hostqueueentry_set.update(aborted=True)
    667         self._run_dispatcher() # kills verify, launches cleanup
    668         self.assert_(self.mock_drone_manager.was_last_process_killed(
    669                 _PidfileType.VERIFY, set([signal.SIGKILL])))
    670         self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
    671         self._run_dispatcher()
    672 
    673 
    674     def test_job_abort(self):
    675         self._initialize_test()
    676         job = self._create_job(hosts=[1])
    677         job.run_reset = False
    678         job.save()
    679         queue_entries = list(job.hostqueueentry_set.all())
    680 
    681         self._run_dispatcher() # launches job
    682 
    683         self._check_statuses(queue_entries[0], HqeStatus.RUNNING)
    684 
    685         job.hostqueueentry_set.update(aborted=True)
    686 
    687         self._run_dispatcher() # kills job, launches gathering
    688 
    689         self._check_statuses(queue_entries[0], HqeStatus.GATHERING)
    690         self.mock_drone_manager.finish_process(_PidfileType.GATHER)
    691         self._run_dispatcher() # launches parsing + cleanup
    692         queue_entry = job.hostqueueentry_set.all()[0]
    693         self._finish_parsing()
    694         # The abort will cause gathering to launch a cleanup.
    695         self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
    696         self._run_dispatcher()
    697 
    698 
    699     def test_job_abort_queued_synchronous(self):
    700         self._initialize_test()
    701         job = self._create_job(hosts=[1,2])
    702         job.synch_count = 2
    703         job.save()
    704 
    705         job.hostqueueentry_set.update(aborted=True)
    706         self._run_dispatcher()
    707         for host_queue_entry in job.hostqueueentry_set.all():
    708             self.assertEqual(host_queue_entry.status,
    709                              HqeStatus.ABORTED)
    710 
    711 
    712     def test_no_pidfile_leaking(self):
    713         self._initialize_test()
    714 
    715         self.test_simple_job()
    716         self.mock_drone_manager.refresh()
    717         self.assertEquals(self.mock_drone_manager._pidfiles, {})
    718 
    719         self.test_job_abort_in_verify()
    720         self.mock_drone_manager.refresh()
    721         self.assertEquals(self.mock_drone_manager._pidfiles, {})
    722 
    723         self.test_job_abort()
    724         self.mock_drone_manager.refresh()
    725         self.assertEquals(self.mock_drone_manager._pidfiles, {})
    726 
    727 
    728     def _make_job_and_queue_entry(self):
    729         job = self._create_job(hosts=[1])
    730         queue_entry = job.hostqueueentry_set.all()[0]
    731         return job, queue_entry
    732 
    733 
    734     def test_recover_running_no_process(self):
    735         # recovery should re-execute a Running HQE if no process is found
    736         _, queue_entry = self._make_job_and_queue_entry()
    737         queue_entry.status = HqeStatus.RUNNING
    738         queue_entry.execution_subdir = '1-myuser/host1'
    739         queue_entry.save()
    740         queue_entry.host.status = HostStatus.RUNNING
    741         queue_entry.host.save()
    742 
    743         self._initialize_test()
    744         self._run_dispatcher()
    745         self._finish_job(queue_entry)
    746 
    747 
    748     def test_recover_verifying_hqe_no_special_task(self):
    749         # recovery should move a Resetting HQE with no corresponding
    750         # Verify or Reset SpecialTask back to Queued.
    751         _, queue_entry = self._make_job_and_queue_entry()
    752         queue_entry.status = HqeStatus.RESETTING
    753         queue_entry.save()
    754 
    755         # make some dummy SpecialTasks that shouldn't count
    756         models.SpecialTask.objects.create(
    757                 host=queue_entry.host,
    758                 task=models.SpecialTask.Task.RESET,
    759                 requested_by=models.User.current_user())
    760         models.SpecialTask.objects.create(
    761                 host=queue_entry.host,
    762                 task=models.SpecialTask.Task.CLEANUP,
    763                 queue_entry=queue_entry,
    764                 is_complete=True,
    765                 requested_by=models.User.current_user())
    766 
    767         self._initialize_test()
    768         self._check_statuses(queue_entry, HqeStatus.QUEUED)
    769 
    770 
    771     def _test_recover_verifying_hqe_helper(self, task, pidfile_type):
    772         _, queue_entry = self._make_job_and_queue_entry()
    773         queue_entry.status = HqeStatus.VERIFYING
    774         queue_entry.save()
    775 
    776         special_task = models.SpecialTask.objects.create(
    777                 host=queue_entry.host, task=task, queue_entry=queue_entry)
    778 
    779         self._initialize_test()
    780         self._run_dispatcher()
    781         self.mock_drone_manager.finish_process(pidfile_type)
    782         self._run_dispatcher()
    783         # don't bother checking the rest of the job execution, as long as the
    784         # SpecialTask ran
    785 
    786 
    787     def test_recover_verifying_hqe_with_cleanup(self):
    788         # recover an HQE that was in pre-job cleanup
    789         self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,
    790                                                 _PidfileType.CLEANUP)
    791 
    792 
    793     def test_recover_verifying_hqe_with_verify(self):
    794         # recover an HQE that was in pre-job verify
    795         self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,
    796                                                 _PidfileType.VERIFY)
    797 
    798 
    799     def test_recover_parsing(self):
    800         self._initialize_test()
    801         job, queue_entry = self._make_job_and_queue_entry()
    802         job.run_verify = False
    803         job.run_reset = False
    804         job.reboot_after = model_attributes.RebootAfter.NEVER
    805         job.save()
    806 
    807         self._run_dispatcher() # launches job
    808         self.mock_drone_manager.finish_process(_PidfileType.JOB)
    809         self._run_dispatcher() # launches parsing
    810 
    811         # now "restart" the scheduler
    812         self._create_dispatcher()
    813         self._initialize_test()
    814         self._run_dispatcher()
    815         self.mock_drone_manager.finish_process(_PidfileType.PARSE)
    816         self._run_dispatcher()
    817 
    818 
    819     def test_recover_parsing__no_process_already_aborted(self):
    820         _, queue_entry = self._make_job_and_queue_entry()
    821         queue_entry.execution_subdir = 'host1'
    822         queue_entry.status = HqeStatus.PARSING
    823         queue_entry.aborted = True
    824         queue_entry.save()
    825 
    826         self._initialize_test()
    827         self._run_dispatcher()
    828 
    829 
    830     def test_job_scheduled_just_after_abort(self):
    831         # test a pretty obscure corner case where a job is aborted while queued,
    832         # another job is ready to run, and throttling is active. the post-abort
    833         # cleanup must not be pre-empted by the second job.
    834         # This test kind of doesn't make sense anymore after verify+cleanup
    835         # were merged into reset.  It should maybe just be removed.
    836         job1, queue_entry1 = self._make_job_and_queue_entry()
    837         queue_entry1.save()
    838         job2, queue_entry2 = self._make_job_and_queue_entry()
    839         job2.reboot_before = model_attributes.RebootBefore.IF_DIRTY
    840         job2.save()
    841 
    842         self.mock_drone_manager.process_capacity = 0
    843         self._run_dispatcher() # schedule job1, but won't start verify
    844         job1.hostqueueentry_set.update(aborted=True)
    845         self.mock_drone_manager.process_capacity = 100
    846         self._run_dispatcher() # reset must run here, not verify for job2
    847         self._check_statuses(queue_entry1, HqeStatus.ABORTED,
    848                              HostStatus.RESETTING)
    849         self.mock_drone_manager.finish_process(_PidfileType.RESET)
    850         self._run_dispatcher() # now verify starts for job2
    851         self._check_statuses(queue_entry2, HqeStatus.RUNNING,
    852                              HostStatus.RUNNING)
    853 
    854 
    855     def test_reverify_interrupting_pre_job(self):
    856         # ensure things behave sanely if a reverify is scheduled in the middle
    857         # of pre-job actions
    858         _, queue_entry = self._make_job_and_queue_entry()
    859 
    860         self._run_dispatcher() # pre-job verify
    861         self._create_reverify_request()
    862         self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
    863                                                exit_status=256)
    864         self._run_dispatcher() # repair
    865         self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
    866         self._run_dispatcher() # reverify runs now
    867         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    868         self._run_dispatcher() # pre-job verify
    869         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    870         self._run_dispatcher() # and job runs...
    871         self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
    872         self._finish_job(queue_entry) # reverify has been deleted
    873         self._check_statuses(queue_entry, HqeStatus.COMPLETED,
    874                              HostStatus.READY)
    875         self._assert_nothing_is_running()
    876 
    877 
    878     def test_reverify_while_job_running(self):
    879         # once a job is running, a reverify must not be allowed to preempt
    880         # Gathering
    881         _, queue_entry = self._make_job_and_queue_entry()
    882         self._run_pre_job_verify(queue_entry)
    883         self._run_dispatcher() # job runs
    884         self._create_reverify_request()
    885         # make job end with a signal, so gathering will run
    886         self.mock_drone_manager.finish_process(_PidfileType.JOB,
    887                                                exit_status=271)
    888         self._run_dispatcher() # gathering must start
    889         self.mock_drone_manager.finish_process(_PidfileType.GATHER)
    890         self._run_dispatcher() # parsing and cleanup
    891         self._finish_parsing()
    892         self._run_dispatcher() # now reverify runs
    893         self._check_statuses(queue_entry, HqeStatus.FAILED,
    894                              HostStatus.VERIFYING)
    895         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    896         self._run_dispatcher()
    897         self._check_host_status(queue_entry.host, HostStatus.READY)
    898 
    899 
    900     def test_reverify_while_host_pending(self):
    901         # ensure that if a reverify is scheduled while a host is in Pending, it
    902         # won't run until the host is actually free
    903         job = self._create_job(hosts=[1,2])
    904         queue_entry = job.hostqueueentry_set.get(host__hostname='host1')
    905         job.synch_count = 2
    906         job.save()
    907 
    908         host2 = self.hosts[1]
    909         host2.locked = True
    910         host2.save()
    911 
    912         self._run_dispatcher() # verify host1
    913         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    914         self._run_dispatcher() # host1 Pending
    915         self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
    916         self._create_reverify_request()
    917         self._run_dispatcher() # nothing should happen here
    918         self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
    919 
    920         # now let the job run
    921         host2.locked = False
    922         host2.save()
    923         self._run_dispatcher() # verify host2
    924         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    925         self._run_dispatcher() # run job
    926         self._finish_job(queue_entry)
    927         # the reverify should now be running
    928         self._check_statuses(queue_entry, HqeStatus.COMPLETED,
    929                              HostStatus.VERIFYING)
    930         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    931         self._run_dispatcher()
    932         self._check_host_status(queue_entry.host, HostStatus.READY)
    933 
    934 
    935     def test_throttling(self):
    936         job = self._create_job(hosts=[1,2,3])
    937         job.synch_count = 3
    938         job.save()
    939 
    940         queue_entries = list(job.hostqueueentry_set.all())
    941         def _check_hqe_statuses(*statuses):
    942             for queue_entry, status in zip(queue_entries, statuses):
    943                 self._check_statuses(queue_entry, status)
    944 
    945         self.mock_drone_manager.process_capacity = 2
    946         self._run_dispatcher() # verify runs on 1 and 2
    947         queue_entries = list(job.hostqueueentry_set.all())
    948         _check_hqe_statuses(HqeStatus.QUEUED,
    949                             HqeStatus.VERIFYING, HqeStatus.VERIFYING)
    950         self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)
    951 
    952         self.mock_drone_manager.finish_specific_process(
    953                 'hosts/host3/1-verify', drone_manager.AUTOSERV_PID_FILE)
    954         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    955         self._run_dispatcher() # verify runs on 3
    956         _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.PENDING,
    957                             HqeStatus.PENDING)
    958 
    959         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
    960         self._run_dispatcher() # job won't run due to throttling
    961         _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,
    962                             HqeStatus.STARTING)
    963         self._assert_nothing_is_running()
    964 
    965         self.mock_drone_manager.process_capacity = 3
    966         self._run_dispatcher() # now job runs
    967         _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,
    968                             HqeStatus.RUNNING)
    969 
    970         self.mock_drone_manager.process_capacity = 2
    971         self.mock_drone_manager.finish_process(_PidfileType.JOB,
    972                                                exit_status=271)
    973         self._run_dispatcher() # gathering won't run due to throttling
    974         _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING,
    975                             HqeStatus.GATHERING)
    976         self._assert_nothing_is_running()
    977 
    978         self.mock_drone_manager.process_capacity = 3
    979         self._run_dispatcher() # now gathering runs
    980 
    981         self.mock_drone_manager.process_capacity = 0
    982         self.mock_drone_manager.finish_process(_PidfileType.GATHER)
    983         self._run_dispatcher() # parsing runs despite throttling
    984         _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING,
    985                             HqeStatus.PARSING)
    986 
    987 
    988     def test_abort_starting_while_throttling(self):
    989         self._initialize_test()
    990         job = self._create_job(hosts=[1,2], synchronous=True)
    991         queue_entry = job.hostqueueentry_set.all()[0]
    992         job.run_verify = False
    993         job.run_reset = False
    994         job.reboot_after = model_attributes.RebootAfter.NEVER
    995         job.save()
    996 
    997         self.mock_drone_manager.process_capacity = 0
    998         self._run_dispatcher() # go to starting, but don't start job
    999         self._check_statuses(queue_entry, HqeStatus.STARTING,
   1000                              HostStatus.PENDING)
   1001 
   1002         job.hostqueueentry_set.update(aborted=True)
   1003         self._run_dispatcher()
   1004         self._check_statuses(queue_entry, HqeStatus.GATHERING,
   1005                              HostStatus.RUNNING)
   1006 
   1007         self.mock_drone_manager.process_capacity = 5
   1008         self._run_dispatcher()
   1009         self._check_statuses(queue_entry, HqeStatus.ABORTED,
   1010                              HostStatus.CLEANING)
   1011 
   1012 
   1013     def test_simple_metahost_assignment(self):
   1014         job = self._create_job(metahosts=[1])
   1015         self._run_dispatcher()
   1016         entry = job.hostqueueentry_set.all()[0]
   1017         self.assertEquals(entry.host.hostname, 'host1')
   1018         self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)
   1019         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
   1020         self._run_dispatcher()
   1021         self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)
   1022         # rest of job proceeds normally
   1023 
   1024 
   1025     def test_metahost_fail_verify(self):
   1026         self.hosts[1].labels.add(self.labels[0]) # put label1 also on host2
   1027         job = self._create_job(metahosts=[1])
   1028         self._run_dispatcher() # assigned to host1
   1029         self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
   1030                                                exit_status=256)
   1031         self._run_dispatcher() # host1 failed, gets reassigned to host2
   1032         entry = job.hostqueueentry_set.all()[0]
   1033         self.assertEquals(entry.host.hostname, 'host2')
   1034         self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)
   1035         self._check_host_status(self.hosts[0], HostStatus.REPAIRING)
   1036 
   1037         self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
   1038         self._run_dispatcher()
   1039         self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)
   1040 
   1041 
   1042     def test_hostless_job(self):
   1043         job = self._create_job(hostless=True)
   1044         entry = job.hostqueueentry_set.all()[0]
   1045 
   1046         self._run_dispatcher()
   1047         self._check_entry_status(entry, HqeStatus.RUNNING)
   1048 
   1049         self.mock_drone_manager.finish_process(_PidfileType.JOB)
   1050         self._run_dispatcher()
   1051         self._check_entry_status(entry, HqeStatus.PARSING)
   1052         self.mock_drone_manager.finish_process(_PidfileType.PARSE)
   1053         self._run_dispatcher()
   1054         self._check_entry_status(entry, HqeStatus.COMPLETED)
   1055 
   1056 
   1057     def test_pre_job_keyvals(self):
   1058         job = self._create_job(hosts=[1])
   1059         job.run_verify = False
   1060         job.run_reset = False
   1061         job.reboot_before = model_attributes.RebootBefore.NEVER
   1062         job.save()
   1063         models.JobKeyval.objects.create(job=job, key='mykey', value='myvalue')
   1064 
   1065         self._run_dispatcher()
   1066         self._finish_job(job.hostqueueentry_set.all()[0])
   1067 
   1068         attached_files = self.mock_drone_manager.attached_files(
   1069                 '1-autotest_system/host1')
   1070         job_keyval_path = '1-autotest_system/host1/keyval'
   1071         self.assert_(job_keyval_path in attached_files, attached_files)
   1072         keyval_contents = attached_files[job_keyval_path]
   1073         keyval_dict = dict(line.strip().split('=', 1)
   1074                            for line in keyval_contents.splitlines())
   1075         self.assert_('job_queued' in keyval_dict, keyval_dict)
   1076         self.assertEquals(keyval_dict['mykey'], 'myvalue')
   1077 
   1078 
   1079 # This tests the scheduler functions with archiving step disabled
   1080 class SchedulerFunctionalTestNoArchiving(SchedulerFunctionalTest):
   1081     def _set_global_config_values(self):
   1082         super(SchedulerFunctionalTestNoArchiving, self
   1083                 )._set_global_config_values()
   1084         self.mock_config.set_config_value('SCHEDULER', 'enable_archiving',
   1085                                           False)
   1086 
   1087 
   1088     def _finish_parsing(self):
   1089         self.mock_drone_manager.finish_process(_PidfileType.PARSE)
   1090         self._run_dispatcher()
   1091 
   1092 
   1093     def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
   1094                                                    include_verify=True):
   1095         if include_verify:
   1096             self._run_pre_job_verify(queue_entry)
   1097         self._run_dispatcher() # job
   1098         self.mock_drone_manager.finish_process(_PidfileType.JOB)
   1099         self._run_dispatcher() # parsing + cleanup
   1100         self.mock_drone_manager.finish_process(_PidfileType.PARSE)
   1101         self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
   1102                                                exit_status=256)
   1103         self._run_dispatcher() # repair, HQE unaffected
   1104         return queue_entry
   1105 
   1106 
   1107     def test_hostless_job(self):
   1108         job = self._create_job(hostless=True)
   1109         entry = job.hostqueueentry_set.all()[0]
   1110 
   1111         self._run_dispatcher()
   1112         self._check_entry_status(entry, HqeStatus.RUNNING)
   1113 
   1114         self.mock_drone_manager.finish_process(_PidfileType.JOB)
   1115         self._run_dispatcher()
   1116         self._check_entry_status(entry, HqeStatus.PARSING)
   1117         self.mock_drone_manager.finish_process(_PidfileType.PARSE)
   1118         self._run_dispatcher()
   1119         self._check_entry_status(entry, HqeStatus.COMPLETED)
   1120 
   1121     def test_synchronous_with_reset(self):
   1122         # For crbug/621257.
   1123         job = self._create_job(hosts=[1, 2])
   1124         job.synch_count = 2
   1125         job.reboot_before = model_attributes.RebootBefore.ALWAYS
   1126         job.save()
   1127 
   1128         hqe1 = job.hostqueueentry_set.get(host__hostname='host1')
   1129         hqe2 = job.hostqueueentry_set.get(host__hostname='host2')
   1130 
   1131         self._run_dispatcher()
   1132 
   1133         self._check_statuses(hqe1, HqeStatus.RESETTING, HostStatus.RESETTING)
   1134         self._check_statuses(hqe2, HqeStatus.RESETTING, HostStatus.RESETTING)
   1135 
   1136         self.mock_drone_manager.finish_active_process_on_host(1)
   1137         self._run_dispatcher()
   1138 
   1139         self._check_statuses(hqe1, HqeStatus.PENDING, HostStatus.PENDING)
   1140         self._check_statuses(hqe2, HqeStatus.RESETTING, HostStatus.RESETTING)
   1141 
   1142         self.mock_drone_manager.finish_active_process_on_host(2)
   1143         self._run_dispatcher()
   1144 
   1145         self._check_statuses(hqe1, HqeStatus.RUNNING, HostStatus.RUNNING)
   1146         self._check_statuses(hqe2, HqeStatus.RUNNING, HostStatus.RUNNING)
   1147 
   1148 
   1149 if __name__ == '__main__':
   1150     unittest.main()
   1151