Home | History | Annotate | Download | only in hosts
      1 # Copyright 2016 The Chromium OS Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 import json
      6 import logging
      7 import os
      8 import time
      9 
     10 import common
     11 from autotest_lib.client.common_lib import error
     12 from autotest_lib.client.common_lib import global_config
     13 from autotest_lib.client.common_lib import hosts
     14 from autotest_lib.client.common_lib.cros import retry
     15 from autotest_lib.server import afe_utils
     16 from autotest_lib.server import crashcollect
     17 from autotest_lib.server.hosts import repair
     18 from autotest_lib.server.hosts import cros_firmware
     19 
     20 # _DEV_MODE_ALLOW_POOLS - The set of pools that are allowed to be
     21 # in dev mode (usually, those should be unmanaged devices)
     22 #
     23 _DEV_MODE_ALLOWED_POOLS = set(
     24     global_config.global_config.get_config_value(
     25             'CROS',
     26             'pools_dev_mode_allowed',
     27             type=str,
     28             default='',
     29             allow_blank=True).split(','))
     30 
     31 # Setting to suppress dev mode check; primarily used for moblab where all
     32 # DUT's are in dev mode.
     33 _DEV_MODE_ALWAYS_ALLOWED = global_config.global_config.get_config_value(
     34             'CROS',
     35             'dev_mode_allowed',
     36             type=bool,
     37             default=False)
     38 
     39 # Triggers for the 'au', 'powerwash', and 'usb' repair actions.
     40 # These are also used as dependencies in the `CrosHost` repair
     41 # sequence, as follows:
     42 #
     43 # au:
     44 #   - triggers: _CROS_AU_TRIGGERS
     45 #   - depends on: _CROS_USB_TRIGGERS + _CROS_POWERWASH_TRIGGERS
     46 #
     47 # powerwash:
     48 #   - triggers: _CROS_POWERWASH_TRIGGERS + _CROS_AU_TRIGGERS
     49 #   - depends on: _CROS_USB_TRIGGERS
     50 #
     51 # usb:
     52 #   - triggers: _CROS_USB_TRIGGERS + _CROS_POWERWASH_TRIGGERS +
     53 #               _CROS_AU_TRIGGERS
     54 #   - no dependencies
     55 #
     56 # N.B. AC power detection depends on software on the DUT, and there
     57 # have been bugs where detection failed even though the DUT really
     58 # did have power.  So, we make the 'power' verifier a trigger for
     59 # reinstall repair actions, too.
     60 #
     61 # TODO(jrbarnette):  AU repair can't fix all problems reported by
     62 # the 'cros' verifier; it's listed as an AU trigger as a
     63 # simplification.  The ultimate fix is to split the 'cros' verifier
     64 # into smaller individual verifiers.
     65 _CROS_AU_TRIGGERS = ('power', 'rwfw', 'python', 'cros',)
     66 _CROS_POWERWASH_TRIGGERS = ('tpm', 'good_au', 'ext4',)
     67 _CROS_USB_TRIGGERS = ('ssh', 'writable',)
     68 
     69 
     70 class ACPowerVerifier(hosts.Verifier):
     71     """Check for AC power and a reasonable battery charge."""
     72 
     73     def verify(self, host):
     74         # Temporarily work around a problem caused by some old FSI
     75         # builds that don't have the power_supply_info command by
     76         # ignoring failures.  The repair triggers believe that this
     77         # verifier can't be fixed by re-installing, which means if a DUT
     78         # gets stuck with one of those old builds, it can't be repaired.
     79         #
     80         # TODO(jrbarnette): This is for crbug.com/599158; we need a
     81         # better solution.
     82         try:
     83             info = host.get_power_supply_info()
     84         except:
     85             logging.exception('get_power_supply_info() failed')
     86             return
     87         try:
     88             if info['Line Power']['online'] != 'yes':
     89                 raise hosts.AutoservVerifyError(
     90                         'AC power is not plugged in')
     91         except KeyError:
     92             logging.info('Cannot determine AC power status - '
     93                          'skipping check.')
     94         try:
     95             if float(info['Battery']['percentage']) < 50.0:
     96                 raise hosts.AutoservVerifyError(
     97                         'Battery is less than 50%')
     98         except KeyError:
     99             logging.info('Cannot determine battery status - '
    100                          'skipping check.')
    101 
    102     @property
    103     def description(self):
    104         return 'The DUT is plugged in to AC power'
    105 
    106 
    107 class WritableVerifier(hosts.Verifier):
    108     """
    109     Confirm the stateful file systems are writable.
    110 
    111     The standard linux response to certain unexpected file system errors
    112     (including hardware errors in block devices) is to change the file
    113     system status to read-only.  This checks that that hasn't happened.
    114 
    115     The test covers the two file systems that need to be writable for
    116     critical operations like AU:
    117       * The (unencrypted) stateful system which includes
    118         /mnt/stateful_partition.
    119       * The encrypted stateful partition, which includes /var.
    120 
    121     The test doesn't check various bind mounts; those are expected to
    122     fail the same way as their underlying main mounts.  Whether the
    123     Linux kernel can guarantee that is untested...
    124     """
    125 
    126     # N.B. Order matters here:  Encrypted stateful is loop-mounted from
    127     # a file in unencrypted stateful, so we don't test for errors in
    128     # encrypted stateful if unencrypted fails.
    129     _TEST_DIRECTORIES = ['/mnt/stateful_partition', '/var/tmp']
    130 
    131     def verify(self, host):
    132         # This deliberately stops looking after the first error.
    133         # See above for the details.
    134         for testdir in self._TEST_DIRECTORIES:
    135             filename = os.path.join(testdir, 'writable_test')
    136             command = 'touch %s && rm %s' % (filename, filename)
    137             rv = host.run(command=command, ignore_status=True)
    138             if rv.exit_status != 0:
    139                 msg = 'Can\'t create a file in %s' % testdir
    140                 raise hosts.AutoservVerifyError(msg)
    141 
    142     @property
    143     def description(self):
    144         return 'The stateful filesystems are writable'
    145 
    146 
    147 class EXT4fsErrorVerifier(hosts.Verifier):
    148     """
    149     Confirm we have not seen critical file system kernel errors.
    150     """
    151     def verify(self, host):
    152         # grep for stateful FS errors of the type "EXT4-fs error (device sda1):"
    153         command = ("dmesg | grep -E \"EXT4-fs error \(device "
    154                    "$(cut -d ' ' -f 5,9 /proc/$$/mountinfo | "
    155                    "grep -e '^/mnt/stateful_partition ' | "
    156                    "cut -d ' ' -f 2 | cut -d '/' -f 3)\):\"")
    157         output = host.run(command=command, ignore_status=True).stdout
    158         if output:
    159             sample = output.splitlines()[0]
    160             message = 'Saw file system error: %s' % sample
    161             raise hosts.AutoservVerifyError(message)
    162         # Check for other critical FS errors.
    163         command = 'dmesg | grep "This should not happen!!  Data will be lost"'
    164         output = host.run(command=command, ignore_status=True).stdout
    165         if output:
    166             message = 'Saw file system error: Data will be lost'
    167             raise hosts.AutoservVerifyError(message)
    168         else:
    169             logging.error('Could not determine stateful mount.')
    170 
    171     @property
    172     def description(self):
    173         return 'Did not find critical file system errors'
    174 
    175 
    176 class UpdateSuccessVerifier(hosts.Verifier):
    177     """
    178     Checks that the DUT successfully finished its last provision job.
    179 
    180     At the start of any update (e.g. for a Provision job), the code
    181     creates a marker file named `host.PROVISION_FAILED`.  The file is
    182     located in a part of the stateful partition that will be removed if
    183     an update finishes successfully.  Thus, the presence of the file
    184     indicates that a prior update failed.
    185 
    186     The verifier tests for the existence of the marker file and fails if
    187     it still exists.
    188     """
    189     def verify(self, host):
    190         result = host.run('test -f %s' % host.PROVISION_FAILED,
    191                           ignore_status=True)
    192         if result.exit_status == 0:
    193             raise hosts.AutoservVerifyError(
    194                     'Last AU on this DUT failed')
    195 
    196     @property
    197     def description(self):
    198         return 'The most recent AU attempt on this DUT succeeded'
    199 
    200 
    201 class TPMStatusVerifier(hosts.Verifier):
    202     """Verify that the host's TPM is in a good state."""
    203 
    204     def verify(self, host):
    205         if _is_virual_machine(host):
    206             # We do not forward host TPM / emulated TPM to qemu VMs, so skip
    207             # this verification step.
    208             logging.debug('Skipped verification %s on VM', self)
    209             return
    210 
    211         # This cryptohome command emits status information in JSON format. It
    212         # looks something like this:
    213         # {
    214         #    "installattrs": {
    215         #       ...
    216         #    },
    217         #    "mounts": [ {
    218         #       ...
    219         #    } ],
    220         #    "tpm": {
    221         #       "being_owned": false,
    222         #       "can_connect": true,
    223         #       "can_decrypt": false,
    224         #       "can_encrypt": false,
    225         #       "can_load_srk": true,
    226         #       "can_load_srk_pubkey": true,
    227         #       "enabled": true,
    228         #       "has_context": true,
    229         #       "has_cryptohome_key": false,
    230         #       "has_key_handle": false,
    231         #       "last_error": 0,
    232         #       "owned": true
    233         #    }
    234         # }
    235         output = host.run('cryptohome --action=status').stdout.strip()
    236         try:
    237             status = json.loads(output)
    238         except ValueError:
    239             logging.info('Cannot determine the Crytohome valid status - '
    240                          'skipping check.')
    241             return
    242         try:
    243             tpm = status['tpm']
    244             if not tpm['enabled']:
    245                 raise hosts.AutoservVerifyError(
    246                         'TPM is not enabled -- Hardware is not working.')
    247             if not tpm['can_connect']:
    248                 raise hosts.AutoservVerifyError(
    249                         ('TPM connect failed -- '
    250                          'last_error=%d.' % tpm['last_error']))
    251             if tpm['owned'] and not tpm['can_load_srk']:
    252                 raise hosts.AutoservVerifyError(
    253                         'Cannot load the TPM SRK')
    254             if tpm['can_load_srk'] and not tpm['can_load_srk_pubkey']:
    255                 raise hosts.AutoservVerifyError(
    256                         'Cannot load the TPM SRK public key')
    257         except KeyError:
    258             logging.info('Cannot determine the Crytohome valid status - '
    259                          'skipping check.')
    260 
    261     @property
    262     def description(self):
    263         return 'The host\'s TPM is available and working'
    264 
    265 
    266 class PythonVerifier(hosts.Verifier):
    267     """Confirm the presence of a working Python interpreter."""
    268 
    269     def verify(self, host):
    270         result = host.run('python -c "import cPickle"',
    271                           ignore_status=True)
    272         if result.exit_status != 0:
    273             message = 'The python interpreter is broken'
    274             if result.exit_status == 127:
    275                 search = host.run('which python', ignore_status=True)
    276                 if search.exit_status != 0 or not search.stdout:
    277                     message = ('Python is missing; may be caused by '
    278                                'powerwash')
    279             raise hosts.AutoservVerifyError(message)
    280 
    281     @property
    282     def description(self):
    283         return 'Python on the host is installed and working'
    284 
    285 
    286 class DevModeVerifier(hosts.Verifier):
    287     """Verify that the host is not in dev mode."""
    288 
    289     def verify(self, host):
    290         # Some pools are allowed to be in dev mode
    291         info = host.host_info_store.get()
    292         if (_DEV_MODE_ALWAYS_ALLOWED or
    293                 bool(info.pools & _DEV_MODE_ALLOWED_POOLS)):
    294             return
    295 
    296         result = host.run('crossystem devsw_boot', ignore_status=True).stdout
    297         if result != '0':
    298             raise hosts.AutoservVerifyError('The host is in dev mode')
    299 
    300     @property
    301     def description(self):
    302         return 'The host should not be in dev mode'
    303 
    304 
    305 class HWIDVerifier(hosts.Verifier):
    306     """Verify that the host has HWID & serial number."""
    307 
    308     def verify(self, host):
    309         try:
    310             info = host.host_info_store.get()
    311 
    312             hwid = host.run('crossystem hwid', ignore_status=True).stdout
    313             if hwid:
    314                 info.attributes['HWID'] = hwid
    315 
    316             serial_number = host.run('vpd -g serial_number',
    317                                      ignore_status=True).stdout
    318             if serial_number:
    319                 info.attributes['serial_number'] = serial_number
    320 
    321             if info != host.host_info_store.get():
    322                 host.host_info_store.commit(info)
    323         except Exception as e:
    324             logging.exception('Failed to get HWID & Serial Number for host ',
    325                               '%s: %s', host.hostname, str(e))
    326 
    327     @property
    328     def description(self):
    329         return 'The host should have valid HWID and Serial Number'
    330 
    331 
    332 class JetstreamServicesVerifier(hosts.Verifier):
    333     """Verify that Jetstream services are running."""
    334 
    335     # Retry for b/62576902
    336     @retry.retry(error.AutoservError, timeout_min=1, delay_sec=10)
    337     def verify(self, host):
    338         try:
    339             if not host.upstart_status('ap-controller'):
    340                 raise hosts.AutoservVerifyError(
    341                     'ap-controller service is not running')
    342         except error.AutoservRunError:
    343             raise hosts.AutoservVerifyError(
    344                 'ap-controller service not found')
    345 
    346         try:
    347             host.run('pgrep ap-controller')
    348         except error.AutoservRunError:
    349             raise hosts.AutoservVerifyError(
    350                 'ap-controller process is not running')
    351 
    352     @property
    353     def description(self):
    354         return 'Jetstream services must be running'
    355 
    356 
    357 class _ResetRepairAction(hosts.RepairAction):
    358     """Common handling for repair actions that reset a DUT."""
    359 
    360     def _collect_logs(self, host):
    361         """Collect logs from a successfully repaired DUT."""
    362         dirname = 'after_%s' % self.tag
    363         local_log_dir = crashcollect.get_crashinfo_dir(host, dirname)
    364         host.collect_logs('/var/log', local_log_dir, ignore_errors=True)
    365         # Collect crash info.
    366         crashcollect.get_crashinfo(host, None)
    367 
    368     def _check_reset_success(self, host):
    369         """Check whether reset succeeded, and gather logs if possible."""
    370         if host.wait_up(host.BOOT_TIMEOUT):
    371             try:
    372                 # Collect logs once we regain ssh access before
    373                 # clobbering them.
    374                 self._collect_logs(host)
    375             except Exception:
    376                 # If the DUT is up, we want to declare success, even if
    377                 # log gathering fails for some reason.  So, if there's
    378                 # a failure, just log it and move on.
    379                 logging.exception('Unexpected failure in log '
    380                                   'collection during %s.',
    381                                   self.tag)
    382             return
    383         raise hosts.AutoservRepairError(
    384                 'Host %s is still offline after %s.' %
    385                 (host.hostname, self.tag))
    386 
    387 
    388 class ServoSysRqRepair(_ResetRepairAction):
    389     """
    390     Repair a Chrome device by sending a system request to the kernel.
    391 
    392     Sending 3 times the Alt+VolUp+x key combination (aka sysrq-x)
    393     will ask the kernel to panic itself and reboot while conserving
    394     the kernel logs in console ramoops.
    395     """
    396 
    397     def repair(self, host):
    398         if not host.servo:
    399             raise hosts.AutoservRepairError(
    400                     '%s has no servo support.' % host.hostname)
    401         # Press 3 times Alt+VolUp+X
    402         # no checking DUT health between each press as
    403         # killing Chrome is not really likely to fix the DUT SSH.
    404         for _ in range(3):
    405             try:
    406                 host.servo.sysrq_x()
    407             except error.TestFail, ex:
    408                 raise hosts.AutoservRepairError(
    409                       'cannot press sysrq-x: %s.' % str(ex))
    410             # less than 5 seconds between presses.
    411             time.sleep(2.0)
    412         self._check_reset_success(host)
    413 
    414     @property
    415     def description(self):
    416         return 'Reset the DUT via keyboard sysrq-x'
    417 
    418 
    419 class ServoResetRepair(_ResetRepairAction):
    420     """Repair a Chrome device by resetting it with servo."""
    421 
    422     def repair(self, host):
    423         if not host.servo:
    424             raise hosts.AutoservRepairError(
    425                     '%s has no servo support.' % host.hostname)
    426         host.servo.get_power_state_controller().reset()
    427         self._check_reset_success(host)
    428 
    429     @property
    430     def description(self):
    431         return 'Reset the DUT via servo'
    432 
    433 
    434 class CrosRebootRepair(repair.RebootRepair):
    435     """Repair a CrOS target by clearing dev mode and rebooting it."""
    436 
    437     def repair(self, host):
    438         # N.B. We need to reboot regardless of whether clearing
    439         # dev_mode succeeds or fails.
    440         host.run('/usr/share/vboot/bin/set_gbb_flags.sh 0',
    441                  ignore_status=True)
    442         host.run('crossystem disable_dev_request=1',
    443                  ignore_status=True)
    444         super(CrosRebootRepair, self).repair(host)
    445 
    446     @property
    447     def description(self):
    448         return 'Reset GBB flags and Reboot the host'
    449 
    450 
    451 class AutoUpdateRepair(hosts.RepairAction):
    452     """
    453     Repair by re-installing a test image using autoupdate.
    454 
    455     Try to install the DUT's designated "stable test image" using the
    456     standard procedure for installing a new test image via autoupdate.
    457     """
    458 
    459     def repair(self, host):
    460         afe_utils.machine_install_and_update_labels(host, repair=True)
    461 
    462     @property
    463     def description(self):
    464         return 'Re-install the stable build via AU'
    465 
    466 
    467 class PowerWashRepair(AutoUpdateRepair):
    468     """
    469     Powerwash the DUT, then re-install using autoupdate.
    470 
    471     Powerwash the DUT, then attempt to re-install a stable test image as
    472     for `AutoUpdateRepair`.
    473     """
    474 
    475     def repair(self, host):
    476         host.run('echo "fast safe" > '
    477                  '/mnt/stateful_partition/factory_install_reset')
    478         host.reboot(timeout=host.POWERWASH_BOOT_TIMEOUT, wait=True)
    479         super(PowerWashRepair, self).repair(host)
    480 
    481     @property
    482     def description(self):
    483         return 'Powerwash and then re-install the stable build via AU'
    484 
    485 
    486 class ServoInstallRepair(hosts.RepairAction):
    487     """
    488     Reinstall a test image from USB using servo.
    489 
    490     Use servo to re-install the DUT's designated "stable test image"
    491     from servo-attached USB storage.
    492     """
    493 
    494     def repair(self, host):
    495         if not host.servo:
    496             raise hosts.AutoservRepairError(
    497                     '%s has no servo support.' % host.hostname)
    498         host.servo_install(host.stage_image_for_servo())
    499 
    500     @property
    501     def description(self):
    502         return 'Reinstall from USB using servo'
    503 
    504 
    505 class JetstreamRepair(hosts.RepairAction):
    506     """Repair by restarting Jetstrem services."""
    507 
    508     def repair(self, host):
    509         host.cleanup_services()
    510 
    511     @property
    512     def description(self):
    513         return 'Restart Jetstream services'
    514 
    515 
    516 def _cros_verify_dag():
    517     """Return the verification DAG for a `CrosHost`."""
    518     FirmwareStatusVerifier = cros_firmware.FirmwareStatusVerifier
    519     FirmwareVersionVerifier = cros_firmware.FirmwareVersionVerifier
    520     verify_dag = (
    521         (repair.SshVerifier,         'ssh',      ()),
    522         (DevModeVerifier,            'devmode',  ('ssh',)),
    523         (HWIDVerifier,               'hwid',     ('ssh',)),
    524         (ACPowerVerifier,            'power',    ('ssh',)),
    525         (EXT4fsErrorVerifier,        'ext4',     ('ssh',)),
    526         (WritableVerifier,           'writable', ('ssh',)),
    527         (TPMStatusVerifier,          'tpm',      ('ssh',)),
    528         (UpdateSuccessVerifier,      'good_au',  ('ssh',)),
    529         (FirmwareStatusVerifier,     'fwstatus', ('ssh',)),
    530         (FirmwareVersionVerifier,    'rwfw',     ('ssh',)),
    531         (PythonVerifier,             'python',   ('ssh',)),
    532         (repair.LegacyHostVerifier,  'cros',     ('ssh',)),
    533     )
    534     return verify_dag
    535 
    536 
    537 def _cros_basic_repair_actions():
    538     """Return the basic repair actions for a `CrosHost`"""
    539     FirmwareRepair = cros_firmware.FirmwareRepair
    540     repair_actions = (
    541         # RPM cycling must precede Servo reset:  if the DUT has a dead
    542         # battery, we need to reattach AC power before we reset via servo.
    543         (repair.RPMCycleRepair, 'rpm', (), ('ssh', 'power',)),
    544         (ServoSysRqRepair, 'sysrq', (), ('ssh',)),
    545         (ServoResetRepair, 'servoreset', (), ('ssh',)),
    546 
    547         # N.B. FirmwareRepair can't fix a 'good_au' failure directly,
    548         # because it doesn't remove the flag file that triggers the
    549         # failure.  We include it as a repair trigger because it's
    550         # possible the the last update failed because of the firmware,
    551         # and we want the repair steps below to be able to trust the
    552         # firmware.
    553         (FirmwareRepair, 'firmware', (), ('ssh', 'fwstatus', 'good_au',)),
    554 
    555         (CrosRebootRepair, 'reboot', ('ssh',), ('devmode', 'writable',)),
    556     )
    557     return repair_actions
    558 
    559 
    560 def _cros_extended_repair_actions(au_triggers=_CROS_AU_TRIGGERS,
    561                                   powerwash_triggers=_CROS_POWERWASH_TRIGGERS,
    562                                   usb_triggers=_CROS_USB_TRIGGERS):
    563     """Return the extended repair actions for a `CrosHost`"""
    564 
    565     # The dependencies and triggers for the 'au', 'powerwash', and 'usb'
    566     # repair actions stack up:  Each one is able to repair progressively
    567     # more verifiers than the one before.  The 'triggers' lists specify
    568     # the progression.
    569 
    570     repair_actions = (
    571         (AutoUpdateRepair, 'au',
    572                 usb_triggers + powerwash_triggers, au_triggers),
    573         (PowerWashRepair, 'powerwash',
    574                 usb_triggers, powerwash_triggers + au_triggers),
    575         (ServoInstallRepair, 'usb',
    576                 (), usb_triggers + powerwash_triggers + au_triggers),
    577     )
    578     return repair_actions
    579 
    580 
    581 def _cros_repair_actions():
    582     """Return the repair actions for a `CrosHost`."""
    583     repair_actions = (_cros_basic_repair_actions() +
    584                       _cros_extended_repair_actions())
    585     return repair_actions
    586 
    587 
    588 def create_cros_repair_strategy():
    589     """Return a `RepairStrategy` for a `CrosHost`."""
    590     verify_dag = _cros_verify_dag()
    591     repair_actions = _cros_repair_actions()
    592     return hosts.RepairStrategy(verify_dag, repair_actions)
    593 
    594 
    595 def _moblab_verify_dag():
    596     """Return the verification DAG for a `MoblabHost`."""
    597     FirmwareVersionVerifier = cros_firmware.FirmwareVersionVerifier
    598     verify_dag = (
    599         (repair.SshVerifier,         'ssh',     ()),
    600         (ACPowerVerifier,            'power',   ('ssh',)),
    601         (FirmwareVersionVerifier,    'rwfw',    ('ssh',)),
    602         (PythonVerifier,             'python',  ('ssh',)),
    603         (repair.LegacyHostVerifier,  'cros',    ('ssh',)),
    604     )
    605     return verify_dag
    606 
    607 
    608 def _moblab_repair_actions():
    609     """Return the repair actions for a `MoblabHost`."""
    610     repair_actions = (
    611         (repair.RPMCycleRepair, 'rpm', (), ('ssh', 'power',)),
    612         (AutoUpdateRepair, 'au', ('ssh',), _CROS_AU_TRIGGERS),
    613     )
    614     return repair_actions
    615 
    616 
    617 def create_moblab_repair_strategy():
    618     """
    619     Return a `RepairStrategy` for a `MoblabHost`.
    620 
    621     Moblab is a subset of the CrOS verify and repair.  Several pieces
    622     are removed because they're not expected to be meaningful.  Some
    623     others are removed for more specific reasons:
    624 
    625     'tpm':  Moblab DUTs don't run the tests that matter to this
    626         verifier.  TODO(jrbarnette)  This assertion is unproven.
    627 
    628     'good_au':  This verifier can't pass, because the Moblab AU
    629         procedure doesn't properly delete CrosHost.PROVISION_FAILED.
    630         TODO(jrbarnette) We should refactor _machine_install() so that
    631         it can be different for Moblab.
    632 
    633     'firmware':  Moblab DUTs shouldn't be in FAFT pools, so we don't try
    634         this.
    635 
    636     'powerwash':  Powerwash on Moblab causes trouble with deleting the
    637         DHCP leases file, so we skip it.
    638     """
    639     verify_dag = _moblab_verify_dag()
    640     repair_actions = _moblab_repair_actions()
    641     return hosts.RepairStrategy(verify_dag, repair_actions)
    642 
    643 
    644 def _jetstream_repair_actions():
    645     """Return the repair actions for a `JetstreamHost`."""
    646     au_triggers = _CROS_AU_TRIGGERS + ('jetstream_services',)
    647     repair_actions = (
    648         _cros_basic_repair_actions() +
    649         (
    650             (JetstreamRepair, 'jetstream_repair',
    651              _CROS_USB_TRIGGERS + _CROS_POWERWASH_TRIGGERS, au_triggers),
    652         ) +
    653         _cros_extended_repair_actions(au_triggers=au_triggers))
    654     return repair_actions
    655 
    656 
    657 def _jetstream_verify_dag():
    658     """Return the verification DAG for a `JetstreamHost`."""
    659     verify_dag = _cros_verify_dag() + (
    660         (JetstreamServicesVerifier, 'jetstream_services', ('ssh',)),
    661     )
    662     return verify_dag
    663 
    664 
    665 def create_jetstream_repair_strategy():
    666     """
    667     Return a `RepairStrategy` for a `JetstreamHost`.
    668 
    669     The Jetstream repair strategy is based on the CrOS verify and repair,
    670     but adds the JetstreamServicesVerifier.
    671     """
    672     verify_dag = _jetstream_verify_dag()
    673     repair_actions = _jetstream_repair_actions()
    674     return hosts.RepairStrategy(verify_dag, repair_actions)
    675 
    676 
    677 # TODO(pprabhu) Move this to a better place. I have no idea what that place
    678 # would be.
    679 def _is_virual_machine(host):
    680     """Determine whether the given |host| is a virtual machine.
    681 
    682     @param host: a hosts.Host object.
    683     @returns True if the host is a virtual machine, False otherwise.
    684     """
    685     output = host.run('cat /proc/cpuinfo | grep "model name"')
    686     return output.stdout and 'qemu' in output.stdout.lower()
    687