Home | History | Annotate | Download | only in graphics_GLMark2
      1 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 #
      5 # GLMark outputs a final performance score, and it checks the performance score
      6 # against minimum requirement if min_score is set.
      7 
      8 import logging
      9 import os
     10 import re
     11 import string
     12 
     13 from autotest_lib.client.bin import test, utils
     14 from autotest_lib.client.common_lib import error
     15 from autotest_lib.client.cros import service_stopper
     16 from autotest_lib.client.cros.graphics import graphics_utils
     17 
     18 GLMARK2_TEST_RE = (
     19     r'^\[(?P<scene>.*)\] (?P<options>.*): FPS: (?P<fps>\d+) FrameTime: '
     20     r'(?P<frametime>\d+.\d+) ms$')
     21 GLMARK2_SCORE_RE = r'glmark2 Score: (\d+)'
     22 
     23 # perf value description strings may only contain letters, numbers, periods,
     24 # dashes and underscores.
     25 # But glmark2 test names are usually in the form:
     26 #   scene-name:opt=val:opt=v1,v2;v3,v4 or scene:<default>
     27 # which we convert to:
     28 #   scene-name.opt_val.opt_v1-v2_v3-v4 or scene.default
     29 description_table = string.maketrans(':,=;', '.-__')
     30 description_delete = '<>'
     31 
     32 
     33 class graphics_GLMark2(graphics_utils.GraphicsTest):
     34     """Runs glmark2, which benchmarks only calls compatible with OpenGL ES 2.0"""
     35     version = 1
     36     preserve_srcdir = True
     37     _services = None
     38 
     39     def setup(self):
     40         self.job.setup_dep(['glmark2'])
     41 
     42     def initialize(self):
     43         super(graphics_GLMark2, self).initialize()
     44         # If UI is running, we must stop it and restore later.
     45         self._services = service_stopper.ServiceStopper(['ui'])
     46         self._services.stop_services()
     47 
     48     def cleanup(self):
     49         if self._services:
     50             self._services.restore_services()
     51         super(graphics_GLMark2, self).cleanup()
     52 
     53     @graphics_utils.GraphicsTest.failure_report_decorator('graphics_GLMark2')
     54     def run_once(self, size='800x600', hasty=False, min_score=None):
     55         dep = 'glmark2'
     56         dep_dir = os.path.join(self.autodir, 'deps', dep)
     57         self.job.install_pkg(dep, 'dep', dep_dir)
     58 
     59         glmark2 = os.path.join(self.autodir, 'deps/glmark2/glmark2')
     60         if not os.path.exists(glmark2):
     61             raise error.TestFail('Failed: Could not find test binary.')
     62 
     63         glmark2_data = os.path.join(self.autodir, 'deps/glmark2/data')
     64 
     65         options = []
     66         options.append('--data-path %s' % glmark2_data)
     67         options.append('--size %s' % size)
     68         options.append('--annotate')
     69         if hasty:
     70             options.append('-b :duration=0.2')
     71         else:
     72             options.append('-b :duration=2')
     73         cmd = glmark2 + ' ' + ' '.join(options)
     74 
     75         if os.environ.get('CROS_FACTORY'):
     76             from autotest_lib.client.cros import factory_setup_modules
     77             from cros.factory.test import ui
     78             ui.start_reposition_thread('^glmark')
     79 
     80         # TODO(ihf): Switch this test to use perf.PerfControl like
     81         #            graphics_GLBench once it is stable. crbug.com/344766.
     82         if not hasty:
     83             if not utils.wait_for_idle_cpu(60.0, 0.1):
     84                 if not utils.wait_for_idle_cpu(20.0, 0.2):
     85                     raise error.TestFail('Failed: Could not get idle CPU.')
     86             if not utils.wait_for_cool_machine():
     87                 raise error.TestFail('Failed: Could not get cold machine.')
     88 
     89         # In this test we are manually handling stderr, so expected=True.
     90         # Strangely autotest takes CmdError/CmdTimeoutError as warning only.
     91         try:
     92             result = utils.run(cmd,
     93                                stderr_is_expected=True,
     94                                stdout_tee=utils.TEE_TO_LOGS,
     95                                stderr_tee=utils.TEE_TO_LOGS)
     96         except error.CmdError:
     97             raise error.TestFail('Failed: CmdError running %s' % cmd)
     98         except error.CmdTimeoutError:
     99             raise error.TestFail('Failed: CmdTimeout running %s' % cmd)
    100 
    101         logging.info(result)
    102         for line in result.stderr.splitlines():
    103             if line.startswith('Error:'):
    104                 # Line already starts with 'Error: ", not need to prepend.
    105                 raise error.TestFail(line)
    106 
    107         # Numbers in hasty mode are not as reliable, so don't send them to
    108         # the dashboard etc.
    109         if not hasty:
    110             keyvals = {}
    111             score = None
    112             test_re = re.compile(GLMARK2_TEST_RE)
    113             for line in result.stdout.splitlines():
    114                 match = test_re.match(line)
    115                 if match:
    116                     test = '%s.%s' % (match.group('scene'),
    117                                       match.group('options'))
    118                     test = test.translate(description_table, description_delete)
    119                     frame_time = match.group('frametime')
    120                     keyvals[test] = frame_time
    121                     self.output_perf_value(
    122                         description=test,
    123                         value=frame_time,
    124                         units='ms',
    125                         higher_is_better=False)
    126                 else:
    127                     # glmark2 output the final performance score as:
    128                     #  glmark2 Score: 530
    129                     match = re.findall(GLMARK2_SCORE_RE, line)
    130                     if match:
    131                         score = int(match[0])
    132             if score is None:
    133                 raise error.TestFail('Failed: Unable to read benchmark score')
    134             # Output numbers for plotting by harness.
    135             logging.info('GLMark2 score: %d', score)
    136             if os.environ.get('CROS_FACTORY'):
    137                 from autotest_lib.client.cros import factory_setup_modules
    138                 from cros.factory.event_log import EventLog
    139                 EventLog('graphics_GLMark2').Log('glmark2_score', score=score)
    140             keyvals['glmark2_score'] = score
    141             self.write_perf_keyval(keyvals)
    142             self.output_perf_value(
    143                 description='Score',
    144                 value=score,
    145                 units='score',
    146                 higher_is_better=True)
    147 
    148             if min_score is not None and score < min_score:
    149                 raise error.TestFail(
    150                     'Failed: Benchmark score %d < %d (minimum score '
    151                     'requirement)' % (score, min_score))
    152