HomeSort by relevance Sort by last modified time
    Searched refs:perf_args (Results 1 - 7 of 7) sorted by null

  /external/toolchain-utils/crosperf/
mock_instance.py 11 perf_args = 'record -a -e cycles' variable
39 perf_args, '', '')
42 perf_args, '', '')
benchmark.py 24 perf_args,
35 self.perf_args = perf_args
experiment_factory.py 97 iterations, rm_chroot_tmp, perf_args, suite,
102 iterations, rm_chroot_tmp, perf_args,
123 perf_args = global_settings.GetField('perf_args')
176 iterations, rm_chroot_tmp, perf_args, suite,
181 perf_args, suite, show_all_results, retries,
186 perf_args, suite, show_all_results, retries,
196 perf_args,
204 iterations, rm_chroot_tmp, perf_args, suite,
208 rm_chroot_tmp, perf_args, suite
    [all...]
benchmark_run.py 179 if self.benchmark.perf_args and self.benchmark.suite == 'telemetry':
181 self.benchmark.perf_args = ''
183 if self.benchmark.perf_args and self.benchmark.suite == 'test_that':
185 self.benchmark.perf_args = ''
187 if self.benchmark.perf_args:
188 perf_args_list = self.benchmark.perf_args.split(' ')
190 perf_args = ' '.join(perf_args_list)
192 raise SyntaxError('perf_args must start with either record or stat')
194 ("--profiler_args='perf_options=\"%s\"'" % perf_args)]
experiment_factory_unittest.py 100 perf_args, suite, show_all):
103 bench_list, set_list, args, iters, rm_ch, perf_args, suite, show_all
benchmark_run_unittest.py 46 '', # perf_args
93 '', # perf_args
350 self.test_benchmark.perf_args = 'record -e cycles'
362 self.test_benchmark.perf_args = 'record -e cycles'
368 self.test_benchmark.perf_args = 'junk args'
  /external/toolchain-utils/crosperf/experiment_files/
telemetry_perf_perf 26 local perf_args="${1:+perf_args: $1}"
30 $perf_args

Completed in 228 milliseconds