HomeSort by relevance Sort by last modified time
    Searched refs:benchmark_name (Results 1 - 21 of 21) sorted by null

  /external/google-benchmark/src/
complexity.cc 193 CHECK_EQ(reports[0].benchmark_name, run.benchmark_name);
212 mean_data.benchmark_name = reports[0].benchmark_name + "_mean";
237 stddev_data.benchmark_name = reports[0].benchmark_name + "_stddev";
286 std::string benchmark_name =
287 reports[0].benchmark_name.substr(0, reports[0].benchmark_name.find('/'));
291 big_o.benchmark_name = benchmark_name + "_BigO"
    [all...]
csv_reporter.cc 91 std::string name = run.benchmark_name;
console_reporter.cc 109 result.benchmark_name.c_str());
json_reporter.cc 120 out << indent << FormatKV("name", run.benchmark_name) << ",\n";
benchmark.cc 230 report.benchmark_name = b.name;
  /external/libcxx/utils/google-benchmark/src/
complexity.cc 177 CHECK_EQ(reports[0].benchmark_name, run.benchmark_name);
190 mean_data.benchmark_name = reports[0].benchmark_name + "_mean";
210 stddev_data.benchmark_name = reports[0].benchmark_name + "_stddev";
254 std::string benchmark_name =
255 reports[0].benchmark_name.substr(0, reports[0].benchmark_name.find('/'));
259 big_o.benchmark_name = benchmark_name + "_BigO"
    [all...]
csv_reporter.cc 60 std::string name = run.benchmark_name;
console_reporter.cc 76 result.benchmark_name.c_str());
json_reporter.cc 120 out << indent << FormatKV("name", run.benchmark_name) << ",\n";
benchmark.cc 223 report.benchmark_name = b.name;
  /test/vts-testcase/performance/hwbinder_benchmark_adb/
HwBinderPerformanceAdbTest.py 130 benchmark_name = tokens[0]
132 logging.info(benchmark_name)
134 label_result.append(benchmark_name.replace(prefix, ""))
  /external/libcxx/utils/google-benchmark/test/
register_benchmark_test.cc 32 CHECK(name == run.benchmark_name) << "expected " << name << " got "
33 << run.benchmark_name;
skip_with_error_test.cc 36 CHECK(name == run.benchmark_name) << "expected " << name << " got "
37 << run.benchmark_name;
  /external/google-benchmark/test/
register_benchmark_test.cc 32 CHECK(name == run.benchmark_name) << "expected " << name << " got "
33 << run.benchmark_name;
skip_with_error_test.cc 36 CHECK(name == run.benchmark_name) << "expected " << name << " got "
37 << run.benchmark_name;
  /external/toolchain-utils/crosperf/
experiment_status.py 137 benchmark_name = benchmark_run.benchmark.name
138 benchmark_iterations[benchmark_name].append(benchmark_run.iteration)
results_report.py 105 {'benchmark_name': {'perf_event_name': [LabelData]}}
129 def _ProcessPerfReport(self, perf_report, label, benchmark_name, iteration):
134 if benchmark_name not in self.perf_data:
135 self.perf_data[benchmark_name] = {event: [] for event in perf_of_run}
136 ben_data = self.perf_data[benchmark_name]
492 def _ReadExperimentPerfReport(results_directory, label_name, benchmark_name,
500 raw_dir_name = label_name + benchmark_name + str(benchmark_iteration + 1)
  /frameworks/base/libs/hwui/tests/macrobench/
TestSceneRunner.cpp 88 report.benchmark_name = info.name;
101 reports[0].benchmark_name = info.name;
102 reports[0].benchmark_name += ri.suffix;
  /external/google-benchmark/include/benchmark/
reporter.h 61 std::string benchmark_name; member in struct:benchmark::BenchmarkReporter::Run
  /external/libcxx/utils/google-benchmark/include/benchmark/
reporter.h 59 std::string benchmark_name; member in struct:benchmark::BenchmarkReporter::Run
  /external/toolchain-utils/user_activity_benchmarks/
process_hot_functions.py 239 for benchmark_name, metrics in benchmark_set_metrics.iteritems():
240 benchmark_set_metrics_lines.append(','.join([benchmark_name, ','.join(
245 0, 'benchmark_name,number_of_functions,distance_cum,distance_avg,'

Completed in 537 milliseconds