/system/netd/tests/benchmarks/ |
dns_benchmark.cpp | 25 * The benchmark fixture runs in 3 different modes: 68 #include <benchmark/benchmark.h> 82 class DnsFixture : public ::benchmark::Fixture { 90 void SetUp(const ::benchmark::State& state) override { 105 void TearDown(const ::benchmark::State& state) override { 120 void getaddrinfo_until_done(benchmark::State &state) { 137 void benchmark_at_reporting_level(benchmark::State &state, int metricsLevel) { 173 BENCHMARK_DEFINE_F(DnsFixture, getaddrinfo_log_nothing)(benchmark::State& state) { 181 BENCHMARK_DEFINE_F(DnsFixture, getaddrinfo_log_metrics)(benchmark::State& state) [all...] |
/frameworks/minikin/tests/perftests/ |
FontCollection.cpp | 16 #include <benchmark/benchmark.h> 30 static void BM_FontCollection_construct(benchmark::State& state) { 38 BENCHMARK(BM_FontCollection_construct); 40 static void BM_FontCollection_hasVariationSelector(benchmark::State& state) { 57 BENCHMARK(BM_FontCollection_hasVariationSelector) 76 static void BM_FontCollection_itemize(benchmark::State& state) { 98 BENCHMARK(BM_FontCollection_itemize)
|
/system/libhwbinder/vts/performance/ |
Benchmark_binder.cpp | 17 #include <benchmark/benchmark.h> 72 static void BM_sendVec_binder(benchmark::State& state) { 84 state.SkipWithError("Failed to retrieve benchmark service."); 92 BENCHMARK(BM_sendVec_binder)->RangeMultiplier(2)->Range(4, 65536); 95 ::benchmark::Initialize(&argc, argv); 100 ::benchmark::RunSpecifiedBenchmarks();
|
/external/autotest/server/site_tests/telemetry_AFDOGenerate/ |
telemetry_AFDOGenerate.py | 58 # Temporarily disable this benchmark because it is failing a 109 benchmark = benchmark_info[0] 112 self._run_test_with_retry(benchmark, *args) 117 logging.info('Ignoring failure from benchmark %s.', 118 benchmark) 185 def _run_test(self, benchmark, *args): 186 """Run the benchmark using Telemetry. 188 @param benchmark: Name of the benchmark to run. 192 Also re-raise any exceptions thrown by run_telemetry benchmark [all...] |
/external/toolchain-utils/crosperf/ |
benchmark_run.py | 6 """Module of benchmark runs.""" 33 def __init__(self, name, benchmark, label, iteration, cache_conditions, 39 self.benchmark = benchmark 53 self.test_args = benchmark.test_args 71 self.benchmark.test_name, self.iteration, self.test_args, 75 self.benchmark.suite, self.benchmark.show_all_results, 76 self.benchmark.run_local) 99 retval, self.benchmark.test_name [all...] |
suite_runner.py | 60 def Run(self, machine, label, benchmark, test_args, profiler_args): 61 for i in range(0, benchmark.retries + 1): 63 if benchmark.suite == 'telemetry': 65 ret_tup = self.Telemetry_Run(machine, label, benchmark, profiler_args) 66 elif benchmark.suite == 'telemetry_Crosperf': 68 ret_tup = self.Telemetry_Crosperf_Run(machine, label, benchmark, 71 ret_tup = self.Test_That_Run(machine, label, benchmark, test_args, 74 self.logger.LogOutput('benchmark %s failed. Retries left: %s' % 75 (benchmark.name, benchmark.retries - i) [all...] |
results_organizer.py | 22 for benchmark in benchmarks: 23 if benchmark.name != bench or benchmark.iteration_adjusted: 25 benchmark.iteration_adjusted = True 26 benchmark.iterations *= (max_dup + 1) 96 # the benchmark names. The value for a key is a list containing the names 117 # Count how many iterations exist for each benchmark run. 122 name = run.benchmark.name 130 name = run.benchmark.name 162 benchmark = benchmark_run.benchmar [all...] |
/external/caliper/caliper/src/main/java/com/google/caliper/worker/ |
RuntimeWorker.java | 24 import com.google.caliper.runner.Running.Benchmark; 51 RuntimeWorker(Object benchmark, 54 super(benchmark, method); 104 @Inject Micro(@Benchmark Object benchmark, 107 super(benchmark, method, random, ticker, workerOptions); 116 + "Otherwise, check your benchmark for errors.", 117 benchmark.getClass(), benchmarkMethod.getName(), 121 benchmarkMethod.invoke(benchmark, intReps); 130 @Inject Pico(@Benchmark Object benchmark, [all...] |
/external/v8/tools/ |
perf-to-html.py | 113 class Benchmark: 171 def RenderOneBenchmark(self, benchmark): 173 self.Print("<a name=\"" + benchmark.name() + "\">") 174 self.Print(benchmark.name() + "</a> <a href=\"#top\">(top)</a>") 176 self.Print("<table class=\"benchmark\">") 184 tests = benchmark.tests() 185 for test in benchmark.SortedTestKeys(): 207 for benchmark in data: 208 if benchmark != "errors": 209 self.Print("<li><a href=\"#" + benchmark + "\">" + benchmark + "</a></li>" [all...] |
/external/expat/ |
import_expat.sh | 42 rm tests/benchmark/benchmark.dsp 43 rm tests/benchmark/benchmark.dsw
|
/external/google-benchmark/test/ |
complexity_test.cc | 7 #include "benchmark/benchmark.h" 48 void BM_Complexity_O1(benchmark::State& state) { 51 benchmark::DoNotOptimize(&i); 56 BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(benchmark::o1); 57 BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(); 58 BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity([](int) { 67 // See https://github.com/google/benchmark/issues/272 93 void BM_Complexity_O_N(benchmark::State& state) [all...] |
output_test_helper.cc | 113 class TestReporter : public benchmark::BenchmarkReporter { 115 TestReporter(std::vector<benchmark::BenchmarkReporter*> reps) 140 std::vector<benchmark::BenchmarkReporter *> reporters_; 153 regex(std::make_shared<benchmark::Regex>()) { 188 benchmark::Initialize(&argc, argv); 189 benchmark::ConsoleReporter CR(benchmark::ConsoleReporter::OO_None); 190 benchmark::JSONReporter JR; 191 benchmark::CSVReporter CSVR; 196 benchmark::BenchmarkReporter& reporter [all...] |
/external/libcxx/utils/google-benchmark/test/ |
complexity_test.cc | 7 #include "benchmark/benchmark.h" 48 void BM_Complexity_O1(benchmark::State& state) { 51 benchmark::DoNotOptimize(&i); 56 BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(benchmark::o1); 57 BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(); 58 BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity([](int) { 67 // See https://github.com/google/benchmark/issues/272 93 void BM_Complexity_O_N(benchmark::State& state) [all...] |
output_test_helper.cc | 113 class TestReporter : public benchmark::BenchmarkReporter { 115 TestReporter(std::vector<benchmark::BenchmarkReporter*> reps) 140 std::vector<benchmark::BenchmarkReporter *> reporters_; 153 regex(std::make_shared<benchmark::Regex>()) { 188 benchmark::Initialize(&argc, argv); 189 benchmark::ConsoleReporter CR(benchmark::ConsoleReporter::OO_None); 190 benchmark::JSONReporter JR; 191 benchmark::CSVReporter CSVR; 196 benchmark::BenchmarkReporter& reporter [all...] |
/external/google-benchmark/include/benchmark/ |
benchmark_api.h | 6 static void BM_StringCreation(benchmark::State& state) { 11 // Register the function as a benchmark 12 BENCHMARK(BM_StringCreation); 14 // Define another benchmark 15 static void BM_StringCopy(benchmark::State& state) { 20 BENCHMARK(BM_StringCopy); 29 benchmark::Initialize(&argc, argv); 30 benchmark::RunSpecifiedBenchmarks(); 40 static void BM_memcpy(benchmark::State& state) { 49 BENCHMARK(BM_memcpy)->Arg(8)->Arg(64)->Arg(512)->Arg(1<<10)->Arg(8<<10) 166 namespace benchmark { namespace [all...] |
/external/libcxx/utils/google-benchmark/include/benchmark/ |
benchmark_api.h | 6 static void BM_StringCreation(benchmark::State& state) { 11 // Register the function as a benchmark 12 BENCHMARK(BM_StringCreation); 14 // Define another benchmark 15 static void BM_StringCopy(benchmark::State& state) { 20 BENCHMARK(BM_StringCopy); 29 benchmark::Initialize(&argc, argv); 30 benchmark::RunSpecifiedBenchmarks(); 40 static void BM_memcpy(benchmark::State& state) { 49 BENCHMARK(BM_memcpy)->Arg(8)->Arg(64)->Arg(512)->Arg(1<<10)->Arg(8<<10) 166 namespace benchmark { namespace [all...] |
/external/google-benchmark/src/ |
complexity.h | 16 // Adapted to be used with google benchmark 24 #include "benchmark/benchmark_api.h" 25 #include "benchmark/reporter.h" 27 namespace benchmark { namespace 60 } // end namespace benchmark
|
/external/libcxx/utils/google-benchmark/src/ |
complexity.h | 16 // Adapted to be used with google benchmark 24 #include "benchmark/benchmark_api.h" 25 #include "benchmark/reporter.h" 27 namespace benchmark { namespace 60 } // end namespace benchmark
|
/external/okhttp/benchmarks/src/main/java/com/squareup/okhttp/benchmarks/ |
OkHttp.java | 35 @Override public void prepare(Benchmark benchmark) { 36 super.prepare(benchmark); 38 client.setProtocols(benchmark.protocols); 40 if (benchmark.tls) {
|
/external/toolchain-utils/crb/ |
autotest_gatherer.py | 47 row.append('Benchmark') 56 for benchmark in all_benchmarks: 58 row.append(benchmark) 61 if benchmark in results: 62 row.append(results[benchmark])
|
/frameworks/base/libs/androidfw/tests/ |
Theme_bench.cpp | 17 #include "benchmark/benchmark.h" 30 static void BM_ThemeApplyStyleFramework(benchmark::State& state) { 45 BENCHMARK(BM_ThemeApplyStyleFramework); 47 static void BM_ThemeApplyStyleFrameworkOld(benchmark::State& state) { 62 BENCHMARK(BM_ThemeApplyStyleFrameworkOld); 64 static void BM_ThemeGetAttribute(benchmark::State& state) { 80 BENCHMARK(BM_ThemeGetAttribute); 82 static void BM_ThemeGetAttributeOld(benchmark::State& state) { 97 BENCHMARK(BM_ThemeGetAttributeOld) [all...] |
/external/autotest/tko/ |
machine_test_attribute_graph.cgi | 18 benchmark = form["benchmark"].value 24 where = { 'subdir' : benchmark, 'machine_idx' : machine.idx } 32 title = "%s on %s" % (benchmark, machine.hostname)
|
/external/vixl/benchmarks/aarch32/ |
bench-branch-link-masm.cc | 45 void benchmark(int iterations, InstructionSet isa) { function 101 benchmark(iterations, A32); 104 benchmark(iterations, T32);
|
bench-branch-masm.cc | 44 void benchmark(int iterations, InstructionSet isa) { function 91 benchmark(iterations, A32); 94 benchmark(iterations, T32);
|
bench-dataop.cc | 44 void benchmark(unsigned instructions, InstructionSet isa) { function 86 benchmark(instructions, A32); 89 benchmark(instructions, T32);
|