/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/tool/servers/ |
rebaselineserver.py | 45 test_name, _ = os.path.splitext(test_file) 50 actual_pattern = os.path.basename(test_name) + '-actual.*' 60 test_name, _ = os.path.splitext(test_file) 61 test_directory = os.path.dirname(test_name) 63 log('Rebaselining %s...' % test_name) 263 test_name, _ = os.path.splitext(self.query['test'][0]) 266 file_name = test_name + '-expected.png' 268 file_name = test_name + '-actual.png' 270 file_name = test_name + '-expected.checksum' 272 file_name = test_name + '-actual.checksum [all...] |
/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/ |
mock_drt_unittest.py | 79 def input_line(self, port, test_name, checksum=None): 80 url = port.create_driver(0).test_to_uri(test_name) 96 def make_input_output(self, port, test_name, pixel_tests, 100 expected_checksum = port.expected_checksum(test_name) 102 drt_input = self.input_line(port, test_name, expected_checksum) 103 text_output = expected_text or port.expected_text(test_name) or '' 106 drt_output = self.expected_output(port, test_name, pixel_tests, 110 def expected_output(self, port, test_name, pixel_tests, text_output, expected_checksum): 122 def assertTest(self, test_name, pixel_tests, expected_checksum=None, drt_output=None, host=None, expected_text=None): 127 drt_input, drt_output = self.make_input_output(port, test_name, [all...] |
mock_drt.py | 190 dirname, basename = self._port.split_test(driver_input.test_name) 191 is_reftest = (self._port.reference_files(driver_input.test_name) or 205 test_name = self._driver.uri_to_test(uri) 207 test_name = self._port.relative_test_filename(uri) 209 return DriverInput(test_name, 0, checksum, self._options.pixel_tests) 213 actual_text = port.expected_text(test_input.test_name) 214 actual_audio = port.expected_audio(test_input.test_name) 222 if test_input.test_name.endswith('-mismatch.html'): 227 actual_checksum = port.expected_checksum(test_input.test_name) 228 actual_image = port.expected_image(test_input.test_name) [all...] |
base.py | 220 def virtual_baseline_search_path(self, test_name): 221 suite = self.lookup_virtual_suite(test_name) 426 def expected_baselines_by_extension(self, test_name): 432 reference_files = self.reference_files(test_name) 438 path = self.expected_filename(test_name, extension, return_default=False) 447 def expected_baselines(self, test_name, suffix, all_baselines=False): 451 test_name: name of test file (usually a relative path under LayoutTests/) 474 baseline_filename = self._filesystem.splitext(test_name)[0] + '-expected' + suffix 496 def expected_filename(self, test_name, suffix, return_default=True): 505 test_name: name of test file (usually a relative path under LayoutTests/ [all...] |
/external/chromium_org/content/public/test/ |
test_launcher.cc | 64 const std::string& test_name, 69 std::string pre_test_name = test_name; 94 new_cmd_line.AppendSwitchASCII("gtest_filter", test_name); 105 << " ms) exceeded for " << test_name; 111 // Runs test specified by |test_name| in a child process, 115 const std::string& test_name, 141 test_case, test_name, new_cmd_line, default_timeout, was_timeout); 195 std::string test_name = local 224 std::string test_name = local 228 test_name, [all...] |
/external/chromium/base/ |
shared_memory_unittest.cc | 121 std::string test_name = "SharedMemoryOpenCloseTest"; local 126 bool rv = memory1.Delete(test_name); 128 rv = memory1.Delete(test_name); 130 rv = memory1.Open(test_name, false); 132 rv = memory1.CreateNamed(test_name, false, kDataSize); 137 rv = memory2.Open(test_name, false); 161 rv = memory1.Delete(test_name); 163 rv = memory2.Delete(test_name); 173 std::string test_name = test_name_stream.str(); local 178 bool rv = memory1.CreateNamed(test_name, false, kDataSize) [all...] |
/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/layout_package/ |
json_results_generator.py | 122 test = test_result.test_name 136 self.test_name = test 140 test_name = test 142 test_name = test.split('.')[1] 147 if test_name.startswith('FAILS_'): 149 elif test_name.startswith('FLAKY_'): 151 elif test_name.startswith('DISABLED_'): 220 test_results_map: A dictionary that maps test_name to TestResult. 345 def _get_test_timing(self, test_name): 347 for the given test_name."" [all...] |
/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/performance_tests/ |
perftest.py | 89 def __init__(self, port, test_name, test_path, test_runner_count=DEFAULT_TEST_RUNNER_COUNT): 91 self._test_name = test_name 98 def test_name(self): member in class:PerfTest 102 return re.sub(r'\.\w+$', '', self.test_name()) 141 def log_statistics(test_name, values, unit): 158 _log.info('RESULT %s= %s %s' % (test_name, mean, unit)) 206 _log.error('error: %s\n%s' % (self.test_name(), output.error)) 211 _log.error('timeout: %s' % self.test_name()) 213 _log.error('crash: %s' % self.test_name()) 262 def __init__(self, port, test_name, test_path, test_runner_count=1) [all...] |
/external/chromium_org/content/test/plugin/ |
plugin_windowed_test.cc | 26 if (test_name() == "create_instance_in_paint" && test_id() == "2") { 40 if ((test_name() == "create_instance_in_paint" && test_id() == "1") || 41 test_name() == "alert_in_window_message") { 74 if (test_name() != "ensure_scripting_works_in_destroy") 125 if (this_ptr->test_name() == "create_instance_in_paint" && 129 } else if (this_ptr->test_name() == "alert_in_window_message" &&
|
/ndk/sources/android/support/tests/minitest/ |
minitest.cc | 11 const char* test_name; member in struct:__anon34926::TestInfo 164 void RegisterTest(const char* test_name, 172 info->test_name = test_name; 189 printf("[ RUNNING ] %s.%s\n", info->test_name, info->case_name); 203 printf("[ %9s ] %s.%s\n", status, info->test_name, info->case_name);
|
/external/chromium_org/build/android/pylib/monkey/ |
test_runner.py | 41 def RunTest(self, test_name): 45 test_name: String to use for logging the test result. 72 test_name, base_test_result.ResultType.PASS, log=output) 75 test_name, base_test_result.ResultType.FAIL, log=output)
|
/external/chromium_org/content/browser/gpu/ |
webgl_conformance_test.cc | 62 void RunTest(std::string url, std::string test_name) { 64 test_expectations_.GetTestExpectation(test_name, bot_config_); 66 LOG(WARNING) << "Test " << test_name << " is bypassed";
|
/external/chromium_org/cc/resources/ |
tile_manager_perftest.cc | 57 void AfterTest(const std::string test_name) { 60 test_name.c_str(), 107 void RunManageTilesTest(const std::string test_name, 117 AfterTest(test_name);
|
/external/chromium_org/chrome/test/nacl/ |
nacl_browsertest_util.h | 146 #define MAYBE_PNACL(test_name) DISABLED_##test_name 148 #define MAYBE_PNACL(test_name) test_name
|
/external/chromium_org/content/test/gpu/gpu_tests/ |
webgl_conformance.py | 130 test_name = line_tokens[-1] 132 if '.txt' in test_name: 133 include_path = os.path.join(current_dir, test_name) 137 test = os.path.join(current_dir, test_name)
|
/external/chromium_org/tools/telemetry/telemetry/page/ |
gtest_test_results.py | 34 test_name = GTestTestResults._formatTestname(test) 35 print '[ FAILED ]', test_name, '(%0.f ms)' % self._GetMs() 54 test_name = GTestTestResults._formatTestname(test) 55 print '[ OK ]', test_name, '(%0.f ms)' % self._GetMs()
|
/external/chromium/sdch/open-vcdiff/src/gtest/internal/ |
gtest-internal.h | 522 #define GTEST_TEST(test_case_name, test_name, parent_class)\ 523 class test_case_name##_##test_name##_Test : public parent_class {\ 525 test_case_name##_##test_name##_Test() {}\ 527 return new test_case_name##_##test_name##_Test;\ 532 GTEST_DISALLOW_COPY_AND_ASSIGN(test_case_name##_##test_name##_Test);\ 535 ::testing::TestInfo* const test_case_name##_##test_name##_Test::test_info_ =\ 538 #test_name, \ 542 test_case_name##_##test_name##_Test::NewTest);\ 543 void test_case_name##_##test_name##_Test::TestBody()
|
/external/chromium_org/base/test/expectations/ |
parser_unittest.cc | 45 EXPECT_EQ("DouglasTest.PoopsOk", expectations_[0].test_name); 61 EXPECT_EQ("OhMy.MeOhMy", expectations_[0].test_name); 84 EXPECT_EQ("First.Test", expectations_[0].test_name); 91 EXPECT_EQ("Second.Test", expectations_[1].test_name); 109 EXPECT_EQ("Line.First", expectations_[0].test_name); 118 EXPECT_EQ("Line.Second", expectations_[1].test_name); 135 EXPECT_EQ("Foo=Bar", expectations_[0].test_name); 144 EXPECT_EQ("Cow.GoesMoo", expectations_[1].test_name);
|
/external/chromium_org/chrome/browser/extensions/api/system_indicator/ |
system_indicator_apitest.cc | 28 const std::string& test_name) { 30 base::FilePath extdir = test_data_dir_.AppendASCII(test_name);
|
/external/chromium_org/chrome/browser/extensions/ |
notifications_apitest.cc | 28 const std::string& test_name) { 30 base::FilePath extdir = test_data_dir_.AppendASCII(test_name);
|
/external/chromium_org/chrome/browser/nacl_host/test/ |
nacl_gdb_browsertest.cc | 37 void RunWithNaClGdb(std::string test_name) { 55 RunTestViaHTTP(test_name);
|
/external/chromium_org/chrome/renderer/extensions/ |
json_schema_unittest.cc | 26 void TestFunction(const std::string& test_name) { 27 module_system_->CallModuleMethod("json_schema_test", test_name);
|
/external/chromium_org/gpu/config/ |
gpu_test_expectations_parser.h | 41 int32 GetTestExpectation(const std::string& test_name, 52 std::string test_name; member in struct:gpu::GPUTestExpectationsParser::GPUTestExpectationEntry
|
/external/chromium_org/ppapi/native_client/tests/nacl_browser/inbrowser_test_runner/ |
nacl.scons | 17 def AddTest(env, test_name, exe_list, parallel=False): 30 nmf_list_js = env.Command(['%s_nmf_test_list.js' % test_name], [], 37 '%s.out' % test_name, url='test_runner.html', 47 env.AddNodeToTestSuite(node, ['chrome_browser_tests'], test_name,
|
/external/chromium_org/ppapi/tests/ |
test_case.cc | 19 std::string StripPrefix(const std::string& test_name) { 23 if (test_name.find(prefixes[i]) == 0) 24 return test_name.substr(strlen(prefixes[i])); 25 return test_name; 179 bool TestCase::ShouldRunTest(const std::string& test_name, 190 std::map<std::string, bool>::iterator iter = filter_tests_.find(test_name); 194 skipped_tests_.insert(test_name); 197 remaining_tests_.erase(test_name);
|