1 # Copyright (C) 2010 Google Inc. All rights reserved. 2 # 3 # Redistribution and use in source and binary forms, with or without 4 # modification, are permitted provided that the following conditions are 5 # met: 6 # 7 # * Redistributions of source code must retain the above copyright 8 # notice, this list of conditions and the following disclaimer. 9 # * Redistributions in binary form must reproduce the above 10 # copyright notice, this list of conditions and the following disclaimer 11 # in the documentation and/or other materials provided with the 12 # distribution. 13 # * Neither the name of Google Inc. nor the names of its 14 # contributors may be used to endorse or promote products derived from 15 # this software without specific prior written permission. 16 # 17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 29 try: 30 import jsonresults 31 from jsonresults import * 32 except ImportError: 33 print "ERROR: Add the TestResultServer, google_appengine and yaml/lib directories to your PYTHONPATH" 34 raise 35 36 import json 37 import logging 38 import unittest 39 40 FULL_RESULT_EXAMPLE = """ADD_RESULTS({ 41 "seconds_since_epoch": 1368146629, 42 "tests": { 43 "media": { 44 "encrypted-media": { 45 "encrypted-media-v2-events.html": { 46 "bugs": ["crbug.com/1234"], 47 "expected": "TIMEOUT", 48 "actual": "TIMEOUT", 49 "time": 6.0 50 }, 51 "encrypted-media-v2-syntax.html": { 52 "expected": "TIMEOUT", 53 "actual": "TIMEOUT" 54 } 55 }, 56 "progress-events-generated-correctly.html": { 57 "expected": "PASS FAIL IMAGE TIMEOUT CRASH MISSING", 58 "actual": "TIMEOUT", 59 "time": 6.0 60 }, 61 "W3C": { 62 "audio": { 63 "src": { 64 "src_removal_does_not_trigger_loadstart.html": { 65 "expected": "PASS", 66 "actual": "PASS", 67 "time": 3.5 68 } 69 } 70 }, 71 "video": { 72 "src": { 73 "src_removal_does_not_trigger_loadstart.html": { 74 "expected": "PASS", 75 "actual": "PASS", 76 "time": 1.1 77 }, 78 "notrun.html": { 79 "expected": "NOTRUN", 80 "actual": "SKIP", 81 "time": 1.1 82 } 83 } 84 } 85 }, 86 "unexpected-skip.html": { 87 "expected": "PASS", 88 "actual": "SKIP" 89 }, 90 "unexpected-fail.html": { 91 "expected": "PASS", 92 "actual": "FAIL" 93 }, 94 "flaky-failed.html": { 95 "expected": "PASS FAIL", 96 "actual": "FAIL" 97 }, 98 "media-document-audio-repaint.html": { 99 "expected": "IMAGE", 100 "actual": "IMAGE", 101 "time": 0.1 102 } 103 } 104 }, 105 "skipped": 2, 106 "num_regressions": 0, 107 "build_number": "3", 108 "interrupted": false, 109 "layout_tests_dir": "\/tmp\/cr\/src\/third_party\/WebKit\/LayoutTests", 110 "version": 3, 111 "builder_name": "Webkit", 112 "num_passes": 10, 113 "pixel_tests_enabled": true, 114 "blink_revision": "1234", 115 "has_pretty_patch": true, 116 "fixable": 25, 117 "num_flaky": 0, 118 "num_failures_by_type": { 119 "CRASH": 3, 120 "MISSING": 0, 121 "TEXT": 3, 122 "IMAGE": 1, 123 "PASS": 10, 124 "SKIP": 2, 125 "TIMEOUT": 16, 126 "IMAGE+TEXT": 0, 127 "FAIL": 2, 128 "AUDIO": 0 129 }, 130 "has_wdiff": true, 131 "chromium_revision": "5678" 132 });""" 133 134 JSON_RESULTS_OLD_TEMPLATE = ( 135 '{"[BUILDER_NAME]":{' 136 '"allFixableCount":[[TESTDATA_COUNT]],' 137 '"blinkRevision":[[TESTDATA_WEBKITREVISION]],' 138 '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],' 139 '"chromeRevision":[[TESTDATA_CHROMEREVISION]],' 140 '"failure_map": %s,' 141 '"fixableCount":[[TESTDATA_COUNT]],' 142 '"fixableCounts":[[TESTDATA_COUNTS]],' 143 '"secondsSinceEpoch":[[TESTDATA_TIMES]],' 144 '"tests":{[TESTDATA_TESTS]}' 145 '},' 146 '"version":[VERSION]' 147 '}') % json.dumps(CHAR_TO_FAILURE) 148 149 JSON_RESULTS_COUNTS = '{"' + '":[[TESTDATA_COUNT]],"'.join([char for char in CHAR_TO_FAILURE.values()]) + '":[[TESTDATA_COUNT]]}' 150 151 JSON_RESULTS_TEMPLATE = ( 152 '{"[BUILDER_NAME]":{' 153 '"blinkRevision":[[TESTDATA_WEBKITREVISION]],' 154 '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],' 155 '"chromeRevision":[[TESTDATA_CHROMEREVISION]],' 156 '"failure_map": %s,' 157 '"num_failures_by_type":%s,' 158 '"secondsSinceEpoch":[[TESTDATA_TIMES]],' 159 '"tests":{[TESTDATA_TESTS]}' 160 '},' 161 '"version":[VERSION]' 162 '}') % (json.dumps(CHAR_TO_FAILURE), JSON_RESULTS_COUNTS) 163 164 JSON_RESULTS_COUNTS_TEMPLATE = '{"' + '":[TESTDATA],"'.join([char for char in CHAR_TO_FAILURE]) + '":[TESTDATA]}' 165 166 JSON_RESULTS_TEST_LIST_TEMPLATE = '{"Webkit":{"tests":{[TESTDATA_TESTS]}}}' 167 168 169 class MockFile(object): 170 def __init__(self, name='results.json', data=''): 171 self.master = 'MockMasterName' 172 self.builder = 'MockBuilderName' 173 self.test_type = 'MockTestType' 174 self.name = name 175 self.data = data 176 177 def save(self, data): 178 self.data = data 179 return True 180 181 182 class JsonResultsTest(unittest.TestCase): 183 def setUp(self): 184 self._builder = "Webkit" 185 self.old_log_level = logging.root.level 186 logging.root.setLevel(logging.ERROR) 187 188 def tearDown(self): 189 logging.root.setLevel(self.old_log_level) 190 191 # Use this to get better error messages than just string compare gives. 192 def assert_json_equal(self, a, b): 193 self.maxDiff = None 194 a = json.loads(a) if isinstance(a, str) else a 195 b = json.loads(b) if isinstance(b, str) else b 196 self.assertEqual(a, b) 197 198 def test_strip_prefix_suffix(self): 199 json = "['contents']" 200 self.assertEqual(JsonResults._strip_prefix_suffix("ADD_RESULTS(" + json + ");"), json) 201 self.assertEqual(JsonResults._strip_prefix_suffix(json), json) 202 203 def _make_test_json(self, test_data, json_string=JSON_RESULTS_TEMPLATE, builder_name="Webkit"): 204 if not test_data: 205 return "" 206 207 builds = test_data["builds"] 208 tests = test_data["tests"] 209 if not builds or not tests: 210 return "" 211 212 counts = [] 213 build_numbers = [] 214 webkit_revision = [] 215 chrome_revision = [] 216 times = [] 217 for build in builds: 218 counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build)) 219 build_numbers.append("1000%s" % build) 220 webkit_revision.append("2000%s" % build) 221 chrome_revision.append("3000%s" % build) 222 times.append("100000%s000" % build) 223 224 json_string = json_string.replace("[BUILDER_NAME]", builder_name) 225 json_string = json_string.replace("[TESTDATA_COUNTS]", ",".join(counts)) 226 json_string = json_string.replace("[TESTDATA_COUNT]", ",".join(builds)) 227 json_string = json_string.replace("[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers)) 228 json_string = json_string.replace("[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision)) 229 json_string = json_string.replace("[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision)) 230 json_string = json_string.replace("[TESTDATA_TIMES]", ",".join(times)) 231 232 version = str(test_data["version"]) if "version" in test_data else "4" 233 json_string = json_string.replace("[VERSION]", version) 234 json_string = json_string.replace("{[TESTDATA_TESTS]}", json.dumps(tests, separators=(',', ':'), sort_keys=True)) 235 return json_string 236 237 def _test_merge(self, aggregated_data, incremental_data, expected_data, max_builds=jsonresults.JSON_RESULTS_MAX_BUILDS): 238 aggregated_results = self._make_test_json(aggregated_data, builder_name=self._builder) 239 incremental_json, _ = JsonResults._get_incremental_json(self._builder, self._make_test_json(incremental_data, builder_name=self._builder), is_full_results_format=False) 240 merged_results, status_code = JsonResults.merge(self._builder, aggregated_results, incremental_json, num_runs=max_builds, sort_keys=True) 241 242 if expected_data: 243 expected_results = self._make_test_json(expected_data, builder_name=self._builder) 244 self.assert_json_equal(merged_results, expected_results) 245 self.assertEqual(status_code, 200) 246 else: 247 self.assertTrue(status_code != 200) 248 249 def _test_get_test_list(self, input_data, expected_data): 250 input_results = self._make_test_json(input_data) 251 expected_results = JSON_RESULTS_TEST_LIST_TEMPLATE.replace("{[TESTDATA_TESTS]}", json.dumps(expected_data, separators=(',', ':'))) 252 actual_results = JsonResults.get_test_list(self._builder, input_results) 253 self.assert_json_equal(actual_results, expected_results) 254 255 def test_update_files_empty_aggregate_data(self): 256 small_file = MockFile(name='results-small.json') 257 large_file = MockFile(name='results.json') 258 259 incremental_data = { 260 "builds": ["2", "1"], 261 "tests": { 262 "001.html": { 263 "results": [[200, TEXT]], 264 "times": [[200, 0]], 265 } 266 } 267 } 268 incremental_string = self._make_test_json(incremental_data, builder_name=small_file.builder) 269 270 self.assertTrue(JsonResults.update_files(small_file.builder, incremental_string, small_file, large_file, is_full_results_format=False)) 271 self.assert_json_equal(small_file.data, incremental_string) 272 self.assert_json_equal(large_file.data, incremental_string) 273 274 def test_update_files_null_incremental_data(self): 275 small_file = MockFile(name='results-small.json') 276 large_file = MockFile(name='results.json') 277 278 aggregated_data = { 279 "builds": ["2", "1"], 280 "tests": { 281 "001.html": { 282 "results": [[200, TEXT]], 283 "times": [[200, 0]], 284 } 285 } 286 } 287 aggregated_string = self._make_test_json(aggregated_data, builder_name=small_file.builder) 288 289 small_file.data = large_file.data = aggregated_string 290 291 incremental_string = "" 292 293 self.assertEqual(JsonResults.update_files(small_file.builder, incremental_string, small_file, large_file, is_full_results_format=False), 294 ('No incremental JSON data to merge.', 403)) 295 self.assert_json_equal(small_file.data, aggregated_string) 296 self.assert_json_equal(large_file.data, aggregated_string) 297 298 def test_update_files_empty_incremental_data(self): 299 small_file = MockFile(name='results-small.json') 300 large_file = MockFile(name='results.json') 301 302 aggregated_data = { 303 "builds": ["2", "1"], 304 "tests": { 305 "001.html": { 306 "results": [[200, TEXT]], 307 "times": [[200, 0]], 308 } 309 } 310 } 311 aggregated_string = self._make_test_json(aggregated_data, builder_name=small_file.builder) 312 313 small_file.data = large_file.data = aggregated_string 314 315 incremental_data = { 316 "builds": [], 317 "tests": {} 318 } 319 incremental_string = self._make_test_json(incremental_data, builder_name=small_file.builder) 320 321 self.assertEqual(JsonResults.update_files(small_file.builder, incremental_string, small_file, large_file, is_full_results_format=False), 322 ('No incremental JSON data to merge.', 403)) 323 self.assert_json_equal(small_file.data, aggregated_string) 324 self.assert_json_equal(large_file.data, aggregated_string) 325 326 def test_merge_with_empty_aggregated_results(self): 327 incremental_data = { 328 "builds": ["2", "1"], 329 "tests": { 330 "001.html": { 331 "results": [[200, TEXT]], 332 "times": [[200, 0]], 333 } 334 } 335 } 336 incremental_results, _ = JsonResults._get_incremental_json(self._builder, self._make_test_json(incremental_data), is_full_results_format=False) 337 aggregated_results = "" 338 merged_results, _ = JsonResults.merge(self._builder, aggregated_results, incremental_results, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS, sort_keys=True) 339 self.assert_json_equal(merged_results, incremental_results) 340 341 def test_failures_by_type_added(self): 342 aggregated_results = self._make_test_json({ 343 "builds": ["2", "1"], 344 "tests": { 345 "001.html": { 346 "results": [[100, TEXT], [100, FAIL]], 347 "times": [[200, 0]], 348 } 349 } 350 }, json_string=JSON_RESULTS_OLD_TEMPLATE) 351 incremental_results = self._make_test_json({ 352 "builds": ["3"], 353 "tests": { 354 "001.html": { 355 "results": [[1, TEXT]], 356 "times": [[1, 0]], 357 } 358 } 359 }, json_string=JSON_RESULTS_OLD_TEMPLATE) 360 incremental_json, _ = JsonResults._get_incremental_json(self._builder, incremental_results, is_full_results_format=False) 361 merged_results, _ = JsonResults.merge(self._builder, aggregated_results, incremental_json, num_runs=201, sort_keys=True) 362 self.assert_json_equal(merged_results, self._make_test_json({ 363 "builds": ["3", "2", "1"], 364 "tests": { 365 "001.html": { 366 "results": [[101, TEXT], [100, FAIL]], 367 "times": [[201, 0]], 368 } 369 } 370 })) 371 372 def test_merge_full_results_format(self): 373 expected_incremental_results = { 374 "Webkit": { 375 "blinkRevision": ["1234"], 376 "buildNumbers": ["3"], 377 "chromeRevision": ["5678"], 378 "failure_map": CHAR_TO_FAILURE, 379 "num_failures_by_type": {"AUDIO": [0], "CRASH": [3], "FAIL": [2], "IMAGE": [1], "IMAGE+TEXT": [0], "MISSING": [0], "PASS": [10], "SKIP": [2], "TEXT": [3], "TIMEOUT": [16]}, 380 "secondsSinceEpoch": [1368146629], 381 "tests": { 382 "media": { 383 "W3C": { 384 "audio": { 385 "src": { 386 "src_removal_does_not_trigger_loadstart.html": { 387 "results": [[1, PASS]], 388 "times": [[1, 4]], 389 } 390 } 391 } 392 }, 393 "encrypted-media": { 394 "encrypted-media-v2-events.html": { 395 "bugs": ["crbug.com/1234"], 396 "expected": "TIMEOUT", 397 "results": [[1, TIMEOUT]], 398 "times": [[1, 6]], 399 }, 400 "encrypted-media-v2-syntax.html": { 401 "expected": "TIMEOUT", 402 "results": [[1, TIMEOUT]], 403 "times": [[1, 0]], 404 } 405 }, 406 "media-document-audio-repaint.html": { 407 "expected": "IMAGE", 408 "results": [[1, IMAGE]], 409 "times": [[1, 0]], 410 }, 411 "progress-events-generated-correctly.html": { 412 "expected": "PASS FAIL IMAGE TIMEOUT CRASH MISSING", 413 "results": [[1, TIMEOUT]], 414 "times": [[1, 6]], 415 }, 416 "flaky-failed.html": { 417 "expected": "PASS FAIL", 418 "results": [[1, FAIL]], 419 "times": [[1, 0]], 420 }, 421 "unexpected-fail.html": { 422 "results": [[1, FAIL]], 423 "times": [[1, 0]], 424 }, 425 } 426 } 427 }, 428 "version": 4 429 } 430 431 aggregated_results = "" 432 incremental_json, _ = JsonResults._get_incremental_json(self._builder, FULL_RESULT_EXAMPLE, is_full_results_format=True) 433 merged_results, _ = JsonResults.merge("Webkit", aggregated_results, incremental_json, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS, sort_keys=True) 434 self.assert_json_equal(merged_results, expected_incremental_results) 435 436 def test_merge_empty_aggregated_results(self): 437 # No existing aggregated results. 438 # Merged results == new incremental results. 439 self._test_merge( 440 # Aggregated results 441 None, 442 # Incremental results 443 {"builds": ["2", "1"], 444 "tests": {"001.html": { 445 "results": [[200, TEXT]], 446 "times": [[200, 0]]}}}, 447 # Expected result 448 {"builds": ["2", "1"], 449 "tests": {"001.html": { 450 "results": [[200, TEXT]], 451 "times": [[200, 0]]}}}) 452 453 def test_merge_duplicate_build_number(self): 454 self._test_merge( 455 # Aggregated results 456 {"builds": ["2", "1"], 457 "tests": {"001.html": { 458 "results": [[100, TEXT]], 459 "times": [[100, 0]]}}}, 460 # Incremental results 461 {"builds": ["2"], 462 "tests": {"001.html": { 463 "results": [[1, TEXT]], 464 "times": [[1, 0]]}}}, 465 # Expected results 466 None) 467 468 def test_merge_incremental_single_test_single_run_same_result(self): 469 # Incremental results has the latest build and same test results for 470 # that run. 471 # Insert the incremental results at the first place and sum number 472 # of runs for TEXT (200 + 1) to get merged results. 473 self._test_merge( 474 # Aggregated results 475 {"builds": ["2", "1"], 476 "tests": {"001.html": { 477 "results": [[200, TEXT]], 478 "times": [[200, 0]]}}}, 479 # Incremental results 480 {"builds": ["3"], 481 "tests": {"001.html": { 482 "results": [[1, TEXT]], 483 "times": [[1, 0]]}}}, 484 # Expected results 485 {"builds": ["3", "2", "1"], 486 "tests": {"001.html": { 487 "results": [[201, TEXT]], 488 "times": [[201, 0]]}}}) 489 490 def test_merge_single_test_single_run_different_result(self): 491 # Incremental results has the latest build but different test results 492 # for that run. 493 # Insert the incremental results at the first place. 494 self._test_merge( 495 # Aggregated results 496 {"builds": ["2", "1"], 497 "tests": {"001.html": { 498 "results": [[200, TEXT]], 499 "times": [[200, 0]]}}}, 500 # Incremental results 501 {"builds": ["3"], 502 "tests": {"001.html": { 503 "results": [[1, IMAGE]], 504 "times": [[1, 1]]}}}, 505 # Expected results 506 {"builds": ["3", "2", "1"], 507 "tests": {"001.html": { 508 "results": [[1, IMAGE], [200, TEXT]], 509 "times": [[1, 1], [200, 0]]}}}) 510 511 def test_merge_single_test_single_run_result_changed(self): 512 # Incremental results has the latest build but results which differ from 513 # the latest result (but are the same as an older result). 514 self._test_merge( 515 # Aggregated results 516 {"builds": ["2", "1"], 517 "tests": {"001.html": { 518 "results": [[200, TEXT], [10, IMAGE]], 519 "times": [[200, 0], [10, 1]]}}}, 520 # Incremental results 521 {"builds": ["3"], 522 "tests": {"001.html": { 523 "results": [[1, IMAGE]], 524 "times": [[1, 1]]}}}, 525 # Expected results 526 {"builds": ["3", "2", "1"], 527 "tests": {"001.html": { 528 "results": [[1, IMAGE], [200, TEXT], [10, IMAGE]], 529 "times": [[1, 1], [200, 0], [10, 1]]}}}) 530 531 def test_merge_multiple_tests_single_run(self): 532 # All tests have incremental updates. 533 self._test_merge( 534 # Aggregated results 535 {"builds": ["2", "1"], 536 "tests": {"001.html": { 537 "results": [[200, TEXT]], 538 "times": [[200, 0]]}, 539 "002.html": { 540 "results": [[100, IMAGE]], 541 "times": [[100, 1]]}}}, 542 # Incremental results 543 {"builds": ["3"], 544 "tests": {"001.html": { 545 "results": [[1, TEXT]], 546 "times": [[1, 0]]}, 547 "002.html": { 548 "results": [[1, IMAGE]], 549 "times": [[1, 1]]}}}, 550 # Expected results 551 {"builds": ["3", "2", "1"], 552 "tests": {"001.html": { 553 "results": [[201, TEXT]], 554 "times": [[201, 0]]}, 555 "002.html": { 556 "results": [[101, IMAGE]], 557 "times": [[101, 1]]}}}) 558 559 def test_merge_multiple_tests_single_run_one_no_result(self): 560 self._test_merge( 561 # Aggregated results 562 {"builds": ["2", "1"], 563 "tests": {"001.html": { 564 "results": [[200, TEXT]], 565 "times": [[200, 0]]}, 566 "002.html": { 567 "results": [[100, IMAGE]], 568 "times": [[100, 1]]}}}, 569 # Incremental results 570 {"builds": ["3"], 571 "tests": {"002.html": { 572 "results": [[1, IMAGE]], 573 "times": [[1, 1]]}}}, 574 # Expected results 575 {"builds": ["3", "2", "1"], 576 "tests": {"001.html": { 577 "results": [[1, NO_DATA], [200, TEXT]], 578 "times": [[201, 0]]}, 579 "002.html": { 580 "results": [[101, IMAGE]], 581 "times": [[101, 1]]}}}) 582 583 def test_merge_single_test_multiple_runs(self): 584 self._test_merge( 585 # Aggregated results 586 {"builds": ["2", "1"], 587 "tests": {"001.html": { 588 "results": [[200, TEXT]], 589 "times": [[200, 0]]}}}, 590 # Incremental results 591 {"builds": ["4", "3"], 592 "tests": {"001.html": { 593 "results": [[2, IMAGE], [1, FAIL]], 594 "times": [[3, 2]]}}}, 595 # Expected results 596 {"builds": ["4", "3", "2", "1"], 597 "tests": {"001.html": { 598 "results": [[1, FAIL], [2, IMAGE], [200, TEXT]], 599 "times": [[3, 2], [200, 0]]}}}) 600 601 def test_merge_multiple_tests_multiple_runs(self): 602 self._test_merge( 603 # Aggregated results 604 {"builds": ["2", "1"], 605 "tests": {"001.html": { 606 "results": [[200, TEXT]], 607 "times": [[200, 0]]}, 608 "002.html": { 609 "results": [[10, IMAGE_PLUS_TEXT]], 610 "times": [[10, 0]]}}}, 611 # Incremental results 612 {"builds": ["4", "3"], 613 "tests": {"001.html": { 614 "results": [[2, IMAGE]], 615 "times": [[2, 2]]}, 616 "002.html": { 617 "results": [[1, CRASH]], 618 "times": [[1, 1]]}}}, 619 # Expected results 620 {"builds": ["4", "3", "2", "1"], 621 "tests": {"001.html": { 622 "results": [[2, IMAGE], [200, TEXT]], 623 "times": [[2, 2], [200, 0]]}, 624 "002.html": { 625 "results": [[1, CRASH], [10, IMAGE_PLUS_TEXT]], 626 "times": [[1, 1], [10, 0]]}}}) 627 628 def test_merge_incremental_result_older_build(self): 629 # Test the build in incremental results is older than the most recent 630 # build in aggregated results. 631 self._test_merge( 632 # Aggregated results 633 {"builds": ["3", "1"], 634 "tests": {"001.html": { 635 "results": [[5, TEXT]], 636 "times": [[5, 0]]}}}, 637 # Incremental results 638 {"builds": ["2"], 639 "tests": {"001.html": { 640 "results": [[1, TEXT]], 641 "times": [[1, 0]]}}}, 642 # Expected no merge happens. 643 {"builds": ["2", "3", "1"], 644 "tests": {"001.html": { 645 "results": [[6, TEXT]], 646 "times": [[6, 0]]}}}) 647 648 def test_merge_incremental_result_same_build(self): 649 # Test the build in incremental results is same as the build in 650 # aggregated results. 651 self._test_merge( 652 # Aggregated results 653 {"builds": ["2", "1"], 654 "tests": {"001.html": { 655 "results": [[5, TEXT]], 656 "times": [[5, 0]]}}}, 657 # Incremental results 658 {"builds": ["3", "2"], 659 "tests": {"001.html": { 660 "results": [[2, TEXT]], 661 "times": [[2, 0]]}}}, 662 # Expected no merge happens. 663 {"builds": ["3", "2", "2", "1"], 664 "tests": {"001.html": { 665 "results": [[7, TEXT]], 666 "times": [[7, 0]]}}}) 667 668 def test_merge_remove_new_test(self): 669 self._test_merge( 670 # Aggregated results 671 {"builds": ["2", "1"], 672 "tests": {"001.html": { 673 "results": [[199, TEXT]], 674 "times": [[199, 0]]}, 675 }}, 676 # Incremental results 677 {"builds": ["3"], 678 "tests": {"001.html": { 679 "results": [[1, TEXT]], 680 "times": [[1, 0]]}, 681 "002.html": { 682 "results": [[1, PASS]], 683 "times": [[1, 0]]}, 684 "notrun.html": { 685 "results": [[1, NOTRUN]], 686 "times": [[1, 0]]}, 687 "003.html": { 688 "results": [[1, NO_DATA]], 689 "times": [[1, 0]]}, 690 }}, 691 # Expected results 692 {"builds": ["3", "2", "1"], 693 "tests": {"001.html": { 694 "results": [[200, TEXT]], 695 "times": [[200, 0]]}, 696 }}, 697 max_builds=200) 698 699 def test_merge_remove_test(self): 700 self._test_merge( 701 # Aggregated results 702 { 703 "builds": ["2", "1"], 704 "tests": { 705 "directory": { 706 "directory": { 707 "001.html": { 708 "results": [[200, PASS]], 709 "times": [[200, 0]] 710 } 711 } 712 }, 713 "002.html": { 714 "results": [[10, TEXT]], 715 "times": [[10, 0]] 716 }, 717 "003.html": { 718 "results": [[190, PASS], [9, NO_DATA], [1, TEXT]], 719 "times": [[200, 0]] 720 }, 721 } 722 }, 723 # Incremental results 724 { 725 "builds": ["3"], 726 "tests": { 727 "directory": { 728 "directory": { 729 "001.html": { 730 "results": [[1, PASS]], 731 "times": [[1, 0]] 732 } 733 } 734 }, 735 "002.html": { 736 "results": [[1, PASS]], 737 "times": [[1, 0]] 738 }, 739 "003.html": { 740 "results": [[1, PASS]], 741 "times": [[1, 0]] 742 }, 743 } 744 }, 745 # Expected results 746 { 747 "builds": ["3", "2", "1"], 748 "tests": { 749 "002.html": { 750 "results": [[1, PASS], [10, TEXT]], 751 "times": [[11, 0]] 752 } 753 } 754 }, 755 max_builds=200) 756 757 def test_merge_updates_expected(self): 758 self._test_merge( 759 # Aggregated results 760 { 761 "builds": ["2", "1"], 762 "tests": { 763 "directory": { 764 "directory": { 765 "001.html": { 766 "expected": "FAIL", 767 "results": [[200, PASS]], 768 "times": [[200, 0]] 769 } 770 } 771 }, 772 "002.html": { 773 "bugs": ["crbug.com/1234"], 774 "expected": "FAIL", 775 "results": [[10, TEXT]], 776 "times": [[10, 0]] 777 }, 778 "003.html": { 779 "expected": "FAIL", 780 "results": [[190, PASS], [9, NO_DATA], [1, TEXT]], 781 "times": [[200, 0]] 782 }, 783 "004.html": { 784 "results": [[199, PASS], [1, TEXT]], 785 "times": [[200, 0]] 786 }, 787 } 788 }, 789 # Incremental results 790 { 791 "builds": ["3"], 792 "tests": { 793 "002.html": { 794 "expected": "PASS", 795 "results": [[1, PASS]], 796 "times": [[1, 0]] 797 }, 798 "003.html": { 799 "expected": "TIMEOUT", 800 "results": [[1, PASS]], 801 "times": [[1, 0]] 802 }, 803 "004.html": { 804 "bugs": ["crbug.com/1234"], 805 "results": [[1, PASS]], 806 "times": [[1, 0]] 807 }, 808 } 809 }, 810 # Expected results 811 { 812 "builds": ["3", "2", "1"], 813 "tests": { 814 "002.html": { 815 "results": [[1, PASS], [10, TEXT]], 816 "times": [[11, 0]] 817 }, 818 "003.html": { 819 "expected": "TIMEOUT", 820 "results": [[191, PASS], [9, NO_DATA]], 821 "times": [[200, 0]] 822 }, 823 "004.html": { 824 "bugs": ["crbug.com/1234"], 825 "results": [[200, PASS]], 826 "times": [[200, 0]] 827 }, 828 } 829 }, 830 max_builds=200) 831 832 833 def test_merge_keep_test_with_all_pass_but_slow_time(self): 834 self._test_merge( 835 # Aggregated results 836 {"builds": ["2", "1"], 837 "tests": {"001.html": { 838 "results": [[200, PASS]], 839 "times": [[200, jsonresults.JSON_RESULTS_MIN_TIME]]}, 840 "002.html": { 841 "results": [[10, TEXT]], 842 "times": [[10, 0]]}}}, 843 # Incremental results 844 {"builds": ["3"], 845 "tests": {"001.html": { 846 "results": [[1, PASS]], 847 "times": [[1, 1]]}, 848 "002.html": { 849 "results": [[1, PASS]], 850 "times": [[1, 0]]}}}, 851 # Expected results 852 {"builds": ["3", "2", "1"], 853 "tests": {"001.html": { 854 "results": [[201, PASS]], 855 "times": [[1, 1], [200, jsonresults.JSON_RESULTS_MIN_TIME]]}, 856 "002.html": { 857 "results": [[1, PASS], [10, TEXT]], 858 "times": [[11, 0]]}}}) 859 860 def test_merge_pruning_slow_tests_for_debug_builders(self): 861 self._builder = "MockBuilder(dbg)" 862 self._test_merge( 863 # Aggregated results 864 {"builds": ["2", "1"], 865 "tests": {"001.html": { 866 "results": [[200, PASS]], 867 "times": [[200, 3 * jsonresults.JSON_RESULTS_MIN_TIME]]}, 868 "002.html": { 869 "results": [[10, TEXT]], 870 "times": [[10, 0]]}}}, 871 # Incremental results 872 {"builds": ["3"], 873 "tests": {"001.html": { 874 "results": [[1, PASS]], 875 "times": [[1, 1]]}, 876 "002.html": { 877 "results": [[1, PASS]], 878 "times": [[1, 0]]}, 879 "003.html": { 880 "results": [[1, PASS]], 881 "times": [[1, jsonresults.JSON_RESULTS_MIN_TIME]]}}}, 882 # Expected results 883 {"builds": ["3", "2", "1"], 884 "tests": {"001.html": { 885 "results": [[201, PASS]], 886 "times": [[1, 1], [200, 3 * jsonresults.JSON_RESULTS_MIN_TIME]]}, 887 "002.html": { 888 "results": [[1, PASS], [10, TEXT]], 889 "times": [[11, 0]]}}}) 890 891 def test_merge_prune_extra_results(self): 892 # Remove items from test results and times that exceed the max number 893 # of builds to track. 894 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS 895 self._test_merge( 896 # Aggregated results 897 {"builds": ["2", "1"], 898 "tests": {"001.html": { 899 "results": [[max_builds, TEXT], [1, IMAGE]], 900 "times": [[max_builds, 0], [1, 1]]}}}, 901 # Incremental results 902 {"builds": ["3"], 903 "tests": {"001.html": { 904 "results": [[1, TIMEOUT]], 905 "times": [[1, 1]]}}}, 906 # Expected results 907 {"builds": ["3", "2", "1"], 908 "tests": {"001.html": { 909 "results": [[1, TIMEOUT], [max_builds, TEXT]], 910 "times": [[1, 1], [max_builds, 0]]}}}) 911 912 def test_merge_prune_extra_results_small(self): 913 # Remove items from test results and times that exceed the max number 914 # of builds to track, using smaller threshold. 915 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL 916 self._test_merge( 917 # Aggregated results 918 {"builds": ["2", "1"], 919 "tests": {"001.html": { 920 "results": [[max_builds, TEXT], [1, IMAGE]], 921 "times": [[max_builds, 0], [1, 1]]}}}, 922 # Incremental results 923 {"builds": ["3"], 924 "tests": {"001.html": { 925 "results": [[1, TIMEOUT]], 926 "times": [[1, 1]]}}}, 927 # Expected results 928 {"builds": ["3", "2", "1"], 929 "tests": {"001.html": { 930 "results": [[1, TIMEOUT], [max_builds, TEXT]], 931 "times": [[1, 1], [max_builds, 0]]}}}, 932 int(max_builds)) 933 934 def test_merge_prune_extra_results_with_new_result_of_same_type(self): 935 # Test that merging in a new result of the same type as the last result 936 # causes old results to fall off. 937 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL 938 self._test_merge( 939 # Aggregated results 940 {"builds": ["2", "1"], 941 "tests": {"001.html": { 942 "results": [[max_builds, TEXT], [1, NO_DATA]], 943 "times": [[max_builds, 0], [1, 1]]}}}, 944 # Incremental results 945 {"builds": ["3"], 946 "tests": {"001.html": { 947 "results": [[1, TEXT]], 948 "times": [[1, 0]]}}}, 949 # Expected results 950 {"builds": ["3", "2", "1"], 951 "tests": {"001.html": { 952 "results": [[max_builds, TEXT]], 953 "times": [[max_builds, 0]]}}}, 954 int(max_builds)) 955 956 def test_merge_build_directory_hierarchy(self): 957 self._test_merge( 958 # Aggregated results 959 {"builds": ["2", "1"], 960 "tests": {"bar": {"baz": { 961 "003.html": { 962 "results": [[25, TEXT]], 963 "times": [[25, 0]]}}}, 964 "foo": { 965 "001.html": { 966 "results": [[50, TEXT]], 967 "times": [[50, 0]]}, 968 "002.html": { 969 "results": [[100, IMAGE]], 970 "times": [[100, 0]]}}}, 971 "version": 4}, 972 # Incremental results 973 {"builds": ["3"], 974 "tests": {"baz": { 975 "004.html": { 976 "results": [[1, IMAGE]], 977 "times": [[1, 0]]}}, 978 "foo": { 979 "001.html": { 980 "results": [[1, TEXT]], 981 "times": [[1, 0]]}, 982 "002.html": { 983 "results": [[1, IMAGE]], 984 "times": [[1, 0]]}}}, 985 "version": 4}, 986 # Expected results 987 {"builds": ["3", "2", "1"], 988 "tests": {"bar": {"baz": { 989 "003.html": { 990 "results": [[1, NO_DATA], [25, TEXT]], 991 "times": [[26, 0]]}}}, 992 "baz": { 993 "004.html": { 994 "results": [[1, IMAGE]], 995 "times": [[1, 0]]}}, 996 "foo": { 997 "001.html": { 998 "results": [[51, TEXT]], 999 "times": [[51, 0]]}, 1000 "002.html": { 1001 "results": [[101, IMAGE]], 1002 "times": [[101, 0]]}}}, 1003 "version": 4}) 1004 1005 # FIXME(aboxhall): Add some tests for xhtml/svg test results. 1006 1007 def test_get_test_name_list(self): 1008 # Get test name list only. Don't include non-test-list data and 1009 # of test result details. 1010 # FIXME: This also tests a temporary bug in the data where directory-level 1011 # results have a results and times values. Once that bug is fixed, 1012 # remove this test-case and assert we don't ever hit it. 1013 self._test_get_test_list( 1014 # Input results 1015 {"builds": ["3", "2", "1"], 1016 "tests": {"foo": { 1017 "001.html": { 1018 "results": [[200, PASS]], 1019 "times": [[200, 0]]}, 1020 "results": [[1, NO_DATA]], 1021 "times": [[1, 0]]}, 1022 "002.html": { 1023 "results": [[10, TEXT]], 1024 "times": [[10, 0]]}}}, 1025 # Expected results 1026 {"foo": {"001.html": {}}, "002.html": {}}) 1027 1028 def test_gtest(self): 1029 self._test_merge( 1030 # Aggregated results 1031 {"builds": ["2", "1"], 1032 "tests": {"foo.bar": { 1033 "results": [[50, TEXT]], 1034 "times": [[50, 0]]}, 1035 "foo.bar2": { 1036 "results": [[100, IMAGE]], 1037 "times": [[100, 0]]}, 1038 "test.failed": { 1039 "results": [[5, FAIL]], 1040 "times": [[5, 0]]}, 1041 }, 1042 "version": 3}, 1043 # Incremental results 1044 {"builds": ["3"], 1045 "tests": {"foo.bar2": { 1046 "results": [[1, IMAGE]], 1047 "times": [[1, 0]]}, 1048 "foo.bar3": { 1049 "results": [[1, TEXT]], 1050 "times": [[1, 0]]}, 1051 "test.failed": { 1052 "results": [[5, FAIL]], 1053 "times": [[5, 0]]}, 1054 }, 1055 "version": 4}, 1056 # Expected results 1057 {"builds": ["3", "2", "1"], 1058 "tests": {"foo.bar": { 1059 "results": [[1, NO_DATA], [50, TEXT]], 1060 "times": [[51, 0]]}, 1061 "foo.bar2": { 1062 "results": [[101, IMAGE]], 1063 "times": [[101, 0]]}, 1064 "foo.bar3": { 1065 "results": [[1, TEXT]], 1066 "times": [[1, 0]]}, 1067 "test.failed": { 1068 "results": [[10, FAIL]], 1069 "times": [[10, 0]]}, 1070 }, 1071 "version": 4}) 1072 1073 if __name__ == '__main__': 1074 unittest.main() 1075