1 # Copyright (C) 2010 Google Inc. All rights reserved. 2 # 3 # Redistribution and use in source and binary forms, with or without 4 # modification, are permitted provided that the following conditions are 5 # met: 6 # 7 # * Redistributions of source code must retain the above copyright 8 # notice, this list of conditions and the following disclaimer. 9 # * Redistributions in binary form must reproduce the above 10 # copyright notice, this list of conditions and the following disclaimer 11 # in the documentation and/or other materials provided with the 12 # distribution. 13 # * Neither the name of Google Inc. nor the names of its 14 # contributors may be used to endorse or promote products derived from 15 # this software without specific prior written permission. 16 # 17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 29 try: 30 import jsonresults 31 from jsonresults import JsonResults 32 except ImportError: 33 print "ERROR: Add the TestResultServer, google_appengine and yaml/lib directories to your PYTHONPATH" 34 35 import unittest 36 37 38 JSON_RESULTS_TEMPLATE = ( 39 '{"Webkit":{' 40 '"allFixableCount":[[TESTDATA_COUNT]],' 41 '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],' 42 '"chromeRevision":[[TESTDATA_CHROMEREVISION]],' 43 '"deferredCounts":[[TESTDATA_COUNTS]],' 44 '"fixableCount":[[TESTDATA_COUNT]],' 45 '"fixableCounts":[[TESTDATA_COUNTS]],' 46 '"secondsSinceEpoch":[[TESTDATA_TIMES]],' 47 '"tests":{[TESTDATA_TESTS]},' 48 '"webkitRevision":[[TESTDATA_WEBKITREVISION]],' 49 '"wontfixCounts":[[TESTDATA_COUNTS]]' 50 '},' 51 '"version":3' 52 '}') 53 54 JSON_RESULTS_COUNTS_TEMPLATE = ( 55 '{' 56 '"C":[TESTDATA],' 57 '"F":[TESTDATA],' 58 '"I":[TESTDATA],' 59 '"O":[TESTDATA],' 60 '"P":[TESTDATA],' 61 '"T":[TESTDATA],' 62 '"X":[TESTDATA],' 63 '"Z":[TESTDATA]}') 64 65 JSON_RESULTS_TESTS_TEMPLATE = ( 66 '"[TESTDATA_TEST_NAME]":{' 67 '"results":[[TESTDATA_TEST_RESULTS]],' 68 '"times":[[TESTDATA_TEST_TIMES]]}') 69 70 JSON_RESULTS_PREFIX = "ADD_RESULTS(" 71 JSON_RESULTS_SUFFIX = ");" 72 73 JSON_RESULTS_TEST_LIST_TEMPLATE = ( 74 '{"Webkit":{"tests":{[TESTDATA_TESTS]}}}') 75 76 77 class JsonResultsTest(unittest.TestCase): 78 def setUp(self): 79 self._builder = "Webkit" 80 81 def _make_test_json(self, test_data): 82 if not test_data: 83 return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX 84 85 (builds, tests) = test_data 86 if not builds or not tests: 87 return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX 88 89 json = JSON_RESULTS_TEMPLATE 90 91 counts = [] 92 build_numbers = [] 93 webkit_revision = [] 94 chrome_revision = [] 95 times = [] 96 for build in builds: 97 counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build)) 98 build_numbers.append("1000%s" % build) 99 webkit_revision.append("2000%s" % build) 100 chrome_revision.append("3000%s" % build) 101 times.append("100000%s000" % build) 102 103 json = json.replace("[TESTDATA_COUNTS]", ",".join(counts)) 104 json = json.replace("[TESTDATA_COUNT]", ",".join(builds)) 105 json = json.replace("[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers)) 106 json = json.replace("[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision)) 107 json = json.replace("[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision)) 108 json = json.replace("[TESTDATA_TIMES]", ",".join(times)) 109 110 json_tests = [] 111 for test in tests: 112 t = JSON_RESULTS_TESTS_TEMPLATE.replace("[TESTDATA_TEST_NAME]", test[0]) 113 t = t.replace("[TESTDATA_TEST_RESULTS]", test[1]) 114 t = t.replace("[TESTDATA_TEST_TIMES]", test[2]) 115 json_tests.append(t) 116 117 json = json.replace("[TESTDATA_TESTS]", ",".join(json_tests)) 118 119 return JSON_RESULTS_PREFIX + json + JSON_RESULTS_SUFFIX 120 121 def _test_merge(self, aggregated_data, incremental_data, expected_data, max_builds=jsonresults.JSON_RESULTS_MAX_BUILDS): 122 aggregated_results = self._make_test_json(aggregated_data) 123 incremental_results = self._make_test_json(incremental_data) 124 merged_results = JsonResults.merge(self._builder, 125 aggregated_results, incremental_results, max_builds, 126 sort_keys=True) 127 128 if expected_data: 129 expected_results = self._make_test_json(expected_data) 130 self.assertEquals(merged_results, expected_results) 131 else: 132 self.assertFalse(merged_results) 133 134 def _test_get_test_list(self, input_data, expected_data): 135 input_results = self._make_test_json(input_data) 136 137 json_tests = [] 138 for test in expected_data: 139 json_tests.append("\"" + test + "\":{}") 140 141 expected_results = JSON_RESULTS_PREFIX + \ 142 JSON_RESULTS_TEST_LIST_TEMPLATE.replace( 143 "[TESTDATA_TESTS]", ",".join(json_tests)) + \ 144 JSON_RESULTS_SUFFIX 145 146 actual_results = JsonResults.get_test_list(self._builder, input_results) 147 self.assertEquals(actual_results, expected_results) 148 149 def test(self): 150 # Empty incremental results json. 151 # Nothing to merge. 152 self._test_merge( 153 # Aggregated results 154 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), 155 # Incremental results 156 None, 157 # Expect no merge happens. 158 None) 159 160 # No actual incremental test results (only prefix and suffix) to merge. 161 # Nothing to merge. 162 self._test_merge( 163 # Aggregated results 164 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), 165 # Incremental results 166 ([], []), 167 # Expected no merge happens. 168 None) 169 170 # No existing aggregated results. 171 # Merged results == new incremental results. 172 self._test_merge( 173 # Aggregated results 174 None, 175 # Incremental results 176 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), 177 # Expected results 178 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]])) 179 180 # Single test for single run. 181 # Incremental results has the latest build and same test results for 182 # that run. 183 # Insert the incremental results at the first place and sum number 184 # of runs for "P" (200 + 1) to get merged results. 185 self._test_merge( 186 # Aggregated results 187 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), 188 # Incremental results 189 (["3"], [["001.html", "[1,\"F\"]", "[1,0]"]]), 190 # Expected results 191 (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"]])) 192 193 # Single test for single run. 194 # Incremental results has the latest build but different test results 195 # for that run. 196 # Insert the incremental results at the first place. 197 self._test_merge( 198 # Aggregated results 199 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), 200 # Incremental results 201 (["3"], [["001.html", "[1, \"I\"]", "[1,1]"]]), 202 # Expected results 203 (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"]", "[1,1],[200,0]"]])) 204 205 # Single test for single run. 206 # Incremental results has the latest build but different test results 207 # for that run. 208 self._test_merge( 209 # Aggregated results 210 (["2", "1"], [["001.html", "[200,\"F\"],[10,\"I\"]", "[200,0],[10,1]"]]), 211 # Incremental results 212 (["3"], [["001.html", "[1,\"I\"]", "[1,1]"]]), 213 # Expected results 214 (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"],[10,\"I\"]", "[1,1],[200,0],[10,1]"]])) 215 216 # Multiple tests for single run. 217 # All tests have incremental updates. 218 self._test_merge( 219 # Aggregated results 220 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]), 221 # Incremental results 222 (["3"], [["001.html", "[1,\"F\"]", "[1,0]"], ["002.html", "[1,\"I\"]", "[1,1]"]]), 223 # Expected results 224 (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]])) 225 226 # Multiple tests for single run. 227 self._test_merge( 228 # Aggregated results 229 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]), 230 # Incremental results 231 (["3"], [["002.html", "[1,\"I\"]", "[1,1]"]]), 232 # Expected results 233 (["3", "2", "1"], [["001.html", "[1,\"N\"],[200,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]])) 234 235 # Single test for multiple runs. 236 self._test_merge( 237 # Aggregated results 238 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), 239 # Incremental results 240 (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"]]), 241 # Expected results 242 (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"]])) 243 244 # Multiple tests for multiple runs. 245 self._test_merge( 246 # Aggregated results 247 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[10,\"Z\"]", "[10,0]"]]), 248 # Incremental results 249 (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"], ["002.html", "[1,\"C\"]", "[1,1]"]]), 250 # Expected results 251 (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"], ["002.html", "[1,\"C\"],[10,\"Z\"]", "[1,1],[10,0]"]])) 252 253 # Test the build in incremental results is older than the most recent 254 # build in aggregated results. 255 # The incremental results should be dropped and no merge happens. 256 self._test_merge( 257 # Aggregated results 258 (["3", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), 259 # Incremental results 260 (["2"], [["001.html", "[1, \"F\"]", "[1,0]"]]), 261 # Expected no merge happens. 262 None) 263 264 # Test the build in incremental results is same as the build in 265 # aggregated results. 266 # The incremental results should be dropped and no merge happens. 267 self._test_merge( 268 # Aggregated results 269 (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]), 270 # Incremental results 271 (["3", "2"], [["001.html", "[2, \"F\"]", "[2,0]"]]), 272 # Expected no merge happens. 273 None) 274 275 # Remove test where there is no data in all runs. 276 self._test_merge( 277 # Aggregated results 278 (["2", "1"], [["001.html", "[200,\"N\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]), 279 # Incremental results 280 (["3"], [["001.html", "[1,\"N\"]", "[1,0]"], ["002.html", "[1,\"P\"]", "[1,0]"]]), 281 # Expected results 282 (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]])) 283 284 # Remove test where all run pass and max running time < 1 seconds 285 self._test_merge( 286 # Aggregated results 287 (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]), 288 # Incremental results 289 (["3"], [["001.html", "[1,\"P\"]", "[1,0]"], ["002.html", "[1,\"P\"]", "[1,0]"]]), 290 # Expected results 291 (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]])) 292 293 # Do not remove test where all run pass but max running time >= 1 seconds 294 self._test_merge( 295 # Aggregated results 296 (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]), 297 # Incremental results 298 (["3"], [["001.html", "[1,\"P\"]", "[1,1]"], ["002.html", "[1,\"P\"]", "[1,0]"]]), 299 # Expected results 300 (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[1,1],[200,0]"], ["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]])) 301 302 # Remove items from test results and times that exceed the max number 303 # of builds to track. 304 max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS) 305 self._test_merge( 306 # Aggregated results 307 (["2", "1"], [["001.html", "[" + max_builds + ",\"F\"],[1,\"I\"]", "[" + max_builds + ",0],[1,1]"]]), 308 # Incremental results 309 (["3"], [["001.html", "[1,\"T\"]", "[1,1]"]]), 310 # Expected results 311 (["3", "2", "1"], [["001.html", "[1,\"T\"],[" + max_builds + ",\"F\"]", "[1,1],[" + max_builds + ",0]"]])) 312 313 # Remove items from test results and times that exceed the max number 314 # of builds to track, using smaller threshold. 315 max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL) 316 self._test_merge( 317 # Aggregated results 318 (["2", "1"], [["001.html", "[" + max_builds + ",\"F\"],[1,\"I\"]", "[" + max_builds + ",0],[1,1]"]]), 319 # Incremental results 320 (["3"], [["001.html", "[1,\"T\"]", "[1,1]"]]), 321 # Expected results 322 (["3", "2", "1"], [["001.html", "[1,\"T\"],[" + max_builds + ",\"F\"]", "[1,1],[" + max_builds + ",0]"]]), 323 int(max_builds)) 324 325 # Test that merging in a new result of the same type as the last result 326 # causes old results to fall off. 327 max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL) 328 self._test_merge( 329 # Aggregated results 330 (["2", "1"], [["001.html", "[" + max_builds + ",\"F\"],[1,\"N\"]", "[" + max_builds + ",0],[1,1]"]]), 331 # Incremental results 332 (["3"], [["001.html", "[1,\"F\"]", "[1,0]"]]), 333 # Expected results 334 (["3", "2", "1"], [["001.html", "[" + max_builds + ",\"F\"]", "[" + max_builds + ",0]"]]), 335 int(max_builds)) 336 337 # Get test name list only. Don't include non-test-list data and 338 # of test result details. 339 self._test_get_test_list( 340 # Input results 341 (["3", "2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]), 342 # Expected results 343 ["001.html", "002.html"]) 344 345 if __name__ == '__main__': 346 unittest.main() 347