1 # Copyright (c) 2011 The Chromium Authors. All rights reserved. 2 # Use of this source code is governed by a BSD-style license that can be 3 # found in the LICENSE file. 4 5 """SiteCompare command to time page loads 6 7 Loads a series of URLs in a series of browsers (and browser versions) 8 and measures how long the page takes to load in each. Outputs a 9 comma-delimited file. The first line is "URL,[browser names", each 10 additional line is a URL follored by comma-delimited times (in seconds), 11 or the string "timeout" or "crashed". 12 13 """ 14 15 import os # Functions for walking the directory tree 16 import tempfile # Get a temporary directory to hold intermediates 17 18 import command_line 19 import drivers # Functions for driving keyboard/mouse/windows, OS-specific 20 import operators # Functions that, given two bitmaps as input, produce 21 # output depending on the performance of an operation 22 import scrapers # Functions that know how to capture a render from 23 # particular browsers 24 25 26 def CreateCommand(cmdline): 27 """Inserts the command and arguments into a command line for parsing.""" 28 cmd = cmdline.AddCommand( 29 ["timeload"], 30 "Measures how long a series of URLs takes to load in one or more browsers.", 31 None, 32 ExecuteTimeLoad) 33 34 cmd.AddArgument( 35 ["-b", "--browsers"], "List of browsers to use. Comma-separated", 36 type="string", required=True) 37 cmd.AddArgument( 38 ["-bp", "--browserpaths"], "List of paths to browsers. Comma-separated", 39 type="string", required=False) 40 cmd.AddArgument( 41 ["-bv", "--browserversions"], 42 "List of versions of browsers. Comma-separated", 43 type="string", required=False) 44 cmd.AddArgument( 45 ["-u", "--url"], "URL to time") 46 cmd.AddArgument( 47 ["-l", "--list"], "List of URLs to time", type="readfile") 48 cmd.AddMutualExclusion(["--url", "--list"]) 49 cmd.AddArgument( 50 ["-s", "--startline"], "First line of URL list", type="int") 51 cmd.AddArgument( 52 ["-e", "--endline"], "Last line of URL list (exclusive)", type="int") 53 cmd.AddArgument( 54 ["-c", "--count"], "Number of lines of URL file to use", type="int") 55 cmd.AddDependency("--startline", "--list") 56 cmd.AddRequiredGroup(["--url", "--list"]) 57 cmd.AddDependency("--endline", "--list") 58 cmd.AddDependency("--count", "--list") 59 cmd.AddMutualExclusion(["--count", "--endline"]) 60 cmd.AddDependency("--count", "--startline") 61 cmd.AddArgument( 62 ["-t", "--timeout"], "Amount of time (seconds) to wait for browser to " 63 "finish loading", 64 type="int", default=60) 65 cmd.AddArgument( 66 ["-log", "--logfile"], "File to write output", type="string", required=True) 67 cmd.AddArgument( 68 ["-sz", "--size"], "Browser window size", default=(800, 600), type="coords") 69 70 71 def ExecuteTimeLoad(command): 72 """Executes the TimeLoad command.""" 73 browsers = command["--browsers"].split(",") 74 num_browsers = len(browsers) 75 76 if command["--browserversions"]: 77 browser_versions = command["--browserversions"].split(",") 78 else: 79 browser_versions = [None] * num_browsers 80 81 if command["--browserpaths"]: 82 browser_paths = command["--browserpaths"].split(",") 83 else: 84 browser_paths = [None] * num_browsers 85 86 if len(browser_versions) != num_browsers: 87 raise ValueError( 88 "--browserversions must be same length as --browser_paths") 89 if len(browser_paths) != num_browsers: 90 raise ValueError( 91 "--browserversions must be same length as --browser_paths") 92 93 if [b for b in browsers if b not in ["chrome", "ie", "firefox"]]: 94 raise ValueError("unknown browsers: %r" % b) 95 96 scraper_list = [] 97 98 for b in xrange(num_browsers): 99 version = browser_versions[b] 100 if not version: version = None 101 102 scraper = scrapers.GetScraper( (browsers[b], version) ) 103 if not scraper: 104 raise ValueError("could not find scraper for (%r, %r)" % 105 (browsers[b], version)) 106 scraper_list.append(scraper) 107 108 if command["--url"]: 109 url_list = [command["--url"]] 110 else: 111 startline = command["--startline"] 112 if command["--count"]: 113 endline = startline+command["--count"] 114 else: 115 endline = command["--endline"] 116 url_list = [url.strip() for url in 117 open(command["--list"], "r").readlines()[startline:endline]] 118 119 log_file = open(command["--logfile"], "w") 120 121 log_file.write("URL") 122 for b in xrange(num_browsers): 123 log_file.write(",%s" % browsers[b]) 124 125 if browser_versions[b]: log_file.write(" %s" % browser_versions[b]) 126 log_file.write("\n") 127 128 results = {} 129 for url in url_list: 130 results[url] = [None] * num_browsers 131 132 for b in xrange(num_browsers): 133 result = scraper_list[b].Time(url_list, command["--size"], 134 command["--timeout"], 135 path=browser_paths[b]) 136 137 for (url, time) in result: 138 results[url][b] = time 139 140 # output the results 141 for url in url_list: 142 log_file.write(url) 143 for b in xrange(num_browsers): 144 log_file.write(",%r" % results[url][b]) 145