OpenGrok
Home
Sort by relevance
Sort by last modified time
Full Search
Definition
Symbol
File Path
History
|
|
Help
Searched
refs:Scrape
(Results
1 - 9
of
9
) sorted by null
/external/chromium_org/tools/site_compare/scrapers/chrome/
chrome011010.py
19
def
Scrape
(urls, outdir, size, pos, timeout=20, **kwargs):
23
urls: list of URLs to
scrape
35
return chromebase.
Scrape
(urls, outdir, size, pos, timeout, kwargs)
chrome01970.py
19
def
Scrape
(urls, outdir, size, pos, timeout=20, **kwargs):
23
urls: list of URLs to
scrape
35
return chromebase.
Scrape
(urls, outdir, size, pos, timeout, kwargs)
chromebase.py
50
def
Scrape
(urls, outdir, size, pos, timeout, kwargs):
54
urls: list of URLs to
scrape
90
#
Scrape
the page
189
#
Scrape
three sites and save the results
190
Scrape
([
/external/chromium_org/tools/site_compare/scrapers/firefox/
firefox2.py
24
# 3) fails badly if an existing Firefox window is open when the
scrape
76
def
Scrape
(urls, outdir, size, pos, timeout=20, **kwargs):
80
urls: list of URLs to
scrape
128
#
Scrape
the page
240
#
Scrape
three sites and save the results
241
Scrape
(
/external/chromium_org/tools/site_compare/scrapers/ie/
ie7.py
67
def
Scrape
(urls, outdir, size, pos, timeout=20, **kwargs):
71
urls: list of URLs to
scrape
111
#
Scrape
the page
200
#
Scrape
three sites and save the results
201
Scrape
(
/external/chromium_org/tools/site_compare/
site_compare.py
31
import commands.
scrape
#
scrape
a URL or series of URLs to a bitmap
namespace
37
def
Scrape
(browsers, urls, window_size=(1024, 768),
73
scraper.
Scrape
(urls, full_path, window_size, window_pos, timeout, kwargs)
169
commands.
scrape
.CreateCommand(cmdline)
/external/chromium_org/tools/site_compare/commands/
compare2.py
52
["-o", "--outdir"], "Directory to store
scrape
files", metaname="DIR")
113
"""Helper class to hold information about a
scrape
."""
137
scrape_info.result = scrape_info.scraper.
Scrape
(
maskmaker.py
77
"Number of times to
scrape
before giving up", type="int", default=50)
80
"Percentage of different pixels (0-100) above which the
scrape
will be"
84
"Number of times a
scrape
can fail before giving up on the URL.",
137
#
Scrape
each URL
160
# Find the stored
scrape
path
178
# Do the
scrape
179
result = scraper.
Scrape
(
186
print "
Scrape
failed with error '%r'" % result
192
# Load the new
scrape
193
scrape
= Image.open(mask_scrape_filename
[
all
...]
/external/chromium_org/chrome/browser/resources/gaia_auth/
saml_injected.js
13
* 3.
Scrape
password fields, making the password available to Chrome OS even if
62
* A class to
scrape
password from type=password input elements under a given
Completed in 80 milliseconds