HomeSort by relevance Sort by last modified time
    Searched refs:generate_tokens (Results 1 - 25 of 98) sorted by null

1 2 3 4

  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/lib2to3/pgen2/
driver.py 88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /external/python/cpython2/Lib/test/
test_tokenize.py 2 from tokenize import (untokenize, generate_tokens, NUMBER, NAME, OP,
21 for type, token, start, end, line in generate_tokens(f.readline):
63 for tok in generate_tokens(StringIO(indent_error_file).readline):
520 g = generate_tokens(StringIO(s).readline) # tokenize the string
600 token_list = list(generate_tokens(f.readline))
605 tokens2 = [tok[:2] for tok in generate_tokens(readline)]
703 tokens = generate_tokens(StringIO(code).readline)
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Tools/scripts/
finddiv.py 58 g = tokenize.generate_tokens(fp.readline)
  /external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
offset_token.py 66 tokenize_tokens = tokenize.generate_tokens(f.readline)
  /external/python/cpython2/Tools/scripts/
finddiv.py 58 g = tokenize.generate_tokens(fp.readline)
  /external/python/cpython3/Tools/scripts/
finddiv.py 58 g = tokenize.generate_tokens(fp.readline)
  /external/python/cpython2/Lib/lib2to3/pgen2/
driver.py 88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
  /external/python/cpython3/Lib/lib2to3/pgen2/
driver.py 88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(io.StringIO(text).readline)
  /prebuilts/gdb/darwin-x86/lib/python2.7/lib2to3/pgen2/
driver.py 88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/gdb/linux-x86/lib/python2.7/lib2to3/pgen2/
driver.py 88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
driver.py 88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/python/linux-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
driver.py 88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.10/Lib/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
35 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
167 tuples generated by generate_tokens().
176 for token_info in generate_tokens(readline):
261 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
264 t2 = [tok[:2] for tok in generate_tokens(readline)]
270 def generate_tokens(readline): function
272 The generate_tokens() generator requires one argument, readline, which
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /external/python/cpython2/Lib/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
35 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
167 tuples generated by generate_tokens().
176 for token_info in generate_tokens(readline):
278 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
281 t2 = [tok[:2] for tok in generate_tokens(readline)]
287 def generate_tokens(readline): function
289 The generate_tokens() generator requires one argument, readline, which
  /prebuilts/gdb/darwin-x86/lib/python2.7/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/gdb/linux-x86/lib/python2.7/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/python/linux-x86/2.7.5/lib/python2.7/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/lib2to3/
patcomp.py 36 tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline)

Completed in 2191 milliseconds

1 2 3 4