HomeSort by relevance Sort by last modified time
    Searched defs:generate_tokens (Results 1 - 15 of 15) sorted by null

  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.10/Lib/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
35 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
167 tuples generated by generate_tokens().
176 for token_info in generate_tokens(readline):
261 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
264 t2 = [tok[:2] for tok in generate_tokens(readline)]
270 def generate_tokens(readline): function
272 The generate_tokens() generator requires one argument, readline, which
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/lib2to3/pgen2/
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /external/python/cpython2/Lib/lib2to3/pgen2/
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
338 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
341 t2 = [tok[:2] for tokin generate_tokens(readline)]
347 def generate_tokens(readline): function
349 The generate_tokens() generator requires one argument, readline, which
  /external/python/cpython2/Lib/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
35 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
167 tuples generated by generate_tokens().
176 for token_info in generate_tokens(readline):
278 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
281 t2 = [tok[:2] for tok in generate_tokens(readline)]
287 def generate_tokens(readline): function
289 The generate_tokens() generator requires one argument, readline, which
  /external/python/cpython3/Lib/lib2to3/pgen2/
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
338 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
341 t2 = [tok[:2] for tokin generate_tokens(readline)]
347 def generate_tokens(readline): function
349 The generate_tokens() generator requires one argument, readline, which
  /prebuilts/gdb/darwin-x86/lib/python2.7/lib2to3/pgen2/
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/gdb/darwin-x86/lib/python2.7/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/gdb/linux-x86/lib/python2.7/lib2to3/pgen2/
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/gdb/linux-x86/lib/python2.7/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/python/linux-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
tokenize.py 6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
336 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
339 t2 = [tok[:2] for tokin generate_tokens(readline)]
345 def generate_tokens(readline): function
347 The generate_tokens() generator requires one argment, readline, which
  /prebuilts/python/linux-x86/2.7.5/lib/python2.7/
tokenize.py 3 generate_tokens(readline) is a generator that breaks a stream of
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
166 tuples generated by generate_tokens().
175 for token_info in generate_tokens(readline):
255 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
258 t2 = [tok[:2] for tok in generate_tokens(readline)]
264 def generate_tokens(readline): function
266 The generate_tokens() generator requires one argment, readline, which
  /external/python/cpython3/Lib/
tokenize.py 729 def generate_tokens(readline): function

Completed in 565 milliseconds