HomeSort by relevance Sort by last modified time
    Searched refs:tokenize (Results 51 - 75 of 368) sorted by null

1 23 4 5 6 7 8 91011>>

  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
driver.py 26 from . import grammar, parse, token, tokenize, pgen
40 # XXX Move the prefix computation into a wrapper around tokenize.
59 if type in (tokenize.COMMENT, tokenize.NL):
88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
  /prebuilts/python/linux-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
driver.py 26 from . import grammar, parse, token, tokenize, pgen
40 # XXX Move the prefix computation into a wrapper around tokenize.
59 if type in (tokenize.COMMENT, tokenize.NL):
88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
  /external/apache-xml/src/main/java/org/apache/xpath/compiler/
Lexer.java 96 void tokenize(String pat) throws javax.xml.transform.TransformerException method in class:Lexer
98 tokenize(pat, null); method
109 void tokenize(String pat, Vector targetStrings) method in class:Lexer
  /external/swiftshader/third_party/subzero/src/
IceRangeSpec.cpp 51 auto Tokens = RangeSpec::tokenize(Token, RangeSpec::DELIM_RANGE);
97 std::vector<std::string> RangeSpec::tokenize(const std::string &Spec, function in class:Ice::RangeSpec
115 auto Tokens = tokenize(Spec, DELIM_LIST);
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Tools/scripts/
finddiv.py 21 import tokenize
58 g = tokenize.generate_tokens(fp.readline)
  /external/python/cpython2/Tools/scripts/
finddiv.py 21 import tokenize
58 g = tokenize.generate_tokens(fp.readline)
  /external/python/cpython3/Tools/scripts/
finddiv.py 21 import tokenize
58 g = tokenize.generate_tokens(fp.readline)
  /frameworks/ex/common/java/com/android/common/
Rfc822Validator.java 97 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(text);
154 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(cs);
  /system/tools/hidl/docs/src/lexer/
DocLexer.kt 26 override fun tokenize(str: String): List<Token> {
HidlLexer.kt 28 override fun tokenize(str: String): List<Token> {
52 //tokenize string using the doc comment lexer,
73 //tokenize comment string and append all
74 tokens += DocLexer.tokenize(sb.toString())
  /external/minijail/
util.h 88 * tokenize: locate the next token in @stringp using the @delim
98 char *tokenize(char **stringp, const char *delim);
  /external/python/cpython3/Lib/idlelib/
runscript.py 22 import tokenize
69 with tokenize.open(filename) as f:
71 tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
72 except tokenize.TokenError as msg:
  /external/swiftshader/third_party/LLVM/lib/MC/MCDisassembler/
EDInst.h 90 /// The result of the tokenize() function
156 /// tokenize - populates the Tokens member of the instruction, returning 0 on
158 int tokenize();
  /hardware/libhardware/modules/sensors/dynamic_sensor/HidUtils/test/
HidParserExample2.cpp 49 // tokenize it
50 std::vector<HidItem> hidItemVector = HidItem::tokenize(descriptor);
  /prebuilts/go/darwin-x86/src/cmd/asm/internal/asm/
pseudo_test.go 15 func tokenize(s string) [][]lex.Token { func
21 res = append(res, lex.Tokenize(o))
59 if !parser.pseudo(test.pseudo, tokenize(test.operands)) {
  /prebuilts/go/linux-x86/src/cmd/asm/internal/asm/
pseudo_test.go 15 func tokenize(s string) [][]lex.Token { func
21 res = append(res, lex.Tokenize(o))
59 if !parser.pseudo(test.pseudo, tokenize(test.operands)) {
  /prebuilts/ndk/r16/sources/third_party/shaderc/third_party/glslang/hlsl/
hlslTokenStream.cpp 108 scanner.tokenize(token);
  /external/python/cpython3/Lib/test/
test_tokenize.py 2 from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
14 # Tests for the tokenize module.
25 for type, token, start, end, line in tokenize(f.readline):
66 for tok in tokenize(readline):
193 for toktype, token, start, end, line in tokenize(f.readline):
    [all...]
  /external/mockftpserver/MockFtpServer/src/test/groovy/org/mockftpserver/fake/command/
NlstCommandHandlerTest.groovy 51 def actualLines = session.sentData[0].tokenize(endOfLine()) as Set
  /external/python/cpython3/Lib/
linecache.py 11 import tokenize
136 with tokenize.open(fullname) as fp:
  /packages/providers/ContactsProvider/src/com/android/providers/contacts/
HanziToPinyin.java 98 private void tokenize(char character, Token token) { method in class:HanziToPinyin
160 tokenize(character, token); method
  /prebuilts/go/darwin-x86/src/go/scanner/
example_test.go 14 // src is the input that we want to tokenize.
  /prebuilts/go/linux-x86/src/go/scanner/
example_test.go 14 // src is the input that we want to tokenize.
  /system/tools/hidl/docs/src/
main.kt 40 val tokens = HidlLexer.tokenize(fp)
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/lib2to3/pgen2/
grammar.py 10 token module; the Python tokenize module reports all operators as the
19 from . import token, tokenize
129 # Map from operator to number (since tokenize doesn't do this)

Completed in 575 milliseconds

1 23 4 5 6 7 8 91011>>