HomeSort by relevance Sort by last modified time
    Searched refs:Tokenize (Results 1 - 13 of 13) sorted by null

  /external/google-breakpad/src/processor/
tokenize.h 30 // Implements a Tokenize function for splitting up strings.
50 // Tokenize, but may be treated as a failure if the caller expects an
53 bool Tokenize(char *line,
57 // For convenience, since you need a char* to pass to Tokenize.
tokenize.cc 45 bool Tokenize(char *line,
basic_source_line_resolver.cc 50 #include "processor/tokenize.h"
471 if (!Tokenize(file_line, kWhitespace, 2, &tokens)) {
499 if (!Tokenize(function_line, kWhitespace, 4, &tokens)) {
531 if (!Tokenize(line_line, kWhitespace, 4, &tokens)) {
580 if (!Tokenize(public_line, kWhitespace, 3, &tokens)) {
windows_frame_info.h 50 #include "processor/tokenize.h"
126 if (!Tokenize(&buffer[0], " \r\n", 11, &tokens))
  /external/vixl/src/vixl/a64/
debugger-a64.cc 61 static Token* Tokenize(const char* arg);
94 static Token* Tokenize(const char* arg);
116 static Token* Tokenize(const char* arg);
140 static Token* Tokenize(const char* arg);
158 static Token* Tokenize(const char* arg);
175 static Token* Tokenize(const char* arg);
207 static Token* Tokenize(const char* arg);
805 Token* Token::Tokenize(const char* arg) {
810 // The order is important. For example Identifier::Tokenize would consider
813 Token* token = RegisterToken::Tokenize(arg)
    [all...]
  /external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
offset_token.py 8 import tokenize namespace
55 def Tokenize(f):
66 tokenize_tokens = tokenize.generate_tokens(f.readline)
90 while offset_tokens[0].type == tokenize.NL:
100 # Convert OffsetTokens to tokenize tokens.
113 # tokenize can't handle whitespace before line continuations.
115 return tokenize.untokenize(tokenize_tokens).replace('\\\n', ' \\\n')
snippet.py 9 import tokenize namespace
206 tokens = offset_token.Tokenize(f)
216 # by the tokenize module to annotate the syntax tree with the information
228 tokens[0].type == tokenize.COMMENT or tokens[0].type == tokenize.NL):
232 # tokenize has 0 or 1 depending on if the file has one.
  /sdk/find_java2/src/
JavaFinder.cpp 274 tok = pathTokens.Tokenize(_T(";"), curPos);
  /external/google-breakpad/src/testing/gtest/scripts/
pump.py 382 def Tokenize(s):
579 tokens = list(Tokenize(pump_src_text))
  /external/protobuf/gtest/scripts/
pump.py 376 def Tokenize(s):
571 for token in Tokenize(s):
  /external/vulkan-validation-layers/tests/gtest-1.7.0/scripts/
pump.py 382 def Tokenize(s):
579 tokens = list(Tokenize(pump_src_text))
  /packages/apps/Test/connectivity/sl4n/rapidjson/include/rapidjson/
reader.h     [all...]
  /prebuilts/tools/common/m2/repository/net/sourceforge/saxon/saxon/9.1.0.8/
saxon-9.1.0.8.jar 

Completed in 1382 milliseconds