/external/google-breakpad/src/processor/ |
tokenize.h | 30 // Implements a Tokenize function for splitting up strings. 50 // Tokenize, but may be treated as a failure if the caller expects an 53 bool Tokenize(char *line, 57 // For convenience, since you need a char* to pass to Tokenize.
|
tokenize.cc | 45 bool Tokenize(char *line,
|
basic_source_line_resolver.cc | 50 #include "processor/tokenize.h" 471 if (!Tokenize(file_line, kWhitespace, 2, &tokens)) { 499 if (!Tokenize(function_line, kWhitespace, 4, &tokens)) { 531 if (!Tokenize(line_line, kWhitespace, 4, &tokens)) { 580 if (!Tokenize(public_line, kWhitespace, 3, &tokens)) {
|
windows_frame_info.h | 50 #include "processor/tokenize.h" 126 if (!Tokenize(&buffer[0], " \r\n", 11, &tokens))
|
/external/libtextclassifier/smartselect/ |
tokenizer.h | 39 std::vector<Token> Tokenize(const std::string& utf8_text) const;
|
tokenizer.cc | 67 std::vector<Token> Tokenizer::Tokenize(const std::string& utf8_text) const {
|
feature-processor.h | 114 std::vector<Token> Tokenize(const std::string& utf8_text) const;
|
feature-processor.cc | 183 std::vector<Token> FeatureProcessor::Tokenize( 187 return tokenizer_.Tokenize(utf8_text); 204 return tokenizer_.Tokenize(utf8_text); 546 *tokens = Tokenize(context); 743 // There is no span to tokenize. 760 std::vector<Token> tokens = tokenizer_.Tokenize(text);
|
/frameworks/base/tools/aapt2/java/ |
AnnotationProcessor.cpp | 73 for (StringPiece line : util::Tokenize(comment, '\n')) { 88 for (StringPiece line : util::Tokenize(result, '\n')) {
|
/system/tools/hidl/utils/ |
StringHelper.cpp | 64 void StringHelper::Tokenize(const std::string &in, 103 Tokenize(in, &components); 118 Tokenize(in, &components); 127 Tokenize(in, &components); 136 Tokenize(in, &components);
|
/external/vixl/src/aarch64/ |
debugger-aarch64.cc | 63 static Token* Tokenize(const char* arg); 100 static Token* Tokenize(const char* arg); 122 static Token* Tokenize(const char* arg); 148 static Token* Tokenize(const char* arg); 166 static Token* Tokenize(const char* arg); 183 static Token* Tokenize(const char* arg); 216 static Token* Tokenize(const char* arg); 831 Token* Token::Tokenize(const char* arg) { 836 // The order is important. For example Identifier::Tokenize would consider 839 Token* token = RegisterToken::Tokenize(arg) [all...] |
/external/libtextclassifier/tests/ |
feature-processor_test.cc | 224 std::vector<Token> tokens = feature_processor.Tokenize("one, two, three"); 257 tokens = feature_processor3.Tokenize("zero, one, two, three, four"); 365 1, feature_processor.Tokenize("aaa bbb ccc")), 368 1, feature_processor.Tokenize("aaa bbb ???")), 371 1, feature_processor.Tokenize("??? ??? ???")), 538 std::vector<Token> tokens = feature_processor.Tokenize("???????????????????"); 557 feature_processor.Tokenize("?????? ?????? ??? ?? ??"); 601 std::vector<Token> tokens = feature_processor.Tokenize(
|
tokenizer_test.cc | 105 std::vector<Token> tokens = tokenizer.Tokenize("Hello world!"); 238 tokens = tokenizer.Tokenize( 242 tokens = tokenizer.Tokenize("??? hello ???????");
|
/system/tools/hidl/utils/include/hidl-util/ |
StringHelper.h | 85 static void Tokenize(const std::string &in,
|
/frameworks/base/tools/aapt2/util/ |
Util_test.cpp | 84 auto tokenizer = util::Tokenize(StringPiece("this| is|the|end"), '|'); 98 auto tokenizer = util::Tokenize(StringPiece(""), '|'); 107 auto tokenizer = util::Tokenize(StringPiece("one."), '.');
|
Util.cpp | 124 for (const StringPiece& piece : Tokenize(str, '.')) { 148 for (const StringPiece& piece : Tokenize(str, '.')) {
|
Files.cpp | 159 for (StringPiece part : util::Tokenize(package, '.')) { 208 for (StringPiece line : util::Tokenize(contents, ' ')) {
|
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/ |
offset_token.py | 8 import tokenize 55 def Tokenize(f): 66 tokenize_tokens = tokenize.generate_tokens(f.readline) 90 while offset_tokens[0].type == tokenize.NL: 100 # Convert OffsetTokens to tokenize tokens. 113 # tokenize can't handle whitespace before line continuations. 115 return tokenize.untokenize(tokenize_tokens).replace('\\\n', ' \\\n')
|
/prebuilts/go/darwin-x86/src/cmd/asm/internal/asm/ |
expr_test.go | 62 p.start(lex.Tokenize(test.input)) 121 p.start(lex.Tokenize(test.input))
|
pseudo_test.go | 15 func tokenize(s string) [][]lex.Token { func 21 res = append(res, lex.Tokenize(o)) 60 if !parser.pseudo(test.pseudo, tokenize(test.operands)) {
|
/prebuilts/go/linux-x86/src/cmd/asm/internal/asm/ |
expr_test.go | 62 p.start(lex.Tokenize(test.input)) 121 p.start(lex.Tokenize(test.input))
|
pseudo_test.go | 15 func tokenize(s string) [][]lex.Token { func 21 res = append(res, lex.Tokenize(o)) 60 if !parser.pseudo(test.pseudo, tokenize(test.operands)) {
|
/frameworks/base/tools/aapt2/ |
Flags.cpp | 125 for (StringPiece line : util::Tokenize(flag.description, '\n')) {
|
/prebuilts/go/darwin-x86/src/cmd/asm/internal/lex/ |
lex.go | 145 // Tokenize turns a string into a list of Tokens; used to parse the -D flag and in tests. 146 func Tokenize(str string) []Token {
|
/prebuilts/go/linux-x86/src/cmd/asm/internal/lex/ |
lex.go | 145 // Tokenize turns a string into a list of Tokens; used to parse the -D flag and in tests. 146 func Tokenize(str string) []Token {
|