/external/eclipse-basebuilder/basebuilder-3.6.2/org.eclipse.releng.basebuilder/plugins/org.eclipse.test.performance.ui/src/org/eclipse/test/performance/ui/ |
GenerateResults.java | 389 StringTokenizer tokenizer = new StringTokenizer(configProperties, ";"); local 391 while (tokenizer.hasMoreTokens()) { 392 String labelDescriptor = tokenizer.nextToken(); [all...] |
/external/v8/benchmarks/ |
earley-boyer.js | [all...] |
/external/apache-xml/src/main/java/org/apache/xml/serializer/dom3/ |
LSSerializerImpl.java | 1467 StringTokenizer tokenizer = new StringTokenizer(origPath, "%"); local [all...] |
/external/chromium/net/http/ |
http_auth_handler_digest.cc | 217 // Check if tokenizer failed.
|
/external/chromium_org/net/http/ |
http_auth_handler_digest.cc | 213 // Check if tokenizer failed.
|
/external/chromium_org/third_party/WebKit/Source/devtools/front_end/cm/ |
javascript.js | 8 // Tokenizer
|
/external/chromium_org/third_party/skia/src/animator/ |
SkScriptRuntime.cpp | 16 // script tokenizer
|
/external/chromium_org/third_party/sqlite/src/ext/fts3/ |
fts3Int.h | 126 sqlite3_tokenizer *pTokenizer; /* tokenizer for inserts and queries */
|
/external/chromium_org/third_party/sqlite/src/test/ |
misc2.test | 368 # Ticket #453. If the SQL ended with "-", the tokenizer was calling that
|
/external/llvm/include/llvm/Support/ |
YAMLParser.h | 68 /// for benchmarking the tokenizer.
|
/external/llvm/lib/Support/ |
CommandLine.cpp | 607 TokenizerCallback Tokenizer, 624 Tokenizer(Str, Saver, NewArgv); 631 bool cl::ExpandResponseFiles(StringSaver &Saver, TokenizerCallback Tokenizer, 654 if (!ExpandResponseFile(Arg + 1, Saver, Tokenizer, ExpandedArgv)) { [all...] |
/external/skia/src/animator/ |
SkScriptRuntime.cpp | 16 // script tokenizer
|
/libcore/luni/src/main/java/org/apache/harmony/xml/parsers/ |
DocumentBuilderImpl.java | 162 * actually more of a tokenizer, and we are doing a classical recursive
|
/packages/apps/Mms/src/com/android/mms/ui/ |
RecipientsEditor.java | 387 implements MultiAutoCompleteTextView.Tokenizer {
|
/prebuilts/python/darwin-x86/2.7.5/lib/python2.7/ |
tokenize.py | 14 It is designed to match the working of the Python tokenizer exactly, except
|
/prebuilts/python/linux-x86/2.7.5/lib/python2.7/ |
tokenize.py | 14 It is designed to match the working of the Python tokenizer exactly, except
|
/external/apache-xml/src/main/java/org/apache/xml/serializer/ |
ToStream.java | 2783 StringTokenizer tokenizer = local [all...] |
/external/protobuf/src/ |
Makefile.in | 118 tokenizer.lo zero_copy_stream_impl.lo importer.lo parser.lo 319 google/protobuf/io/tokenizer.h \ 517 google/protobuf/io/tokenizer.h \ 576 google/protobuf/io/tokenizer.cc \ [all...] |
/external/chromium_org/net/cookies/ |
cookie_store_unittest.h | 237 base::StringTokenizer tokenizer(line, " ;"); 238 while (tokenizer.GetNext()) 239 EXPECT_TRUE(tokens.insert(tokenizer.token()).second); [all...] |
/external/chromium_org/third_party/sqlite/ |
fts2.patch | 54 ** tokenizer. 978 + * The ICU tokenizer considers '*' a break character, so the code below 980 + * ICU tokenizer returns it as the next token. So eat it here until a [all...] |
/external/v8/tools/ |
test.py | 890 class Tokenizer(object): 891 """A simple string tokenizer that chops expressions into variables, 1047 tokens = Tokenizer(expr).Tokenize() [all...] |
/prebuilts/python/darwin-x86/2.7.5/lib/python2.7/config/ |
Makefile | 275 PARSER_OBJS= $(POBJS) Parser/myreadline.o Parser/tokenizer.o 295 Parser/tokenizer.h 632 Parser/tokenizer_pgen.o: $(srcdir)/Parser/tokenizer.c [all...] |
/prebuilts/python/linux-x86/2.7.5/lib/python2.7/config/ |
Makefile | 276 PARSER_OBJS= $(POBJS) Parser/myreadline.o Parser/tokenizer.o 296 Parser/tokenizer.h 633 Parser/tokenizer_pgen.o: $(srcdir)/Parser/tokenizer.c [all...] |
/external/eclipse-basebuilder/basebuilder-3.6.2/org.eclipse.releng.basebuilder/plugins/org.eclipse.build.tools/ |
buildTools.jar | |
/external/chromium_org/third_party/yasm/source/patched-yasm/modules/parsers/gas/ |
gas-parse.c | 418 /* File name is not in quotes, so need to switch to a different tokenizer 451 /* We need to poke back on the \n that was consumed by the tokenizer */ [all...] |