/external/chromium/net/base/ |
pem_tokenizer_unittest.cc | 20 PEMTokenizer tokenizer(string_piece, accepted_types); 21 EXPECT_TRUE(tokenizer.GetNext()); 23 EXPECT_EQ("EXPECTED-BLOCK", tokenizer.block_type()); 24 EXPECT_EQ("MatchesAcceptedBlockType", tokenizer.data()); 26 EXPECT_FALSE(tokenizer.GetNext()); 38 PEMTokenizer tokenizer(string_piece, accepted_types); 39 EXPECT_TRUE(tokenizer.GetNext()); 41 EXPECT_EQ("EXPECTED-BLOCK", tokenizer.block_type()); 42 EXPECT_EQ("MatchesAcceptedBlockType", tokenizer.data()); 44 EXPECT_FALSE(tokenizer.GetNext()) [all...] |
/external/protobuf/src/google/protobuf/io/ |
tokenizer_unittest.cc | 39 #include <google/protobuf/io/tokenizer.h> 182 EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result)); 197 Tokenizer::TokenType type; 207 { "hello", Tokenizer::TYPE_IDENTIFIER }, 210 { "123", Tokenizer::TYPE_INTEGER }, 211 { "0xab6", Tokenizer::TYPE_INTEGER }, 212 { "0XAB6", Tokenizer::TYPE_INTEGER }, 213 { "0X1234567", Tokenizer::TYPE_INTEGER }, 214 { "0x89abcdef", Tokenizer::TYPE_INTEGER }, 215 { "0x89ABCDEF", Tokenizer::TYPE_INTEGER } [all...] |
/external/doclava/src/com/google/doclava/apicheck/ |
ApiFile.java | 67 final Tokenizer tokenizer = new Tokenizer(filename, (new String(buf, 0, size)).toCharArray()); local 71 String token = tokenizer.getToken(); 76 parsePackage(api, tokenizer); 78 throw new ApiParseException("expected package got " + token, tokenizer.getLine()); 88 private static void parsePackage(ApiInfo api, Tokenizer tokenizer) 94 token = tokenizer.requireToken(); 95 assertIdent(tokenizer, token) [all...] |
/frameworks/native/libs/utils/ |
Tokenizer.cpp | 17 #define LOG_TAG "Tokenizer" 26 #include <utils/Tokenizer.h> 28 // Enables debug output for the tokenizer. 38 Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, 45 Tokenizer::~Tokenizer() { 54 status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) { 97 *outTokenizer = new Tokenizer(filename, fileMap, buffer, ownBuffer, length) [all...] |
PropertyMap.cpp | 121 Tokenizer* tokenizer; local 122 status_t status = Tokenizer::open(filename, &tokenizer); 134 Parser parser(map, tokenizer); 139 tokenizer->getFilename().string(), tokenizer->getLineNumber(), 148 delete tokenizer; 156 PropertyMap::Parser::Parser(PropertyMap* map, Tokenizer* tokenizer) [all...] |
/external/protobuf/python/google/protobuf/internal/ |
text_format_test.py | 301 tokenizer = text_format._Tokenizer(text) 302 methods = [(tokenizer.ConsumeIdentifier, 'identifier1'), 304 (tokenizer.ConsumeString, 'string1'), 305 (tokenizer.ConsumeIdentifier, 'identifier2'), 307 (tokenizer.ConsumeInt32, 123), 308 (tokenizer.ConsumeIdentifier, 'identifier3'), 310 (tokenizer.ConsumeString, 'string'), 311 (tokenizer.ConsumeIdentifier, 'identifiER_4'), 313 (tokenizer.ConsumeFloat, 1.1e+2), 314 (tokenizer.ConsumeIdentifier, 'ID5') [all...] |
/external/antlr/antlr-3.4/runtime/ObjC/Framework/ |
ANTLRTreePatternParser.m | 47 //tokenizer = aTokenizer; 50 //ttype = [tokenizer nextToken]; // kickstart 62 tokenizer = aTokenizer; 63 if ( tokenizer ) [tokenizer retain]; 77 if ( tokenizer ) [tokenizer release]; 102 ttype = [tokenizer nextToken]; 127 ttype = [tokenizer nextToken]; 137 ttype = [tokenizer nextToken] [all...] |
/packages/apps/Gallery2/src/com/android/gallery3d/data/ |
Face.java | 34 StringTokenizer tokenizer = new StringTokenizer(rect); local 36 while (tokenizer.hasMoreElements()) { 37 mPosition.left = Integer.parseInt(tokenizer.nextToken()); 38 mPosition.top = Integer.parseInt(tokenizer.nextToken()); 39 mPosition.right = Integer.parseInt(tokenizer.nextToken()); 40 mPosition.bottom = Integer.parseInt(tokenizer.nextToken());
|
/frameworks/native/opengl/libagl/ |
Tokenizer.cpp | 1 /* libs/opengles/Tokenizer.cpp 20 #include "Tokenizer.h" 26 ANDROID_BASIC_TYPES_TRAITS(Tokenizer::run_t) 28 Tokenizer::Tokenizer() 32 Tokenizer::Tokenizer(const Tokenizer& other) 37 Tokenizer::~Tokenizer() [all...] |
Tokenizer.h | 1 /* libs/opengles/Tokenizer.h 29 class Tokenizer 32 Tokenizer(); 33 Tokenizer(const Tokenizer& other); 34 ~Tokenizer();
|
/external/antlr/antlr-3.4/runtime/CSharp2/Sources/Antlr3.Runtime/Antlr.Runtime.Tree/ |
TreePatternParser.cs | 37 protected TreePatternLexer tokenizer; field in class:Antlr.Runtime.Tree.TreePatternParser 42 public TreePatternParser(TreePatternLexer tokenizer, TreeWizard wizard, ITreeAdaptor adaptor) { 43 this.tokenizer = tokenizer; 46 ttype = tokenizer.NextToken(); // kickstart 66 ttype = tokenizer.NextToken(); 90 ttype = tokenizer.NextToken(); 98 ttype = tokenizer.NextToken(); 102 label = tokenizer.sval.ToString(); 103 ttype = tokenizer.NextToken() [all...] |
/external/antlr/antlr-3.4/runtime/CSharp3/Sources/Antlr3.Runtime/Tree/ |
TreePatternParser.cs | 39 protected TreePatternLexer tokenizer; field in class:Antlr.Runtime.Tree.TreePatternParser 44 public TreePatternParser( TreePatternLexer tokenizer, TreeWizard wizard, ITreeAdaptor adaptor ) 46 this.tokenizer = tokenizer; 49 ttype = tokenizer.NextToken(); // kickstart 75 ttype = tokenizer.NextToken(); 105 ttype = tokenizer.NextToken(); 115 ttype = tokenizer.NextToken(); 120 label = tokenizer.sval.ToString(); 121 ttype = tokenizer.NextToken() [all...] |
/external/antlr/antlr-3.4/runtime/Java/src/main/java/org/antlr/runtime/tree/ |
TreePatternParser.java | 34 protected TreePatternLexer tokenizer; field in class:TreePatternParser 39 public TreePatternParser(TreePatternLexer tokenizer, TreeWizard wizard, TreeAdaptor adaptor) { 40 this.tokenizer = tokenizer; 43 ttype = tokenizer.nextToken(); // kickstart 64 ttype = tokenizer.nextToken(); 89 ttype = tokenizer.nextToken(); 97 ttype = tokenizer.nextToken(); 101 label = tokenizer.sval.toString(); 102 ttype = tokenizer.nextToken() [all...] |
/external/webkit/Source/WebCore/manual-tests/ |
open-close-tokenizer-crash.html | 3 <input type="button" value="Click Here" onclick="window.open('resources/open-close-tokenizer-crash.html', 'foo');">
|
/frameworks/base/include/androidfw/ |
VirtualKeyMap.h | 25 #include <utils/Tokenizer.h> 62 Tokenizer* mTokenizer; 65 Parser(VirtualKeyMap* map, Tokenizer* tokenizer);
|
KeyLayoutMap.h | 23 #include <utils/Tokenizer.h> 92 Tokenizer* mTokenizer; 95 Parser(KeyLayoutMap* map, Tokenizer* tokenizer);
|
/external/apache-xml/src/main/java/org/apache/xml/utils/ |
StylesheetPIHandler.java | 152 StringTokenizer tokenizer = new StringTokenizer(data, " \t=\n", true); local 157 while (tokenizer.hasMoreTokens()) 160 token = tokenizer.nextToken(); 163 if (tokenizer.hasMoreTokens() && 170 token = tokenizer.nextToken(); 171 while (tokenizer.hasMoreTokens() && 173 token = tokenizer.nextToken(); 179 token = tokenizer.nextToken(); 180 while (tokenizer.hasMoreTokens() && 182 token = tokenizer.nextToken() [all...] |
/frameworks/native/include/utils/ |
Tokenizer.h | 28 * A simple tokenizer for loading and parsing ASCII text files line by line. 30 class Tokenizer { 31 Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, 35 ~Tokenizer(); 40 * Returns NO_ERROR and a tokenizer for the file, if successful. 43 static status_t open(const String8& filename, Tokenizer** outTokenizer); 48 * Returns NO_ERROR and a tokenizer for the string, if successful. 52 const char* contents, Tokenizer** outTokenizer); 119 Tokenizer(const Tokenizer& other); // not copyabl [all...] |
/external/chromium/base/ |
sys_info_chromeos.cc | 96 StringTokenizer tokenizer(version, "."); 100 for (int i = 0; i < 4 && tokenizer.GetNext(); i++) { 102 StringToInt(tokenizer.token_begin(), 103 tokenizer.token_end(), 107 StringToInt(tokenizer.token_begin(), 108 tokenizer.token_end(), 111 StringToInt(tokenizer.token_begin(), 112 tokenizer.token_end(),
|
/frameworks/base/libs/androidfw/ |
VirtualKeyMap.cpp | 24 #include <utils/Tokenizer.h> 51 Tokenizer* tokenizer; local 52 status_t status = Tokenizer::open(filename, &tokenizer); 64 Parser parser(map, tokenizer); 69 tokenizer->getFilename().string(), tokenizer->getLineNumber(), 78 delete tokenizer; 86 VirtualKeyMap::Parser::Parser(VirtualKeyMap* map, Tokenizer* tokenizer) [all...] |
/external/webkit/Source/WebCore/inspector/front-end/ |
SourceTokenizer.js | 90 var tokenizer = this._tokenizers[tokenizerClass]; 91 if (!tokenizer) { 92 tokenizer = new WebInspector[tokenizerClass](); 93 this._tokenizers[mimeType] = tokenizer; 95 return tokenizer;
|
/external/protobuf/python/google/protobuf/ |
text_format.py | 136 tokenizer = _Tokenizer(text) 137 while not tokenizer.AtEnd(): 138 _MergeField(tokenizer, message) 141 def _MergeField(tokenizer, message): 145 tokenizer: A tokenizer to parse the field name and values. 152 if tokenizer.TryConsume('['): 153 name = [tokenizer.ConsumeIdentifier()] 154 while tokenizer.TryConsume('.'): 155 name.append(tokenizer.ConsumeIdentifier() [all...] |
/external/webkit/Source/WebCore/dom/ |
xml_expat_tokenizer.cpp | 145 void callAndRemoveFirstCallback(XMLTokenizer* tokenizer) { 148 cb->call(tokenizer); 159 virtual void call(XMLTokenizer* tokenizer) = 0; 170 virtual void call(XMLTokenizer* tokenizer) { 171 tokenizer->startElementNs(name, (const XML_Char**)(atts)); 180 virtual void call(XMLTokenizer* tokenizer) { 181 tokenizer->endElementNs(); 190 virtual void call(XMLTokenizer* tokenizer) { 191 tokenizer->characters(s, len); 204 virtual void call(XMLTokenizer* tokenizer) { 662 XMLTokenizer *tokenizer = static_cast<XMLTokenizer *>(userdata); local 668 XMLTokenizer *tokenizer = static_cast<XMLTokenizer *>(userdata); local 674 XMLTokenizer *tokenizer = static_cast<XMLTokenizer *>(userdata); local 680 XMLTokenizer *tokenizer = static_cast<XMLTokenizer *>(userdata); local 686 XMLTokenizer *tokenizer = static_cast<XMLTokenizer *>(userdata); local 692 XMLTokenizer *tokenizer = static_cast<XMLTokenizer *>(userdata); local 698 XMLTokenizer *tokenizer = static_cast<XMLTokenizer *>(userdata); local [all...] |
/frameworks/base/test-runner/src/junit/runner/ |
ClassPathTestCollector.java | 60 StringTokenizer tokenizer= new StringTokenizer(classPath, separator); local 61 while (tokenizer.hasMoreTokens()) 62 result.addElement(tokenizer.nextToken());
|
/external/chromium/chrome/common/ |
auto_start_linux.cc | 79 StringTokenizer tokenizer(contents, "\n"); 81 while (tokenizer.GetNext()) { 82 if (tokenizer.token().substr(0, token.length()) == token) { 83 *value = tokenizer.token().substr(token.length());
|