/external/chromium_org/third_party/WebKit/Source/bindings/scripts/ |
blink_idl_lexer.py | 31 The lexer uses the PLY (Python Lex-Yacc) library to build a tokenizer which
|
/external/chromium_org/third_party/closure_linter/closure_linter/ |
full_test.py | 85 'tokenizer.js',
|
/external/protobuf/src/google/protobuf/compiler/ |
parser.h | 47 #include <google/protobuf/io/tokenizer.h> 73 bool Parse(io::Tokenizer* input, FileDescriptorProto* file); 145 inline bool LookingAtType(io::Tokenizer::TokenType token_type); 291 io::Tokenizer* input_;
|
/external/smack/src/org/xbill/DNS/ |
DSRecord.java | 89 rdataFromString(Tokenizer st, Name origin) throws IOException {
|
GPOSRecord.java | 94 rdataFromString(Tokenizer st, Name origin) throws IOException {
|
NAPTRRecord.java | 71 rdataFromString(Tokenizer st, Name origin) throws IOException {
|
NSEC3PARAMRecord.java | 93 rdataFromString(Tokenizer st, Name origin) throws IOException
|
SOARecord.java | 65 rdataFromString(Tokenizer st, Name origin) throws IOException {
|
TLSARecord.java | 102 rdataFromString(Tokenizer st, Name origin) throws IOException {
|
APLRecord.java | 174 rdataFromString(Tokenizer st, Name origin) throws IOException { 177 Tokenizer.Token t = st.get();
|
/frameworks/native/opengl/libagl/ |
TextureObjectManager.h | 36 #include "Tokenizer.h"
|
/packages/apps/UnifiedEmail/tests/src/com/android/mail/ |
EmailAddressTest.java | 96 // Strings that will fallthrough to the rfc822 tokenizer
|
/packages/providers/ContactsProvider/src/com/android/providers/contacts/ |
SearchIndexManager.java | 408 * Token separator that matches SQLite's "simple" tokenizer. 417 * Tokenize a string in the way as that of SQLite's "simple" tokenizer. 431 * Tokenizes the query and normalizes/hex encodes each token. The tokenizer uses the same 432 * rules as SQLite's "simple" tokenizer. Each token is added to the retokenizer and then
|
/external/apache-harmony/luni/src/test/api/common/org/apache/harmony/luni/tests/java/util/ |
StringTokenizerTest.java | 41 assertTrue("Created incorrect tokenizer", st.countTokens() == 5 55 assertTrue("Created incorrect tokenizer", st.countTokens() == 8
|
/external/chromium_org/third_party/WebKit/Source/core/html/parser/ |
HTMLDocumentParser.cpp | 334 OwnPtr<HTMLTokenizer> tokenizer = m_tokenizer.release(); local 337 if (!tokenizer) { 343 // Currently we're only smart enough to reuse the speculation buffer if the tokenizer 349 && tokenizer->state() == HTMLTokenizer::DataState 356 discardSpeculationsAndResumeFrom(chunk, token.release(), tokenizer.release()); 359 void HTMLDocumentParser::discardSpeculationsAndResumeFrom(PassOwnPtr<ParsedChunk> lastChunkBeforeScript, PassOwnPtr<HTMLToken> token, PassOwnPtr<HTMLTokenizer> tokenizer) 367 checkpoint->tokenizer = tokenizer; [all...] |
/external/chromium_org/third_party/sqlite/src/src/ |
complete.c | 12 ** An tokenizer for SQL 15 ** This code used to be part of the tokenizer.c source file. But by
|
/external/chromium_org/tools/gn/ |
input_conversion.cc | 17 #include "tools/gn/tokenizer.h" 95 std::vector<Token> tokens = Tokenizer::Tokenize(&input_file, err);
|
input_file_manager.cc | 13 #include "tools/gn/tokenizer.h" 223 std::vector<Token> tokens = Tokenizer::Tokenize(file, err);
|
/external/protobuf/vsprojects/ |
libprotobuf.vcproj | 259 RelativePath="..\src\google\protobuf\io\tokenizer.h" 403 RelativePath="..\src\google\protobuf\io\tokenizer.cc"
|
/frameworks/base/core/java/android/text/util/ |
Rfc822Tokenizer.java | 25 * This class works as a Tokenizer for MultiAutoCompleteTextView for 30 public class Rfc822Tokenizer implements MultiAutoCompleteTextView.Tokenizer {
|
/external/chromium_org/content/common/android/ |
address_parser_internal.cc | 252 String16Tokenizer* tokenizer, 394 // Ran out of words, extract more from the tokenizer. 397 if (!tokenizer->GetNext()) 399 } while (tokenizer->token_is_delim()); 401 Word(tokenizer->token_begin(), tokenizer->token_end()));
|
/external/chromium_org/third_party/protobuf/src/google/protobuf/compiler/ |
parser.h | 47 #include <google/protobuf/io/tokenizer.h> 73 bool Parse(io::Tokenizer* input, FileDescriptorProto* file); 150 inline bool LookingAtType(io::Tokenizer::TokenType token_type); 234 void StartAt(const io::Tokenizer::Token& token); 239 void EndAt(const io::Tokenizer::Token& token); 420 io::Tokenizer* input_;
|
/external/chromium_org/third_party/angle_dx11/src/compiler/preprocessor/ |
Tokenizer.cpp | 1 #line 16 "./Tokenizer.l" 12 #line 13 "./Tokenizer.cpp" 522 #include "Tokenizer.h" 568 #define YY_EXTRA_TYPE pp::Tokenizer::Context* [all...] |
/external/chromium_org/third_party/WebKit/Source/devtools/front_end/ |
JavaScriptFormatter.js | 198 function Tokenizer(content) 200 this._readNextToken = parse.tokenizer(content); 204 Tokenizer.prototype = { 231 function JavaScriptFormatter(tokenizer, builder) 233 this._tokenizer = tokenizer;
|
/external/eclipse-basebuilder/basebuilder-3.6.2/org.eclipse.releng.basebuilder/plugins/org.eclipse.test.performance.ui/src/org/eclipse/test/internal/performance/results/ui/ |
ComponentResultsView.java | 496 StringTokenizer tokenizer = new StringTokenizer(partName, "."); 497 while (tokenizer.hasMoreTokens()) { 498 String token = tokenizer.nextToken(); 500 if (tokenizer.hasMoreTokens()) { 519 if (tokenizer.hasMoreTokens()) buffer.append(' ');
|