HomeSort by relevance Sort by last modified time
    Searched full:tokenizer (Results 1 - 25 of 614) sorted by null

1 2 3 4 5 6 7 8 91011>>

  /external/chromium_org/net/cert/
pem_tokenizer_unittest.cc 20 PEMTokenizer tokenizer(string_piece, accepted_types);
21 EXPECT_TRUE(tokenizer.GetNext());
23 EXPECT_EQ("EXPECTED-BLOCK", tokenizer.block_type());
24 EXPECT_EQ("MatchesAcceptedBlockType", tokenizer.data());
26 EXPECT_FALSE(tokenizer.GetNext());
38 PEMTokenizer tokenizer(string_piece, accepted_types);
39 EXPECT_TRUE(tokenizer.GetNext());
41 EXPECT_EQ("EXPECTED-BLOCK", tokenizer.block_type());
42 EXPECT_EQ("MatchesAcceptedBlockType", tokenizer.data());
44 EXPECT_FALSE(tokenizer.GetNext())
    [all...]
  /external/protobuf/src/google/protobuf/io/
tokenizer_unittest.cc 39 #include <google/protobuf/io/tokenizer.h>
182 EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result));
197 Tokenizer::TokenType type;
207 { "hello", Tokenizer::TYPE_IDENTIFIER },
210 { "123", Tokenizer::TYPE_INTEGER },
211 { "0xab6", Tokenizer::TYPE_INTEGER },
212 { "0XAB6", Tokenizer::TYPE_INTEGER },
213 { "0X1234567", Tokenizer::TYPE_INTEGER },
214 { "0x89abcdef", Tokenizer::TYPE_INTEGER },
215 { "0x89ABCDEF", Tokenizer::TYPE_INTEGER }
    [all...]
  /external/chromium_org/third_party/protobuf/src/google/protobuf/io/
tokenizer_unittest.cc 40 #include <google/protobuf/io/tokenizer.h>
183 EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result));
198 Tokenizer::TokenType type;
208 { "hello", Tokenizer::TYPE_IDENTIFIER },
211 { "123", Tokenizer::TYPE_INTEGER },
212 { "0xab6", Tokenizer::TYPE_INTEGER },
213 { "0XAB6", Tokenizer::TYPE_INTEGER },
214 { "0X1234567", Tokenizer::TYPE_INTEGER },
215 { "0x89abcdef", Tokenizer::TYPE_INTEGER },
216 { "0x89ABCDEF", Tokenizer::TYPE_INTEGER }
    [all...]
  /external/chromium_org/third_party/sqlite/src/ext/fts2/
README.tokenizers 5 the text tokenizer implementation to be used when indexing text
6 by specifying a "tokenizer" clause as part of the CREATE VIRTUAL TABLE
10 <columns ...> [, tokenizer <tokenizer-name> [<tokenizer-args>]]
13 The built-in tokenizers (valid values to pass as <tokenizer name>) are
16 <tokenizer-args> should consist of zero or more white-space separated
17 arguments to pass to the selected tokenizer implementation. The
19 tokenizer.
23 FTS2 allows users to provide custom tokenizer implementations. The
    [all...]
fts2_tokenizer.h 10 ** sqlite3_tokenizer_module is a singleton defining the tokenizer
14 ** sqlite3_tokenizer is used to define a particular tokenizer, perhaps
17 ** sqlite3_tokenizer_cursor is generated by a tokenizer to generate
30 ** Structures used by the tokenizer interface. When a new tokenizer
36 ** the tokenizer clause of the CREATE VIRTUAL TABLE statement to the
37 ** sqlite3_tokenizer_module.xCreate() function of the requested tokenizer
39 ** sqlite3_tokenizer structure representing the specific tokenizer to
40 ** be used for the fts2 table (customized by the tokenizer clause arguments).
60 ** Create a new tokenizer. The values in the argv[] array are th
    [all...]
  /external/doclava/src/com/google/doclava/apicheck/
ApiFile.java 67 final Tokenizer tokenizer = new Tokenizer(filename, (new String(buf, 0, size)).toCharArray()); local
71 String token = tokenizer.getToken();
76 parsePackage(api, tokenizer);
78 throw new ApiParseException("expected package got " + token, tokenizer.getLine());
88 private static void parsePackage(ApiInfo api, Tokenizer tokenizer)
94 token = tokenizer.requireToken();
95 assertIdent(tokenizer, token)
    [all...]
  /system/core/libutils/
Tokenizer.cpp 17 #define LOG_TAG "Tokenizer"
26 #include <utils/Tokenizer.h>
28 // Enables debug output for the tokenizer.
38 Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer,
45 Tokenizer::~Tokenizer() {
54 status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) {
97 *outTokenizer = new Tokenizer(filename, fileMap, buffer, ownBuffer, length)
    [all...]
  /external/chromium_org/third_party/sqlite/src/ext/fts3/
README.tokenizers 5 the text tokenizer implementation to be used when indexing text
10 <columns ...> [, tokenize <tokenizer-name> [<tokenizer-args>]]
13 The built-in tokenizers (valid values to pass as <tokenizer name>) are
16 <tokenizer-args> should consist of zero or more white-space separated
17 arguments to pass to the selected tokenizer implementation. The
19 tokenizer.
23 FTS3 allows users to provide custom tokenizer implementations. The
24 interface used to create a new tokenizer is defined and described in
27 Registering a new FTS3 tokenizer is similar to registering a new
    [all...]
fts3_tokenizer.h 10 ** sqlite3_tokenizer_module is a singleton defining the tokenizer
14 ** sqlite3_tokenizer is used to define a particular tokenizer, perhaps
17 ** sqlite3_tokenizer_cursor is generated by a tokenizer to generate
30 ** Structures used by the tokenizer interface. When a new tokenizer
36 ** the tokenizer clause of the CREATE VIRTUAL TABLE statement to the
37 ** sqlite3_tokenizer_module.xCreate() function of the requested tokenizer
39 ** sqlite3_tokenizer structure representing the specific tokenizer to
40 ** be used for the fts3 table (customized by the tokenizer clause arguments).
60 ** Create a new tokenizer. The values in the argv[] array are th
    [all...]
  /external/protobuf/python/google/protobuf/internal/
text_format_test.py 301 tokenizer = text_format._Tokenizer(text)
302 methods = [(tokenizer.ConsumeIdentifier, 'identifier1'),
304 (tokenizer.ConsumeString, 'string1'),
305 (tokenizer.ConsumeIdentifier, 'identifier2'),
307 (tokenizer.ConsumeInt32, 123),
308 (tokenizer.ConsumeIdentifier, 'identifier3'),
310 (tokenizer.ConsumeString, 'string'),
311 (tokenizer.ConsumeIdentifier, 'identifiER_4'),
313 (tokenizer.ConsumeFloat, 1.1e+2),
314 (tokenizer.ConsumeIdentifier, 'ID5')
    [all...]
  /external/chromium_org/third_party/sqlite/src/ext/fts1/
fts1_tokenizer.h 10 ** sqlite3_tokenizer_module is a singleton defining the tokenizer
14 ** sqlite3_tokenizer is used to define a particular tokenizer, perhaps
17 ** sqlite3_tokenizer_cursor is generated by a tokenizer to generate
30 ** Structures used by the tokenizer interface.
40 ** Create and destroy a tokenizer. argc/argv are passed down from
69 const sqlite3_tokenizer_module *pModule; /* The module for this tokenizer */
70 /* Tokenizer implementations will typically add additional fields */
74 sqlite3_tokenizer *pTokenizer; /* Tokenizer for this cursor. */
75 /* Tokenizer implementations will typically add additional fields */
79 ** Get the module for a tokenizer which generates tokens based on
    [all...]
tokenizer.h 10 ** sqlite3_tokenizer_module is a singleton defining the tokenizer
14 ** sqlite3_tokenizer is used to define a particular tokenizer, perhaps
17 ** sqlite3_tokenizer_cursor is generated by a tokenizer to generate
30 ** Structures used by the tokenizer interface.
40 ** Create and destroy a tokenizer. argc/argv are passed down from
69 sqlite3_tokenizer_module *pModule; /* The module for this tokenizer */
70 /* Tokenizer implementations will typically add additional fields */
74 sqlite3_tokenizer *pTokenizer; /* Tokenizer for this cursor. */
75 /* Tokenizer implementations will typically add additional fields */
79 ** Get the module for a tokenizer which generates tokens based on
    [all...]
  /external/chromium_org/tools/gn/
tokenizer_unittest.cc 8 #include "tools/gn/tokenizer.h"
23 std::vector<Token> results = Tokenizer::Tokenize(&input_file, &err);
38 TEST(Tokenizer, Empty) {
43 std::vector<Token> results = Tokenizer::Tokenize(&empty_string_input, &err);
49 results = Tokenizer::Tokenize(&whitespace_input, &err);
53 TEST(Tokenizer, Identifier) {
60 TEST(Tokenizer, Integer) {
68 TEST(Tokenizer, IntegerNoSpace) {
76 TEST(Tokenizer, String) {
86 TEST(Tokenizer, Operator)
    [all...]
  /external/antlr/antlr-3.4/runtime/ObjC/Framework/
ANTLRTreePatternParser.m 47 //tokenizer = aTokenizer;
50 //ttype = [tokenizer nextToken]; // kickstart
62 tokenizer = aTokenizer;
63 if ( tokenizer ) [tokenizer retain];
77 if ( tokenizer ) [tokenizer release];
102 ttype = [tokenizer nextToken];
127 ttype = [tokenizer nextToken];
137 ttype = [tokenizer nextToken]
    [all...]
  /external/deqp/framework/opengl/
gluVarTypeUtil.cpp 92 VarTokenizer tokenizer(nameWithPath);
93 TCU_CHECK(tokenizer.getToken() == VarTokenizer::TOKEN_IDENTIFIER);
94 return tokenizer.getIdentifier();
99 VarTokenizer tokenizer(nameWithPath);
101 if (tokenizer.getToken() == VarTokenizer::TOKEN_IDENTIFIER)
102 tokenizer.advance();
105 while (tokenizer.getToken() != VarTokenizer::TOKEN_END)
109 if (tokenizer.getToken() == VarTokenizer::TOKEN_PERIOD)
111 tokenizer.advance();
112 TCU_CHECK(tokenizer.getToken() == VarTokenizer::TOKEN_IDENTIFIER)
    [all...]
  /external/chromium_org/third_party/angle/src/compiler/preprocessor/
generate_parser.sh 25 run_flex Tokenizer.l Tokenizer.cpp
27 patch --silent --forward < 64bit-tokenizer-safety.patch
  /external/chromium_org/third_party/protobuf/python/google/protobuf/internal/
text_format_test.py 462 tokenizer = text_format._Tokenizer(text)
463 methods = [(tokenizer.ConsumeIdentifier, 'identifier1'),
465 (tokenizer.ConsumeString, 'string1'),
466 (tokenizer.ConsumeIdentifier, 'identifier2'),
468 (tokenizer.ConsumeInt32, 123),
469 (tokenizer.ConsumeIdentifier, 'identifier3'),
471 (tokenizer.ConsumeString, 'string'),
472 (tokenizer.ConsumeIdentifier, 'identifiER_4'),
474 (tokenizer.ConsumeFloat, 1.1e+2),
475 (tokenizer.ConsumeIdentifier, 'ID5')
    [all...]
  /packages/apps/Gallery2/src/com/android/gallery3d/data/
Face.java 34 StringTokenizer tokenizer = new StringTokenizer(rect); local
36 while (tokenizer.hasMoreElements()) {
37 mPosition.left = Integer.parseInt(tokenizer.nextToken());
38 mPosition.top = Integer.parseInt(tokenizer.nextToken());
39 mPosition.right = Integer.parseInt(tokenizer.nextToken());
40 mPosition.bottom = Integer.parseInt(tokenizer.nextToken());
  /frameworks/native/opengl/libagl/
Tokenizer.cpp 1 /* libs/opengles/Tokenizer.cpp
20 #include "Tokenizer.h"
26 ANDROID_BASIC_TYPES_TRAITS(Tokenizer::run_t)
28 Tokenizer::Tokenizer()
32 Tokenizer::Tokenizer(const Tokenizer& other)
37 Tokenizer::~Tokenizer()
    [all...]
  /external/chromium_org/chrome/browser/extensions/api/log_private/
syslog_parser.cc 40 base::StringTokenizer tokenizer(input, " ");
41 if (!tokenizer.GetNext()) {
46 std::string time = tokenizer.token();
51 if (!tokenizer.GetNext()) {
56 if (!tokenizer.GetNext()) {
61 ParseProcess(tokenizer.token(), entry.get());
87 base::StringTokenizer tokenizer(input, kProcessInfoDelimiters);
88 if (!tokenizer.GetNext()) {
93 entry->process = tokenizer.token();
95 if (tokenizer.GetNext())
    [all...]
  /external/antlr/antlr-3.4/runtime/CSharp2/Sources/Antlr3.Runtime/Antlr.Runtime.Tree/
TreePatternParser.cs 37 protected TreePatternLexer tokenizer; field in class:Antlr.Runtime.Tree.TreePatternParser
42 public TreePatternParser(TreePatternLexer tokenizer, TreeWizard wizard, ITreeAdaptor adaptor) {
43 this.tokenizer = tokenizer;
46 ttype = tokenizer.NextToken(); // kickstart
66 ttype = tokenizer.NextToken();
90 ttype = tokenizer.NextToken();
98 ttype = tokenizer.NextToken();
102 label = tokenizer.sval.ToString();
103 ttype = tokenizer.NextToken()
    [all...]
  /external/antlr/antlr-3.4/runtime/CSharp3/Sources/Antlr3.Runtime/Tree/
TreePatternParser.cs 39 protected TreePatternLexer tokenizer; field in class:Antlr.Runtime.Tree.TreePatternParser
44 public TreePatternParser( TreePatternLexer tokenizer, TreeWizard wizard, ITreeAdaptor adaptor )
46 this.tokenizer = tokenizer;
49 ttype = tokenizer.NextToken(); // kickstart
75 ttype = tokenizer.NextToken();
105 ttype = tokenizer.NextToken();
115 ttype = tokenizer.NextToken();
120 label = tokenizer.sval.ToString();
121 ttype = tokenizer.NextToken()
    [all...]
  /external/antlr/antlr-3.4/runtime/Java/src/main/java/org/antlr/runtime/tree/
TreePatternParser.java 34 protected TreePatternLexer tokenizer; field in class:TreePatternParser
39 public TreePatternParser(TreePatternLexer tokenizer, TreeWizard wizard, TreeAdaptor adaptor) {
40 this.tokenizer = tokenizer;
43 ttype = tokenizer.nextToken(); // kickstart
64 ttype = tokenizer.nextToken();
89 ttype = tokenizer.nextToken();
97 ttype = tokenizer.nextToken();
101 label = tokenizer.sval.toString();
102 ttype = tokenizer.nextToken()
    [all...]
  /external/chromium_org/extensions/common/
csp_validator.cc 41 bool HasOnlySecureTokens(base::StringTokenizer& tokenizer,
43 while (tokenizer.GetNext()) {
44 std::string source = tokenizer.token();
99 base::StringTokenizer& tokenizer,
107 status->is_secure = HasOnlySecureTokens(tokenizer, type);
134 base::StringTokenizer tokenizer(input, " \t\r\n");
135 if (!tokenizer.GetNext())
138 std::string directive_name = tokenizer.token();
141 if (UpdateStatus(directive_name, tokenizer, &default_src_status, type))
143 if (UpdateStatus(directive_name, tokenizer, &script_src_status, type)
    [all...]
  /external/chromium_org/testing/android/
native_test_util.cc 16 base::StringTokenizer tokenizer(command_line, base::kWhitespaceASCII);
17 tokenizer.set_quote_chars("\"");
18 while (tokenizer.GetNext()) {
20 base::RemoveChars(tokenizer.token(), "\"", &token);

Completed in 661 milliseconds

1 2 3 4 5 6 7 8 91011>>