HomeSort by relevance Sort by last modified time
    Searched refs:Tokenizer (Results 1 - 25 of 194) sorted by null

1 2 3 4 5 6 7 8

  /external/protobuf/src/google/protobuf/io/
tokenizer_unittest.cc 40 #include <google/protobuf/io/tokenizer.h>
183 EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result));
198 Tokenizer::TokenType type;
208 { "hello", Tokenizer::TYPE_IDENTIFIER },
211 { "123", Tokenizer::TYPE_INTEGER },
212 { "0xab6", Tokenizer::TYPE_INTEGER },
213 { "0XAB6", Tokenizer::TYPE_INTEGER },
214 { "0X1234567", Tokenizer::TYPE_INTEGER },
215 { "0x89abcdef", Tokenizer::TYPE_INTEGER },
216 { "0x89ABCDEF", Tokenizer::TYPE_INTEGER }
    [all...]
tokenizer.h 52 class Tokenizer;
83 class LIBPROTOBUF_EXPORT Tokenizer {
85 // Construct a Tokenizer that reads and tokenizes text from the given
88 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
89 ~Tokenizer();
192 // comes from a TYPE_FLOAT token parsed by Tokenizer. If it doesn't, the
197 // comes from a TYPE_STRING token parsed by Tokenizer. If it doesn't, the
207 // parsed by a Tokenizer, the result is undefined (possibly an assert
249 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
385 inline const Tokenizer::Token& Tokenizer::current()
    [all...]
  /system/core/init/parser/
tokenizer.cpp 15 #include "tokenizer.h"
19 Tokenizer::Tokenizer(const std::string& data)
31 Tokenizer::~Tokenizer() {}
33 const Tokenizer::Token& Tokenizer::current() {
37 bool Tokenizer::Next() {
88 void Tokenizer::AdvChar() {
98 void Tokenizer::AdvWhiteSpace()
    [all...]
tokenizer_test.cpp 15 #include "tokenizer.h"
26 Tokenizer tokenizer(data); \
27 ASSERT_EQ(Tokenizer::TOK_START, tokenizer.current().type)
30 ASSERT_TRUE(tokenizer.Next()); \
31 ASSERT_EQ(test_text, tokenizer.current().text); \
32 ASSERT_EQ(Tokenizer::TOK_TEXT, tokenizer.current().type)
35 ASSERT_TRUE(tokenizer.Next());
    [all...]
tokenizer.h 32 // Comments are denoted with '#' and the tokenizer will ignore
37 class Tokenizer {
39 Tokenizer(const std::string& data);
40 ~Tokenizer();
  /external/parameter-framework/upstream/utility/
Tokenizer.h 37 /** Tokenizer class
40 * of delimiters (@see Tokenizer::defaultDelimiters).
42 class Tokenizer : private utility::NonCopyable
45 /** Constructs a Tokenizer
54 Tokenizer(const std::string &input, const std::string &delimiters = defaultDelimiters,
56 ~Tokenizer(){};
Tokenizer.cpp 30 #include "Tokenizer.h"
35 const string Tokenizer::defaultDelimiters = " \n\r\t\v\f";
37 Tokenizer::Tokenizer(const string &input, const string &delimiters, bool mergeDelimiters)
42 vector<string> Tokenizer::split()
  /external/parameter-framework/upstream/test/tokenizer/
Test.cpp 31 #include "Tokenizer.h"
44 SCENARIO("Tokenizer tests")
46 GIVEN ("A default tokenizer") {
49 Tokenizer tokenizer("a bcd ef");
53 CHECK(tokenizer.split() == expected);
58 Tokenizer tokenizer("");
62 CHECK(tokenizer.split() == expected);
67 Tokenizer tokenizer(" a \n\t bc ")
    [all...]
  /external/v8/tools/gyp/tools/Xcode/Specifications/
gyp.xclangspec 73 Tokenizer = "xcode.lang.gyp.lexer.toplevel";
108 Tokenizer = "xcode.lang.gyp.lexer";
121 Tokenizer = "xcode.lang.gyp.lexer";
133 Tokenizer = "xcode.lang.gyp.lexer";
144 Tokenizer = "xcode.lang.gyp.lexer";
155 Tokenizer = "xcode.lang.gyp.lexer";
168 Tokenizer = "xcode.lang.gyp.lexer";
183 Tokenizer = "xcode.lang.gyp.lexer";
  /frameworks/native/opengl/libagl/
Tokenizer.h 1 /* libs/opengles/Tokenizer.h
29 class Tokenizer
32 Tokenizer();
33 Tokenizer(const Tokenizer& other);
34 ~Tokenizer();
Tokenizer.cpp 1 /* libs/opengles/Tokenizer.cpp
20 #include "Tokenizer.h"
26 ANDROID_BASIC_TYPES_TRAITS(Tokenizer::run_t)
28 Tokenizer::Tokenizer()
32 Tokenizer::Tokenizer(const Tokenizer& other)
37 Tokenizer::~Tokenizer()
    [all...]
  /system/core/include/utils/
Tokenizer.h 28 * A simple tokenizer for loading and parsing ASCII text files line by line.
30 class Tokenizer {
31 Tokenizer(const String8& filename, FileMap* fileMap, char* buffer,
35 ~Tokenizer();
40 * Returns NO_ERROR and a tokenizer for the file, if successful.
43 static status_t open(const String8& filename, Tokenizer** outTokenizer);
48 * Returns NO_ERROR and a tokenizer for the string, if successful.
52 const char* contents, Tokenizer** outTokenizer);
119 Tokenizer(const Tokenizer& other); // not copyabl
    [all...]
  /system/core/libutils/
Tokenizer.cpp 17 #define LOG_TAG "Tokenizer"
26 #include <utils/Tokenizer.h>
28 // Enables debug output for the tokenizer.
38 Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer,
45 Tokenizer::~Tokenizer() {
52 status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) {
95 *outTokenizer = new Tokenizer(filename, fileMap, buffer, ownBuffer, length)
    [all...]
  /prebuilts/misc/darwin-x86_64/protobuf2.5/include/google/protobuf/io/
tokenizer.h 52 class Tokenizer;
82 class LIBPROTOBUF_EXPORT Tokenizer {
84 // Construct a Tokenizer that reads and tokenizes text from the given
87 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
88 ~Tokenizer();
191 // comes from a TYPE_FLOAT token parsed by Tokenizer. If it doesn't, the
196 // comes from a TYPE_STRING token parsed by Tokenizer. If it doesn't, the
206 // parsed by a Tokenizer, the result is undefined (possibly an assert
233 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
367 inline const Tokenizer::Token& Tokenizer::current()
    [all...]
  /prebuilts/misc/linux-x86_64/protobuf2.5/include/google/protobuf/io/
tokenizer.h 52 class Tokenizer;
82 class LIBPROTOBUF_EXPORT Tokenizer {
84 // Construct a Tokenizer that reads and tokenizes text from the given
87 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
88 ~Tokenizer();
191 // comes from a TYPE_FLOAT token parsed by Tokenizer. If it doesn't, the
196 // comes from a TYPE_STRING token parsed by Tokenizer. If it doesn't, the
206 // parsed by a Tokenizer, the result is undefined (possibly an assert
233 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
367 inline const Tokenizer::Token& Tokenizer::current()
    [all...]
  /prebuilts/misc/windows/protobuf2.5/include/google/protobuf/io/
tokenizer.h 52 class Tokenizer;
82 class LIBPROTOBUF_EXPORT Tokenizer {
84 // Construct a Tokenizer that reads and tokenizes text from the given
87 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
88 ~Tokenizer();
191 // comes from a TYPE_FLOAT token parsed by Tokenizer. If it doesn't, the
196 // comes from a TYPE_STRING token parsed by Tokenizer. If it doesn't, the
206 // parsed by a Tokenizer, the result is undefined (possibly an assert
233 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
367 inline const Tokenizer::Token& Tokenizer::current()
    [all...]
  /prebuilts/tools/darwin-x86_64/protoc/include/google/protobuf/io/
tokenizer.h 53 class Tokenizer;
91 class LIBPROTOBUF_EXPORT Tokenizer {
93 // Construct a Tokenizer that reads and tokenizes text from the given
96 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
97 ~Tokenizer();
200 // comes from a TYPE_FLOAT token parsed by Tokenizer. If it doesn't, the
205 // comes from a TYPE_STRING token parsed by Tokenizer. If it doesn't, the
215 // parsed by a Tokenizer, the result is undefined (possibly an assert
257 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
394 inline const Tokenizer::Token& Tokenizer::current()
    [all...]
  /prebuilts/tools/linux-x86_64/protoc/include/google/protobuf/io/
tokenizer.h 52 class Tokenizer;
82 class LIBPROTOBUF_EXPORT Tokenizer {
84 // Construct a Tokenizer that reads and tokenizes text from the given
87 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
88 ~Tokenizer();
191 // comes from a TYPE_FLOAT token parsed by Tokenizer. If it doesn't, the
196 // comes from a TYPE_STRING token parsed by Tokenizer. If it doesn't, the
206 // parsed by a Tokenizer, the result is undefined (possibly an assert
233 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
367 inline const Tokenizer::Token& Tokenizer::current()
    [all...]
  /external/deqp/executor/
xeXMLParser.cpp 57 Tokenizer::Tokenizer (void)
65 Tokenizer::~Tokenizer (void)
69 void Tokenizer::clear (void)
77 void Tokenizer::error (const std::string& what)
82 void Tokenizer::feed (const deUint8* bytes, int numBytes)
98 int Tokenizer::getChar (int offset) const
108 void Tokenizer::advance (void)
338 void Tokenizer::getString (std::string& dst) cons
    [all...]
xeXMLParser.hpp 83 class Tokenizer
86 Tokenizer (void);
87 ~Tokenizer (void);
89 void clear (void); //!< Resets tokenizer to initial state.
103 Tokenizer (const Tokenizer& other);
104 Tokenizer& operator= (const Tokenizer& other);
190 Tokenizer m_tokenizer;
203 inline void Tokenizer::getTokenStr (std::string& dst) cons
    [all...]
  /frameworks/native/include/input/
VirtualKeyMap.h 25 #include <utils/Tokenizer.h>
62 Tokenizer* mTokenizer;
65 Parser(VirtualKeyMap* map, Tokenizer* tokenizer);
  /external/clang/lib/ASTMatchers/Dynamic/
Parser.cpp 57 /// \brief Simple tokenizer for the parser.
294 const TokenInfo NameToken = Tokenizer->consumeNextToken();
296 if (Tokenizer->nextTokenKind() != TokenInfo::TK_OpenParen) {
306 if ((Tokenizer->nextTokenKind() == TokenInfo::TK_Comma ||
307 Tokenizer->nextTokenKind() == TokenInfo::TK_CloseParen ||
308 Tokenizer->nextTokenKind() == TokenInfo::TK_Eof) &&
328 const TokenInfo OpenToken = Tokenizer->consumeNextToken();
349 while (Tokenizer->nextTokenKind() != TokenInfo::TK_Eof) {
350 if (Tokenizer->nextTokenKind() == TokenInfo::TK_CloseParen) {
352 EndToken = Tokenizer->consumeNextToken()
    [all...]
  /external/ImageMagick/MagickCore/
token.h 32 Tokenizer(TokenInfo *,const unsigned int,char *,const size_t,const char *,
  /external/chromium-trace/catapult/third_party/vinn/third_party/parse5/lib/simple_api/
tokenizer_proxy.js 3 var Tokenizer = require('../tokenization/tokenizer'),
13 //Tokenizer proxy
14 //NOTE: this proxy simulates adjustment of the Tokenizer which performed by standard parser during tree construction.
16 this.tokenizer = new Tokenizer(html, options);
26 var token = this.tokenizer.getNextToken();
28 if (token.type === Tokenizer.START_TAG_TOKEN)
31 else if (token.type === Tokenizer.END_TAG_TOKEN)
34 else if (token.type === Tokenizer.NULL_CHARACTER_TOKEN && this.inForeignContent)
    [all...]
  /external/chromium-trace/catapult/third_party/vinn/third_party/parse5/lib/tree_construction/
parser.js 3 var Tokenizer = require('../tokenization/tokenizer'),
88 _[INITIAL_MODE][Tokenizer.CHARACTER_TOKEN] =
89 _[INITIAL_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = tokenInInitialMode;
90 _[INITIAL_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = ignoreToken;
91 _[INITIAL_MODE][Tokenizer.COMMENT_TOKEN] = appendComment;
92 _[INITIAL_MODE][Tokenizer.DOCTYPE_TOKEN] = doctypeInInitialMode;
93 _[INITIAL_MODE][Tokenizer.START_TAG_TOKEN] =
94 _[INITIAL_MODE][Tokenizer.END_TAG_TOKEN] =
95 _[INITIAL_MODE][Tokenizer.EOF_TOKEN] = tokenInInitialMode
    [all...]

Completed in 1614 milliseconds

1 2 3 4 5 6 7 8