HomeSort by relevance Sort by last modified time
    Searched full:tokenizer (Results 76 - 100 of 504) sorted by null

1 2 34 5 6 7 8 91011>>

  /external/smack/src/org/xbill/DNS/
DHCIDRecord.java 43 rdataFromString(Tokenizer st, Name origin) throws IOException {
SingleNameBase.java 42 rdataFromString(Tokenizer st, Name origin) throws IOException {
UNKRecord.java 33 rdataFromString(Tokenizer st, Name origin) throws IOException {
Master.java 23 private Tokenizer st;
38 st = new Tokenizer(file);
87 st = new Tokenizer(in);
257 catch (Tokenizer.TokenizerException e) {
274 Tokenizer.Token token;
293 if (token.type == Tokenizer.WHITESPACE) {
294 Tokenizer.Token next = st.get();
295 if (next.type == Tokenizer.EOL)
297 else if (next.type == Tokenizer.EOF)
305 else if (token.type == Tokenizer.EOL
    [all...]
ISDNRecord.java 54 rdataFromString(Tokenizer st, Name origin) throws IOException {
57 Tokenizer.Token t = st.get();
  /prebuilts/python/darwin-x86/2.7.5/include/python2.7/
parsetok.h 2 /* Parser-tokenizer link interface */
  /prebuilts/python/linux-x86/2.7.5/include/python2.7/
parsetok.h 2 /* Parser-tokenizer link interface */
  /system/core/include/utils/
PropertyMap.h 23 #include <utils/Tokenizer.h>
86 Tokenizer* mTokenizer;
89 Parser(PropertyMap* map, Tokenizer* tokenizer);
  /external/antlr/antlr-3.4/runtime/ObjC/ANTLR.framework/Headers/
ANTLRTreePatternParser.h 39 ANTLRTreePatternLexer *tokenizer; variable
50 - (id) initWithTokenizer:(ANTLRTreePatternLexer *)tokenizer
  /external/antlr/antlr-3.4/runtime/ObjC/ANTLR.framework/Versions/A/Headers/
ANTLRTreePatternParser.h 39 ANTLRTreePatternLexer *tokenizer; variable
50 - (id) initWithTokenizer:(ANTLRTreePatternLexer *)tokenizer
  /external/antlr/antlr-3.4/runtime/ObjC/ANTLR.framework/Versions/Current/Headers/
ANTLRTreePatternParser.h 39 ANTLRTreePatternLexer *tokenizer; variable
50 - (id) initWithTokenizer:(ANTLRTreePatternLexer *)tokenizer
  /external/chromium/base/
string_tokenizer.h 14 // StringTokenizerT is a simple string tokenizer class. It works like an
17 // configure the tokenizer to return delimiters.
101 // The string object must live longer than the tokenizer. (In particular this
114 // Set the options for this tokenizer. By default, this is 0.
118 // a quote char is encountered, the tokenizer will switch into a mode where
124 // Call this method to advance the tokenizer to the next delimiter. This
125 // returns false if the tokenizer is complete. This method must be called
139 // Returns true if token is a delimiter. When the tokenizer is constructed
  /external/chromium/net/http/
http_auth_handler_unittest.cc 38 HttpAuth::ChallengeTokenizer tokenizer(
44 mock_handler.InitFromChallenge(&tokenizer, target,
  /external/chromium_org/base/strings/
string_tokenizer.h 15 // StringTokenizerT is a simple string tokenizer class. It works like an
18 // configure the tokenizer to return delimiters.
102 // The string object must live longer than the tokenizer. (In particular this
115 // Set the options for this tokenizer. By default, this is 0.
119 // a quote char is encountered, the tokenizer will switch into a mode where
125 // Call this method to advance the tokenizer to the next delimiter. This
126 // returns false if the tokenizer is complete. This method must be called
140 // Returns true if token is a delimiter. When the tokenizer is constructed
  /external/chromium_org/net/http/
http_auth_handler_unittest.cc 36 HttpAuth::ChallengeTokenizer tokenizer(
43 mock_handler.InitFromChallenge(&tokenizer, target,
  /external/chromium_org/third_party/WebKit/Source/devtools/front_end/cm/
htmlembedded.js 10 //tokenizer when in html mode
20 //tokenizer when in scripting mode
  /frameworks/native/opengl/libagl/
TokenManager.h 28 #include "Tokenizer.h"
46 Tokenizer mTokenizer;
  /libcore/luni/src/test/java/libcore/java/io/
OldStreamTokenizerTest.java 97 StreamTokenizer tokenizer = new StreamTokenizer(new ByteArrayInputStream(data)); local
99 tokenizer.nextToken();
103 String result = tokenizer.toString();
115 StreamTokenizer tokenizer = new StreamTokenizer(new ByteArrayInputStream(data)); local
117 tokenizer.nextToken();
121 String result = tokenizer.toString();
  /external/chromium_org/third_party/protobuf/java/src/main/java/com/google/protobuf/
TextFormat.java 573 private static final class Tokenizer {
611 /** Construct a tokenizer that parses tokens from the given text. */
612 private Tokenizer(final CharSequence text) {
1059 final Tokenizer tokenizer = new Tokenizer(input); local
    [all...]
  /external/protobuf/java/src/main/java/com/google/protobuf/
TextFormat.java 407 private static final class Tokenizer {
445 /** Construct a tokenizer that parses tokens from the given text. */
446 private Tokenizer(final CharSequence text) {
852 final Tokenizer tokenizer = new Tokenizer(input); local
854 while (!tokenizer.atEnd()) {
855 mergeField(tokenizer, extensionRegistry, builder);
860 * Parse a single field from {@code tokenizer} and merge it into
863 private static void mergeField(final Tokenizer tokenizer
    [all...]
  /external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/tree/
wizard.rb 193 def initialize( tokenizer, token_scheme, adaptor )
194 @tokenizer = tokenizer
197 @token_type = tokenizer.next_token
215 @token_type = @tokenizer.next_token
230 @token_type = @tokenizer.next_token
237 ( @token_type = @tokenizer.next_token ) == :identifier or return nil
238 label = @tokenizer.text
239 ( @token_type = @tokenizer.next_token ) == :colon or return nil
240 @token_type = @tokenizer.next_toke
    [all...]
  /external/chromium/chrome/browser/
shell_integration_linux.cc 310 StringTokenizer tokenizer(xdg_data_dirs, ":");
311 while (tokenizer.GetNext()) {
312 FilePath data_dir(tokenizer.token());
375 StringTokenizer tokenizer(template_contents, "\n");
376 while (tokenizer.GetNext()) {
377 if (tokenizer.token().substr(0, 5) == "Exec=") {
378 std::string exec_path = tokenizer.token().substr(5);
400 } else if (tokenizer.token().substr(0, 5) == "Name=") {
411 } else if (tokenizer.token().substr(0, 11) == "GenericName" ||
412 tokenizer.token().substr(0, 7) == "Comment" |
    [all...]
  /external/antlr/antlr-3.4/runtime/Python/antlr3/
treewizard.py 164 def __init__(self, tokenizer, wizard, adaptor):
165 self.tokenizer = tokenizer
168 self.ttype = tokenizer.nextToken() # kickstart
189 self.ttype = self.tokenizer.nextToken()
209 self.ttype = self.tokenizer.nextToken()
218 self.ttype = self.tokenizer.nextToken()
222 label = self.tokenizer.sval
223 self.ttype = self.tokenizer.nextToken()
227 self.ttype = self.tokenizer.nextToken() # move to ID following colo
    [all...]
  /external/protobuf/src/google/protobuf/compiler/
parser.cc 46 #include <google/protobuf/io/tokenizer.h>
114 inline bool Parser::LookingAtType(io::Tokenizer::TokenType token_type) {
119 return LookingAtType(io::Tokenizer::TYPE_END);
150 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) {
161 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
163 if (!io::Tokenizer::ParseInteger(input_->current().text,
179 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
180 if (!io::Tokenizer::ParseInteger(input_->current().text, max_value,
195 if (LookingAtType(io::Tokenizer::TYPE_FLOAT)) {
196 *output = io::Tokenizer::ParseFloat(input_->current().text)
    [all...]
  /external/chromium_org/third_party/protobuf/src/google/protobuf/io/
tokenizer.h 52 class Tokenizer;
82 class LIBPROTOBUF_EXPORT Tokenizer {
84 // Construct a Tokenizer that reads and tokenizes text from the given
87 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
88 ~Tokenizer();
191 // comes from a TYPE_FLOAT token parsed by Tokenizer. If it doesn't, the
196 // comes from a TYPE_STRING token parsed by Tokenizer. If it doesn't, the
206 // parsed by a Tokenizer, the result is undefined (possibly an assert
233 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
367 inline const Tokenizer::Token& Tokenizer::current()
    [all...]

Completed in 342 milliseconds

1 2 34 5 6 7 8 91011>>