HomeSort by relevance Sort by last modified time
    Searched refs:tokens (Results 426 - 450 of 1090) sorted by null

<<11121314151617181920>>

  /external/antlr/antlr-3.4/runtime/ObjC/ANTLR.framework/Versions/Current/Headers/
ANTLRBufferedTreeNodeStream.h 71 id<ANTLRTokenStream> tokens; variable
89 @property (retain, getter=getTokenStream, setter=setTokenStream:) id<ANTLRTokenStream> tokens; variable
123 - (void) setTokenStream:(id<ANTLRTokenStream>) tokens;
  /external/antlr/antlr-3.4/runtime/ObjC/Framework/
ANTLRBufferedTreeNodeStream.h 72 id<ANTLRTokenStream> tokens; variable
90 @property (retain, getter=getTokenStream, setter=setTokenStream:) id<ANTLRTokenStream> tokens; variable
125 - (void) setTokenStream:(id<ANTLRTokenStream>) tokens;
  /external/chromium_org/chrome/browser/ui/webui/
identity_internals_ui.cc 87 // Gets all of the tokens stored in IdentityAPI token cache and returns them
97 // A vector of token revokers that are currently revoking tokens.
223 extensions::IdentityAPI::CachedTokens tokens = local
228 iter = tokens.begin(); iter != tokens.end(); ++iter) {
  /external/chromium_org/extensions/common/permissions/
socket_permission_entry.cc 131 std::vector<std::string> tokens; local
132 base::SplitStringDontTrim(pattern, kColon, &tokens);
133 return ParseHostPattern(type, tokens, entry);
  /external/chromium_org/google_apis/gaia/
gaia_auth_fetcher.cc 293 // All tokens should be session tokens except the gaia auth token.
380 // Helper method that extracts tokens from a successful reply.
392 vector<pair<string, string> > tokens; local
393 base::SplitStringIntoKeyValuePairs(data, '=', '\n', &tokens);
394 for (vector<pair<string, string> >::iterator i = tokens.begin();
395 i != tokens.end(); ++i) {
430 vector<pair<string, string> > tokens;
431 base::SplitStringIntoKeyValuePairs(data, '=', '\n', &tokens);
432 for (vector<pair<string, string> >::iterator i = tokens.begin()
870 base::StringPairs tokens; local
    [all...]
  /external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/tool/bot/
commitannouncer.py 163 tokens = line[len(svn_string):].split()
164 if not tokens:
166 revision = tokens[0]
  /external/chromium_org/third_party/mesa/src/src/gallium/auxiliary/draw/
draw_pipe_aaline.c 53 /** Approx number of new tokens for instructions in aa_transform_inst() */
356 const uint newLen = tgsi_num_tokens(orig_fs->tokens) + NUM_NEW_TOKENS;
359 aaline_fs.tokens = tgsi_alloc_tokens(newLen);
360 if (aaline_fs.tokens == NULL)
373 tgsi_transform_shader(orig_fs->tokens,
374 (struct tgsi_token *) aaline_fs.tokens,
379 tgsi_dump(orig_fs->tokens, 0);
381 tgsi_dump(aaline_fs.tokens, 0);
391 FREE((void *)aaline_fs.tokens);
395 FREE((void *)aaline_fs.tokens);
    [all...]
draw_pipe_aapoint.c 56 /** Approx number of new tokens for instructions in aa_transform_inst() */
500 const uint newLen = tgsi_num_tokens(orig_fs->tokens) + NUM_NEW_TOKENS;
504 aapoint_fs.tokens = tgsi_alloc_tokens(newLen);
505 if (aapoint_fs.tokens == NULL)
518 tgsi_transform_shader(orig_fs->tokens,
519 (struct tgsi_token *) aapoint_fs.tokens,
524 tgsi_dump(orig_fs->tokens, 0);
526 tgsi_dump(aapoint_fs.tokens, 0);
535 FREE((void *)aapoint_fs.tokens);
539 FREE((void *)aapoint_fs.tokens);
    [all...]
  /external/chromium_org/third_party/mesa/src/src/gallium/drivers/rbug/
rbug_objects.c 210 rb_shader->tokens = tgsi_dup_tokens(state->tokens);
248 FREE(rb_shader->tokens);
  /external/chromium_org/ui/base/x/
selection_utils.cc 83 std::vector<std::string> tokens; local
84 Tokenize(unparsed, "\n", &tokens);
85 return tokens;
  /external/mesa3d/src/gallium/auxiliary/draw/
draw_pipe_aaline.c 53 /** Approx number of new tokens for instructions in aa_transform_inst() */
356 const uint newLen = tgsi_num_tokens(orig_fs->tokens) + NUM_NEW_TOKENS;
359 aaline_fs.tokens = tgsi_alloc_tokens(newLen);
360 if (aaline_fs.tokens == NULL)
373 tgsi_transform_shader(orig_fs->tokens,
374 (struct tgsi_token *) aaline_fs.tokens,
379 tgsi_dump(orig_fs->tokens, 0);
381 tgsi_dump(aaline_fs.tokens, 0);
391 FREE((void *)aaline_fs.tokens);
395 FREE((void *)aaline_fs.tokens);
    [all...]
draw_pipe_aapoint.c 56 /** Approx number of new tokens for instructions in aa_transform_inst() */
500 const uint newLen = tgsi_num_tokens(orig_fs->tokens) + NUM_NEW_TOKENS;
504 aapoint_fs.tokens = tgsi_alloc_tokens(newLen);
505 if (aapoint_fs.tokens == NULL)
518 tgsi_transform_shader(orig_fs->tokens,
519 (struct tgsi_token *) aapoint_fs.tokens,
524 tgsi_dump(orig_fs->tokens, 0);
526 tgsi_dump(aapoint_fs.tokens, 0);
535 FREE((void *)aapoint_fs.tokens);
539 FREE((void *)aapoint_fs.tokens);
    [all...]
  /external/mesa3d/src/gallium/drivers/rbug/
rbug_objects.c 210 rb_shader->tokens = tgsi_dup_tokens(state->tokens);
248 FREE(rb_shader->tokens);
  /external/robolectric/src/main/java/com/xtremelabs/robolectric/shadows/
ShadowAccountManager.java 192 HashMap<String, String> tokens = cachedAuthTokenValues.get(account);
193 return (tokens != null) ? tokens.get(authTokenType) : null;
  /frameworks/av/drm/mediadrm/plugins/clearkey/
JsonWebKey.cpp 179 * Parses a JSON objects string and initializes a vector of tokens.
184 Vector<String8>* tokens) {
207 tokens->clear();
215 tokens->add(token);
236 // Computes number of tokens. A token marks the type, offset in
  /frameworks/base/media/java/android/media/session/
MediaSessionManager.java 340 public void onActiveSessionsChanged(final List<MediaSession.Token> tokens) {
347 int size = tokens.size();
349 controllers.add(new MediaController(mContext, tokens.get(i)));
  /frameworks/opt/chips/src/com/android/ex/chips/
RecipientEntry.java 120 final Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(address); local
121 final String tokenizedAddress = tokens.length > 0 ? tokens[0].getAddress() : address;
  /frameworks/opt/net/wifi/service/java/com/android/server/wifi/
WifiNative.java 339 String[] tokens = ret.split(" = "); local
340 if (tokens.length == 2) return tokens[1];
514 String[] tokens = ret.split(" "); local
516 if (tokens.length == 2) return Integer.parseInt(tokens[1]);
986 String[] tokens = status.split("\\n"); local
    [all...]
  /packages/apps/QuickSearchBox/tests/src/com/android/quicksearchbox/
LevenshteinFormatterTest.java 44 Token[] tokens = mFormatter.tokenize(input); local
45 assertEquals(output.length, tokens.length);
47 assertEquals(output[i], tokens[i].toString());
61 verifyTokenizeResult("two tokens", "two", "tokens");
  /external/antlr/antlr-3.4/runtime/CSharp2/Sources/Antlr3.Runtime/Antlr.Runtime.Tree/
BufferedTreeNodeStream.cs 134 protected ITokenStream tokens; field in class:Antlr.Runtime.Tree.BufferedTreeNodeStream
195 return tokens;
198 tokens = value;
515 if (tokens != null) {
525 return tokens.ToString(beginTokenIndex, endTokenIndex);
  /external/antlr/antlr-3.4/runtime/CSharp3/Sources/Antlr3.Runtime/Tree/
BufferedTreeNodeStream.cs 143 protected ITokenStream tokens; field in class:Antlr.Runtime.Tree.BufferedTreeNodeStream
216 return tokens;
220 tokens = value;
612 if ( tokens != null )
626 return tokens.ToString( beginTokenIndex, endTokenIndex );
  /external/antlr/antlr-3.4/runtime/Java/src/main/java/org/antlr/runtime/
BaseRecognizer.java 148 //System.out.println("viable tokens="+follow.toString(getTokenNames()));
378 * rule invocation, the parser pushes the set of tokens that can
381 * enclosing rule. This local follow set only includes tokens
389 * thing to do is to consume tokens until you see something that
391 * You don't want the exact set of viable next tokens because the
393 * rest of the input looking for one of the missing tokens.
405 * At each rule invocation, the set of tokens that could follow
430 * all context-sensitive FOLLOW sets--the set of all tokens that
432 * resync to one of those tokens. Note that FOLLOW(c)='^' and if
475 * viable tokens that can possibly come next (lookahead depth 1
    [all...]
  /external/chromium_org/third_party/skia/tools/
render_pictures_main.cpp 514 SkTArray<SkString> tokens; local
515 SkStrSplit(FLAGS_descriptions[i], "=", &tokens);
516 SkASSERT(tokens.count() == 2);
517 jsonSummary.addDescription(tokens[0].c_str(), tokens[1].c_str());
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
tokenize.py 7 text into Python tokens. It accepts a readline-like method which is called
18 that it produces COMMENT tokens for comments and gives type OP for all
24 are the same, except instead of generating tokens, tokeneater is a callback
186 self.tokens = []
195 self.tokens.append(" " * col_offset)
204 self.tokens.append(token)
209 return "".join(self.tokens)
214 toks_append = self.tokens.append
325 """Transform tokens back into Python source code.
329 only two tokens are passed, the resulting output is poor
    [all...]
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/
tokenize.py 4 text into Python tokens. It accepts a readline-like method which is called
15 that it produces COMMENT tokens for comments and gives type OP for all
21 are the same, except instead of generating tokens, tokeneater is a callback
181 self.tokens = []
190 self.tokens.append(" " * col_offset)
199 self.tokens.append(token)
204 return "".join(self.tokens)
209 toks_append = self.tokens.append
244 """Transform tokens back into Python source code.
248 only two tokens are passed, the resulting output is poor
    [all...]

Completed in 896 milliseconds

<<11121314151617181920>>