HomeSort by relevance Sort by last modified time
    Searched defs:tokens (Results 51 - 75 of 249) sorted by null

1 23 4 5 6 7 8 910

  /packages/apps/QuickSearchBox/tests/src/com/android/quicksearchbox/util/
LevenshteinDistanceTest.java 60 Token[] tokens = new Token[strings.length]; local
63 tokens[i] = new Token(str.toCharArray(), 0, str.length());
65 return tokens;
  /external/antlr/antlr-3.4/runtime/Java/src/main/java/org/antlr/runtime/
LegacyCommonTokenStream.java 32 /** The most common stream of tokens is one where every token is buffered up
33 * and tokens are prefiltered for a certain channel (the parser will only
34 * see these tokens and cannot change the filter channel number during the
37 * TODO: how to access the full token stream? How to track all tokens matched per rule?
45 protected List tokens; field in class:LegacyCommonTokenStream
47 /** Map<tokentype, channel> to override some Tokens' channel numbers */
50 /** Set<tokentype>; discard any tokens with this type */
53 /** Skip tokens on any channel but this one; this is how we skip whitespace... */
56 /** By default, track all incoming tokens */
64 /** The index into the tokens list of the current token (next toke
    [all...]
  /external/antlr/antlr-3.4/tool/src/main/java/org/antlr/tool/
AssignTokenTypesBehavior.java 43 protected Map<String,Integer> tokens = new TreeMap<String, Integer>(); field in class:AssignTokenTypesBehavior
66 /** Track string literals (could be in tokens{} section) */
69 // if lexer, don't allow aliasing in tokens section
89 // Don't record literals for lexers, they are things to match not tokens
94 // or in the tokens{} section
109 tokens.get(t.getText())==null )
111 tokens.put(t.getText(), UNASSIGNED);
126 // don't add Tokens rule
136 tokens.put(t.getText(), UNASSIGNED);
168 // duplicate but identical alias; might be tokens {A='a'} an
    [all...]
  /packages/providers/ContactsProvider/src/com/android/providers/contacts/
HanziToPinyin.java 401 * Convert the input to a array of tokens. The sequence of ASCII or Unknown characters without
406 ArrayList<Token> tokens = new ArrayList<Token>(); local
408 // return empty tokens.
409 return tokens;
422 addToken(sb, tokens, tokenType);
426 addToken(sb, tokens, tokenType);
432 addToken(sb, tokens, tokenType);
440 addToken(sb, tokens, tokenType);
442 tokens.add(t);
446 addToken(sb, tokens, tokenType)
    [all...]
ProfileAwareUriMatcher.java 70 String[] tokens = PATH_SPLIT_PATTERN.split(path); local
71 if (tokens != null) {
75 // also count as lookup tokens, since the vcard is specified by lookup key.
77 for (int i = 0; i < tokens.length; i++) {
78 String token = tokens[i];
  /cts/suite/audio_quality/lib/src/task/
TaskProcess.cpp 253 std::vector<android::String8>& tokens = *(paramTokens.get()); local
254 for (size_t i = 0; i < tokens.size(); i++) {
255 UniquePtr<std::vector<android::String8> > itemTokens(StringUtil::split(tokens[i], ':'));
298 std::vector<android::String8>* tokens = tokenPtr.get(); local
299 if (tokens == NULL) {
303 if (tokens->size() != 2) {
307 if (StringUtil::compare(tokens->at(0), "builtin") == 0) {
309 } else if (StringUtil::compare(tokens->at(0), "script") == 0) {
315 mName.append(tokens->at(1));
  /cts/tests/tests/text/src/android/text/util/cts/
Rfc822TokenizerTest.java 114 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(""); local
115 assertEquals(0, tokens.length);
118 tokens = Rfc822Tokenizer.tokenize(text);
119 assertEquals(2, tokens.length);
120 localAssertEquals(tokens[0], "Berg", "berg\\@google.com", "home");
121 localAssertEquals(tokens[1], null, "tom\\@google.com", "work");
124 tokens = Rfc822Tokenizer.tokenize(text);
125 assertEquals(2, tokens.length);
126 localAssertEquals(tokens[0], "Foo Bar", "foo\\@google.com", "something");
127 localAssertEquals(tokens[1], null, "blah\\@google.com", "something")
    [all...]
  /external/antlr/antlr-3.4/runtime/ActionScript/project/src/org/antlr/runtime/
TokenRewriteStream.as 38 * all the time. As the buffer of tokens is converted to strings, the
46 * index i does not change the index values for tokens i+1..n-1.
56 * var tokens:TokenRewriteStream = new TokenRewriteStream(lex);
57 * var parser:T = new T(tokens);
65 * trace(tokens.toString());
75 * tokens.insertAfter("pass1", t, "text to put after t");}
76 * tokens.insertAfter("pass2", u, "text after u");}
77 * trace(tokens.toString("pass1"));
78 * trace(tokens.toString("pass2"));
142 if ( fromIndex > toIndex || fromIndex<0 || toIndex<0 || toIndex >= tokens.length )
    [all...]
  /external/antlr/antlr-3.4/runtime/CSharp2/Sources/Antlr3.Runtime/Antlr.Runtime/
LegacyCommonTokenStream.cs 43 * The most common stream of tokens is one where every token is buffered up
44 * and tokens are prefiltered for a certain channel (the parser will only
45 * see these tokens and cannot change the filter channel number during the
49 * <remarks>TODO: how to access the full token stream? How to track all tokens matched per rule?</remarks>
61 protected List<IToken> tokens; field in class:Antlr.Runtime.LegacyCommonTokenStream
63 /** <summary>Map<tokentype, channel> to override some Tokens' channel numbers</summary> */
66 /** <summary>Set<tokentype>; discard any tokens with this type</summary> */
69 /** <summary>Skip tokens on any channel but this one; this is how we skip whitespace...</summary> */
72 /** <summary>By default, track all incoming tokens</summary> */
79 * The index into the tokens list of the current token (next toke
    [all...]
  /external/antlr/antlr-3.4/runtime/CSharp3/Sources/Antlr3.Runtime/
LegacyCommonTokenStream.cs 41 * The most common stream of tokens is one where every token is buffered up
42 * and tokens are prefiltered for a certain channel (the parser will only
43 * see these tokens and cannot change the filter channel number during the
47 * <remarks>TODO: how to access the full token stream? How to track all tokens matched per rule?</remarks>
60 protected List<IToken> tokens; field in class:Antlr.Runtime.LegacyCommonTokenStream
62 /** <summary>Map<tokentype, channel> to override some Tokens' channel numbers</summary> */
65 /** <summary>Set<tokentype>; discard any tokens with this type</summary> */
68 /** <summary>Skip tokens on any channel but this one; this is how we skip whitespace...</summary> */
71 /** <summary>By default, track all incoming tokens</summary> */
78 * The index into the tokens list of the current token (next toke
    [all...]
  /external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/
streams.rb 72 parsers with the means to sequential walk through series of tokens.
79 In a similar fashion to CommonTokenStream, CommonTreeNodeStream feeds tokens
82 the two-dimensional shape of the tree using special UP and DOWN tokens. The
99 is the <i>integer token type of the token</i> <tt>k</tt> tokens ahead of the
108 <b>TokenStreams</b>, this is the <i>full token structure</i> <tt>k</tt> tokens
282 sequence of tokens. Unlike simple character-based streams, such as StringStream,
287 <i>channel</i> feature, which allows you to hold on to all tokens of interest
288 while only presenting a specific set of interesting tokens to a parser. For
291 whitespace to channel value HIDDEN as it creates the tokens.
295 yield tokens that have the same value for <tt>channel</tt>. The stream skip
1044 def tokens( start = nil, stop = nil ) method in class:ANTLR3.that.CommonTokenStream
    [all...]
  /external/antlr/antlr-3.4/tool/src/main/java/org/antlr/analysis/
MachineProbe.java 102 List<String> tokens = new ArrayList<String>(); local
104 tokens.add(label.toString(g));
105 return tokens.toString();
110 * tokens (from grammar) associated with path through NFA following the
117 List<Token> tokens = new ArrayList<Token>(); local
137 tokens.add(token);
145 return tokens;
  /external/bluetooth/glib/gio/
gicon.c 166 GPtrArray *tokens; local
180 tokens = g_ptr_array_new ();
181 if (!icon_iface->to_tokens (icon, tokens, &version))
183 g_ptr_array_free (tokens, TRUE);
189 all the tokens are url escaped to ensure they have no spaces in them */
195 for (i = 0; i < tokens->len; i++)
199 token = g_ptr_array_index (tokens, i);
209 g_ptr_array_free (tokens, TRUE);
301 g_icon_new_from_tokens (char **tokens,
317 num_tokens = g_strv_length (tokens);
458 gchar **tokens; local
    [all...]
  /external/chromium/chrome/common/extensions/docs/examples/extensions/irc/servlet/src/org/chromium/
IRCProxyWebSocket.java 57 String tokens[] = data.split(":"); local
58 socket_ = SocketFactory.getDefault().createSocket(tokens[0],
59 Integer.parseInt(tokens[1]));
  /external/jmonkeyengine/engine/src/core-plugins/com/jme3/font/plugins/
BitmapFontLoader.java 64 String[] tokens = line.split(regex); local
65 if (tokens[0].equals("info")){
67 for (int i = 1; i < tokens.length; i++){
68 if (tokens[i].equals("size")){
69 charSet.setRenderedSize(Integer.parseInt(tokens[i + 1]));
72 }else if (tokens[0].equals("common")){
74 for (int i = 1; i < tokens.length; i++){
75 String token = tokens[i];
77 charSet.setLineHeight(Integer.parseInt(tokens[i + 1]));
79 charSet.setBase(Integer.parseInt(tokens[i + 1]))
    [all...]
  /external/smali/smali/src/test/java/
LexerTest.java 130 String tokensFile = String.format("LexerTest%s%s.tokens", File.separatorChar, test);
161 List tokens = tokenStream.getTokens(); local
165 for (int i=0; i<tokens.size(); i++) {
166 token = (CommonToken)tokens.get(i);
173 Assert.fail("Too many tokens");
202 Assert.fail(String.format("Not enough tokens. Expecting %d tokens, but got %d", expectedTokens.size(),
  /external/srec/portable/src/
PFileSystem.c 181 LCHAR** tokens = NULL; local
207 CHKLOG(rc, PFileSystemLinearToPathTokens(path, &tokens, &tokenLen));
212 if (LSTRCMP(tokens[i], L("../")) == 0)
220 else if (LSTRCMP(tokens[i], L("./")) == 0)
232 LSTRCAT(path, tokens[i]);
233 FREE(tokens[i]);
234 tokens[i] = NULL;
236 FREE(tokens);
239 if (tokens != NULL)
243 FREE(tokens[i])
    [all...]
  /external/webkit/Source/ThirdParty/ANGLE/src/compiler/preprocessor/
tokens.c 45 // tokens.c
337 TokenStream *tokens; member in struct:TokenInputSrc
343 int token = ReadToken(in->tokens, yylvalpp);
367 in->tokens = ts;
435 /////////////////////////////////////// End of tokens.c ///////////////////////////////////////
  /frameworks/base/media/java/android/media/audiofx/
BassBoost.java 234 int tokens = st.countTokens(); local
Virtualizer.java 236 int tokens = st.countTokens(); local
  /frameworks/base/wifi/java/android/net/wifi/p2p/
WifiP2pGroup.java 82 String[] tokens = supplicantEvent.split(" "); local
84 if (tokens.length < 3) {
88 if (tokens[0].startsWith("P2P-GROUP")) {
89 mInterface = tokens[1];
90 mIsGroupOwner = tokens[2].equals("GO");
104 } else if (tokens[0].equals("P2P-INVITATION-RECEIVED")) {
105 for (String token : tokens) {
  /frameworks/native/opengl/tools/glgen/src/
CFunc.java 112 String[] tokens = s.split("\\s"); local
116 String ftypeName = tokens[i++];
119 ftypeName = tokens[i++];
123 String fname = tokens[i++];
126 fname = tokens[i++];
132 while (i < tokens.length) {
133 String tok = tokens[i++];
149 argTypeName = tokens[i++];
157 argName = tokens[i++];
  /packages/apps/QuickSearchBox/src/com/android/quicksearchbox/
LevenshteinSuggestionFormatter.java 73 * Finds which tokens in the target match tokens in the source.
75 * @param source List of source tokens (i.e. user query)
76 * @param target List of target tokens (i.e. suggestion)
77 * @return The indices into source which target tokens correspond to. A non-negative value n at
103 // There can't be more tokens than characters, make an array that is large enough
104 Token[] tokens = new Token[len]; local
116 tokens[tokenCount++] = new Token(chars, start, end);
121 System.arraycopy(tokens, 0, ret, 0, tokenCount);
  /external/smali/smali/src/main/java/org/jf/smali/
main.java 271 CommonTokenStream tokens; local
282 tokens = new CommonTokenStream((TokenSource)lexer);
289 tokens = new CommonTokenStream((TokenSource)lexer);
293 tokens.getTokens();
295 for (int i=0; i<tokens.size(); i++) {
296 Token token = tokens.get(i);
305 smaliParser parser = new smaliParser(tokens);
318 treeStream.setTokenStream(tokens);
414 Option printTokensOption = OptionBuilder.withLongOpt("print-tokens")
  /dalvik/vm/compiler/template/
gen-template.py 49 # Set handler_size_bytes to the value of tokens[1], and handler_size_bits to
53 def setHandlerSize(tokens):
55 if len(tokens) != 2:
61 handler_size_bytes = bytes = int(tokens[1])
76 def importFile(tokens):
77 if len(tokens) != 2:
79 source = tokens[1]
81 appendSourceFile(tokens[1], getGlobalSubDict(), asm_fp, None)
90 def setAsmStub(tokens):
92 if len(tokens) != 2
385 tokens = line.split(' ') # tokenize variable
    [all...]

Completed in 950 milliseconds

1 23 4 5 6 7 8 910