/packages/apps/QuickSearchBox/tests/src/com/android/quicksearchbox/ |
LevenshteinFormatterTest.java | 44 Token[] tokens = mFormatter.tokenize(input); 94 Token[] sourceTokens = mFormatter.tokenize(source); 95 Token[] targetTokens = mFormatter.tokenize(target);
|
/prebuilts/python/darwin-x86/2.7.5/lib/python2.7/idlelib/ |
ScriptBinding.py | 24 import tokenize namespace 72 tabnanny.process_tokens(tokenize.generate_tokens(f.readline)) 73 except tokenize.TokenError, msg:
|
/prebuilts/python/linux-x86/2.7.5/lib/python2.7/idlelib/ |
ScriptBinding.py | 24 import tokenize namespace 72 tabnanny.process_tokens(tokenize.generate_tokens(f.readline)) 73 except tokenize.TokenError, msg:
|
/external/sqlite/android/ |
sqlite3_android.cpp | 258 static void tokenize(sqlite3_context * context, int argc, sqlite3_value ** argv) function 260 //ALOGD("enter tokenize"); 266 ALOGE("Tokenize requires 4 to 6 arguments"); 333 // Get the raw bytes for the string to tokenize 449 err = sqlite3_create_function(handle, "_TOKENIZE", 4, SQLITE_UTF16, collator, tokenize, NULL, NULL); 453 err = sqlite3_create_function(handle, "_TOKENIZE", 5, SQLITE_UTF16, collator, tokenize, NULL, NULL); 457 err = sqlite3_create_function(handle, "_TOKENIZE", 6, SQLITE_UTF16, collator, tokenize, NULL, NULL);
|
/packages/apps/UnifiedEmail/src/com/android/mail/compose/ |
FromAddressSpinnerAdapter.java | 107 return String.format(sFormatString, Rfc822Tokenizer.tokenize(address)[0].getAddress());
|
/prebuilts/python/darwin-x86/2.7.5/lib/python2.7/ |
cgitb.py | 32 import tokenize namespace 84 for ttype, token, start, end, line in tokenize.generate_tokens(reader): 85 if ttype == tokenize.NEWLINE: break 86 if ttype == tokenize.NAME and token not in keyword.kwlist:
|
inspect.py | 39 import tokenize namespace 651 elif type == tokenize.NEWLINE: 658 elif type == tokenize.INDENT: 661 elif type == tokenize.DEDENT: 668 elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL): 677 tokenize.tokenize(iter(lines).next, blockfinder.tokeneater) [all...] |
tokenize.py | 20 tokenize(readline, tokeneater=printtoken) 34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"] 155 def tokenize(readline, tokeneater=printtoken): function 157 The tokenize() function accepts two parameters: one representing the 158 input stream, and one providing an output mechanism for tokenize(). 254 # Output text will tokenize the back to the input 351 ("<tokenize>", lnum, pos, line)) 424 tokenize(open(sys.argv[1]).readline) 426 tokenize(sys.stdin.readline)
|
/prebuilts/python/linux-x86/2.7.5/lib/python2.7/ |
cgitb.py | 32 import tokenize namespace 84 for ttype, token, start, end, line in tokenize.generate_tokens(reader): 85 if ttype == tokenize.NEWLINE: break 86 if ttype == tokenize.NAME and token not in keyword.kwlist:
|
inspect.py | 39 import tokenize namespace 651 elif type == tokenize.NEWLINE: 658 elif type == tokenize.INDENT: 661 elif type == tokenize.DEDENT: 668 elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL): 677 tokenize.tokenize(iter(lines).next, blockfinder.tokeneater) [all...] |
tokenize.py | 20 tokenize(readline, tokeneater=printtoken) 34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"] 155 def tokenize(readline, tokeneater=printtoken): function 157 The tokenize() function accepts two parameters: one representing the 158 input stream, and one providing an output mechanism for tokenize(). 254 # Output text will tokenize the back to the input 351 ("<tokenize>", lnum, pos, line)) 424 tokenize(open(sys.argv[1]).readline) 426 tokenize(sys.stdin.readline)
|
/packages/apps/UnifiedEmail/tests/src/com/android/mail/compose/ |
ComposeActivityTest.java | 258 Rfc822Tokenizer.tokenize(to[0])[0].getAddress()); 281 Rfc822Tokenizer.tokenize(to[0])[0].getAddress()); 384 Rfc822Tokenizer.tokenize(to[0])[0].getAddress()); 407 assertEquals(refReplyToAccount, Rfc822Tokenizer.tokenize(to[0])[0].getAddress()); 454 Rfc822Tokenizer.tokenize(to[0])[0].getAddress()); 458 ccMap.add(Rfc822Tokenizer.tokenize(recip.trim())[0].getAddress()); 686 Rfc822Tokenizer.tokenize(to[0])[0].getAddress()); [all...] |
/external/chromium_org/third_party/WebKit/Source/devtools/front_end/ |
CSSFormatter.js | 48 var tokenize = WebInspector.CodeMirrorUtils.createTokenizer("text/css"); 53 tokenize(line, this._tokenCallback.bind(this, i));
|
/external/chromium_org/third_party/libjingle/source/talk/base/ |
stringencode.h | 162 size_t tokenize(const std::string& source, char delimiter, 165 // Tokenize and append the tokens to fields. Return the new size of fields. 176 size_t tokenize(const std::string& source, char delimiter, char start_mark,
|
/external/libvpx/libvpx/vp8/ |
vp8cx.mk | 57 VP8_CX_SRCS-yes += encoder/tokenize.h 71 VP8_CX_SRCS-yes += encoder/tokenize.c
|
/packages/providers/ContactsProvider/src/com/android/providers/contacts/ |
NameLookupBuilder.java | 100 int tokenCount = mSplitter.tokenize(mNames, name); 143 int tokenCount = mSplitter.tokenize(mNames, name);
|
/prebuilts/python/darwin-x86/2.7.5/lib/python2.7/lib2to3/ |
patcomp.py | 18 from .pgen2 import driver, literals, token, tokenize, parse, grammar namespace 36 tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline)
|
/prebuilts/python/darwin-x86/2.7.5/lib/python2.7/lib2to3/tests/ |
test_parser.py | 20 from lib2to3.pgen2 import tokenize namespace 167 encoding = tokenize.detect_encoding(fp.readline)[0]
|
/prebuilts/python/linux-x86/2.7.5/lib/python2.7/lib2to3/ |
patcomp.py | 18 from .pgen2 import driver, literals, token, tokenize, parse, grammar namespace 36 tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline)
|
/prebuilts/python/linux-x86/2.7.5/lib/python2.7/lib2to3/tests/ |
test_parser.py | 20 from lib2to3.pgen2 import tokenize namespace 167 encoding = tokenize.detect_encoding(fp.readline)[0]
|
/packages/apps/Bluetooth/src/com/android/bluetooth/map/ |
BluetoothMapbMessageMmsEmail.java | 448 Rfc822Token tokens[] = Rfc822Tokenizer.tokenize(headerValue); 452 Rfc822Token tokens[] = Rfc822Tokenizer.tokenize(headerValue); 456 Rfc822Token tokens[] = Rfc822Tokenizer.tokenize(headerValue); 460 Rfc822Token tokens[] = Rfc822Tokenizer.tokenize(headerValue); 464 Rfc822Token tokens[] = Rfc822Tokenizer.tokenize(headerValue);
|
/packages/apps/UnifiedEmail/src/com/android/mail/providers/ |
Address.java | 121 final Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(rawAddress); 192 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(addressList); 215 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(addressList);
|
/prebuilts/python/darwin-x86/2.7.5/lib/python2.7/lib2to3/pgen2/ |
tokenize.py | 23 tokenize(readline, tokeneater=printtoken) 37 __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize", 160 def tokenize(readline, tokeneater=printtoken): function 162 The tokenize() function accepts two parameters: one representing the 163 input stream, and one providing an output mechanism for tokenize(). 256 in the same way as the tokenize() generator. 335 # Output text will tokenize the back to the input 427 ("<tokenize>", lnum, pos, line)) 499 if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline) 500 else: tokenize(sys.stdin.readline [all...] |
/prebuilts/python/linux-x86/2.7.5/lib/python2.7/lib2to3/pgen2/ |
tokenize.py | 23 tokenize(readline, tokeneater=printtoken) 37 __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize", 160 def tokenize(readline, tokeneater=printtoken): function 162 The tokenize() function accepts two parameters: one representing the 163 input stream, and one providing an output mechanism for tokenize(). 256 in the same way as the tokenize() generator. 335 # Output text will tokenize the back to the input 427 ("<tokenize>", lnum, pos, line)) 499 if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline) 500 else: tokenize(sys.stdin.readline [all...] |
/external/chromium/third_party/libjingle/source/talk/base/ |
stringencode.h | 135 size_t tokenize(const std::string& source, char delimiter,
|