HomeSort by relevance Sort by last modified time
    Searched refs:tokenize (Results 1 - 25 of 162) sorted by null

1 2 3 4 5 6 7

  /external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/coverage/
phystokens.py 3 import keyword, re, token, tokenize namespace
9 tokenize.generate_tokens() doesn't return a token for the backslash that
38 if last_ttype == tokenize.COMMENT:
77 ws_tokens = [token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL]
81 tokgen = tokenize.generate_tokens(StringIO(source).readline)
98 tok_class = tokenize.tok_name.get(ttype, 'xx').lower()[:3]
backward.py 75 # Python 3.2 provides `tokenize.open`, the best way to open source files.
76 import tokenize namespace
78 open_source = tokenize.open # pylint: disable=E1101
81 detect_encoding = tokenize.detect_encoding # pylint: disable=E1101
  /external/deqp/framework/randomshaders/
rsgStatement.hpp 44 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
62 void tokenize (GeneratorState& state, TokenStream& str) const;
78 void tokenize (GeneratorState& state, TokenStream& str) const;
98 void tokenize (GeneratorState& state, TokenStream& str) const;
119 void tokenize (GeneratorState& state, TokenStream& str) const;
145 void tokenize (GeneratorState& state, TokenStream& str) const;
rsgExpression.hpp 57 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
74 void tokenize (GeneratorState& state, TokenStream& str) const { DE_UNREF(state); str << Token(m_variable->getName()); } function in class:rsg::VariableAccess
113 void tokenize (GeneratorState& state, TokenStream& str) const;
131 void tokenize (GeneratorState& state, TokenStream& str) const;
149 void tokenize (GeneratorState& state, TokenStream& str) const;
167 void tokenize (GeneratorState& state, TokenStream& str) const;
189 void tokenize (GeneratorState& state, TokenStream& str) const;
214 void tokenize (GeneratorState& state, TokenStream& str) const;
233 void tokenize (GeneratorState& state, TokenStream& str) const;
255 void tokenize (GeneratorState& state, TokenStream& str) const
    [all...]
rsgShader.cpp 93 void Shader::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::Shader
99 // Tokenize global declaration statements
101 m_globalStatements[ndx]->tokenize(state, str);
103 // Tokenize all functions
107 m_functions[ndx]->tokenize(state, str);
110 // Tokenize main
112 m_mainFunction.tokenize(state, str);
125 void Function::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::Function
146 // Tokenize body
147 m_functionBlock.tokenize(state, str)
    [all...]
rsgStatement.cpp 203 void BlockStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::BlockStatement
208 (*i)->tokenize(state, str);
219 void ExpressionStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::ExpressionStatement
222 m_expression->tokenize(state, str);
333 void DeclarationStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::DeclarationStatement
340 m_expression->tokenize(state, str);
456 void ConditionalStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::ConditionalStatement
462 m_condition->tokenize(state, str);
469 m_trueStatement->tokenize(state, str);
473 m_trueStatement->tokenize(state, str)
556 void AssignStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::AssignStatement
    [all...]
rsgShader.hpp 54 void tokenize (GeneratorState& state, TokenStream& stream) const;
106 void tokenize (GeneratorState& state, TokenStream& str) const;
  /external/chromium_org/third_party/WebKit/Source/core/html/parser/
CSSPreloadScanner.h 66 inline void tokenize(UChar, const SegmentedString&);
  /external/chromium_org/third_party/libjingle/source/talk/base/
stringencode_unittest.cc 236 EXPECT_EQ(5ul, tokenize("one two three four five", ' ', &fields));
238 EXPECT_EQ(1ul, tokenize("one", ' ', &fields));
242 EXPECT_EQ(5ul, tokenize(" one two three four five ", ' ', &fields));
244 EXPECT_EQ(1ul, tokenize(" one ", ' ', &fields));
246 EXPECT_EQ(0ul, tokenize(" ", ' ', &fields));
253 tokenize("find middle one", ' ', &fields);
259 tokenize(" find middle one ", ' ', &fields);
263 tokenize(" ", ' ', &fields);
283 ASSERT_EQ(0ul, tokenize("D \"A B", ' ', '(', ')', NULL));
286 tokenize("A B C", ' ', '"', '"', &fields)
    [all...]
  /external/chromium_org/third_party/webrtc/base/
stringencode_unittest.cc 219 EXPECT_EQ(5ul, tokenize("one two three four five", ' ', &fields));
221 EXPECT_EQ(1ul, tokenize("one", ' ', &fields));
225 EXPECT_EQ(5ul, tokenize(" one two three four five ", ' ', &fields));
227 EXPECT_EQ(1ul, tokenize(" one ", ' ', &fields));
229 EXPECT_EQ(0ul, tokenize(" ", ' ', &fields));
236 tokenize("find middle one", ' ', &fields);
242 tokenize(" find middle one ", ' ', &fields);
246 tokenize(" ", ' ', &fields);
266 ASSERT_EQ(0ul, tokenize("D \"A B", ' ', '(', ')', NULL));
269 tokenize("A B C", ' ', '"', '"', &fields)
    [all...]
  /external/chromium_org/third_party/WebKit/Source/devtools/front_end/elements/
DOMSyntaxHighlighter.js 81 var tokenize = WebInspector.moduleManager.instance(WebInspector.TokenizerFactory).createTokenizer(this._mimeType);
85 tokenize(line, processToken.bind(this));
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/
tabnanny.py 26 import tokenize namespace
27 if not hasattr(tokenize, 'NL'):
28 raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
106 process_tokens(tokenize.generate_tokens(f.readline))
108 except tokenize.TokenError, msg:
274 INDENT = tokenize.INDENT
275 DEDENT = tokenize.DEDENT
276 NEWLINE = tokenize.NEWLINE
277 JUNK = tokenize.COMMENT, tokenize.N
    [all...]
  /prebuilts/python/linux-x86/2.7.5/lib/python2.7/
tabnanny.py 26 import tokenize namespace
27 if not hasattr(tokenize, 'NL'):
28 raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
106 process_tokens(tokenize.generate_tokens(f.readline))
108 except tokenize.TokenError, msg:
274 INDENT = tokenize.INDENT
275 DEDENT = tokenize.DEDENT
276 NEWLINE = tokenize.NEWLINE
277 JUNK = tokenize.COMMENT, tokenize.N
    [all...]
  /external/chromium_org/third_party/WebKit/Source/core/css/parser/
MediaQueryTokenizerTest.cpp 65 MediaQueryTokenizer::tokenize(testCases[i].input, tokens);
106 MediaQueryTokenizer::tokenize(testCases[i].input, tokens);
126 MediaQueryTokenizer::tokenize(input.toString(), tokens);
MediaConditionTest.cpp 43 MediaQueryTokenizer::tokenize(testCases[i].input, tokens);
MediaQueryTokenizer.h 22 static void tokenize(String, Vector<MediaQueryToken>&);
  /packages/apps/QuickSearchBox/src/com/android/quicksearchbox/
LevenshteinSuggestionFormatter.java 43 final Token[] queryTokens = tokenize(query);
44 final Token[] suggestionTokens = tokenize(suggestion);
99 Token[] tokenize(final String seq) { method in class:LevenshteinSuggestionFormatter
  /cts/tests/tests/text/src/android/text/util/cts/
Rfc822TokenizerTest.java 114 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize("");
118 tokens = Rfc822Tokenizer.tokenize(text);
124 tokens = Rfc822Tokenizer.tokenize(text);
130 Rfc822Tokenizer.tokenize(null);
  /external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/
pep8.py 102 import tokenize namespace
370 if (token_type == tokenize.OP and
373 prev_type == tokenize.NAME and
443 prev_type = tokenize.OP
446 if token_type in (tokenize.NL, tokenize.NEWLINE, tokenize.ERRORTOKEN):
457 elif token_type == tokenize.OP:
464 if ((prev_type != tokenize.OP or prev_text in '}])') and not
465 (prev_type == tokenize.NAME and iskeyword(prev_text)))
    [all...]
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
driver.py 26 from . import grammar, parse, token, tokenize, pgen namespace
40 # XXX Move the prefix computation into a wrapper around tokenize.
59 if type in (tokenize.COMMENT, tokenize.NL):
88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
  /prebuilts/python/linux-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
driver.py 26 from . import grammar, parse, token, tokenize, pgen namespace
40 # XXX Move the prefix computation into a wrapper around tokenize.
59 if type in (tokenize.COMMENT, tokenize.NL):
88 tokens = tokenize.generate_tokens(stream.readline)
105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
  /external/apache-xml/src/main/java/org/apache/xpath/compiler/
Lexer.java 96 void tokenize(String pat) throws javax.xml.transform.TransformerException method in class:Lexer
98 tokenize(pat, null); method
109 void tokenize(String pat, Vector targetStrings) method in class:Lexer
  /external/chromium_org/third_party/WebKit/Source/devtools/front_end/cm/
coffeescript.js 70 state.tokenize = longComment;
71 return state.tokenize(stream, state);
122 state.tokenize = tokenFactory(stream.current(), "string");
123 return state.tokenize(stream, state);
128 state.tokenize = tokenFactory(stream.current(), "string-2");
129 return state.tokenize(stream, state);
175 state.tokenize = tokenBase;
185 state.tokenize = tokenBase;
196 state.tokenize = tokenBase;
253 var style = state.tokenize(stream, state)
    [all...]
clike.js 22 state.tokenize = tokenString(ch);
23 return state.tokenize(stream, state);
35 state.tokenize = tokenComment;
69 state.tokenize = null;
78 state.tokenize = null;
111 tokenize: null,
127 var style = (state.tokenize || tokenBase)(stream, state);
148 if (state.tokenize != tokenBase && state.tokenize != null) return CodeMirror.Pass;
182 state.tokenize = cppHook
    [all...]
  /frameworks/ex/common/java/com/android/common/
Rfc822Validator.java 97 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(text);
154 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(cs);

Completed in 366 milliseconds

1 2 3 4 5 6 7