HomeSort by relevance Sort by last modified time
    Searched refs:tokenize (Results 1 - 25 of 368) sorted by null

1 2 3 4 5 6 7 8 91011>>

  /external/minijail/
util_unittest.cc 25 TEST(tokenize, null_stringp) {
26 ASSERT_EQ(nullptr, tokenize(nullptr, nullptr));
27 ASSERT_EQ(nullptr, tokenize(nullptr, ""));
28 ASSERT_EQ(nullptr, tokenize(nullptr, ","));
31 ASSERT_EQ(nullptr, tokenize(&p, nullptr));
35 TEST(tokenize, null_delim) {
38 ASSERT_EQ(str, tokenize(&p, nullptr));
43 ASSERT_EQ(str, tokenize(&p, ""));
48 // Sanity check for the tokenize func.
49 TEST(tokenize, basic)
    [all...]
  /hardware/libhardware/modules/sensors/dynamic_sensor/HidUtils/
HidItem.h 41 // tokenize from a unsigned char vector
42 static std::vector<HidItem> tokenize(const std::vector<uint8_t> &descriptor);
43 static std::vector<HidItem> tokenize(const uint8_t *begin, size_t size);
44 static std::vector<HidItem> tokenize(std::istream &is);
HidItem.cpp 49 std::vector<HidItem> HidItem::tokenize(const uint8_t *begin, size_t size) { function in class:HidUtil::HidItem
53 return tokenize(is);
56 std::vector<HidItem> HidItem::tokenize(const std::vector<uint8_t> &descriptor) { function in class:HidUtil::HidItem
60 return tokenize(is);
63 std::vector<HidItem> HidItem::tokenize(std::istream &is) { function in class:HidUtil::HidItem
  /external/selinux/libsepol/include/sepol/policydb/
util.h 38 * The tokenize function may be used to
41 extern int tokenize(char *line_buf, char delim, int num_args, ...);
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Tools/scripts/
checkappend.py 39 import tokenize
106 tokenize.tokenize(self.file.readline, self.tokeneater)
107 except tokenize.TokenError, msg:
113 NEWLINE=tokenize.NEWLINE,
114 JUNK=(tokenize.COMMENT, tokenize.NL),
115 OP=tokenize.OP,
116 NAME=tokenize.NAME):
cleanfuture.py 42 import tokenize
145 # Line-getter for tokenize.
157 STRING = tokenize.STRING
158 NL = tokenize.NL
159 NEWLINE = tokenize.NEWLINE
160 COMMENT = tokenize.COMMENT
161 NAME = tokenize.NAME
162 OP = tokenize.OP
165 get = tokenize.generate_tokens(self.getline).next
184 startline = srow - 1 # tokenize is one-based
    [all...]
reindent.py 44 import tokenize
163 # that we can use tokenize's 1-based line numbering easily.
172 # signal that tokenize doesn't know what to do about them;
177 tokenize.tokenize(self.getline, self.tokeneater)
248 # Line-getter for tokenize.
257 # Line-eater for tokenize.
259 INDENT=tokenize.INDENT,
260 DEDENT=tokenize.DEDENT,
261 NEWLINE=tokenize.NEWLINE,
    [all...]
  /external/python/cpython2/Tools/scripts/
checkappend.py 39 import tokenize
106 tokenize.tokenize(self.file.readline, self.tokeneater)
107 except tokenize.TokenError, msg:
113 NEWLINE=tokenize.NEWLINE,
114 JUNK=(tokenize.COMMENT, tokenize.NL),
115 OP=tokenize.OP,
116 NAME=tokenize.NAME):
cleanfuture.py 42 import tokenize
145 # Line-getter for tokenize.
157 STRING = tokenize.STRING
158 NL = tokenize.NL
159 NEWLINE = tokenize.NEWLINE
160 COMMENT = tokenize.COMMENT
161 NAME = tokenize.NAME
162 OP = tokenize.OP
165 get = tokenize.generate_tokens(self.getline).next
184 startline = srow - 1 # tokenize is one-base
    [all...]
reindent.py 44 import tokenize
170 # that we can use tokenize's 1-based line numbering easily.
179 # signal that tokenize doesn't know what to do about them;
188 tokenize.tokenize(self.getline, self.tokeneater)
259 # Line-getter for tokenize.
268 # Line-eater for tokenize.
270 INDENT=tokenize.INDENT,
271 DEDENT=tokenize.DEDENT,
272 NEWLINE=tokenize.NEWLINE
    [all...]
  /external/deqp/framework/randomshaders/
rsgStatement.hpp 44 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
62 void tokenize (GeneratorState& state, TokenStream& str) const;
78 void tokenize (GeneratorState& state, TokenStream& str) const;
98 void tokenize (GeneratorState& state, TokenStream& str) const;
119 void tokenize (GeneratorState& state, TokenStream& str) const;
145 void tokenize (GeneratorState& state, TokenStream& str) const;
rsgExpression.hpp 57 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
74 void tokenize (GeneratorState& state, TokenStream& str) const { DE_UNREF(state); str << Token(m_variable->getName()); } function in class:rsg::VariableAccess
113 void tokenize (GeneratorState& state, TokenStream& str) const;
131 void tokenize (GeneratorState& state, TokenStream& str) const;
149 void tokenize (GeneratorState& state, TokenStream& str) const;
167 void tokenize (GeneratorState& state, TokenStream& str) const;
189 void tokenize (GeneratorState& state, TokenStream& str) const;
214 void tokenize (GeneratorState& state, TokenStream& str) const;
233 void tokenize (GeneratorState& state, TokenStream& str) const;
255 void tokenize (GeneratorState& state, TokenStream& str) const
    [all...]
rsgShader.cpp 93 void Shader::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::Shader
99 // Tokenize global declaration statements
101 m_globalStatements[ndx]->tokenize(state, str);
103 // Tokenize all functions
107 m_functions[ndx]->tokenize(state, str);
110 // Tokenize main
112 m_mainFunction.tokenize(state, str);
125 void Function::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::Function
146 // Tokenize body
147 m_functionBlock.tokenize(state, str)
    [all...]
rsgStatement.cpp 203 void BlockStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::BlockStatement
208 (*i)->tokenize(state, str);
219 void ExpressionStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::ExpressionStatement
222 m_expression->tokenize(state, str);
333 void DeclarationStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::DeclarationStatement
340 m_expression->tokenize(state, str);
456 void ConditionalStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::ConditionalStatement
462 m_condition->tokenize(state, str);
469 m_trueStatement->tokenize(state, str);
473 m_trueStatement->tokenize(state, str)
556 void AssignStatement::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::AssignStatement
    [all...]
  /external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
offset_token.py 8 import tokenize
55 def Tokenize(f):
66 tokenize_tokens = tokenize.generate_tokens(f.readline)
90 while offset_tokens[0].type == tokenize.NL:
100 # Convert OffsetTokens to tokenize tokens.
113 # tokenize can't handle whitespace before line continuations.
115 return tokenize.untokenize(tokenize_tokens).replace('\\\n', ' \\\n')
  /external/mesa3d/src/gallium/state_trackers/clover/llvm/
util.hpp 44 tokenize(const std::string &s) { function in namespace:clover::llvm
  /prebuilts/ndk/r16/sources/third_party/shaderc/third_party/glslang/glslang/MachineIndependent/
ScanContext.h 59 int tokenize(TPpContext*, TParserToken&);
  /cts/tests/tests/text/src/android/text/util/cts/
Rfc822TokenizerTest.java 130 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize("");
134 tokens = Rfc822Tokenizer.tokenize(text);
140 tokens = Rfc822Tokenizer.tokenize(text);
148 Rfc822Tokenizer.tokenize(null);
154 Rfc822Tokenizer.tokenize("", list);
158 Rfc822Tokenizer.tokenize(text, list);
165 Rfc822Tokenizer.tokenize(text, list);
174 Rfc822Tokenizer.tokenize(null);
  /external/autotest/utils/
reindent.py 44 import tokenize
162 # that we can use tokenize's 1-based line numbering easily.
171 # signal that tokenize doesn't know what to do about them;
176 tokenize.tokenize(self.getline, self.tokeneater)
247 # Line-getter for tokenize.
256 # Line-eater for tokenize.
258 INDENT=tokenize.INDENT,
259 DEDENT=tokenize.DEDENT,
260 NEWLINE=tokenize.NEWLINE
    [all...]
  /external/python/cpython3/Tools/scripts/
cleanfuture.py 42 import tokenize
145 # Line-getter for tokenize.
157 STRING = tokenize.STRING
158 NL = tokenize.NL
159 NEWLINE = tokenize.NEWLINE
160 COMMENT = tokenize.COMMENT
161 NAME = tokenize.NAME
162 OP = tokenize.OP
165 get = tokenize.generate_tokens(self.getline).__next__
184 startline = srow - 1 # tokenize is one-base
    [all...]
highlight.py 11 import tokenize
35 tok_type = tokenize.COMMENT
37 for tok in tokenize.generate_tokens(readline):
41 if tok_type == tokenize.COMMENT:
43 elif tok_type == tokenize.OP and tok_str[:1] not in '{}[](),.:;@':
45 elif tok_type == tokenize.STRING:
47 if prev_tok_type == tokenize.INDENT or scol==0:
49 elif tok_type == tokenize.NAME:
untabify.py 8 import tokenize
30 with tokenize.open(filename) as f:
  /external/google-breakpad/src/testing/scripts/generator/cpp/
ast.py 46 from cpp import tokenize
549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
581 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
738 if token.token_type == tokenize.NAME:
749 if next.token_type == tokenize.SYNTAX and next.name == '(':
754 syntax = tokenize.SYNTAX
763 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';'
    [all...]
  /external/googletest/googlemock/scripts/generator/cpp/
ast.py 46 from cpp import tokenize
551 if parts[-1].token_type == tokenize.NAME:
581 if (type_name and type_name[-1].token_type == tokenize.NAME and
582 p.token_type == tokenize.NAME):
583 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
739 if token.token_type == tokenize.NAME:
750 if next.token_type == tokenize.SYNTAX and next.name == '(':
755 syntax = tokenize.SYNTAX
764 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';'
    [all...]
  /external/v8/testing/gmock/scripts/generator/cpp/
ast.py 46 from cpp import tokenize
551 if parts[-1].token_type == tokenize.NAME:
581 if (type_name and type_name[-1].token_type == tokenize.NAME and
582 p.token_type == tokenize.NAME):
583 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
739 if token.token_type == tokenize.NAME:
750 if next.token_type == tokenize.SYNTAX and next.name == '(':
755 syntax = tokenize.SYNTAX
764 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';'
    [all...]

Completed in 684 milliseconds

1 2 3 4 5 6 7 8 91011>>