/external/chromium-trace/trace-viewer/third_party/closure_linter/closure_linter/common/ |
matcher.py | 46 def __init__(self, regex, token_type, result_mode=None, line_start=False): 51 token_type: The type of token a successful match indicates. 58 self.type = token_type
|
tokens.py | 50 def __init__(self, string, token_type, line, line_number, values=None): 55 token_type: The type of token. 62 self.type = token_type 97 def IsType(self, token_type): 101 token_type: The type to test for. 106 return self.type == token_type
|
tokenizer.py | 78 def _CreateToken(self, string, token_type, line, line_number, values=None): 83 token_type: The type of token. 93 return tokens.Token(string, token_type, line, line_number, values)
|
/external/chromium_org/third_party/closure_linter/closure_linter/common/ |
matcher.py | 46 def __init__(self, regex, token_type, result_mode=None, line_start=False): 51 token_type: The type of token a successful match indicates. 58 self.type = token_type
|
tokens.py | 50 def __init__(self, string, token_type, line, line_number, values=None): 55 token_type: The type of token. 62 self.type = token_type 97 def IsType(self, token_type): 101 token_type: The type to test for. 106 return self.type == token_type
|
/external/chromium_org/sandbox/win/tools/finder/ |
main.cc | 32 sandbox::TokenLevel token_type = sandbox::USER_LOCKDOWN; local 48 token_type = sandbox::USER_LOCKDOWN; 50 token_type = sandbox::USER_RESTRICTED; 52 token_type = sandbox::USER_LIMITED; 54 token_type = sandbox::USER_INTERACTIVE; 56 token_type = sandbox::USER_NON_ADMIN; 58 token_type = sandbox::USER_RESTRICTED_SAME_ACCESS; 60 token_type = sandbox::USER_UNPROTECTED; 141 finder_obj.Init(token_type, object_type, access_type, file_output);
|
finder.cc | 24 DWORD Finder::Init(sandbox::TokenLevel token_type, 38 err_code = sandbox::CreateRestrictedToken(&token_handle_, token_type,
|
/external/chromium/testing/gmock/scripts/generator/cpp/ |
tokenize.py | 74 def __init__(self, token_type, name, start, end): 75 self.token_type = token_type 147 token_type = UNKNOWN 151 token_type = NAME 159 token_type = CONSTANT 162 token_type = CONSTANT 173 token_type = SYNTAX 183 token_type = SYNTAX 186 token_type = CONSTAN [all...] |
ast.py | 549 if parts[-1].token_type == tokenize.NAME: 579 if (type_name and type_name[-1].token_type == tokenize.NAME and 580 p.token_type == tokenize.NAME): 713 if token.token_type == _INTERNAL_TOKEN: 738 if token.token_type == tokenize.NAME: 749 if next.token_type == tokenize.SYNTAX and next.name == '(': 805 elif token.token_type == tokenize.SYNTAX: 811 if (token.token_type == tokenize.NAME and 815 elif token.token_type == tokenize.PREPROCESSOR: 854 while (last_token.token_type != expected_token_type o [all...] |
/external/openfst/src/include/fst/extensions/far/ |
print-strings.h | 46 typename StringPrinter<Arc>::TokenType token_type; local 48 token_type = StringPrinter<Arc>::SYMBOL; 50 token_type = StringPrinter<Arc>::BYTE; 52 token_type = StringPrinter<Arc>::UTF8; 95 token_type, syms ? syms : fst.InputSymbols()); 127 if (token_type == StringPrinter<Arc>::SYMBOL)
|
compile-strings.h | 54 TokenType token_type, 59 token_type_(token_type), symbols_(syms), done_(false), 60 compiler_(token_type, syms, unknown_label, allow_negative_labels) { 166 typename StringCompiler<Arc>::TokenType token_type; local 168 token_type = StringCompiler<Arc>::SYMBOL; 170 token_type = StringCompiler<Arc>::BYTE; 172 token_type = StringCompiler<Arc>::UTF8; 246 entry_type, token_type, allow_negative_labels, 260 << " in file " << inputs[i] << " failed with token_type = "
|
farscript.h | 211 const FarTokenType token_type; member in struct:fst::script::FarPrintStringsArgs 224 const FarTokenType token_type, const string &begin_key, 229 ifilenames(ifilenames), entry_type(entry_type), token_type(token_type), 240 args->ifilenames, args->entry_type, args->token_type, 250 const FarTokenType token_type,
|
/external/chromium/testing/gtest/scripts/ |
pump.py | 140 def __init__(self, start=None, end=None, value=None, token_type=None): 150 self.token_type = token_type 154 self.start, self.value, self.token_type) 160 self.token_type) 171 for (regex, token_type) in token_table: 178 best_match_token_type = token_type 197 (start_column, length, token_type) = m 202 return MakeToken(lines, found_start, found_end, token_type) 237 def MakeToken(lines, start, end, token_type) [all...] |
/external/chromium_org/testing/gtest/scripts/ |
pump.py | 140 def __init__(self, start=None, end=None, value=None, token_type=None): 150 self.token_type = token_type 154 self.start, self.value, self.token_type) 160 self.token_type) 171 for (regex, token_type) in token_table: 178 best_match_token_type = token_type 197 (start_column, length, token_type) = m 202 return MakeToken(lines, found_start, found_end, token_type) 237 def MakeToken(lines, start, end, token_type) [all...] |
/external/gtest/scripts/ |
pump.py | 140 def __init__(self, start=None, end=None, value=None, token_type=None): 150 self.token_type = token_type 154 self.start, self.value, self.token_type) 160 self.token_type) 171 for (regex, token_type) in token_table: 178 best_match_token_type = token_type 197 (start_column, length, token_type) = m 202 return MakeToken(lines, found_start, found_end, token_type) 237 def MakeToken(lines, start, end, token_type) [all...] |
/external/chromium_org/sandbox/win/src/ |
restricted_token_utils.h | 27 // restricted. The token_type determines if the token will be used as a primary 38 TokenType token_type);
|
/external/chromium-trace/trace-viewer/third_party/closure_linter/closure_linter/ |
ecmametadatapass.py | 264 token_type = token.type 279 if token_type == TokenType.START_PAREN: 287 elif token_type == TokenType.END_PAREN: 315 elif (token_type == TokenType.KEYWORD and 323 elif token_type == TokenType.START_PARAMETERS: 326 elif token_type == TokenType.END_PARAMETERS: 329 elif token_type == TokenType.START_BRACKET: 336 elif token_type == TokenType.END_BRACKET: 339 elif token_type == TokenType.START_BLOCK: 354 elif token_type == TokenType.END_BLOCK [all...] |
indentation.py | 139 token_type = token.type 145 if token_type == Type.END_PAREN: 148 elif token_type == Type.END_PARAMETERS: 151 elif token_type == Type.END_BRACKET: 154 elif token_type == Type.END_BLOCK: 181 elif token_type == Type.KEYWORD and token.string in ('case', 'default'): 191 elif token_type == Type.SEMICOLON: 194 not_binary_operator = (token_type != Type.OPERATOR or 212 if token_type in Type.COMMENT_TYPES: 232 if token_type == Type.START_BRACKET [all...] |
/external/chromium_org/third_party/closure_linter/closure_linter/ |
ecmametadatapass.py | 264 token_type = token.type 279 if token_type == TokenType.START_PAREN: 287 elif token_type == TokenType.END_PAREN: 315 elif (token_type == TokenType.KEYWORD and 323 elif token_type == TokenType.START_PARAMETERS: 326 elif token_type == TokenType.END_PARAMETERS: 329 elif token_type == TokenType.START_BRACKET: 336 elif token_type == TokenType.END_BRACKET: 339 elif token_type == TokenType.START_BLOCK: 354 elif token_type == TokenType.END_BLOCK [all...] |
indentation.py | 139 token_type = token.type 145 if token_type == Type.END_PAREN: 148 elif token_type == Type.END_PARAMETERS: 151 elif token_type == Type.END_BRACKET: 154 elif token_type == Type.END_BLOCK: 181 elif token_type == Type.KEYWORD and token.string in ('case', 'default'): 191 elif token_type == Type.SEMICOLON: 194 not_binary_operator = (token_type != Type.OPERATOR or 212 if token_type in Type.COMMENT_TYPES: 232 if token_type == Type.START_BRACKET [all...] |
/external/bison/examples/calc++/ |
calc++-driver.hh | 10 yy::calcxx_parser::token_type \
|
/external/chromium_org/v8/src/ |
token.cc | 57 const char Token::token_type[] = { member in class:v8::internal::Token
|
/external/v8/src/ |
token.cc | 57 const char Token::token_type[] = { member in class:v8::internal::Token
|
/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/ |
pep8.py | 369 token_type, text, start, end, line = tokens[index] 370 if (token_type == tokenize.OP and 377 prev_type = token_type 445 for token_type, text, start, end, line in tokens: 446 if token_type in (tokenize.NL, tokenize.NEWLINE, tokenize.ERRORTOKEN): 457 elif token_type == tokenize.OP: 471 prev_type = token_type 551 for token_type, text, start, end, line in tokens: 552 if token_type == tokenize.NL: 554 if token_type == tokenize.COMMENT [all...] |
/external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/tree/ |
wizard.rb | 197 @token_type = tokenizer.next_token 201 case @token_type 205 @token_type == EOF and return node 214 @token_type != :open and return nil 215 @token_type = @tokenizer.next_token 219 case @token_type 229 @token_type == :close or return nil 230 @token_type = @tokenizer.next_token 236 if @token_type == :percent 237 ( @token_type = @tokenizer.next_token ) == :identifier or return ni [all...] |