HomeSort by relevance Sort by last modified time
    Searched full:start_token (Results 1 - 25 of 26) sorted by null

1 2

  /external/chromium-trace/catapult/third_party/closure_linter/closure_linter/
javascriptstatetracker_test.py 70 start_token = function.start_token
75 function.start_token.type)
77 self.assertEquals('function', start_token.string)
78 self.assertEquals(3, start_token.line_number)
79 self.assertEquals(0, start_token.start_index)
93 start_token = function.start_token
98 function.start_token.type)
100 self.assertEquals('function', start_token.string
    [all...]
aliaspass_test.py 32 def _GetTokenByLineAndString(start_token, string, line_number):
33 for token in start_token:
41 start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCOPE_SCRIPT)
46 alias_pass.Process(start_token)
57 start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_ALIAS_SCRIPT)
59 alias_pass.Process(start_token)
61 alias_token = _GetTokenByLineAndString(start_token, 'Event', 4)
64 my_class_token = _GetTokenByLineAndString(start_token, 'myClass', 9)
67 component_token = _GetTokenByLineAndString(start_token, 'Component', 17)
71 event_token = _GetTokenByLineAndString(start_token, 'Event.Something', 17
    [all...]
error_fixer_test.py 36 start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCRIPT)
37 second_token = start_token.next
38 self.error_fixer.HandleFile('test_file', start_token)
40 self.error_fixer._DeleteToken(start_token)
45 start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCRIPT)
46 fourth_token = start_token.next.next.next
47 self.error_fixer.HandleFile('test_file', start_token)
49 self.error_fixer._DeleteTokens(start_token, 3)
testutil.py 57 start_token = TokenizeSource(source)
59 ecma_pass.Process(start_token)
60 return start_token
73 start_token = TokenizeSourceAndRunEcmaPass(source)
77 tracker.DocFlagPass(start_token, error_handler)
81 for token in start_token:
scopeutil_test.py 31 def _FindContexts(start_token):
38 start_token: First token in the token stream.
47 for token in start_token:
74 start_token = testutil.TokenizeSourceAndRunEcmaPass(script)
76 start_token, ecmametadatapass.EcmaContext.VAR)
129 start_token = testutil.TokenizeSourceAndRunEcmaPass(source)
130 for context in _FindContexts(start_token):
171 self.start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCRIPT)
175 for context in _FindContexts(self.start_token):
193 for context in _FindContexts(self.start_token)
    [all...]
checker.py 66 def Check(self, start_token, limited_doc_checks=False, is_html=False,
74 start_token: The first token in the token stream.
81 self._state_tracker.DocFlagPass(start_token, self._error_handler)
84 self._alias_pass.Process(start_token)
90 self._ExecutePass(start_token, self._DependencyPass, stop_token)
92 self._ExecutePass(start_token, self._LintPass, stop_token)
runner.py 51 def _GetLastNonWhiteSpaceToken(start_token):
57 for t in start_token:
78 start_token = tokenizer.TokenizeFile(fileobj)
79 return start_token, tokenizer.mode
151 def RunMetaDataPass(start_token, metadata_pass, error_handler, filename=''):
155 start_token: The first token in a token stream.
165 metadata_pass.Process(start_token)
185 def _RunChecker(start_token, error_handler,
195 style_checker.Check(start_token,
aliaspass.py 100 def Process(self, start_token):
104 start_token: The first token in the stream.
107 if start_token is None:
111 self._CheckGoogScopeCalls(start_token)
115 context = start_token.metadata.context
119 def _CheckGoogScopeCalls(self, start_token):
127 scope_tokens = [t for t in start_token if IsScopeToken(t)]
212 token = context.start_token
tokenutil.py 99 def CustomSearch(start_token, func, end_func=None, distance=None,
104 start_token: The token to start searching from
118 token = start_token
148 def Search(start_token, token_types, distance=None, reverse=False):
152 start_token: The token to start searching from
164 return CustomSearch(start_token, lambda token: token.IsAnyType(token_types),
168 def SearchExcept(start_token, token_types, distance=None, reverse=False):
172 start_token: The token to start searching from
184 return CustomSearch(start_token,
189 def SearchUntil(start_token, token_types, end_types, distance=None
    [all...]
scopeutil.py 58 last_code_token = parent.start_token.metadata.last_code
82 reversed(block_context.start_token))
137 if context.start_token and context.end_token:
138 statement_tokens = tokenutil.GetTokenRange(context.start_token,
closurizednamespacesinfo.py 447 start_token = tokenutil.GetIdentifierStart(token)
448 if start_token and start_token != token:
450 identifier = tokenutil.GetIdentifierForToken(start_token)
452 start_token = token
453 # If an alias is defined on the start_token, use it instead.
454 if (start_token and
455 start_token.metadata and
456 start_token.metadata.aliased_symbol and
457 not start_token.metadata.is_alias_definition)
    [all...]
error_fixer.py 277 start_token = token.attached_object.type_start_token
279 if start_token.type != Type.DOC_START_BRACE:
281 len(start_token.string) - len(start_token.string.lstrip()))
283 start_token = tokenutil.SplitToken(start_token, leading_space)
285 if token.attached_object.type_end_token == start_token.previous:
286 token.attached_object.type_end_token = start_token
288 new_token = Token('{', Type.DOC_START_BRACE, start_token.line,
289 start_token.line_number
    [all...]
statetracker.py 312 start_token: The token that starts the doc comment.
316 def __init__(self, start_token):
320 start_token: The first token in the doc comment.
323 self.start_token = start_token
420 for token in self.start_token:
625 def _GetNextPartialIdentifierToken(start_token):
633 start_token: The token to start searching after.
638 token = start_token.next
651 def _GetEndTokenAndContents(start_token)
    [all...]
ecmametadatapass.py 51 start_token: The token where this context starts.
118 def __init__(self, context_type, start_token, parent=None):
123 start_token: The token where this context starts.
128 start_token: The token where this context starts.
134 self.start_token = start_token
176 return tokenutil.Compare(context1.start_token, context2.start_token)
337 keyword_token = result.start_token.metadata.last_code
349 start_block_token = pre_keyword_token.metadata.context.start_token
    [all...]
indentation.py 155 start_token = self._PopTo(Type.START_BLOCK)
157 if start_token:
158 goog_scope = tokenutil.GoogScopeOrNoneFromStartBlock(start_token.token)
177 (start_token.line_number)),
451 start_token = token
454 if not token or token.line_number != start_token.line_number:
459 def _AllFunctionPropertyAssignTokens(self, start_token, end_token):
463 start_token: Start of the token range.
467 True if all tokens between start_token and end_token are legal tokens
470 for token in tokenutil.GetTokenRange(start_token, end_token)
    [all...]
checkerbase.py 129 def Check(self, start_token, limited_doc_checks=False, is_html=False,
134 start_token: First token in token stream.
142 self._ExecutePass(start_token, self._LintPass, stop_token=stop_token)
javascriptstatetracker.py 146 start_token = self._block_stack.pop()
147 if tokenutil.GoogScopeOrNoneFromStartBlock(start_token):
javascriptlintrules.py 206 not doc_comment.start_token.previous)
227 block_start = doc_comment.start_token
  /toolchain/binutils/binutils-2.25/binutils/
mclex.c 329 unichar *start_token; local
339 start_token = input_stream_pos;
355 yylval.ustr = get_diff (input_stream_pos, start_token);
371 start_token = input_stream_pos;
378 start_token++;
385 yylval.ustr = get_diff (input_stream_pos, start_token);
397 yylval.ustr = get_diff (input_stream_pos, start_token);
404 ++start_token;
409 yylval.ustr = get_diff (input_stream_pos, start_token);
431 ret = mc_token (start_token, (size_t) (input_stream_pos - start_token))
    [all...]
  /external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/tree/
debug.rb 96 def set_token_boundaries( tree, start_token, stop_token )
97 super( tree, start_token, stop_token )
98 return unless tree && start_token && stop_token
100 start_token.token_index, stop_token.token_index )
  /external/pdfium/core/src/fpdfapi/fpdf_parser/
fpdf_parser_utility.cpp 228 FX_BOOL CPDF_SimpleParser::FindTagPair(const CFX_ByteStringC& start_token,
232 if (!start_token.IsEmpty()) {
233 if (!SkipWord(start_token)) {
  /external/mksh/src/
syn.c 29 int start_token; /* token than began nesting (eg, FOR) */ member in struct:nesting_state
868 if (nesting.start_token) {
869 c = nesting.start_token;
909 nesting.start_token = tok;
938 nesting.start_token = 0;
  /external/protobuf/src/google/protobuf/compiler/
parser.cc 1170 io::Tokenizer::Token start_token; local
    [all...]
  /external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/
tree.rb 779 def set_token_boundaries( tree, start_token = nil, stop_token = nil )
782 start_token and start = start_token.index
    [all...]
  /external/pdfium/core/include/fpdfapi/
fpdf_parser.h 212 FX_BOOL FindTagPair(const CFX_ByteStringC& start_token,

Completed in 373 milliseconds

1 2