Home | History | Annotate | Download | only in cpp

Lines Matching refs:tokens

24 #  * Tokens should never be exported, need to convert to Nodes
340 # TODO(nnorwitz): bases are tokens, do name comparision.
383 # TODO(nnorwitz): parameters are tokens, do name comparision.
462 def _GetTemplateEnd(self, tokens, start):
466 token = tokens[end]
474 return tokens[start:end-1], end
476 def ToType(self, tokens):
489 # Partition tokens into name and modifier tokens.
504 end = len(tokens)
506 token = tokens[i]
508 new_tokens, new_end = self._GetTemplateEnd(tokens, i+1)
530 # No '<' in the tokens, just a simple name and no template.
589 def ToParameters(self, tokens):
590 if not tokens:
615 for s in tokens:
678 self.tokens = token_stream
853 tokens = []
856 tokens.append(last_token)
858 return tokens, last_token
907 return next(self.tokens)
917 def _AddBackTokens(self, tokens):
918 if tokens:
919 if tokens[-1].whence == tokenize.WHENCE_STREAM:
920 for token in tokens:
922 self.token_queue[:0] = reversed(tokens)
924 assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens
925 self.token_queue.extend(reversed(tokens))
928 """Returns ([tokens], next_token_info)."""
934 tokens = []
944 tokens.append(next_token)
947 tokens.extend(self._GetMatchingChar('<', '>', GetNextToken))
950 return tokens, next_token
1088 # TODO(nnorwitz): store tokens and improve parsing.
1090 tokens = list(self._GetMatchingChar('[', ']'))
1314 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1315 assert tokens
1316 t0 = tokens[0]
1317 return Friend(t0.start, t0.end, tokens, self.namespace_stack)
1335 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1336 assert tokens
1337 return Delete(tokens[0].start, tokens[0].end, tokens)
1346 tokens = [method()]
1349 tokens = [token]
1352 tokens
1355 assert tokens
1356 name = tokens.pop()
1358 if tokens:
1359 indices = tokens[0]
1364 if (len(tokens) >= 4 and
1365 tokens[1].name == '(' and tokens[2].name == '*'):
1366 tokens.append(name)
1367 name = tokens[3]
1370 if len(tokens) >= 2:
1371 tokens.append(name)
1372 name = tokens[1]
1373 new_type = tokens
1374 if tokens and isinstance(tokens[0], tokenize.Token):
1375 new_type = self.converter.ToType(tokens)[0]
1387 tokens = list(self._GetMatchingChar('<', '>'))
1388 len_tokens = len(tokens) - 1 # Ignore trailing '>'.
1391 key = tokens[i].name
1398 if tokens[i-1].name == '=':
1399 assert i < len_tokens, '%s %s' % (i, tokens)
1400 default, unused_next_token = self.GetName(tokens[i:])
1403 if tokens[i-1].name != ',':
1406 key = tokens[i-1].name
1407 type_name = tokens[i-2]
1427 tokens, last = self._GetVarTokensUpTo(tokenize.SYNTAX, '(', ';')
1428 tokens.append(last)
1429 self._AddBackTokens(tokens)
1514 tokens = (class_token, token, name_token, next_token)
1515 self._AddBackTokens(tokens)
1571 tokens = list(self.GetScope())
1573 tokens[-1] = internal_token
1575 self._AddBackTokens(tokens)
1579 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1580 assert tokens
1581 return Using(tokens[0].start, tokens[0].end, tokens)
1618 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1619 if not tokens:
1621 return Return(tokens[0].start, tokens[0].end, tokens)
1624 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1625 assert len(tokens) == 1, str(tokens)
1626 return Goto(tokens[0].start, tokens[0].end, tokens[0].name)