Home | History | Annotate | Download | only in cpp

Lines Matching refs:tokenize

46 from cpp import tokenize
549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
581 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
738 if token.token_type == tokenize.NAME:
749 if next.token_type == tokenize.SYNTAX and next.name == '(':
754 syntax = tokenize.SYNTAX
763 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';')
766 last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)
771 # unused_size = self._GetTokensUpTo(tokenize.SYNTAX, ']')
777 self._GetVarTokensUpTo(tokenize.SYNTAX, ';')
805 elif token.token_type == tokenize.SYNTAX:
811 if (token.token_type == tokenize.NAME and
815 elif token.token_type == tokenize.PREPROCESSOR:
868 if token.token_type != tokenize.PREPROCESSOR:
887 if token.token_type == tokenize.SYNTAX:
910 if token.whence == tokenize.WHENCE_STREAM:
911 token.whence = tokenize.WHENCE_QUEUE
914 assert token.whence == tokenize.WHENCE_QUEUE, token
919 if tokens[-1].whence == tokenize.WHENCE_STREAM:
921 token.whence = tokenize.WHENCE_QUEUE
924 assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens
936 while (next_token.token_type == tokenize.NAME or
937 (next_token.token_type == tokenize.SYNTAX and
941 if last_token_was_name and next_token.token_type == tokenize.NAME:
943 tokenize.NAME
953 return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(')
963 assert token.token_type == tokenize.SYNTAX, token
969 assert token.token_type == tokenize.SYNTAX, token
987 name = tokenize.Token(tokenize.NAME, 'operator[]',
1012 while token.token_type == tokenize.NAME:
1037 assert token.token_type == tokenize.SYNTAX, token
1057 assert token.token_type == tokenize.SYNTAX, token
1082 assert token.token_type == tokenize.CONSTANT, token
1128 seq_copy.append(tokenize.Token(tokenize.SYNTAX, '', 0, 0))
1135 if next and next.token_type == tokenize.SYNTAX:
1190 if token.token_type == tokenize.SYNTAX and token.name == ';':
1194 if token.token_type == tokenize.NAME and self._handling_typedef:
1202 if token.token_type == tokenize.SYNTAX and token.name == '{':
1208 if next.token_type != tokenize.NAME:
1214 assert token.token_type == tokenize.NAME, token
1223 is_syntax = (var_token.token_type == tokenize.SYNTAX and
1225 is_variable = (var_token.token_type == tokenize.NAME and
1231 if temp.token_type == tokenize.SYNTAX and temp.name == '(':
1234 struct = tokenize.Token(tokenize.NAME, 'struct',
1285 if token2.token_type == tokenize.SYNTAX and token2.name == '~':
1287 assert token.token_type == tokenize.NAME or token.name == '::', token
1288 return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(')
1314 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1335 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1341 if (token.token_type == tokenize.NAME and
1352 tokenize.SYNTAX, ';'))
1374 if tokens and isinstance(tokens[0], tokenize.Token):
1414 assert token.token_type == tokenize.SYNTAX, token
1419 if token.token_type == tokenize.NAME:
1427 tokens, last = self._GetVarTokensUpTo(tokenize.SYNTAX, '(', ';')
1452 assert token.token_type == tokenize.NAME, token
1471 assert next_token.token_type == tokenize.SYNTAX, next_token
1482 if class_token.token_type != tokenize.NAME:
1483 assert class_token.token_type == tokenize.SYNTAX, class_token
1489 if next_token.token_type == tokenize.NAME:
1496 if token.token_type == tokenize.SYNTAX:
1521 if token.token_type == tokenize.SYNTAX and token.name == '{':
1522 assert token.token_type == tokenize.SYNTAX, token
1531 if token.token_type != tokenize.NAME:
1532 assert token.token_type == tokenize.SYNTAX, token
1555 if token.token_type == tokenize.NAME:
1559 assert token.token_type == tokenize.SYNTAX, token
1561 internal_token = tokenize.Token(_INTERNAL_TOKEN, _NAMESPACE_POP,
1579 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1608 assert token.token_type == tokenize.SYNTAX
1618 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1624 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1647 self._IgnoreUpTo(tokenize.SYNTAX, ';')
1650 self._IgnoreUpTo(tokenize.SYNTAX, ';')
1663 return AstBuilder(tokenize.GetTokens(source), filename)