Lines Matching refs:Tokenizer
55 #include <google/protobuf/io/tokenizer.h>
205 // This class makes use of the Protocol Message compiler's tokenizer found
206 // in //google/protobuf/io/tokenizer.h. Note that class's Parse
255 tokenizer_.set_comment_style(io::Tokenizer::SH_COMMENT_STYLE);
274 if (LookingAtType(io::Tokenizer::TYPE_END)) {
289 return suc && LookingAtType(io::Tokenizer::TYPE_END);
369 // Consumes the current field (as returned by the tokenizer) on the
684 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
709 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) {
715 LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
753 if (LookingAtType(io::Tokenizer::TYPE_STRING)) {
754 while (LookingAtType(io::Tokenizer::TYPE_STRING)) {
781 if (!LookingAtType(io::Tokenizer::TYPE_INTEGER) &&
782 !LookingAtType(io::Tokenizer::TYPE_FLOAT) &&
783 !LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) {
792 if (has_minus && LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) {
812 bool LookingAtType(io::Tokenizer::TokenType token_type) {
819 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) {
828 && LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
853 if (!LookingAtType(io::Tokenizer::TYPE_STRING)) {
859 while (LookingAtType(io::Tokenizer::TYPE_STRING)) {
860 io::Tokenizer::ParseStringAppend(tokenizer_.current().text, text);
871 if (!LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
876 if (!io::Tokenizer::ParseInteger(tokenizer_.current().text,
887 // Note that since the tokenizer does not support negative numbers,
917 if (!LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
928 if (!io::Tokenizer::ParseInteger(text, max_value, value)) {
938 // Note that since the tokenizer does not support negative numbers,
949 // A double can actually be an integer, according to the tokenizer.
951 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) {
957 } else if (LookingAtType(io::Tokenizer::TYPE_FLOAT)) {
959 *value = io::Tokenizer::ParseFloat(tokenizer_.current().text);
963 } else if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) {
1068 // An internal instance of the Tokenizer's error collector, used to
1094 io::Tokenizer tokenizer_;