HomeSort by relevance Sort by last modified time
    Searched defs:Tokenize (Results 1 - 13 of 13) sorted by null

  /external/google-breakpad/src/processor/
tokenize.cc 45 bool Tokenize(char *line,
  /external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
offset_token.py 8 import tokenize
55 def Tokenize(f):
66 tokenize_tokens = tokenize.generate_tokens(f.readline)
90 while offset_tokens[0].type == tokenize.NL:
100 # Convert OffsetTokens to tokenize tokens.
113 # tokenize can't handle whitespace before line continuations.
115 return tokenize.untokenize(tokenize_tokens).replace('\\\n', ' \\\n')
  /external/libtextclassifier/annotator/duration/
duration_test.cc 108 std::vector<Token> Tokenize(const UnicodeText& text) {
109 return feature_processor_.Tokenize(text);
141 std::vector<Token> tokens = Tokenize(text);
160 std::vector<Token> tokens = Tokenize(text);
179 std::vector<Token> tokens = Tokenize(text);
197 std::vector<Token> tokens = Tokenize(text);
216 std::vector<Token> tokens = Tokenize(text);
235 std::vector<Token> tokens = Tokenize(text);
255 std::vector<Token> tokens = Tokenize(text);
274 std::vector<Token> tokens = Tokenize(text)
    [all...]
  /external/libtextclassifier/lang_id/
custom-tokenizer.cc 102 void TokenizerForLangId::Tokenize(StringPiece text,
  /external/tensorflow/tensorflow/lite/testing/nnapi_tflite_zip_tests/
tokenize.cc 18 #include "tokenize.h"
26 void Tokenize(std::istream* input, TokenProcessor* processor) {
  /external/tensorflow/tensorflow/lite/testing/
tokenize.cc 15 #include "tensorflow/lite/testing/tokenize.h"
23 void Tokenize(std::istream* input, TokenProcessor* processor) {
  /external/libtextclassifier/utils/
tokenizer_test.cc 92 std::vector<Token> Tokenize(const std::string& utf8_text) const {
93 return tokenizer_->Tokenize(utf8_text);
174 std::vector<Token> tokens = tokenizer.Tokenize("Hello world!");
208 EXPECT_THAT(tokenizer.Tokenize("???? ? ???(123) 456-789?????"),
345 tokens = tokenizer.Tokenize(
349 tokens = tokenizer.Tokenize("??? hello ???????");
372 std::vector<Token> tokens = tokenizer.Tokenize("???????????????????");
387 std::vector<Token> tokens = tokenizer.Tokenize("?????? ?????? ??? ?? ??");
440 std::vector<Token> tokens = tokenizer.Tokenize(
468 EXPECT_EQ(tokenizer.Tokenize("????123????")
    [all...]
tokenizer.cc 90 std::vector<Token> Tokenizer::Tokenize(const std::string& text) const {
92 return Tokenize(text_unicode);
95 std::vector<Token> Tokenizer::Tokenize(const UnicodeText& text_unicode) const {
164 // There is no span to tokenize.
  /external/perfetto/src/trace_processor/
proto_trace_parser_unittest.cc 165 void Tokenize() {
211 Tokenize();
247 Tokenize();
296 Tokenize();
359 Tokenize();
404 Tokenize();
426 Tokenize();
446 Tokenize();
461 Tokenize();
476 Tokenize();
    [all...]
  /external/google-breakpad/src/testing/gtest/scripts/
pump.py 382 def Tokenize(s):
579 tokens = list(Tokenize(pump_src_text))
  /external/googletest/googletest/scripts/
pump.py 382 def Tokenize(s):
579 tokens = list(Tokenize(pump_src_text))
  /external/libtextclassifier/annotator/
feature-processor.cc 191 std::vector<Token> FeatureProcessor::Tokenize(const std::string& text) const {
192 return tokenizer_.Tokenize(text);
195 std::vector<Token> FeatureProcessor::Tokenize(
197 return tokenizer_.Tokenize(text_unicode);
  /external/libtextclassifier/actions/
actions-suggestions.cc 590 std::vector<std::vector<Token>> ActionsSuggestions::Tokenize(
595 tokens.push_back(feature_processor_->tokenizer()->Tokenize(message));
779 // Tokenize the messages in the conversation.
780 tokens = Tokenize(context);
    [all...]

Completed in 910 milliseconds