HomeSort by relevance Sort by last modified time
    Searched defs:tokenize (Results 1 - 25 of 174) sorted by null

1 2 3 4 5 6 7

  /prebuilts/go/darwin-x86/src/cmd/asm/internal/asm/
pseudo_test.go 15 func tokenize(s string) [][]lex.Token { func
21 res = append(res, lex.Tokenize(o))
59 if !parser.pseudo(test.pseudo, tokenize(test.operands)) {
  /prebuilts/go/linux-x86/src/cmd/asm/internal/asm/
pseudo_test.go 15 func tokenize(s string) [][]lex.Token { func
21 res = append(res, lex.Tokenize(o))
59 if !parser.pseudo(test.pseudo, tokenize(test.operands)) {
  /external/swiftshader/third_party/subzero/src/
IceRangeSpec.cpp 51 auto Tokens = RangeSpec::tokenize(Token, RangeSpec::DELIM_RANGE);
97 std::vector<std::string> RangeSpec::tokenize(const std::string &Spec, function in class:Ice::RangeSpec
115 auto Tokens = tokenize(Spec, DELIM_LIST);
  /hardware/libhardware/modules/sensors/dynamic_sensor/HidUtils/
HidItem.cpp 49 std::vector<HidItem> HidItem::tokenize(const uint8_t *begin, size_t size) { function in class:HidUtil::HidItem
53 return tokenize(is);
56 std::vector<HidItem> HidItem::tokenize(const std::vector<uint8_t> &descriptor) { function in class:HidUtil::HidItem
60 return tokenize(is);
63 std::vector<HidItem> HidItem::tokenize(std::istream &is) { function in class:HidUtil::HidItem
  /prebuilts/go/darwin-x86/src/cmd/link/internal/ld/
util.go 26 func tokenize(s string) []string { func
  /prebuilts/go/linux-x86/src/cmd/link/internal/ld/
util.go 26 func tokenize(s string) []string { func
  /external/deqp/framework/randomshaders/
rsgShader.cpp 93 void Shader::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::Shader
99 // Tokenize global declaration statements
101 m_globalStatements[ndx]->tokenize(state, str);
103 // Tokenize all functions
107 m_functions[ndx]->tokenize(state, str);
110 // Tokenize main
112 m_mainFunction.tokenize(state, str);
125 void Function::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::Function
146 // Tokenize body
147 m_functionBlock.tokenize(state, str)
    [all...]
rsgBuiltinFunctions.hpp 43 void tokenize (GeneratorState& state, TokenStream& str) const;
95 void UnaryBuiltinVecFunc<GetValueRangeWeight, ComputeValueRange, Evaluate>::tokenize (GeneratorState& state, TokenStream& str) const function in class:rsg::UnaryBuiltinVecFunc
98 m_child->tokenize(state, str);
rsgExpression.hpp 57 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
74 void tokenize (GeneratorState& state, TokenStream& str) const { DE_UNREF(state); str << Token(m_variable->getName()); } function in class:rsg::VariableAccess
113 void tokenize (GeneratorState& state, TokenStream& str) const;
131 void tokenize (GeneratorState& state, TokenStream& str) const;
149 void tokenize (GeneratorState& state, TokenStream& str) const;
167 void tokenize (GeneratorState& state, TokenStream& str) const;
189 void tokenize (GeneratorState& state, TokenStream& str) const;
214 void tokenize (GeneratorState& state, TokenStream& str) const;
233 void tokenize (GeneratorState& state, TokenStream& str) const;
255 void tokenize (GeneratorState& state, TokenStream& str) const
    [all...]
  /external/mesa3d/src/gallium/state_trackers/clover/llvm/
util.hpp 44 tokenize(const std::string &s) { function in namespace:clover::llvm
  /external/swiftshader/third_party/LLVM/lib/MC/MCDisassembler/
EDInst.cpp 165 int EDInst::tokenize() { function in class:EDInst
175 return TokenizeResult.setResult(EDToken::tokenize(Tokens,
183 if (tokenize())
189 if (tokenize())
197 if (tokenize())
EDToken.cpp 88 int EDToken::tokenize(std::vector<EDToken*> &tokens, function in class:EDToken
  /external/tensorflow/tensorflow/contrib/eager/python/examples/rnn_ptb/
rnn_ptb.py 225 self.train = self.tokenize(os.path.join(path, "ptb.train.txt"))
226 self.valid = self.tokenize(os.path.join(path, "ptb.valid.txt"))
236 def tokenize(self, path): member in class:Datasets
246 # Tokenize file content
  /frameworks/base/core/java/android/text/util/
Rfc822Tokenizer.java 45 public static void tokenize(CharSequence text, Collection<Rfc822Token> out) { method in class:Rfc822Tokenizer
171 public static Rfc822Token[] tokenize(CharSequence text) { method in class:Rfc822Tokenizer
173 tokenize(text, out); method
  /prebuilts/ndk/r16/sources/third_party/shaderc/third_party/glslang/hlsl/
hlslScanContext.cpp 457 void HlslScanContext::tokenize(HlslToken& token) function in class:glslang::HlslScanContext
482 int token = ppContext.tokenize(ppToken);
    [all...]
  /external/minijail/
util.c 179 while ((group = tokenize(&constant_str, "|")) != NULL) {
256 char *tokenize(char **stringp, const char *delim) function
  /external/selinux/libsepol/src/
util.c 196 * The tokenize and tokenize_str functions may be used to
240 * line_buf - Buffer containing string to tokenize.
241 * delim - The delimiter used to tokenize line_buf. A whitespace delimiter will
249 * function will not tokenize more than num_args and the last argument will
253 int hidden tokenize(char *line_buf, char delim, int num_args, ...) function
  /packages/apps/QuickSearchBox/src/com/android/quicksearchbox/
LevenshteinSuggestionFormatter.java 43 final Token[] queryTokens = tokenize(query);
44 final Token[] suggestionTokens = tokenize(suggestion);
99 Token[] tokenize(final String seq) { method in class:LevenshteinSuggestionFormatter
  /packages/providers/ContactsProvider/src/com/android/providers/contacts/
HanziToPinyin.java 98 private void tokenize(char character, Token token) { method in class:HanziToPinyin
160 tokenize(character, token); method
  /cts/tests/tests/text/src/android/text/util/cts/
Rfc822TokenizerTest.java 130 Rfc822Token[] tokens = Rfc822Tokenizer.tokenize("");
134 tokens = Rfc822Tokenizer.tokenize(text);
140 tokens = Rfc822Tokenizer.tokenize(text);
148 Rfc822Tokenizer.tokenize(null);
154 Rfc822Tokenizer.tokenize("", list);
158 Rfc822Tokenizer.tokenize(text, list);
165 Rfc822Tokenizer.tokenize(text, list);
174 Rfc822Tokenizer.tokenize(null);
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.10/Lib/
tokenize.py 20 tokenize(readline, tokeneater=printtoken)
35 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
156 def tokenize(readline, tokeneater=printtoken): function
158 The tokenize() function accepts two parameters: one representing the
159 input stream, and one providing an output mechanism for tokenize().
260 # Output text will tokenize the back to the input
357 ("<tokenize>", lnum, pos, line))
430 tokenize(open(sys.argv[1]).readline)
432 tokenize(sys.stdin.readline)
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/lib2to3/pgen2/
tokenize.py 23 tokenize(readline, tokeneater=printtoken)
37 __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize",
160 def tokenize(readline, tokeneater=printtoken): function
162 The tokenize() function accepts two parameters: one representing the
163 input stream, and one providing an output mechanism for tokenize().
256 in the same way as the tokenize() generator.
335 # Output text will tokenize the back to the input
427 ("<tokenize>", lnum, pos, line))
499 if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
500 else: tokenize(sys.stdin.readline)
    [all...]
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/
tokenize.py 20 tokenize(readline, tokeneater=printtoken)
34 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
155 def tokenize(readline, tokeneater=printtoken): function
157 The tokenize() function accepts two parameters: one representing the
158 input stream, and one providing an output mechanism for tokenize().
254 # Output text will tokenize the back to the input
351 ("<tokenize>", lnum, pos, line))
422 tokenize(open(sys.argv[1]).readline)
424 tokenize(sys.stdin.readline)
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Parser/
spark.py 65 def tokenize(self, s): member in class:GenericScanner
  /external/apache-xml/src/main/java/org/apache/xpath/compiler/
Lexer.java 96 void tokenize(String pat) throws javax.xml.transform.TransformerException method in class:Lexer
98 tokenize(pat, null); method
109 void tokenize(String pat, Vector targetStrings) method in class:Lexer

Completed in 1260 milliseconds

1 2 3 4 5 6 7