HomeSort by relevance Sort by last modified time
    Searched refs:Whitespace (Results 1 - 25 of 57) sorted by null

1 2 3

  /external/selinux/policycoreutils/mcstrans/share/examples/pipes/setrans.d/
pipes.conf 5 Whitespace=,
  /external/snakeyaml/src/main/java/org/yaml/snakeyaml/tokens/
WhitespaceToken.java 27 return ID.Whitespace;
Token.java 23 Alias, Anchor, BlockEnd, BlockEntry, BlockMappingStart, BlockSequenceStart, Directive, DocumentEnd, DocumentStart, FlowEntry, FlowMappingEnd, FlowMappingStart, FlowSequenceEnd, FlowSequenceStart, Key, Scalar, StreamEnd, StreamStart, Tag, Value, Whitespace, Comment, Error
  /prebuilts/gdb/darwin-x86/lib/python2.7/
tabnanny.py 80 checked for whitespace related problems. The diagnostic messages are
132 class Whitespace:
140 # the number of leading whitespace characters in raw
157 S, T = Whitespace.S, Whitespace.T
278 indents = [Whitespace("")]
292 thisguy = Whitespace(token)
316 # ENDMARKER; the "line" argument exposes the leading whitespace
321 thisguy = Whitespace(line)
tokenize.py 48 Whitespace = r'[ \f\t]*'
50 Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
99 PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
318 while pos < max: # measure leading whitespace
  /prebuilts/gdb/linux-x86/lib/python2.7/
tabnanny.py 80 checked for whitespace related problems. The diagnostic messages are
132 class Whitespace:
140 # the number of leading whitespace characters in raw
157 S, T = Whitespace.S, Whitespace.T
278 indents = [Whitespace("")]
292 thisguy = Whitespace(token)
316 # ENDMARKER; the "line" argument exposes the leading whitespace
321 thisguy = Whitespace(line)
tokenize.py 48 Whitespace = r'[ \f\t]*'
50 Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
99 PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
318 while pos < max: # measure leading whitespace
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/
tabnanny.py 80 checked for whitespace related problems. The diagnostic messages are
132 class Whitespace:
140 # the number of leading whitespace characters in raw
157 S, T = Whitespace.S, Whitespace.T
278 indents = [Whitespace("")]
292 thisguy = Whitespace(token)
316 # ENDMARKER; the "line" argument exposes the leading whitespace
321 thisguy = Whitespace(line)
tokenize.py 48 Whitespace = r'[ \f\t]*'
50 Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
99 PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
318 while pos < max: # measure leading whitespace
  /prebuilts/python/linux-x86/2.7.5/lib/python2.7/
tabnanny.py 80 checked for whitespace related problems. The diagnostic messages are
132 class Whitespace:
140 # the number of leading whitespace characters in raw
157 S, T = Whitespace.S, Whitespace.T
278 indents = [Whitespace("")]
292 thisguy = Whitespace(token)
316 # ENDMARKER; the "line" argument exposes the leading whitespace
321 thisguy = Whitespace(line)
tokenize.py 48 Whitespace = r'[ \f\t]*'
50 Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
99 PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
318 while pos < max: # measure leading whitespace
  /external/chromium-trace/catapult/third_party/webapp2/docs/_themes/webapp2/
pygapp2.py 11 Whitespace: "nobold noitalic #FFF",
  /external/opencv3/3rdparty/jinja2/markupsafe/
__init__.py 147 also resolves known HTML4 and XHTML entities. Whitespace is
  /prebuilts/gdb/darwin-x86/lib/python2.7/lib2to3/pgen2/
tokenize.py 52 Whitespace = r'[ \f\t]*'
54 Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
103 PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
399 while pos < max: # measure leading whitespace
  /prebuilts/gdb/linux-x86/lib/python2.7/lib2to3/pgen2/
tokenize.py 52 Whitespace = r'[ \f\t]*'
54 Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
103 PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
399 while pos < max: # measure leading whitespace
  /prebuilts/python/darwin-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
tokenize.py 52 Whitespace = r'[ \f\t]*'
54 Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
103 PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
399 while pos < max: # measure leading whitespace
  /prebuilts/python/linux-x86/2.7.5/lib/python2.7/lib2to3/pgen2/
tokenize.py 52 Whitespace = r'[ \f\t]*'
54 Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
103 PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
399 while pos < max: # measure leading whitespace
  /prebuilts/go/darwin-x86/src/text/scanner/
scanner.go 108 // GoWhitespace is the default value for the Scanner's Whitespace field.
154 // The Whitespace field controls which characters are recognized
156 // set the ch'th bit in Whitespace (the Scanner's behavior is undefined
158 Whitespace uint64
178 // and Whitespace is set to GoWhitespace.
206 s.Whitespace = GoWhitespace
550 for s.Whitespace&(1<<uint(ch)) != 0 {
scanner_test.go 316 s.Whitespace = 0 // don't skip any whitespace
419 s.Whitespace = ws
600 s.Whitespace = 0
  /prebuilts/go/linux-x86/src/text/scanner/
scanner.go 108 // GoWhitespace is the default value for the Scanner's Whitespace field.
154 // The Whitespace field controls which characters are recognized
156 // set the ch'th bit in Whitespace (the Scanner's behavior is undefined
158 Whitespace uint64
178 // and Whitespace is set to GoWhitespace.
206 s.Whitespace = GoWhitespace
550 for s.Whitespace&(1<<uint(ch)) != 0 {
scanner_test.go 316 s.Whitespace = 0 // don't skip any whitespace
419 s.Whitespace = ws
600 s.Whitespace = 0
  /prebuilts/go/darwin-x86/pkg/bootstrap/src/bootstrap/asm/internal/lex/
tokenizer.go 33 s.Whitespace = 1<<'\t' | 1<<'\r' | 1<<' '
  /prebuilts/go/darwin-x86/src/cmd/asm/internal/lex/
tokenizer.go 30 s.Whitespace = 1<<'\t' | 1<<'\r' | 1<<' '
  /prebuilts/go/linux-x86/pkg/bootstrap/src/bootstrap/asm/internal/lex/
tokenizer.go 33 s.Whitespace = 1<<'\t' | 1<<'\r' | 1<<' '
  /prebuilts/go/linux-x86/src/cmd/asm/internal/lex/
tokenizer.go 30 s.Whitespace = 1<<'\t' | 1<<'\r' | 1<<' '

Completed in 8564 milliseconds

1 2 3