Home | History | Annotate | Download | only in cpplint
      1 #!/usr/bin/env python
      2 #
      3 # Copyright (c) 2009 Google Inc. All rights reserved.
      4 #
      5 # Redistribution and use in source and binary forms, with or without
      6 # modification, are permitted provided that the following conditions are
      7 # met:
      8 #
      9 #    * Redistributions of source code must retain the above copyright
     10 # notice, this list of conditions and the following disclaimer.
     11 #    * Redistributions in binary form must reproduce the above
     12 # copyright notice, this list of conditions and the following disclaimer
     13 # in the documentation and/or other materials provided with the
     14 # distribution.
     15 #    * Neither the name of Google Inc. nor the names of its
     16 # contributors may be used to endorse or promote products derived from
     17 # this software without specific prior written permission.
     18 #
     19 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     20 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     21 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     22 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     23 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     24 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     25 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     26 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     27 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     28 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     29 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     30 
     31 """Does google-lint on c++ files.
     32 
     33 The goal of this script is to identify places in the code that *may*
     34 be in non-compliance with google style.  It does not attempt to fix
     35 up these problems -- the point is to educate.  It does also not
     36 attempt to find all problems, or to ensure that everything it does
     37 find is legitimately a problem.
     38 
     39 In particular, we can get very confused by /* and // inside strings!
     40 We do a small hack, which is to ignore //'s with "'s after them on the
     41 same line, but it is far from perfect (in either direction).
     42 """
     43 
     44 import codecs
     45 import copy
     46 import getopt
     47 import math  # for log
     48 import os
     49 import re
     50 import sre_compile
     51 import string
     52 import sys
     53 import unicodedata
     54 
     55 
     56 _USAGE = """
     57 Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...]
     58                    [--counting=total|toplevel|detailed] [--root=subdir]
     59                    [--linelength=digits] [--headers=x,y,...]
     60                    [--quiet]
     61         <file> [file] ...
     62 
     63   The style guidelines this tries to follow are those in
     64     https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml
     65 
     66   Every problem is given a confidence score from 1-5, with 5 meaning we are
     67   certain of the problem, and 1 meaning it could be a legitimate construct.
     68   This will miss some errors, and is not a substitute for a code review.
     69 
     70   To suppress false-positive errors of a certain category, add a
     71   'NOLINT(category)' comment to the line.  NOLINT or NOLINT(*)
     72   suppresses errors of all categories on that line.
     73 
     74   The files passed in will be linted; at least one file must be provided.
     75   Default linted extensions are .cc, .cpp, .cu, .cuh and .h.  Change the
     76   extensions with the --extensions flag.
     77 
     78   Flags:
     79 
     80     output=vs7
     81       By default, the output is formatted to ease emacs parsing.  Visual Studio
     82       compatible output (vs7) may also be used.  Other formats are unsupported.
     83 
     84     verbose=#
     85       Specify a number 0-5 to restrict errors to certain verbosity levels.
     86 
     87     quiet
     88       Don't print anything if no errors are found.
     89 
     90     filter=-x,+y,...
     91       Specify a comma-separated list of category-filters to apply: only
     92       error messages whose category names pass the filters will be printed.
     93       (Category names are printed with the message and look like
     94       "[whitespace/indent]".)  Filters are evaluated left to right.
     95       "-FOO" and "FOO" means "do not print categories that start with FOO".
     96       "+FOO" means "do print categories that start with FOO".
     97 
     98       Examples: --filter=-whitespace,+whitespace/braces
     99                 --filter=whitespace,runtime/printf,+runtime/printf_format
    100                 --filter=-,+build/include_what_you_use
    101 
    102       To see a list of all the categories used in cpplint, pass no arg:
    103          --filter=
    104 
    105     counting=total|toplevel|detailed
    106       The total number of errors found is always printed. If
    107       'toplevel' is provided, then the count of errors in each of
    108       the top-level categories like 'build' and 'whitespace' will
    109       also be printed. If 'detailed' is provided, then a count
    110       is provided for each category like 'build/class'.
    111 
    112     root=subdir
    113       The root directory used for deriving header guard CPP variable.
    114       By default, the header guard CPP variable is calculated as the relative
    115       path to the directory that contains .git, .hg, or .svn.  When this flag
    116       is specified, the relative path is calculated from the specified
    117       directory. If the specified directory does not exist, this flag is
    118       ignored.
    119 
    120       Examples:
    121         Assuming that top/src/.git exists (and cwd=top/src), the header guard
    122         CPP variables for top/src/chrome/browser/ui/browser.h are:
    123 
    124         No flag => CHROME_BROWSER_UI_BROWSER_H_
    125         --root=chrome => BROWSER_UI_BROWSER_H_
    126         --root=chrome/browser => UI_BROWSER_H_
    127         --root=.. => SRC_CHROME_BROWSER_UI_BROWSER_H_
    128 
    129     linelength=digits
    130       This is the allowed line length for the project. The default value is
    131       80 characters.
    132 
    133       Examples:
    134         --linelength=120
    135 
    136     extensions=extension,extension,...
    137       The allowed file extensions that cpplint will check
    138 
    139       Examples:
    140         --extensions=hpp,cpp
    141 
    142     headers=x,y,...
    143       The header extensions that cpplint will treat as .h in checks. Values are
    144       automatically added to --extensions list.
    145 
    146       Examples:
    147         --headers=hpp,hxx
    148         --headers=hpp
    149 
    150     cpplint.py supports per-directory configurations specified in CPPLINT.cfg
    151     files. CPPLINT.cfg file can contain a number of key=value pairs.
    152     Currently the following options are supported:
    153 
    154       set noparent
    155       filter=+filter1,-filter2,...
    156       exclude_files=regex
    157       linelength=80
    158       root=subdir
    159       headers=x,y,...
    160 
    161     "set noparent" option prevents cpplint from traversing directory tree
    162     upwards looking for more .cfg files in parent directories. This option
    163     is usually placed in the top-level project directory.
    164 
    165     The "filter" option is similar in function to --filter flag. It specifies
    166     message filters in addition to the |_DEFAULT_FILTERS| and those specified
    167     through --filter command-line flag.
    168 
    169     "exclude_files" allows to specify a regular expression to be matched against
    170     a file name. If the expression matches, the file is skipped and not run
    171     through liner.
    172 
    173     "linelength" allows to specify the allowed line length for the project.
    174 
    175     The "root" option is similar in function to the --root flag (see example
    176     above). Paths are relative to the directory of the CPPLINT.cfg.
    177 
    178     The "headers" option is similar in function to the --headers flag
    179     (see example above).
    180 
    181     CPPLINT.cfg has an effect on files in the same directory and all
    182     sub-directories, unless overridden by a nested configuration file.
    183 
    184       Example file:
    185         filter=-build/include_order,+build/include_alpha
    186         exclude_files=.*\.cc
    187 
    188     The above example disables build/include_order warning and enables
    189     build/include_alpha as well as excludes all .cc from being
    190     processed by linter, in the current directory (where the .cfg
    191     file is located) and all sub-directories.
    192 """
    193 
    194 # We categorize each error message we print.  Here are the categories.
    195 # We want an explicit list so we can list them all in cpplint --filter=.
    196 # If you add a new error message with a new category, add it to the list
    197 # here!  cpplint_unittest.py should tell you if you forget to do this.
    198 _ERROR_CATEGORIES = [
    199     'build/class',
    200     'build/c++11',
    201     'build/c++14',
    202     'build/c++tr1',
    203     'build/deprecated',
    204     'build/endif_comment',
    205     'build/explicit_make_pair',
    206     'build/forward_decl',
    207     'build/header_guard',
    208     'build/include',
    209     'build/include_alpha',
    210     'build/include_order',
    211     'build/include_what_you_use',
    212     'build/namespaces',
    213     'build/printf_format',
    214     'build/storage_class',
    215     'legal/copyright',
    216     'readability/alt_tokens',
    217     'readability/braces',
    218     'readability/casting',
    219     'readability/check',
    220     'readability/constructors',
    221     'readability/fn_size',
    222     'readability/inheritance',
    223     'readability/multiline_comment',
    224     'readability/multiline_string',
    225     'readability/namespace',
    226     'readability/nolint',
    227     'readability/nul',
    228     'readability/strings',
    229     'readability/todo',
    230     'readability/utf8',
    231     'runtime/arrays',
    232     'runtime/casting',
    233     'runtime/explicit',
    234     'runtime/int',
    235     'runtime/init',
    236     'runtime/invalid_increment',
    237     'runtime/member_string_references',
    238     'runtime/memset',
    239     'runtime/indentation_namespace',
    240     'runtime/operator',
    241     'runtime/printf',
    242     'runtime/printf_format',
    243     'runtime/references',
    244     'runtime/string',
    245     'runtime/threadsafe_fn',
    246     'runtime/vlog',
    247     'whitespace/blank_line',
    248     'whitespace/braces',
    249     'whitespace/comma',
    250     'whitespace/comments',
    251     'whitespace/empty_conditional_body',
    252     'whitespace/empty_if_body',
    253     'whitespace/empty_loop_body',
    254     'whitespace/end_of_line',
    255     'whitespace/ending_newline',
    256     'whitespace/forcolon',
    257     'whitespace/indent',
    258     'whitespace/line_length',
    259     'whitespace/newline',
    260     'whitespace/operators',
    261     'whitespace/parens',
    262     'whitespace/semicolon',
    263     'whitespace/tab',
    264     'whitespace/todo',
    265     ]
    266 
    267 # These error categories are no longer enforced by cpplint, but for backwards-
    268 # compatibility they may still appear in NOLINT comments.
    269 _LEGACY_ERROR_CATEGORIES = [
    270     'readability/streams',
    271     'readability/function',
    272     ]
    273 
    274 # The default state of the category filter. This is overridden by the --filter=
    275 # flag. By default all errors are on, so only add here categories that should be
    276 # off by default (i.e., categories that must be enabled by the --filter= flags).
    277 # All entries here should start with a '-' or '+', as in the --filter= flag.
    278 _DEFAULT_FILTERS = ['-build/include_alpha']
    279 
    280 # The default list of categories suppressed for C (not C++) files.
    281 _DEFAULT_C_SUPPRESSED_CATEGORIES = [
    282     'readability/casting',
    283     ]
    284 
    285 # The default list of categories suppressed for Linux Kernel files.
    286 _DEFAULT_KERNEL_SUPPRESSED_CATEGORIES = [
    287     'whitespace/tab',
    288     ]
    289 
    290 # We used to check for high-bit characters, but after much discussion we
    291 # decided those were OK, as long as they were in UTF-8 and didn't represent
    292 # hard-coded international strings, which belong in a separate i18n file.
    293 
    294 # C++ headers
    295 _CPP_HEADERS = frozenset([
    296     # Legacy
    297     'algobase.h',
    298     'algo.h',
    299     'alloc.h',
    300     'builtinbuf.h',
    301     'bvector.h',
    302     'complex.h',
    303     'defalloc.h',
    304     'deque.h',
    305     'editbuf.h',
    306     'fstream.h',
    307     'function.h',
    308     'hash_map',
    309     'hash_map.h',
    310     'hash_set',
    311     'hash_set.h',
    312     'hashtable.h',
    313     'heap.h',
    314     'indstream.h',
    315     'iomanip.h',
    316     'iostream.h',
    317     'istream.h',
    318     'iterator.h',
    319     'list.h',
    320     'map.h',
    321     'multimap.h',
    322     'multiset.h',
    323     'ostream.h',
    324     'pair.h',
    325     'parsestream.h',
    326     'pfstream.h',
    327     'procbuf.h',
    328     'pthread_alloc',
    329     'pthread_alloc.h',
    330     'rope',
    331     'rope.h',
    332     'ropeimpl.h',
    333     'set.h',
    334     'slist',
    335     'slist.h',
    336     'stack.h',
    337     'stdiostream.h',
    338     'stl_alloc.h',
    339     'stl_relops.h',
    340     'streambuf.h',
    341     'stream.h',
    342     'strfile.h',
    343     'strstream.h',
    344     'tempbuf.h',
    345     'tree.h',
    346     'type_traits.h',
    347     'vector.h',
    348     # 17.6.1.2 C++ library headers
    349     'algorithm',
    350     'array',
    351     'atomic',
    352     'bitset',
    353     'chrono',
    354     'codecvt',
    355     'complex',
    356     'condition_variable',
    357     'deque',
    358     'exception',
    359     'forward_list',
    360     'fstream',
    361     'functional',
    362     'future',
    363     'initializer_list',
    364     'iomanip',
    365     'ios',
    366     'iosfwd',
    367     'iostream',
    368     'istream',
    369     'iterator',
    370     'limits',
    371     'list',
    372     'locale',
    373     'map',
    374     'memory',
    375     'mutex',
    376     'new',
    377     'numeric',
    378     'ostream',
    379     'queue',
    380     'random',
    381     'ratio',
    382     'regex',
    383     'scoped_allocator',
    384     'set',
    385     'sstream',
    386     'stack',
    387     'stdexcept',
    388     'streambuf',
    389     'string',
    390     'strstream',
    391     'system_error',
    392     'thread',
    393     'tuple',
    394     'typeindex',
    395     'typeinfo',
    396     'type_traits',
    397     'unordered_map',
    398     'unordered_set',
    399     'utility',
    400     'valarray',
    401     'vector',
    402     # 17.6.1.2 C++ headers for C library facilities
    403     'cassert',
    404     'ccomplex',
    405     'cctype',
    406     'cerrno',
    407     'cfenv',
    408     'cfloat',
    409     'cinttypes',
    410     'ciso646',
    411     'climits',
    412     'clocale',
    413     'cmath',
    414     'csetjmp',
    415     'csignal',
    416     'cstdalign',
    417     'cstdarg',
    418     'cstdbool',
    419     'cstddef',
    420     'cstdint',
    421     'cstdio',
    422     'cstdlib',
    423     'cstring',
    424     'ctgmath',
    425     'ctime',
    426     'cuchar',
    427     'cwchar',
    428     'cwctype',
    429     ])
    430 
    431 # Type names
    432 _TYPES = re.compile(
    433     r'^(?:'
    434     # [dcl.type.simple]
    435     r'(char(16_t|32_t)?)|wchar_t|'
    436     r'bool|short|int|long|signed|unsigned|float|double|'
    437     # [support.types]
    438     r'(ptrdiff_t|size_t|max_align_t|nullptr_t)|'
    439     # [cstdint.syn]
    440     r'(u?int(_fast|_least)?(8|16|32|64)_t)|'
    441     r'(u?int(max|ptr)_t)|'
    442     r')$')
    443 
    444 
    445 # These headers are excluded from [build/include] and [build/include_order]
    446 # checks:
    447 # - Anything not following google file name conventions (containing an
    448 #   uppercase character, such as Python.h or nsStringAPI.h, for example).
    449 # - Lua headers.
    450 _THIRD_PARTY_HEADERS_PATTERN = re.compile(
    451     r'^(?:[^/]*[A-Z][^/]*\.h|lua\.h|lauxlib\.h|lualib\.h)$')
    452 
    453 # Pattern for matching FileInfo.BaseName() against test file name
    454 _TEST_FILE_SUFFIX = r'(_test|_unittest|_regtest)$'
    455 
    456 # Pattern that matches only complete whitespace, possibly across multiple lines.
    457 _EMPTY_CONDITIONAL_BODY_PATTERN = re.compile(r'^\s*$', re.DOTALL)
    458 
    459 # Assertion macros.  These are defined in base/logging.h and
    460 # testing/base/public/gunit.h.
    461 _CHECK_MACROS = [
    462     'DCHECK', 'CHECK',
    463     'EXPECT_TRUE', 'ASSERT_TRUE',
    464     'EXPECT_FALSE', 'ASSERT_FALSE',
    465     ]
    466 
    467 # Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE
    468 _CHECK_REPLACEMENT = dict([(m, {}) for m in _CHECK_MACROS])
    469 
    470 for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
    471                         ('>=', 'GE'), ('>', 'GT'),
    472                         ('<=', 'LE'), ('<', 'LT')]:
    473   _CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement
    474   _CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement
    475   _CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement
    476   _CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement
    477 
    478 for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'),
    479                             ('>=', 'LT'), ('>', 'LE'),
    480                             ('<=', 'GT'), ('<', 'GE')]:
    481   _CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement
    482   _CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement
    483 
    484 # Alternative tokens and their replacements.  For full list, see section 2.5
    485 # Alternative tokens [lex.digraph] in the C++ standard.
    486 #
    487 # Digraphs (such as '%:') are not included here since it's a mess to
    488 # match those on a word boundary.
    489 _ALT_TOKEN_REPLACEMENT = {
    490     'and': '&&',
    491     'bitor': '|',
    492     'or': '||',
    493     'xor': '^',
    494     'compl': '~',
    495     'bitand': '&',
    496     'and_eq': '&=',
    497     'or_eq': '|=',
    498     'xor_eq': '^=',
    499     'not': '!',
    500     'not_eq': '!='
    501     }
    502 
    503 # Compile regular expression that matches all the above keywords.  The "[ =()]"
    504 # bit is meant to avoid matching these keywords outside of boolean expressions.
    505 #
    506 # False positives include C-style multi-line comments and multi-line strings
    507 # but those have always been troublesome for cpplint.
    508 _ALT_TOKEN_REPLACEMENT_PATTERN = re.compile(
    509     r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)')
    510 
    511 
    512 # These constants define types of headers for use with
    513 # _IncludeState.CheckNextIncludeOrder().
    514 _C_SYS_HEADER = 1
    515 _CPP_SYS_HEADER = 2
    516 _LIKELY_MY_HEADER = 3
    517 _POSSIBLE_MY_HEADER = 4
    518 _OTHER_HEADER = 5
    519 
    520 # These constants define the current inline assembly state
    521 _NO_ASM = 0       # Outside of inline assembly block
    522 _INSIDE_ASM = 1   # Inside inline assembly block
    523 _END_ASM = 2      # Last line of inline assembly block
    524 _BLOCK_ASM = 3    # The whole block is an inline assembly block
    525 
    526 # Match start of assembly blocks
    527 _MATCH_ASM = re.compile(r'^\s*(?:asm|_asm|__asm|__asm__)'
    528                         r'(?:\s+(volatile|__volatile__))?'
    529                         r'\s*[{(]')
    530 
    531 # Match strings that indicate we're working on a C (not C++) file.
    532 _SEARCH_C_FILE = re.compile(r'\b(?:LINT_C_FILE|'
    533                             r'vim?:\s*.*(\s*|:)filetype=c(\s*|:|$))')
    534 
    535 # Match string that indicates we're working on a Linux Kernel file.
    536 _SEARCH_KERNEL_FILE = re.compile(r'\b(?:LINT_KERNEL_FILE)')
    537 
    538 _regexp_compile_cache = {}
    539 
    540 # {str, set(int)}: a map from error categories to sets of linenumbers
    541 # on which those errors are expected and should be suppressed.
    542 _error_suppressions = {}
    543 
    544 # The root directory used for deriving header guard CPP variable.
    545 # This is set by --root flag.
    546 _root = None
    547 _root_debug = False
    548 
    549 # The allowed line length of files.
    550 # This is set by --linelength flag.
    551 _line_length = 80
    552 
    553 # The allowed extensions for file names
    554 # This is set by --extensions flag.
    555 _valid_extensions = set(['cc', 'h', 'cpp', 'cu', 'cuh'])
    556 
    557 # Treat all headers starting with 'h' equally: .h, .hpp, .hxx etc.
    558 # This is set by --headers flag.
    559 _hpp_headers = set(['h'])
    560 
    561 # {str, bool}: a map from error categories to booleans which indicate if the
    562 # category should be suppressed for every line.
    563 _global_error_suppressions = {}
    564 
    565 def ProcessHppHeadersOption(val):
    566   global _hpp_headers
    567   try:
    568     _hpp_headers = set(val.split(','))
    569     # Automatically append to extensions list so it does not have to be set 2 times
    570     _valid_extensions.update(_hpp_headers)
    571   except ValueError:
    572     PrintUsage('Header extensions must be comma seperated list.')
    573 
    574 def IsHeaderExtension(file_extension):
    575   return file_extension in _hpp_headers
    576 
    577 def ParseNolintSuppressions(filename, raw_line, linenum, error):
    578   """Updates the global list of line error-suppressions.
    579 
    580   Parses any NOLINT comments on the current line, updating the global
    581   error_suppressions store.  Reports an error if the NOLINT comment
    582   was malformed.
    583 
    584   Args:
    585     filename: str, the name of the input file.
    586     raw_line: str, the line of input text, with comments.
    587     linenum: int, the number of the current line.
    588     error: function, an error handler.
    589   """
    590   matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line)
    591   if matched:
    592     if matched.group(1):
    593       suppressed_line = linenum + 1
    594     else:
    595       suppressed_line = linenum
    596     category = matched.group(2)
    597     if category in (None, '(*)'):  # => "suppress all"
    598       _error_suppressions.setdefault(None, set()).add(suppressed_line)
    599     else:
    600       if category.startswith('(') and category.endswith(')'):
    601         category = category[1:-1]
    602         if category in _ERROR_CATEGORIES:
    603           _error_suppressions.setdefault(category, set()).add(suppressed_line)
    604         elif category not in _LEGACY_ERROR_CATEGORIES:
    605           error(filename, linenum, 'readability/nolint', 5,
    606                 'Unknown NOLINT error category: %s' % category)
    607 
    608 
    609 def ProcessGlobalSuppresions(lines):
    610   """Updates the list of global error suppressions.
    611 
    612   Parses any lint directives in the file that have global effect.
    613 
    614   Args:
    615     lines: An array of strings, each representing a line of the file, with the
    616            last element being empty if the file is terminated with a newline.
    617   """
    618   for line in lines:
    619     if _SEARCH_C_FILE.search(line):
    620       for category in _DEFAULT_C_SUPPRESSED_CATEGORIES:
    621         _global_error_suppressions[category] = True
    622     if _SEARCH_KERNEL_FILE.search(line):
    623       for category in _DEFAULT_KERNEL_SUPPRESSED_CATEGORIES:
    624         _global_error_suppressions[category] = True
    625 
    626 
    627 def ResetNolintSuppressions():
    628   """Resets the set of NOLINT suppressions to empty."""
    629   _error_suppressions.clear()
    630   _global_error_suppressions.clear()
    631 
    632 
    633 def IsErrorSuppressedByNolint(category, linenum):
    634   """Returns true if the specified error category is suppressed on this line.
    635 
    636   Consults the global error_suppressions map populated by
    637   ParseNolintSuppressions/ProcessGlobalSuppresions/ResetNolintSuppressions.
    638 
    639   Args:
    640     category: str, the category of the error.
    641     linenum: int, the current line number.
    642   Returns:
    643     bool, True iff the error should be suppressed due to a NOLINT comment or
    644     global suppression.
    645   """
    646   return (_global_error_suppressions.get(category, False) or
    647           linenum in _error_suppressions.get(category, set()) or
    648           linenum in _error_suppressions.get(None, set()))
    649 
    650 
    651 def Match(pattern, s):
    652   """Matches the string with the pattern, caching the compiled regexp."""
    653   # The regexp compilation caching is inlined in both Match and Search for
    654   # performance reasons; factoring it out into a separate function turns out
    655   # to be noticeably expensive.
    656   if pattern not in _regexp_compile_cache:
    657     _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
    658   return _regexp_compile_cache[pattern].match(s)
    659 
    660 
    661 def ReplaceAll(pattern, rep, s):
    662   """Replaces instances of pattern in a string with a replacement.
    663 
    664   The compiled regex is kept in a cache shared by Match and Search.
    665 
    666   Args:
    667     pattern: regex pattern
    668     rep: replacement text
    669     s: search string
    670 
    671   Returns:
    672     string with replacements made (or original string if no replacements)
    673   """
    674   if pattern not in _regexp_compile_cache:
    675     _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
    676   return _regexp_compile_cache[pattern].sub(rep, s)
    677 
    678 
    679 def Search(pattern, s):
    680   """Searches the string for the pattern, caching the compiled regexp."""
    681   if pattern not in _regexp_compile_cache:
    682     _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
    683   return _regexp_compile_cache[pattern].search(s)
    684 
    685 
    686 def _IsSourceExtension(s):
    687   """File extension (excluding dot) matches a source file extension."""
    688   return s in ('c', 'cc', 'cpp', 'cxx')
    689 
    690 
    691 class _IncludeState(object):
    692   """Tracks line numbers for includes, and the order in which includes appear.
    693 
    694   include_list contains list of lists of (header, line number) pairs.
    695   It's a lists of lists rather than just one flat list to make it
    696   easier to update across preprocessor boundaries.
    697 
    698   Call CheckNextIncludeOrder() once for each header in the file, passing
    699   in the type constants defined above. Calls in an illegal order will
    700   raise an _IncludeError with an appropriate error message.
    701 
    702   """
    703   # self._section will move monotonically through this set. If it ever
    704   # needs to move backwards, CheckNextIncludeOrder will raise an error.
    705   _INITIAL_SECTION = 0
    706   _MY_H_SECTION = 1
    707   _C_SECTION = 2
    708   _CPP_SECTION = 3
    709   _OTHER_H_SECTION = 4
    710 
    711   _TYPE_NAMES = {
    712       _C_SYS_HEADER: 'C system header',
    713       _CPP_SYS_HEADER: 'C++ system header',
    714       _LIKELY_MY_HEADER: 'header this file implements',
    715       _POSSIBLE_MY_HEADER: 'header this file may implement',
    716       _OTHER_HEADER: 'other header',
    717       }
    718   _SECTION_NAMES = {
    719       _INITIAL_SECTION: "... nothing. (This can't be an error.)",
    720       _MY_H_SECTION: 'a header this file implements',
    721       _C_SECTION: 'C system header',
    722       _CPP_SECTION: 'C++ system header',
    723       _OTHER_H_SECTION: 'other header',
    724       }
    725 
    726   def __init__(self):
    727     self.include_list = [[]]
    728     self.ResetSection('')
    729 
    730   def FindHeader(self, header):
    731     """Check if a header has already been included.
    732 
    733     Args:
    734       header: header to check.
    735     Returns:
    736       Line number of previous occurrence, or -1 if the header has not
    737       been seen before.
    738     """
    739     for section_list in self.include_list:
    740       for f in section_list:
    741         if f[0] == header:
    742           return f[1]
    743     return -1
    744 
    745   def ResetSection(self, directive):
    746     """Reset section checking for preprocessor directive.
    747 
    748     Args:
    749       directive: preprocessor directive (e.g. "if", "else").
    750     """
    751     # The name of the current section.
    752     self._section = self._INITIAL_SECTION
    753     # The path of last found header.
    754     self._last_header = ''
    755 
    756     # Update list of includes.  Note that we never pop from the
    757     # include list.
    758     if directive in ('if', 'ifdef', 'ifndef'):
    759       self.include_list.append([])
    760     elif directive in ('else', 'elif'):
    761       self.include_list[-1] = []
    762 
    763   def SetLastHeader(self, header_path):
    764     self._last_header = header_path
    765 
    766   def CanonicalizeAlphabeticalOrder(self, header_path):
    767     """Returns a path canonicalized for alphabetical comparison.
    768 
    769     - replaces "-" with "_" so they both cmp the same.
    770     - removes '-inl' since we don't require them to be after the main header.
    771     - lowercase everything, just in case.
    772 
    773     Args:
    774       header_path: Path to be canonicalized.
    775 
    776     Returns:
    777       Canonicalized path.
    778     """
    779     return header_path.replace('-inl.h', '.h').replace('-', '_').lower()
    780 
    781   def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path):
    782     """Check if a header is in alphabetical order with the previous header.
    783 
    784     Args:
    785       clean_lines: A CleansedLines instance containing the file.
    786       linenum: The number of the line to check.
    787       header_path: Canonicalized header to be checked.
    788 
    789     Returns:
    790       Returns true if the header is in alphabetical order.
    791     """
    792     # If previous section is different from current section, _last_header will
    793     # be reset to empty string, so it's always less than current header.
    794     #
    795     # If previous line was a blank line, assume that the headers are
    796     # intentionally sorted the way they are.
    797     if (self._last_header > header_path and
    798         Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])):
    799       return False
    800     return True
    801 
    802   def CheckNextIncludeOrder(self, header_type):
    803     """Returns a non-empty error message if the next header is out of order.
    804 
    805     This function also updates the internal state to be ready to check
    806     the next include.
    807 
    808     Args:
    809       header_type: One of the _XXX_HEADER constants defined above.
    810 
    811     Returns:
    812       The empty string if the header is in the right order, or an
    813       error message describing what's wrong.
    814 
    815     """
    816     error_message = ('Found %s after %s' %
    817                      (self._TYPE_NAMES[header_type],
    818                       self._SECTION_NAMES[self._section]))
    819 
    820     last_section = self._section
    821 
    822     if header_type == _C_SYS_HEADER:
    823       if self._section <= self._C_SECTION:
    824         self._section = self._C_SECTION
    825       else:
    826         self._last_header = ''
    827         return error_message
    828     elif header_type == _CPP_SYS_HEADER:
    829       if self._section <= self._CPP_SECTION:
    830         self._section = self._CPP_SECTION
    831       else:
    832         self._last_header = ''
    833         return error_message
    834     elif header_type == _LIKELY_MY_HEADER:
    835       if self._section <= self._MY_H_SECTION:
    836         self._section = self._MY_H_SECTION
    837       else:
    838         self._section = self._OTHER_H_SECTION
    839     elif header_type == _POSSIBLE_MY_HEADER:
    840       if self._section <= self._MY_H_SECTION:
    841         self._section = self._MY_H_SECTION
    842       else:
    843         # This will always be the fallback because we're not sure
    844         # enough that the header is associated with this file.
    845         self._section = self._OTHER_H_SECTION
    846     else:
    847       assert header_type == _OTHER_HEADER
    848       self._section = self._OTHER_H_SECTION
    849 
    850     if last_section != self._section:
    851       self._last_header = ''
    852 
    853     return ''
    854 
    855 
    856 class _CppLintState(object):
    857   """Maintains module-wide state.."""
    858 
    859   def __init__(self):
    860     self.verbose_level = 1  # global setting.
    861     self.error_count = 0    # global count of reported errors
    862     # filters to apply when emitting error messages
    863     self.filters = _DEFAULT_FILTERS[:]
    864     # backup of filter list. Used to restore the state after each file.
    865     self._filters_backup = self.filters[:]
    866     self.counting = 'total'  # In what way are we counting errors?
    867     self.errors_by_category = {}  # string to int dict storing error counts
    868     self.quiet = False  # Suppress non-error messagess?
    869 
    870     # output format:
    871     # "emacs" - format that emacs can parse (default)
    872     # "vs7" - format that Microsoft Visual Studio 7 can parse
    873     self.output_format = 'emacs'
    874 
    875   def SetOutputFormat(self, output_format):
    876     """Sets the output format for errors."""
    877     self.output_format = output_format
    878 
    879   def SetQuiet(self, quiet):
    880     """Sets the module's quiet settings, and returns the previous setting."""
    881     last_quiet = self.quiet
    882     self.quiet = quiet
    883     return last_quiet
    884 
    885   def SetVerboseLevel(self, level):
    886     """Sets the module's verbosity, and returns the previous setting."""
    887     last_verbose_level = self.verbose_level
    888     self.verbose_level = level
    889     return last_verbose_level
    890 
    891   def SetCountingStyle(self, counting_style):
    892     """Sets the module's counting options."""
    893     self.counting = counting_style
    894 
    895   def SetFilters(self, filters):
    896     """Sets the error-message filters.
    897 
    898     These filters are applied when deciding whether to emit a given
    899     error message.
    900 
    901     Args:
    902       filters: A string of comma-separated filters (eg "+whitespace/indent").
    903                Each filter should start with + or -; else we die.
    904 
    905     Raises:
    906       ValueError: The comma-separated filters did not all start with '+' or '-'.
    907                   E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter"
    908     """
    909     # Default filters always have less priority than the flag ones.
    910     self.filters = _DEFAULT_FILTERS[:]
    911     self.AddFilters(filters)
    912 
    913   def AddFilters(self, filters):
    914     """ Adds more filters to the existing list of error-message filters. """
    915     for filt in filters.split(','):
    916       clean_filt = filt.strip()
    917       if clean_filt:
    918         self.filters.append(clean_filt)
    919     for filt in self.filters:
    920       if not (filt.startswith('+') or filt.startswith('-')):
    921         raise ValueError('Every filter in --filters must start with + or -'
    922                          ' (%s does not)' % filt)
    923 
    924   def BackupFilters(self):
    925     """ Saves the current filter list to backup storage."""
    926     self._filters_backup = self.filters[:]
    927 
    928   def RestoreFilters(self):
    929     """ Restores filters previously backed up."""
    930     self.filters = self._filters_backup[:]
    931 
    932   def ResetErrorCounts(self):
    933     """Sets the module's error statistic back to zero."""
    934     self.error_count = 0
    935     self.errors_by_category = {}
    936 
    937   def IncrementErrorCount(self, category):
    938     """Bumps the module's error statistic."""
    939     self.error_count += 1
    940     if self.counting in ('toplevel', 'detailed'):
    941       if self.counting != 'detailed':
    942         category = category.split('/')[0]
    943       if category not in self.errors_by_category:
    944         self.errors_by_category[category] = 0
    945       self.errors_by_category[category] += 1
    946 
    947   def PrintErrorCounts(self):
    948     """Print a summary of errors by category, and the total."""
    949     for category, count in self.errors_by_category.iteritems():
    950       sys.stderr.write('Category \'%s\' errors found: %d\n' %
    951                        (category, count))
    952     sys.stdout.write('Total errors found: %d\n' % self.error_count)
    953 
    954 _cpplint_state = _CppLintState()
    955 
    956 
    957 def _OutputFormat():
    958   """Gets the module's output format."""
    959   return _cpplint_state.output_format
    960 
    961 
    962 def _SetOutputFormat(output_format):
    963   """Sets the module's output format."""
    964   _cpplint_state.SetOutputFormat(output_format)
    965 
    966 def _Quiet():
    967   """Return's the module's quiet setting."""
    968   return _cpplint_state.quiet
    969 
    970 def _SetQuiet(quiet):
    971   """Set the module's quiet status, and return previous setting."""
    972   return _cpplint_state.SetQuiet(quiet)
    973 
    974 
    975 def _VerboseLevel():
    976   """Returns the module's verbosity setting."""
    977   return _cpplint_state.verbose_level
    978 
    979 
    980 def _SetVerboseLevel(level):
    981   """Sets the module's verbosity, and returns the previous setting."""
    982   return _cpplint_state.SetVerboseLevel(level)
    983 
    984 
    985 def _SetCountingStyle(level):
    986   """Sets the module's counting options."""
    987   _cpplint_state.SetCountingStyle(level)
    988 
    989 
    990 def _Filters():
    991   """Returns the module's list of output filters, as a list."""
    992   return _cpplint_state.filters
    993 
    994 
    995 def _SetFilters(filters):
    996   """Sets the module's error-message filters.
    997 
    998   These filters are applied when deciding whether to emit a given
    999   error message.
   1000 
   1001   Args:
   1002     filters: A string of comma-separated filters (eg "whitespace/indent").
   1003              Each filter should start with + or -; else we die.
   1004   """
   1005   _cpplint_state.SetFilters(filters)
   1006 
   1007 def _AddFilters(filters):
   1008   """Adds more filter overrides.
   1009 
   1010   Unlike _SetFilters, this function does not reset the current list of filters
   1011   available.
   1012 
   1013   Args:
   1014     filters: A string of comma-separated filters (eg "whitespace/indent").
   1015              Each filter should start with + or -; else we die.
   1016   """
   1017   _cpplint_state.AddFilters(filters)
   1018 
   1019 def _BackupFilters():
   1020   """ Saves the current filter list to backup storage."""
   1021   _cpplint_state.BackupFilters()
   1022 
   1023 def _RestoreFilters():
   1024   """ Restores filters previously backed up."""
   1025   _cpplint_state.RestoreFilters()
   1026 
   1027 class _FunctionState(object):
   1028   """Tracks current function name and the number of lines in its body."""
   1029 
   1030   _NORMAL_TRIGGER = 250  # for --v=0, 500 for --v=1, etc.
   1031   _TEST_TRIGGER = 400    # about 50% more than _NORMAL_TRIGGER.
   1032 
   1033   def __init__(self):
   1034     self.in_a_function = False
   1035     self.lines_in_function = 0
   1036     self.current_function = ''
   1037 
   1038   def Begin(self, function_name):
   1039     """Start analyzing function body.
   1040 
   1041     Args:
   1042       function_name: The name of the function being tracked.
   1043     """
   1044     self.in_a_function = True
   1045     self.lines_in_function = 0
   1046     self.current_function = function_name
   1047 
   1048   def Count(self):
   1049     """Count line in current function body."""
   1050     if self.in_a_function:
   1051       self.lines_in_function += 1
   1052 
   1053   def Check(self, error, filename, linenum):
   1054     """Report if too many lines in function body.
   1055 
   1056     Args:
   1057       error: The function to call with any errors found.
   1058       filename: The name of the current file.
   1059       linenum: The number of the line to check.
   1060     """
   1061     if not self.in_a_function:
   1062       return
   1063 
   1064     if Match(r'T(EST|est)', self.current_function):
   1065       base_trigger = self._TEST_TRIGGER
   1066     else:
   1067       base_trigger = self._NORMAL_TRIGGER
   1068     trigger = base_trigger * 2**_VerboseLevel()
   1069 
   1070     if self.lines_in_function > trigger:
   1071       error_level = int(math.log(self.lines_in_function / base_trigger, 2))
   1072       # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
   1073       if error_level > 5:
   1074         error_level = 5
   1075       error(filename, linenum, 'readability/fn_size', error_level,
   1076             'Small and focused functions are preferred:'
   1077             ' %s has %d non-comment lines'
   1078             ' (error triggered by exceeding %d lines).'  % (
   1079                 self.current_function, self.lines_in_function, trigger))
   1080 
   1081   def End(self):
   1082     """Stop analyzing function body."""
   1083     self.in_a_function = False
   1084 
   1085 
   1086 class _IncludeError(Exception):
   1087   """Indicates a problem with the include order in a file."""
   1088   pass
   1089 
   1090 
   1091 class FileInfo(object):
   1092   """Provides utility functions for filenames.
   1093 
   1094   FileInfo provides easy access to the components of a file's path
   1095   relative to the project root.
   1096   """
   1097 
   1098   def __init__(self, filename):
   1099     self._filename = filename
   1100 
   1101   def FullName(self):
   1102     """Make Windows paths like Unix."""
   1103     return os.path.abspath(self._filename).replace('\\', '/')
   1104 
   1105   def RepositoryName(self):
   1106     """FullName after removing the local path to the repository.
   1107 
   1108     If we have a real absolute path name here we can try to do something smart:
   1109     detecting the root of the checkout and truncating /path/to/checkout from
   1110     the name so that we get header guards that don't include things like
   1111     "C:\Documents and Settings\..." or "/home/username/..." in them and thus
   1112     people on different computers who have checked the source out to different
   1113     locations won't see bogus errors.
   1114     """
   1115     fullname = self.FullName()
   1116 
   1117     if os.path.exists(fullname):
   1118       project_dir = os.path.dirname(fullname)
   1119 
   1120       if os.path.exists(os.path.join(project_dir, ".svn")):
   1121         # If there's a .svn file in the current directory, we recursively look
   1122         # up the directory tree for the top of the SVN checkout
   1123         root_dir = project_dir
   1124         one_up_dir = os.path.dirname(root_dir)
   1125         while os.path.exists(os.path.join(one_up_dir, ".svn")):
   1126           root_dir = os.path.dirname(root_dir)
   1127           one_up_dir = os.path.dirname(one_up_dir)
   1128 
   1129         prefix = os.path.commonprefix([root_dir, project_dir])
   1130         return fullname[len(prefix) + 1:]
   1131 
   1132       # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by
   1133       # searching up from the current path.
   1134       root_dir = current_dir = os.path.dirname(fullname)
   1135       while current_dir != os.path.dirname(current_dir):
   1136         if (os.path.exists(os.path.join(current_dir, ".git")) or
   1137             os.path.exists(os.path.join(current_dir, ".hg")) or
   1138             os.path.exists(os.path.join(current_dir, ".svn"))):
   1139           root_dir = current_dir
   1140         current_dir = os.path.dirname(current_dir)
   1141 
   1142       if (os.path.exists(os.path.join(root_dir, ".git")) or
   1143           os.path.exists(os.path.join(root_dir, ".hg")) or
   1144           os.path.exists(os.path.join(root_dir, ".svn"))):
   1145         prefix = os.path.commonprefix([root_dir, project_dir])
   1146         return fullname[len(prefix) + 1:]
   1147 
   1148     # Don't know what to do; header guard warnings may be wrong...
   1149     return fullname
   1150 
   1151   def Split(self):
   1152     """Splits the file into the directory, basename, and extension.
   1153 
   1154     For 'chrome/browser/browser.cc', Split() would
   1155     return ('chrome/browser', 'browser', '.cc')
   1156 
   1157     Returns:
   1158       A tuple of (directory, basename, extension).
   1159     """
   1160 
   1161     googlename = self.RepositoryName()
   1162     project, rest = os.path.split(googlename)
   1163     return (project,) + os.path.splitext(rest)
   1164 
   1165   def BaseName(self):
   1166     """File base name - text after the final slash, before the final period."""
   1167     return self.Split()[1]
   1168 
   1169   def Extension(self):
   1170     """File extension - text following the final period."""
   1171     return self.Split()[2]
   1172 
   1173   def NoExtension(self):
   1174     """File has no source file extension."""
   1175     return '/'.join(self.Split()[0:2])
   1176 
   1177   def IsSource(self):
   1178     """File has a source file extension."""
   1179     return _IsSourceExtension(self.Extension()[1:])
   1180 
   1181 
   1182 def _ShouldPrintError(category, confidence, linenum):
   1183   """If confidence >= verbose, category passes filter and is not suppressed."""
   1184 
   1185   # There are three ways we might decide not to print an error message:
   1186   # a "NOLINT(category)" comment appears in the source,
   1187   # the verbosity level isn't high enough, or the filters filter it out.
   1188   if IsErrorSuppressedByNolint(category, linenum):
   1189     return False
   1190 
   1191   if confidence < _cpplint_state.verbose_level:
   1192     return False
   1193 
   1194   is_filtered = False
   1195   for one_filter in _Filters():
   1196     if one_filter.startswith('-'):
   1197       if category.startswith(one_filter[1:]):
   1198         is_filtered = True
   1199     elif one_filter.startswith('+'):
   1200       if category.startswith(one_filter[1:]):
   1201         is_filtered = False
   1202     else:
   1203       assert False  # should have been checked for in SetFilter.
   1204   if is_filtered:
   1205     return False
   1206 
   1207   return True
   1208 
   1209 
   1210 def Error(filename, linenum, category, confidence, message):
   1211   """Logs the fact we've found a lint error.
   1212 
   1213   We log where the error was found, and also our confidence in the error,
   1214   that is, how certain we are this is a legitimate style regression, and
   1215   not a misidentification or a use that's sometimes justified.
   1216 
   1217   False positives can be suppressed by the use of
   1218   "cpplint(category)"  comments on the offending line.  These are
   1219   parsed into _error_suppressions.
   1220 
   1221   Args:
   1222     filename: The name of the file containing the error.
   1223     linenum: The number of the line containing the error.
   1224     category: A string used to describe the "category" this bug
   1225       falls under: "whitespace", say, or "runtime".  Categories
   1226       may have a hierarchy separated by slashes: "whitespace/indent".
   1227     confidence: A number from 1-5 representing a confidence score for
   1228       the error, with 5 meaning that we are certain of the problem,
   1229       and 1 meaning that it could be a legitimate construct.
   1230     message: The error message.
   1231   """
   1232   if _ShouldPrintError(category, confidence, linenum):
   1233     _cpplint_state.IncrementErrorCount(category)
   1234     if _cpplint_state.output_format == 'vs7':
   1235       sys.stderr.write('%s(%s): error cpplint: [%s] %s [%d]\n' % (
   1236           filename, linenum, category, message, confidence))
   1237     elif _cpplint_state.output_format == 'eclipse':
   1238       sys.stderr.write('%s:%s: warning: %s  [%s] [%d]\n' % (
   1239           filename, linenum, message, category, confidence))
   1240     else:
   1241       sys.stderr.write('%s:%s:  %s  [%s] [%d]\n' % (
   1242           filename, linenum, message, category, confidence))
   1243 
   1244 
   1245 # Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard.
   1246 _RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile(
   1247     r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)')
   1248 # Match a single C style comment on the same line.
   1249 _RE_PATTERN_C_COMMENTS = r'/\*(?:[^*]|\*(?!/))*\*/'
   1250 # Matches multi-line C style comments.
   1251 # This RE is a little bit more complicated than one might expect, because we
   1252 # have to take care of space removals tools so we can handle comments inside
   1253 # statements better.
   1254 # The current rule is: We only clear spaces from both sides when we're at the
   1255 # end of the line. Otherwise, we try to remove spaces from the right side,
   1256 # if this doesn't work we try on left side but only if there's a non-character
   1257 # on the right.
   1258 _RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(
   1259     r'(\s*' + _RE_PATTERN_C_COMMENTS + r'\s*$|' +
   1260     _RE_PATTERN_C_COMMENTS + r'\s+|' +
   1261     r'\s+' + _RE_PATTERN_C_COMMENTS + r'(?=\W)|' +
   1262     _RE_PATTERN_C_COMMENTS + r')')
   1263 
   1264 
   1265 def IsCppString(line):
   1266   """Does line terminate so, that the next symbol is in string constant.
   1267 
   1268   This function does not consider single-line nor multi-line comments.
   1269 
   1270   Args:
   1271     line: is a partial line of code starting from the 0..n.
   1272 
   1273   Returns:
   1274     True, if next character appended to 'line' is inside a
   1275     string constant.
   1276   """
   1277 
   1278   line = line.replace(r'\\', 'XX')  # after this, \\" does not match to \"
   1279   return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
   1280 
   1281 
   1282 def CleanseRawStrings(raw_lines):
   1283   """Removes C++11 raw strings from lines.
   1284 
   1285     Before:
   1286       static const char kData[] = R"(
   1287           multi-line string
   1288           )";
   1289 
   1290     After:
   1291       static const char kData[] = ""
   1292           (replaced by blank line)
   1293           "";
   1294 
   1295   Args:
   1296     raw_lines: list of raw lines.
   1297 
   1298   Returns:
   1299     list of lines with C++11 raw strings replaced by empty strings.
   1300   """
   1301 
   1302   delimiter = None
   1303   lines_without_raw_strings = []
   1304   for line in raw_lines:
   1305     if delimiter:
   1306       # Inside a raw string, look for the end
   1307       end = line.find(delimiter)
   1308       if end >= 0:
   1309         # Found the end of the string, match leading space for this
   1310         # line and resume copying the original lines, and also insert
   1311         # a "" on the last line.
   1312         leading_space = Match(r'^(\s*)\S', line)
   1313         line = leading_space.group(1) + '""' + line[end + len(delimiter):]
   1314         delimiter = None
   1315       else:
   1316         # Haven't found the end yet, append a blank line.
   1317         line = '""'
   1318 
   1319     # Look for beginning of a raw string, and replace them with
   1320     # empty strings.  This is done in a loop to handle multiple raw
   1321     # strings on the same line.
   1322     while delimiter is None:
   1323       # Look for beginning of a raw string.
   1324       # See 2.14.15 [lex.string] for syntax.
   1325       #
   1326       # Once we have matched a raw string, we check the prefix of the
   1327       # line to make sure that the line is not part of a single line
   1328       # comment.  It's done this way because we remove raw strings
   1329       # before removing comments as opposed to removing comments
   1330       # before removing raw strings.  This is because there are some
   1331       # cpplint checks that requires the comments to be preserved, but
   1332       # we don't want to check comments that are inside raw strings.
   1333       matched = Match(r'^(.*?)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\((.*)$', line)
   1334       if (matched and
   1335           not Match(r'^([^\'"]|\'(\\.|[^\'])*\'|"(\\.|[^"])*")*//',
   1336                     matched.group(1))):
   1337         delimiter = ')' + matched.group(2) + '"'
   1338 
   1339         end = matched.group(3).find(delimiter)
   1340         if end >= 0:
   1341           # Raw string ended on same line
   1342           line = (matched.group(1) + '""' +
   1343                   matched.group(3)[end + len(delimiter):])
   1344           delimiter = None
   1345         else:
   1346           # Start of a multi-line raw string
   1347           line = matched.group(1) + '""'
   1348       else:
   1349         break
   1350 
   1351     lines_without_raw_strings.append(line)
   1352 
   1353   # TODO(unknown): if delimiter is not None here, we might want to
   1354   # emit a warning for unterminated string.
   1355   return lines_without_raw_strings
   1356 
   1357 
   1358 def FindNextMultiLineCommentStart(lines, lineix):
   1359   """Find the beginning marker for a multiline comment."""
   1360   while lineix < len(lines):
   1361     if lines[lineix].strip().startswith('/*'):
   1362       # Only return this marker if the comment goes beyond this line
   1363       if lines[lineix].strip().find('*/', 2) < 0:
   1364         return lineix
   1365     lineix += 1
   1366   return len(lines)
   1367 
   1368 
   1369 def FindNextMultiLineCommentEnd(lines, lineix):
   1370   """We are inside a comment, find the end marker."""
   1371   while lineix < len(lines):
   1372     if lines[lineix].strip().endswith('*/'):
   1373       return lineix
   1374     lineix += 1
   1375   return len(lines)
   1376 
   1377 
   1378 def RemoveMultiLineCommentsFromRange(lines, begin, end):
   1379   """Clears a range of lines for multi-line comments."""
   1380   # Having // dummy comments makes the lines non-empty, so we will not get
   1381   # unnecessary blank line warnings later in the code.
   1382   for i in range(begin, end):
   1383     lines[i] = '/**/'
   1384 
   1385 
   1386 def RemoveMultiLineComments(filename, lines, error):
   1387   """Removes multiline (c-style) comments from lines."""
   1388   lineix = 0
   1389   while lineix < len(lines):
   1390     lineix_begin = FindNextMultiLineCommentStart(lines, lineix)
   1391     if lineix_begin >= len(lines):
   1392       return
   1393     lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin)
   1394     if lineix_end >= len(lines):
   1395       error(filename, lineix_begin + 1, 'readability/multiline_comment', 5,
   1396             'Could not find end of multi-line comment')
   1397       return
   1398     RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1)
   1399     lineix = lineix_end + 1
   1400 
   1401 
   1402 def CleanseComments(line):
   1403   """Removes //-comments and single-line C-style /* */ comments.
   1404 
   1405   Args:
   1406     line: A line of C++ source.
   1407 
   1408   Returns:
   1409     The line with single-line comments removed.
   1410   """
   1411   commentpos = line.find('//')
   1412   if commentpos != -1 and not IsCppString(line[:commentpos]):
   1413     line = line[:commentpos].rstrip()
   1414   # get rid of /* ... */
   1415   return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
   1416 
   1417 
   1418 class CleansedLines(object):
   1419   """Holds 4 copies of all lines with different preprocessing applied to them.
   1420 
   1421   1) elided member contains lines without strings and comments.
   1422   2) lines member contains lines without comments.
   1423   3) raw_lines member contains all the lines without processing.
   1424   4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw
   1425      strings removed.
   1426   All these members are of <type 'list'>, and of the same length.
   1427   """
   1428 
   1429   def __init__(self, lines):
   1430     self.elided = []
   1431     self.lines = []
   1432     self.raw_lines = lines
   1433     self.num_lines = len(lines)
   1434     self.lines_without_raw_strings = CleanseRawStrings(lines)
   1435     for linenum in range(len(self.lines_without_raw_strings)):
   1436       self.lines.append(CleanseComments(
   1437           self.lines_without_raw_strings[linenum]))
   1438       elided = self._CollapseStrings(self.lines_without_raw_strings[linenum])
   1439       self.elided.append(CleanseComments(elided))
   1440 
   1441   def NumLines(self):
   1442     """Returns the number of lines represented."""
   1443     return self.num_lines
   1444 
   1445   @staticmethod
   1446   def _CollapseStrings(elided):
   1447     """Collapses strings and chars on a line to simple "" or '' blocks.
   1448 
   1449     We nix strings first so we're not fooled by text like '"http://"'
   1450 
   1451     Args:
   1452       elided: The line being processed.
   1453 
   1454     Returns:
   1455       The line with collapsed strings.
   1456     """
   1457     if _RE_PATTERN_INCLUDE.match(elided):
   1458       return elided
   1459 
   1460     # Remove escaped characters first to make quote/single quote collapsing
   1461     # basic.  Things that look like escaped characters shouldn't occur
   1462     # outside of strings and chars.
   1463     elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided)
   1464 
   1465     # Replace quoted strings and digit separators.  Both single quotes
   1466     # and double quotes are processed in the same loop, otherwise
   1467     # nested quotes wouldn't work.
   1468     collapsed = ''
   1469     while True:
   1470       # Find the first quote character
   1471       match = Match(r'^([^\'"]*)([\'"])(.*)$', elided)
   1472       if not match:
   1473         collapsed += elided
   1474         break
   1475       head, quote, tail = match.groups()
   1476 
   1477       if quote == '"':
   1478         # Collapse double quoted strings
   1479         second_quote = tail.find('"')
   1480         if second_quote >= 0:
   1481           collapsed += head + '""'
   1482           elided = tail[second_quote + 1:]
   1483         else:
   1484           # Unmatched double quote, don't bother processing the rest
   1485           # of the line since this is probably a multiline string.
   1486           collapsed += elided
   1487           break
   1488       else:
   1489         # Found single quote, check nearby text to eliminate digit separators.
   1490         #
   1491         # There is no special handling for floating point here, because
   1492         # the integer/fractional/exponent parts would all be parsed
   1493         # correctly as long as there are digits on both sides of the
   1494         # separator.  So we are fine as long as we don't see something
   1495         # like "0.'3" (gcc 4.9.0 will not allow this literal).
   1496         if Search(r'\b(?:0[bBxX]?|[1-9])[0-9a-fA-F]*$', head):
   1497           match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$', "'" + tail)
   1498           collapsed += head + match_literal.group(1).replace("'", '')
   1499           elided = match_literal.group(2)
   1500         else:
   1501           second_quote = tail.find('\'')
   1502           if second_quote >= 0:
   1503             collapsed += head + "''"
   1504             elided = tail[second_quote + 1:]
   1505           else:
   1506             # Unmatched single quote
   1507             collapsed += elided
   1508             break
   1509 
   1510     return collapsed
   1511 
   1512 
   1513 def FindEndOfExpressionInLine(line, startpos, stack):
   1514   """Find the position just after the end of current parenthesized expression.
   1515 
   1516   Args:
   1517     line: a CleansedLines line.
   1518     startpos: start searching at this position.
   1519     stack: nesting stack at startpos.
   1520 
   1521   Returns:
   1522     On finding matching end: (index just after matching end, None)
   1523     On finding an unclosed expression: (-1, None)
   1524     Otherwise: (-1, new stack at end of this line)
   1525   """
   1526   for i in xrange(startpos, len(line)):
   1527     char = line[i]
   1528     if char in '([{':
   1529       # Found start of parenthesized expression, push to expression stack
   1530       stack.append(char)
   1531     elif char == '<':
   1532       # Found potential start of template argument list
   1533       if i > 0 and line[i - 1] == '<':
   1534         # Left shift operator
   1535         if stack and stack[-1] == '<':
   1536           stack.pop()
   1537           if not stack:
   1538             return (-1, None)
   1539       elif i > 0 and Search(r'\boperator\s*$', line[0:i]):
   1540         # operator<, don't add to stack
   1541         continue
   1542       else:
   1543         # Tentative start of template argument list
   1544         stack.append('<')
   1545     elif char in ')]}':
   1546       # Found end of parenthesized expression.
   1547       #
   1548       # If we are currently expecting a matching '>', the pending '<'
   1549       # must have been an operator.  Remove them from expression stack.
   1550       while stack and stack[-1] == '<':
   1551         stack.pop()
   1552       if not stack:
   1553         return (-1, None)
   1554       if ((stack[-1] == '(' and char == ')') or
   1555           (stack[-1] == '[' and char == ']') or
   1556           (stack[-1] == '{' and char == '}')):
   1557         stack.pop()
   1558         if not stack:
   1559           return (i + 1, None)
   1560       else:
   1561         # Mismatched parentheses
   1562         return (-1, None)
   1563     elif char == '>':
   1564       # Found potential end of template argument list.
   1565 
   1566       # Ignore "->" and operator functions
   1567       if (i > 0 and
   1568           (line[i - 1] == '-' or Search(r'\boperator\s*$', line[0:i - 1]))):
   1569         continue
   1570 
   1571       # Pop the stack if there is a matching '<'.  Otherwise, ignore
   1572       # this '>' since it must be an operator.
   1573       if stack:
   1574         if stack[-1] == '<':
   1575           stack.pop()
   1576           if not stack:
   1577             return (i + 1, None)
   1578     elif char == ';':
   1579       # Found something that look like end of statements.  If we are currently
   1580       # expecting a '>', the matching '<' must have been an operator, since
   1581       # template argument list should not contain statements.
   1582       while stack and stack[-1] == '<':
   1583         stack.pop()
   1584       if not stack:
   1585         return (-1, None)
   1586 
   1587   # Did not find end of expression or unbalanced parentheses on this line
   1588   return (-1, stack)
   1589 
   1590 
   1591 def CloseExpression(clean_lines, linenum, pos):
   1592   """If input points to ( or { or [ or <, finds the position that closes it.
   1593 
   1594   If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the
   1595   linenum/pos that correspond to the closing of the expression.
   1596 
   1597   TODO(unknown): cpplint spends a fair bit of time matching parentheses.
   1598   Ideally we would want to index all opening and closing parentheses once
   1599   and have CloseExpression be just a simple lookup, but due to preprocessor
   1600   tricks, this is not so easy.
   1601 
   1602   Args:
   1603     clean_lines: A CleansedLines instance containing the file.
   1604     linenum: The number of the line to check.
   1605     pos: A position on the line.
   1606 
   1607   Returns:
   1608     A tuple (line, linenum, pos) pointer *past* the closing brace, or
   1609     (line, len(lines), -1) if we never find a close.  Note we ignore
   1610     strings and comments when matching; and the line we return is the
   1611     'cleansed' line at linenum.
   1612   """
   1613 
   1614   line = clean_lines.elided[linenum]
   1615   if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]):
   1616     return (line, clean_lines.NumLines(), -1)
   1617 
   1618   # Check first line
   1619   (end_pos, stack) = FindEndOfExpressionInLine(line, pos, [])
   1620   if end_pos > -1:
   1621     return (line, linenum, end_pos)
   1622 
   1623   # Continue scanning forward
   1624   while stack and linenum < clean_lines.NumLines() - 1:
   1625     linenum += 1
   1626     line = clean_lines.elided[linenum]
   1627     (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack)
   1628     if end_pos > -1:
   1629       return (line, linenum, end_pos)
   1630 
   1631   # Did not find end of expression before end of file, give up
   1632   return (line, clean_lines.NumLines(), -1)
   1633 
   1634 
   1635 def FindStartOfExpressionInLine(line, endpos, stack):
   1636   """Find position at the matching start of current expression.
   1637 
   1638   This is almost the reverse of FindEndOfExpressionInLine, but note
   1639   that the input position and returned position differs by 1.
   1640 
   1641   Args:
   1642     line: a CleansedLines line.
   1643     endpos: start searching at this position.
   1644     stack: nesting stack at endpos.
   1645 
   1646   Returns:
   1647     On finding matching start: (index at matching start, None)
   1648     On finding an unclosed expression: (-1, None)
   1649     Otherwise: (-1, new stack at beginning of this line)
   1650   """
   1651   i = endpos
   1652   while i >= 0:
   1653     char = line[i]
   1654     if char in ')]}':
   1655       # Found end of expression, push to expression stack
   1656       stack.append(char)
   1657     elif char == '>':
   1658       # Found potential end of template argument list.
   1659       #
   1660       # Ignore it if it's a "->" or ">=" or "operator>"
   1661       if (i > 0 and
   1662           (line[i - 1] == '-' or
   1663            Match(r'\s>=\s', line[i - 1:]) or
   1664            Search(r'\boperator\s*$', line[0:i]))):
   1665         i -= 1
   1666       else:
   1667         stack.append('>')
   1668     elif char == '<':
   1669       # Found potential start of template argument list
   1670       if i > 0 and line[i - 1] == '<':
   1671         # Left shift operator
   1672         i -= 1
   1673       else:
   1674         # If there is a matching '>', we can pop the expression stack.
   1675         # Otherwise, ignore this '<' since it must be an operator.
   1676         if stack and stack[-1] == '>':
   1677           stack.pop()
   1678           if not stack:
   1679             return (i, None)
   1680     elif char in '([{':
   1681       # Found start of expression.
   1682       #
   1683       # If there are any unmatched '>' on the stack, they must be
   1684       # operators.  Remove those.
   1685       while stack and stack[-1] == '>':
   1686         stack.pop()
   1687       if not stack:
   1688         return (-1, None)
   1689       if ((char == '(' and stack[-1] == ')') or
   1690           (char == '[' and stack[-1] == ']') or
   1691           (char == '{' and stack[-1] == '}')):
   1692         stack.pop()
   1693         if not stack:
   1694           return (i, None)
   1695       else:
   1696         # Mismatched parentheses
   1697         return (-1, None)
   1698     elif char == ';':
   1699       # Found something that look like end of statements.  If we are currently
   1700       # expecting a '<', the matching '>' must have been an operator, since
   1701       # template argument list should not contain statements.
   1702       while stack and stack[-1] == '>':
   1703         stack.pop()
   1704       if not stack:
   1705         return (-1, None)
   1706 
   1707     i -= 1
   1708 
   1709   return (-1, stack)
   1710 
   1711 
   1712 def ReverseCloseExpression(clean_lines, linenum, pos):
   1713   """If input points to ) or } or ] or >, finds the position that opens it.
   1714 
   1715   If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the
   1716   linenum/pos that correspond to the opening of the expression.
   1717 
   1718   Args:
   1719     clean_lines: A CleansedLines instance containing the file.
   1720     linenum: The number of the line to check.
   1721     pos: A position on the line.
   1722 
   1723   Returns:
   1724     A tuple (line, linenum, pos) pointer *at* the opening brace, or
   1725     (line, 0, -1) if we never find the matching opening brace.  Note
   1726     we ignore strings and comments when matching; and the line we
   1727     return is the 'cleansed' line at linenum.
   1728   """
   1729   line = clean_lines.elided[linenum]
   1730   if line[pos] not in ')}]>':
   1731     return (line, 0, -1)
   1732 
   1733   # Check last line
   1734   (start_pos, stack) = FindStartOfExpressionInLine(line, pos, [])
   1735   if start_pos > -1:
   1736     return (line, linenum, start_pos)
   1737 
   1738   # Continue scanning backward
   1739   while stack and linenum > 0:
   1740     linenum -= 1
   1741     line = clean_lines.elided[linenum]
   1742     (start_pos, stack) = FindStartOfExpressionInLine(line, len(line) - 1, stack)
   1743     if start_pos > -1:
   1744       return (line, linenum, start_pos)
   1745 
   1746   # Did not find start of expression before beginning of file, give up
   1747   return (line, 0, -1)
   1748 
   1749 
   1750 def CheckForCopyright(filename, lines, error):
   1751   """Logs an error if no Copyright message appears at the top of the file."""
   1752 
   1753   # We'll say it should occur by line 10. Don't forget there's a
   1754   # dummy line at the front.
   1755   for line in xrange(1, min(len(lines), 11)):
   1756     if re.search(r'Copyright', lines[line], re.I): break
   1757   else:                       # means no copyright line was found
   1758     error(filename, 0, 'legal/copyright', 5,
   1759           'No copyright message found.  '
   1760           'You should have a line: "Copyright [year] <Copyright Owner>"')
   1761 
   1762 
   1763 def GetIndentLevel(line):
   1764   """Return the number of leading spaces in line.
   1765 
   1766   Args:
   1767     line: A string to check.
   1768 
   1769   Returns:
   1770     An integer count of leading spaces, possibly zero.
   1771   """
   1772   indent = Match(r'^( *)\S', line)
   1773   if indent:
   1774     return len(indent.group(1))
   1775   else:
   1776     return 0
   1777 
   1778 def PathSplitToList(path):
   1779   """Returns the path split into a list by the separator.
   1780 
   1781   Args:
   1782     path: An absolute or relative path (e.g. '/a/b/c/' or '../a')
   1783 
   1784   Returns:
   1785     A list of path components (e.g. ['a', 'b', 'c]).
   1786   """
   1787   lst = []
   1788   while True:
   1789     (head, tail) = os.path.split(path)
   1790     if head == path: # absolute paths end
   1791       lst.append(head)
   1792       break
   1793     if tail == path: # relative paths end
   1794       lst.append(tail)
   1795       break
   1796 
   1797     path = head
   1798     lst.append(tail)
   1799 
   1800   lst.reverse()
   1801   return lst
   1802 
   1803 def GetHeaderGuardCPPVariable(filename):
   1804   """Returns the CPP variable that should be used as a header guard.
   1805 
   1806   Args:
   1807     filename: The name of a C++ header file.
   1808 
   1809   Returns:
   1810     The CPP variable that should be used as a header guard in the
   1811     named file.
   1812 
   1813   """
   1814 
   1815   # Restores original filename in case that cpplint is invoked from Emacs's
   1816   # flymake.
   1817   filename = re.sub(r'_flymake\.h$', '.h', filename)
   1818   filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename)
   1819   # Replace 'c++' with 'cpp'.
   1820   filename = filename.replace('C++', 'cpp').replace('c++', 'cpp')
   1821 
   1822   fileinfo = FileInfo(filename)
   1823   file_path_from_root = fileinfo.RepositoryName()
   1824 
   1825   def FixupPathFromRoot():
   1826     if _root_debug:
   1827       sys.stderr.write("\n_root fixup, _root = '%s', repository name = '%s'\n"
   1828           %(_root, fileinfo.RepositoryName()))
   1829 
   1830     # Process the file path with the --root flag if it was set.
   1831     if not _root:
   1832       if _root_debug:
   1833         sys.stderr.write("_root unspecified\n")
   1834       return file_path_from_root
   1835 
   1836     def StripListPrefix(lst, prefix):
   1837       # f(['x', 'y'], ['w, z']) -> None  (not a valid prefix)
   1838       if lst[:len(prefix)] != prefix:
   1839         return None
   1840       # f(['a, 'b', 'c', 'd'], ['a', 'b']) -> ['c', 'd']
   1841       return lst[(len(prefix)):]
   1842 
   1843     # root behavior:
   1844     #   --root=subdir , lstrips subdir from the header guard
   1845     maybe_path = StripListPrefix(PathSplitToList(file_path_from_root),
   1846                                  PathSplitToList(_root))
   1847 
   1848     if _root_debug:
   1849       sys.stderr.write("_root lstrip (maybe_path=%s, file_path_from_root=%s," +
   1850           " _root=%s)\n" %(maybe_path, file_path_from_root, _root))
   1851 
   1852     if maybe_path:
   1853       return os.path.join(*maybe_path)
   1854 
   1855     #   --root=.. , will prepend the outer directory to the header guard
   1856     full_path = fileinfo.FullName()
   1857     root_abspath = os.path.abspath(_root)
   1858 
   1859     maybe_path = StripListPrefix(PathSplitToList(full_path),
   1860                                  PathSplitToList(root_abspath))
   1861 
   1862     if _root_debug:
   1863       sys.stderr.write("_root prepend (maybe_path=%s, full_path=%s, " +
   1864           "root_abspath=%s)\n" %(maybe_path, full_path, root_abspath))
   1865 
   1866     if maybe_path:
   1867       return os.path.join(*maybe_path)
   1868 
   1869     if _root_debug:
   1870       sys.stderr.write("_root ignore, returning %s\n" %(file_path_from_root))
   1871 
   1872     #   --root=FAKE_DIR is ignored
   1873     return file_path_from_root
   1874 
   1875   file_path_from_root = FixupPathFromRoot()
   1876   return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_'
   1877 
   1878 
   1879 def CheckForHeaderGuard(filename, clean_lines, error):
   1880   """Checks that the file contains a header guard.
   1881 
   1882   Logs an error if no #ifndef header guard is present.  For other
   1883   headers, checks that the full pathname is used.
   1884 
   1885   Args:
   1886     filename: The name of the C++ header file.
   1887     clean_lines: A CleansedLines instance containing the file.
   1888     error: The function to call with any errors found.
   1889   """
   1890 
   1891   # Don't check for header guards if there are error suppression
   1892   # comments somewhere in this file.
   1893   #
   1894   # Because this is silencing a warning for a nonexistent line, we
   1895   # only support the very specific NOLINT(build/header_guard) syntax,
   1896   # and not the general NOLINT or NOLINT(*) syntax.
   1897   raw_lines = clean_lines.lines_without_raw_strings
   1898   for i in raw_lines:
   1899     if Search(r'//\s*NOLINT\(build/header_guard\)', i):
   1900       return
   1901 
   1902   cppvar = GetHeaderGuardCPPVariable(filename)
   1903 
   1904   ifndef = ''
   1905   ifndef_linenum = 0
   1906   define = ''
   1907   endif = ''
   1908   endif_linenum = 0
   1909   for linenum, line in enumerate(raw_lines):
   1910     linesplit = line.split()
   1911     if len(linesplit) >= 2:
   1912       # find the first occurrence of #ifndef and #define, save arg
   1913       if not ifndef and linesplit[0] == '#ifndef':
   1914         # set ifndef to the header guard presented on the #ifndef line.
   1915         ifndef = linesplit[1]
   1916         ifndef_linenum = linenum
   1917       if not define and linesplit[0] == '#define':
   1918         define = linesplit[1]
   1919     # find the last occurrence of #endif, save entire line
   1920     if line.startswith('#endif'):
   1921       endif = line
   1922       endif_linenum = linenum
   1923 
   1924   if not ifndef or not define or ifndef != define:
   1925     error(filename, 0, 'build/header_guard', 5,
   1926           'No #ifndef header guard found, suggested CPP variable is: %s' %
   1927           cppvar)
   1928     return
   1929 
   1930   # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
   1931   # for backward compatibility.
   1932   if ifndef != cppvar:
   1933     error_level = 0
   1934     if ifndef != cppvar + '_':
   1935       error_level = 5
   1936 
   1937     ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum,
   1938                             error)
   1939     error(filename, ifndef_linenum, 'build/header_guard', error_level,
   1940           '#ifndef header guard has wrong style, please use: %s' % cppvar)
   1941 
   1942   # Check for "//" comments on endif line.
   1943   ParseNolintSuppressions(filename, raw_lines[endif_linenum], endif_linenum,
   1944                           error)
   1945   match = Match(r'#endif\s*//\s*' + cppvar + r'(_)?\b', endif)
   1946   if match:
   1947     if match.group(1) == '_':
   1948       # Issue low severity warning for deprecated double trailing underscore
   1949       error(filename, endif_linenum, 'build/header_guard', 0,
   1950             '#endif line should be "#endif  // %s"' % cppvar)
   1951     return
   1952 
   1953   # Didn't find the corresponding "//" comment.  If this file does not
   1954   # contain any "//" comments at all, it could be that the compiler
   1955   # only wants "/**/" comments, look for those instead.
   1956   no_single_line_comments = True
   1957   for i in xrange(1, len(raw_lines) - 1):
   1958     line = raw_lines[i]
   1959     if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line):
   1960       no_single_line_comments = False
   1961       break
   1962 
   1963   if no_single_line_comments:
   1964     match = Match(r'#endif\s*/\*\s*' + cppvar + r'(_)?\s*\*/', endif)
   1965     if match:
   1966       if match.group(1) == '_':
   1967         # Low severity warning for double trailing underscore
   1968         error(filename, endif_linenum, 'build/header_guard', 0,
   1969               '#endif line should be "#endif  /* %s */"' % cppvar)
   1970       return
   1971 
   1972   # Didn't find anything
   1973   error(filename, endif_linenum, 'build/header_guard', 5,
   1974         '#endif line should be "#endif  // %s"' % cppvar)
   1975 
   1976 
   1977 def CheckHeaderFileIncluded(filename, include_state, error):
   1978   """Logs an error if a .cc file does not include its header."""
   1979 
   1980   # Do not check test files
   1981   fileinfo = FileInfo(filename)
   1982   if Search(_TEST_FILE_SUFFIX, fileinfo.BaseName()):
   1983     return
   1984 
   1985   headerfile = filename[0:len(filename) - len(fileinfo.Extension())] + '.h'
   1986   if not os.path.exists(headerfile):
   1987     return
   1988   headername = FileInfo(headerfile).RepositoryName()
   1989   first_include = 0
   1990   for section_list in include_state.include_list:
   1991     for f in section_list:
   1992       if headername in f[0] or f[0] in headername:
   1993         return
   1994       if not first_include:
   1995         first_include = f[1]
   1996 
   1997   error(filename, first_include, 'build/include', 5,
   1998         '%s should include its header file %s' % (fileinfo.RepositoryName(),
   1999                                                   headername))
   2000 
   2001 
   2002 def CheckForBadCharacters(filename, lines, error):
   2003   """Logs an error for each line containing bad characters.
   2004 
   2005   Two kinds of bad characters:
   2006 
   2007   1. Unicode replacement characters: These indicate that either the file
   2008   contained invalid UTF-8 (likely) or Unicode replacement characters (which
   2009   it shouldn't).  Note that it's possible for this to throw off line
   2010   numbering if the invalid UTF-8 occurred adjacent to a newline.
   2011 
   2012   2. NUL bytes.  These are problematic for some tools.
   2013 
   2014   Args:
   2015     filename: The name of the current file.
   2016     lines: An array of strings, each representing a line of the file.
   2017     error: The function to call with any errors found.
   2018   """
   2019   for linenum, line in enumerate(lines):
   2020     if u'\ufffd' in line:
   2021       error(filename, linenum, 'readability/utf8', 5,
   2022             'Line contains invalid UTF-8 (or Unicode replacement character).')
   2023     if '\0' in line:
   2024       error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.')
   2025 
   2026 
   2027 def CheckForNewlineAtEOF(filename, lines, error):
   2028   """Logs an error if there is no newline char at the end of the file.
   2029 
   2030   Args:
   2031     filename: The name of the current file.
   2032     lines: An array of strings, each representing a line of the file.
   2033     error: The function to call with any errors found.
   2034   """
   2035 
   2036   # The array lines() was created by adding two newlines to the
   2037   # original file (go figure), then splitting on \n.
   2038   # To verify that the file ends in \n, we just have to make sure the
   2039   # last-but-two element of lines() exists and is empty.
   2040   if len(lines) < 3 or lines[-2]:
   2041     error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
   2042           'Could not find a newline character at the end of the file.')
   2043 
   2044 
   2045 def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
   2046   """Logs an error if we see /* ... */ or "..." that extend past one line.
   2047 
   2048   /* ... */ comments are legit inside macros, for one line.
   2049   Otherwise, we prefer // comments, so it's ok to warn about the
   2050   other.  Likewise, it's ok for strings to extend across multiple
   2051   lines, as long as a line continuation character (backslash)
   2052   terminates each line. Although not currently prohibited by the C++
   2053   style guide, it's ugly and unnecessary. We don't do well with either
   2054   in this lint program, so we warn about both.
   2055 
   2056   Args:
   2057     filename: The name of the current file.
   2058     clean_lines: A CleansedLines instance containing the file.
   2059     linenum: The number of the line to check.
   2060     error: The function to call with any errors found.
   2061   """
   2062   line = clean_lines.elided[linenum]
   2063 
   2064   # Remove all \\ (escaped backslashes) from the line. They are OK, and the
   2065   # second (escaped) slash may trigger later \" detection erroneously.
   2066   line = line.replace('\\\\', '')
   2067 
   2068   if line.count('/*') > line.count('*/'):
   2069     error(filename, linenum, 'readability/multiline_comment', 5,
   2070           'Complex multi-line /*...*/-style comment found. '
   2071           'Lint may give bogus warnings.  '
   2072           'Consider replacing these with //-style comments, '
   2073           'with #if 0...#endif, '
   2074           'or with more clearly structured multi-line comments.')
   2075 
   2076   if (line.count('"') - line.count('\\"')) % 2:
   2077     error(filename, linenum, 'readability/multiline_string', 5,
   2078           'Multi-line string ("...") found.  This lint script doesn\'t '
   2079           'do well with such strings, and may give bogus warnings.  '
   2080           'Use C++11 raw strings or concatenation instead.')
   2081 
   2082 
   2083 # (non-threadsafe name, thread-safe alternative, validation pattern)
   2084 #
   2085 # The validation pattern is used to eliminate false positives such as:
   2086 #  _rand();               // false positive due to substring match.
   2087 #  ->rand();              // some member function rand().
   2088 #  ACMRandom rand(seed);  // some variable named rand.
   2089 #  ISAACRandom rand();    // another variable named rand.
   2090 #
   2091 # Basically we require the return value of these functions to be used
   2092 # in some expression context on the same line by matching on some
   2093 # operator before the function name.  This eliminates constructors and
   2094 # member function calls.
   2095 _UNSAFE_FUNC_PREFIX = r'(?:[-+*/=%^&|(<]\s*|>\s+)'
   2096 _THREADING_LIST = (
   2097     ('asctime(', 'asctime_r(', _UNSAFE_FUNC_PREFIX + r'asctime\([^)]+\)'),
   2098     ('ctime(', 'ctime_r(', _UNSAFE_FUNC_PREFIX + r'ctime\([^)]+\)'),
   2099     ('getgrgid(', 'getgrgid_r(', _UNSAFE_FUNC_PREFIX + r'getgrgid\([^)]+\)'),
   2100     ('getgrnam(', 'getgrnam_r(', _UNSAFE_FUNC_PREFIX + r'getgrnam\([^)]+\)'),
   2101     ('getlogin(', 'getlogin_r(', _UNSAFE_FUNC_PREFIX + r'getlogin\(\)'),
   2102     ('getpwnam(', 'getpwnam_r(', _UNSAFE_FUNC_PREFIX + r'getpwnam\([^)]+\)'),
   2103     ('getpwuid(', 'getpwuid_r(', _UNSAFE_FUNC_PREFIX + r'getpwuid\([^)]+\)'),
   2104     ('gmtime(', 'gmtime_r(', _UNSAFE_FUNC_PREFIX + r'gmtime\([^)]+\)'),
   2105     ('localtime(', 'localtime_r(', _UNSAFE_FUNC_PREFIX + r'localtime\([^)]+\)'),
   2106     ('rand(', 'rand_r(', _UNSAFE_FUNC_PREFIX + r'rand\(\)'),
   2107     ('strtok(', 'strtok_r(',
   2108      _UNSAFE_FUNC_PREFIX + r'strtok\([^)]+\)'),
   2109     ('ttyname(', 'ttyname_r(', _UNSAFE_FUNC_PREFIX + r'ttyname\([^)]+\)'),
   2110     )
   2111 
   2112 
   2113 def CheckPosixThreading(filename, clean_lines, linenum, error):
   2114   """Checks for calls to thread-unsafe functions.
   2115 
   2116   Much code has been originally written without consideration of
   2117   multi-threading. Also, engineers are relying on their old experience;
   2118   they have learned posix before threading extensions were added. These
   2119   tests guide the engineers to use thread-safe functions (when using
   2120   posix directly).
   2121 
   2122   Args:
   2123     filename: The name of the current file.
   2124     clean_lines: A CleansedLines instance containing the file.
   2125     linenum: The number of the line to check.
   2126     error: The function to call with any errors found.
   2127   """
   2128   line = clean_lines.elided[linenum]
   2129   for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST:
   2130     # Additional pattern matching check to confirm that this is the
   2131     # function we are looking for
   2132     if Search(pattern, line):
   2133       error(filename, linenum, 'runtime/threadsafe_fn', 2,
   2134             'Consider using ' + multithread_safe_func +
   2135             '...) instead of ' + single_thread_func +
   2136             '...) for improved thread safety.')
   2137 
   2138 
   2139 def CheckVlogArguments(filename, clean_lines, linenum, error):
   2140   """Checks that VLOG() is only used for defining a logging level.
   2141 
   2142   For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and
   2143   VLOG(FATAL) are not.
   2144 
   2145   Args:
   2146     filename: The name of the current file.
   2147     clean_lines: A CleansedLines instance containing the file.
   2148     linenum: The number of the line to check.
   2149     error: The function to call with any errors found.
   2150   """
   2151   line = clean_lines.elided[linenum]
   2152   if Search(r'\bVLOG\((INFO|ERROR|WARNING|DFATAL|FATAL)\)', line):
   2153     error(filename, linenum, 'runtime/vlog', 5,
   2154           'VLOG() should be used with numeric verbosity level.  '
   2155           'Use LOG() if you want symbolic severity levels.')
   2156 
   2157 # Matches invalid increment: *count++, which moves pointer instead of
   2158 # incrementing a value.
   2159 _RE_PATTERN_INVALID_INCREMENT = re.compile(
   2160     r'^\s*\*\w+(\+\+|--);')
   2161 
   2162 
   2163 def CheckInvalidIncrement(filename, clean_lines, linenum, error):
   2164   """Checks for invalid increment *count++.
   2165 
   2166   For example following function:
   2167   void increment_counter(int* count) {
   2168     *count++;
   2169   }
   2170   is invalid, because it effectively does count++, moving pointer, and should
   2171   be replaced with ++*count, (*count)++ or *count += 1.
   2172 
   2173   Args:
   2174     filename: The name of the current file.
   2175     clean_lines: A CleansedLines instance containing the file.
   2176     linenum: The number of the line to check.
   2177     error: The function to call with any errors found.
   2178   """
   2179   line = clean_lines.elided[linenum]
   2180   if _RE_PATTERN_INVALID_INCREMENT.match(line):
   2181     error(filename, linenum, 'runtime/invalid_increment', 5,
   2182           'Changing pointer instead of value (or unused value of operator*).')
   2183 
   2184 
   2185 def IsMacroDefinition(clean_lines, linenum):
   2186   if Search(r'^#define', clean_lines[linenum]):
   2187     return True
   2188 
   2189   if linenum > 0 and Search(r'\\$', clean_lines[linenum - 1]):
   2190     return True
   2191 
   2192   return False
   2193 
   2194 
   2195 def IsForwardClassDeclaration(clean_lines, linenum):
   2196   return Match(r'^\s*(\btemplate\b)*.*class\s+\w+;\s*$', clean_lines[linenum])
   2197 
   2198 
   2199 class _BlockInfo(object):
   2200   """Stores information about a generic block of code."""
   2201 
   2202   def __init__(self, linenum, seen_open_brace):
   2203     self.starting_linenum = linenum
   2204     self.seen_open_brace = seen_open_brace
   2205     self.open_parentheses = 0
   2206     self.inline_asm = _NO_ASM
   2207     self.check_namespace_indentation = False
   2208 
   2209   def CheckBegin(self, filename, clean_lines, linenum, error):
   2210     """Run checks that applies to text up to the opening brace.
   2211 
   2212     This is mostly for checking the text after the class identifier
   2213     and the "{", usually where the base class is specified.  For other
   2214     blocks, there isn't much to check, so we always pass.
   2215 
   2216     Args:
   2217       filename: The name of the current file.
   2218       clean_lines: A CleansedLines instance containing the file.
   2219       linenum: The number of the line to check.
   2220       error: The function to call with any errors found.
   2221     """
   2222     pass
   2223 
   2224   def CheckEnd(self, filename, clean_lines, linenum, error):
   2225     """Run checks that applies to text after the closing brace.
   2226 
   2227     This is mostly used for checking end of namespace comments.
   2228 
   2229     Args:
   2230       filename: The name of the current file.
   2231       clean_lines: A CleansedLines instance containing the file.
   2232       linenum: The number of the line to check.
   2233       error: The function to call with any errors found.
   2234     """
   2235     pass
   2236 
   2237   def IsBlockInfo(self):
   2238     """Returns true if this block is a _BlockInfo.
   2239 
   2240     This is convenient for verifying that an object is an instance of
   2241     a _BlockInfo, but not an instance of any of the derived classes.
   2242 
   2243     Returns:
   2244       True for this class, False for derived classes.
   2245     """
   2246     return self.__class__ == _BlockInfo
   2247 
   2248 
   2249 class _ExternCInfo(_BlockInfo):
   2250   """Stores information about an 'extern "C"' block."""
   2251 
   2252   def __init__(self, linenum):
   2253     _BlockInfo.__init__(self, linenum, True)
   2254 
   2255 
   2256 class _ClassInfo(_BlockInfo):
   2257   """Stores information about a class."""
   2258 
   2259   def __init__(self, name, class_or_struct, clean_lines, linenum):
   2260     _BlockInfo.__init__(self, linenum, False)
   2261     self.name = name
   2262     self.is_derived = False
   2263     self.check_namespace_indentation = True
   2264     if class_or_struct == 'struct':
   2265       self.access = 'public'
   2266       self.is_struct = True
   2267     else:
   2268       self.access = 'private'
   2269       self.is_struct = False
   2270 
   2271     # Remember initial indentation level for this class.  Using raw_lines here
   2272     # instead of elided to account for leading comments.
   2273     self.class_indent = GetIndentLevel(clean_lines.raw_lines[linenum])
   2274 
   2275     # Try to find the end of the class.  This will be confused by things like:
   2276     #   class A {
   2277     #   } *x = { ...
   2278     #
   2279     # But it's still good enough for CheckSectionSpacing.
   2280     self.last_line = 0
   2281     depth = 0
   2282     for i in range(linenum, clean_lines.NumLines()):
   2283       line = clean_lines.elided[i]
   2284       depth += line.count('{') - line.count('}')
   2285       if not depth:
   2286         self.last_line = i
   2287         break
   2288 
   2289   def CheckBegin(self, filename, clean_lines, linenum, error):
   2290     # Look for a bare ':'
   2291     if Search('(^|[^:]):($|[^:])', clean_lines.elided[linenum]):
   2292       self.is_derived = True
   2293 
   2294   def CheckEnd(self, filename, clean_lines, linenum, error):
   2295     # If there is a DISALLOW macro, it should appear near the end of
   2296     # the class.
   2297     seen_last_thing_in_class = False
   2298     for i in xrange(linenum - 1, self.starting_linenum, -1):
   2299       match = Search(
   2300           r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\(' +
   2301           self.name + r'\)',
   2302           clean_lines.elided[i])
   2303       if match:
   2304         if seen_last_thing_in_class:
   2305           error(filename, i, 'readability/constructors', 3,
   2306                 match.group(1) + ' should be the last thing in the class')
   2307         break
   2308 
   2309       if not Match(r'^\s*$', clean_lines.elided[i]):
   2310         seen_last_thing_in_class = True
   2311 
   2312     # Check that closing brace is aligned with beginning of the class.
   2313     # Only do this if the closing brace is indented by only whitespaces.
   2314     # This means we will not check single-line class definitions.
   2315     indent = Match(r'^( *)\}', clean_lines.elided[linenum])
   2316     if indent and len(indent.group(1)) != self.class_indent:
   2317       if self.is_struct:
   2318         parent = 'struct ' + self.name
   2319       else:
   2320         parent = 'class ' + self.name
   2321       error(filename, linenum, 'whitespace/indent', 3,
   2322             'Closing brace should be aligned with beginning of %s' % parent)
   2323 
   2324 
   2325 class _NamespaceInfo(_BlockInfo):
   2326   """Stores information about a namespace."""
   2327 
   2328   def __init__(self, name, linenum):
   2329     _BlockInfo.__init__(self, linenum, False)
   2330     self.name = name or ''
   2331     self.check_namespace_indentation = True
   2332 
   2333   def CheckEnd(self, filename, clean_lines, linenum, error):
   2334     """Check end of namespace comments."""
   2335     line = clean_lines.raw_lines[linenum]
   2336 
   2337     # Check how many lines is enclosed in this namespace.  Don't issue
   2338     # warning for missing namespace comments if there aren't enough
   2339     # lines.  However, do apply checks if there is already an end of
   2340     # namespace comment and it's incorrect.
   2341     #
   2342     # TODO(unknown): We always want to check end of namespace comments
   2343     # if a namespace is large, but sometimes we also want to apply the
   2344     # check if a short namespace contained nontrivial things (something
   2345     # other than forward declarations).  There is currently no logic on
   2346     # deciding what these nontrivial things are, so this check is
   2347     # triggered by namespace size only, which works most of the time.
   2348     if (linenum - self.starting_linenum < 10
   2349         and not Match(r'^\s*};*\s*(//|/\*).*\bnamespace\b', line)):
   2350       return
   2351 
   2352     # Look for matching comment at end of namespace.
   2353     #
   2354     # Note that we accept C style "/* */" comments for terminating
   2355     # namespaces, so that code that terminate namespaces inside
   2356     # preprocessor macros can be cpplint clean.
   2357     #
   2358     # We also accept stuff like "// end of namespace <name>." with the
   2359     # period at the end.
   2360     #
   2361     # Besides these, we don't accept anything else, otherwise we might
   2362     # get false negatives when existing comment is a substring of the
   2363     # expected namespace.
   2364     if self.name:
   2365       # Named namespace
   2366       if not Match((r'^\s*};*\s*(//|/\*).*\bnamespace\s+' +
   2367                     re.escape(self.name) + r'[\*/\.\\\s]*$'),
   2368                    line):
   2369         error(filename, linenum, 'readability/namespace', 5,
   2370               'Namespace should be terminated with "// namespace %s"' %
   2371               self.name)
   2372     else:
   2373       # Anonymous namespace
   2374       if not Match(r'^\s*};*\s*(//|/\*).*\bnamespace[\*/\.\\\s]*$', line):
   2375         # If "// namespace anonymous" or "// anonymous namespace (more text)",
   2376         # mention "// anonymous namespace" as an acceptable form
   2377         if Match(r'^\s*}.*\b(namespace anonymous|anonymous namespace)\b', line):
   2378           error(filename, linenum, 'readability/namespace', 5,
   2379                 'Anonymous namespace should be terminated with "// namespace"'
   2380                 ' or "// anonymous namespace"')
   2381         else:
   2382           error(filename, linenum, 'readability/namespace', 5,
   2383                 'Anonymous namespace should be terminated with "// namespace"')
   2384 
   2385 
   2386 class _PreprocessorInfo(object):
   2387   """Stores checkpoints of nesting stacks when #if/#else is seen."""
   2388 
   2389   def __init__(self, stack_before_if):
   2390     # The entire nesting stack before #if
   2391     self.stack_before_if = stack_before_if
   2392 
   2393     # The entire nesting stack up to #else
   2394     self.stack_before_else = []
   2395 
   2396     # Whether we have already seen #else or #elif
   2397     self.seen_else = False
   2398 
   2399 
   2400 class NestingState(object):
   2401   """Holds states related to parsing braces."""
   2402 
   2403   def __init__(self):
   2404     # Stack for tracking all braces.  An object is pushed whenever we
   2405     # see a "{", and popped when we see a "}".  Only 3 types of
   2406     # objects are possible:
   2407     # - _ClassInfo: a class or struct.
   2408     # - _NamespaceInfo: a namespace.
   2409     # - _BlockInfo: some other type of block.
   2410     self.stack = []
   2411 
   2412     # Top of the previous stack before each Update().
   2413     #
   2414     # Because the nesting_stack is updated at the end of each line, we
   2415     # had to do some convoluted checks to find out what is the current
   2416     # scope at the beginning of the line.  This check is simplified by
   2417     # saving the previous top of nesting stack.
   2418     #
   2419     # We could save the full stack, but we only need the top.  Copying
   2420     # the full nesting stack would slow down cpplint by ~10%.
   2421     self.previous_stack_top = []
   2422 
   2423     # Stack of _PreprocessorInfo objects.
   2424     self.pp_stack = []
   2425 
   2426   def SeenOpenBrace(self):
   2427     """Check if we have seen the opening brace for the innermost block.
   2428 
   2429     Returns:
   2430       True if we have seen the opening brace, False if the innermost
   2431       block is still expecting an opening brace.
   2432     """
   2433     return (not self.stack) or self.stack[-1].seen_open_brace
   2434 
   2435   def InNamespaceBody(self):
   2436     """Check if we are currently one level inside a namespace body.
   2437 
   2438     Returns:
   2439       True if top of the stack is a namespace block, False otherwise.
   2440     """
   2441     return self.stack and isinstance(self.stack[-1], _NamespaceInfo)
   2442 
   2443   def InExternC(self):
   2444     """Check if we are currently one level inside an 'extern "C"' block.
   2445 
   2446     Returns:
   2447       True if top of the stack is an extern block, False otherwise.
   2448     """
   2449     return self.stack and isinstance(self.stack[-1], _ExternCInfo)
   2450 
   2451   def InClassDeclaration(self):
   2452     """Check if we are currently one level inside a class or struct declaration.
   2453 
   2454     Returns:
   2455       True if top of the stack is a class/struct, False otherwise.
   2456     """
   2457     return self.stack and isinstance(self.stack[-1], _ClassInfo)
   2458 
   2459   def InAsmBlock(self):
   2460     """Check if we are currently one level inside an inline ASM block.
   2461 
   2462     Returns:
   2463       True if the top of the stack is a block containing inline ASM.
   2464     """
   2465     return self.stack and self.stack[-1].inline_asm != _NO_ASM
   2466 
   2467   def InTemplateArgumentList(self, clean_lines, linenum, pos):
   2468     """Check if current position is inside template argument list.
   2469 
   2470     Args:
   2471       clean_lines: A CleansedLines instance containing the file.
   2472       linenum: The number of the line to check.
   2473       pos: position just after the suspected template argument.
   2474     Returns:
   2475       True if (linenum, pos) is inside template arguments.
   2476     """
   2477     while linenum < clean_lines.NumLines():
   2478       # Find the earliest character that might indicate a template argument
   2479       line = clean_lines.elided[linenum]
   2480       match = Match(r'^[^{};=\[\]\.<>]*(.)', line[pos:])
   2481       if not match:
   2482         linenum += 1
   2483         pos = 0
   2484         continue
   2485       token = match.group(1)
   2486       pos += len(match.group(0))
   2487 
   2488       # These things do not look like template argument list:
   2489       #   class Suspect {
   2490       #   class Suspect x; }
   2491       if token in ('{', '}', ';'): return False
   2492 
   2493       # These things look like template argument list:
   2494       #   template <class Suspect>
   2495       #   template <class Suspect = default_value>
   2496       #   template <class Suspect[]>
   2497       #   template <class Suspect...>
   2498       if token in ('>', '=', '[', ']', '.'): return True
   2499 
   2500       # Check if token is an unmatched '<'.
   2501       # If not, move on to the next character.
   2502       if token != '<':
   2503         pos += 1
   2504         if pos >= len(line):
   2505           linenum += 1
   2506           pos = 0
   2507         continue
   2508 
   2509       # We can't be sure if we just find a single '<', and need to
   2510       # find the matching '>'.
   2511       (_, end_line, end_pos) = CloseExpression(clean_lines, linenum, pos - 1)
   2512       if end_pos < 0:
   2513         # Not sure if template argument list or syntax error in file
   2514         return False
   2515       linenum = end_line
   2516       pos = end_pos
   2517     return False
   2518 
   2519   def UpdatePreprocessor(self, line):
   2520     """Update preprocessor stack.
   2521 
   2522     We need to handle preprocessors due to classes like this:
   2523       #ifdef SWIG
   2524       struct ResultDetailsPageElementExtensionPoint {
   2525       #else
   2526       struct ResultDetailsPageElementExtensionPoint : public Extension {
   2527       #endif
   2528 
   2529     We make the following assumptions (good enough for most files):
   2530     - Preprocessor condition evaluates to true from #if up to first
   2531       #else/#elif/#endif.
   2532 
   2533     - Preprocessor condition evaluates to false from #else/#elif up
   2534       to #endif.  We still perform lint checks on these lines, but
   2535       these do not affect nesting stack.
   2536 
   2537     Args:
   2538       line: current line to check.
   2539     """
   2540     if Match(r'^\s*#\s*(if|ifdef|ifndef)\b', line):
   2541       # Beginning of #if block, save the nesting stack here.  The saved
   2542       # stack will allow us to restore the parsing state in the #else case.
   2543       self.pp_stack.append(_PreprocessorInfo(copy.deepcopy(self.stack)))
   2544     elif Match(r'^\s*#\s*(else|elif)\b', line):
   2545       # Beginning of #else block
   2546       if self.pp_stack:
   2547         if not self.pp_stack[-1].seen_else:
   2548           # This is the first #else or #elif block.  Remember the
   2549           # whole nesting stack up to this point.  This is what we
   2550           # keep after the #endif.
   2551           self.pp_stack[-1].seen_else = True
   2552           self.pp_stack[-1].stack_before_else = copy.deepcopy(self.stack)
   2553 
   2554         # Restore the stack to how it was before the #if
   2555         self.stack = copy.deepcopy(self.pp_stack[-1].stack_before_if)
   2556       else:
   2557         # TODO(unknown): unexpected #else, issue warning?
   2558         pass
   2559     elif Match(r'^\s*#\s*endif\b', line):
   2560       # End of #if or #else blocks.
   2561       if self.pp_stack:
   2562         # If we saw an #else, we will need to restore the nesting
   2563         # stack to its former state before the #else, otherwise we
   2564         # will just continue from where we left off.
   2565         if self.pp_stack[-1].seen_else:
   2566           # Here we can just use a shallow copy since we are the last
   2567           # reference to it.
   2568           self.stack = self.pp_stack[-1].stack_before_else
   2569         # Drop the corresponding #if
   2570         self.pp_stack.pop()
   2571       else:
   2572         # TODO(unknown): unexpected #endif, issue warning?
   2573         pass
   2574 
   2575   # TODO(unknown): Update() is too long, but we will refactor later.
   2576   def Update(self, filename, clean_lines, linenum, error):
   2577     """Update nesting state with current line.
   2578 
   2579     Args:
   2580       filename: The name of the current file.
   2581       clean_lines: A CleansedLines instance containing the file.
   2582       linenum: The number of the line to check.
   2583       error: The function to call with any errors found.
   2584     """
   2585     line = clean_lines.elided[linenum]
   2586 
   2587     # Remember top of the previous nesting stack.
   2588     #
   2589     # The stack is always pushed/popped and not modified in place, so
   2590     # we can just do a shallow copy instead of copy.deepcopy.  Using
   2591     # deepcopy would slow down cpplint by ~28%.
   2592     if self.stack:
   2593       self.previous_stack_top = self.stack[-1]
   2594     else:
   2595       self.previous_stack_top = None
   2596 
   2597     # Update pp_stack
   2598     self.UpdatePreprocessor(line)
   2599 
   2600     # Count parentheses.  This is to avoid adding struct arguments to
   2601     # the nesting stack.
   2602     if self.stack:
   2603       inner_block = self.stack[-1]
   2604       depth_change = line.count('(') - line.count(')')
   2605       inner_block.open_parentheses += depth_change
   2606 
   2607       # Also check if we are starting or ending an inline assembly block.
   2608       if inner_block.inline_asm in (_NO_ASM, _END_ASM):
   2609         if (depth_change != 0 and
   2610             inner_block.open_parentheses == 1 and
   2611             _MATCH_ASM.match(line)):
   2612           # Enter assembly block
   2613           inner_block.inline_asm = _INSIDE_ASM
   2614         else:
   2615           # Not entering assembly block.  If previous line was _END_ASM,
   2616           # we will now shift to _NO_ASM state.
   2617           inner_block.inline_asm = _NO_ASM
   2618       elif (inner_block.inline_asm == _INSIDE_ASM and
   2619             inner_block.open_parentheses == 0):
   2620         # Exit assembly block
   2621         inner_block.inline_asm = _END_ASM
   2622 
   2623     # Consume namespace declaration at the beginning of the line.  Do
   2624     # this in a loop so that we catch same line declarations like this:
   2625     #   namespace proto2 { namespace bridge { class MessageSet; } }
   2626     while True:
   2627       # Match start of namespace.  The "\b\s*" below catches namespace
   2628       # declarations even if it weren't followed by a whitespace, this
   2629       # is so that we don't confuse our namespace checker.  The
   2630       # missing spaces will be flagged by CheckSpacing.
   2631       namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line)
   2632       if not namespace_decl_match:
   2633         break
   2634 
   2635       new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum)
   2636       self.stack.append(new_namespace)
   2637 
   2638       line = namespace_decl_match.group(2)
   2639       if line.find('{') != -1:
   2640         new_namespace.seen_open_brace = True
   2641         line = line[line.find('{') + 1:]
   2642 
   2643     # Look for a class declaration in whatever is left of the line
   2644     # after parsing namespaces.  The regexp accounts for decorated classes
   2645     # such as in:
   2646     #   class LOCKABLE API Object {
   2647     #   };
   2648     class_decl_match = Match(
   2649         r'^(\s*(?:template\s*<[\w\s<>,:]*>\s*)?'
   2650         r'(class|struct)\s+(?:[A-Z_]+\s+)*(\w+(?:::\w+)*))'
   2651         r'(.*)$', line)
   2652     if (class_decl_match and
   2653         (not self.stack or self.stack[-1].open_parentheses == 0)):
   2654       # We do not want to accept classes that are actually template arguments:
   2655       #   template <class Ignore1,
   2656       #             class Ignore2 = Default<Args>,
   2657       #             template <Args> class Ignore3>
   2658       #   void Function() {};
   2659       #
   2660       # To avoid template argument cases, we scan forward and look for
   2661       # an unmatched '>'.  If we see one, assume we are inside a
   2662       # template argument list.
   2663       end_declaration = len(class_decl_match.group(1))
   2664       if not self.InTemplateArgumentList(clean_lines, linenum, end_declaration):
   2665         self.stack.append(_ClassInfo(
   2666             class_decl_match.group(3), class_decl_match.group(2),
   2667             clean_lines, linenum))
   2668         line = class_decl_match.group(4)
   2669 
   2670     # If we have not yet seen the opening brace for the innermost block,
   2671     # run checks here.
   2672     if not self.SeenOpenBrace():
   2673       self.stack[-1].CheckBegin(filename, clean_lines, linenum, error)
   2674 
   2675     # Update access control if we are inside a class/struct
   2676     if self.stack and isinstance(self.stack[-1], _ClassInfo):
   2677       classinfo = self.stack[-1]
   2678       access_match = Match(
   2679           r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?'
   2680           r':(?:[^:]|$)',
   2681           line)
   2682       if access_match:
   2683         classinfo.access = access_match.group(2)
   2684 
   2685         # Check that access keywords are indented +1 space.  Skip this
   2686         # check if the keywords are not preceded by whitespaces.
   2687         indent = access_match.group(1)
   2688         if (len(indent) != classinfo.class_indent + 1 and
   2689             Match(r'^\s*$', indent)):
   2690           if classinfo.is_struct:
   2691             parent = 'struct ' + classinfo.name
   2692           else:
   2693             parent = 'class ' + classinfo.name
   2694           slots = ''
   2695           if access_match.group(3):
   2696             slots = access_match.group(3)
   2697           error(filename, linenum, 'whitespace/indent', 3,
   2698                 '%s%s: should be indented +1 space inside %s' % (
   2699                     access_match.group(2), slots, parent))
   2700 
   2701     # Consume braces or semicolons from what's left of the line
   2702     while True:
   2703       # Match first brace, semicolon, or closed parenthesis.
   2704       matched = Match(r'^[^{;)}]*([{;)}])(.*)$', line)
   2705       if not matched:
   2706         break
   2707 
   2708       token = matched.group(1)
   2709       if token == '{':
   2710         # If namespace or class hasn't seen a opening brace yet, mark
   2711         # namespace/class head as complete.  Push a new block onto the
   2712         # stack otherwise.
   2713         if not self.SeenOpenBrace():
   2714           self.stack[-1].seen_open_brace = True
   2715         elif Match(r'^extern\s*"[^"]*"\s*\{', line):
   2716           self.stack.append(_ExternCInfo(linenum))
   2717         else:
   2718           self.stack.append(_BlockInfo(linenum, True))
   2719           if _MATCH_ASM.match(line):
   2720             self.stack[-1].inline_asm = _BLOCK_ASM
   2721 
   2722       elif token == ';' or token == ')':
   2723         # If we haven't seen an opening brace yet, but we already saw
   2724         # a semicolon, this is probably a forward declaration.  Pop
   2725         # the stack for these.
   2726         #
   2727         # Similarly, if we haven't seen an opening brace yet, but we
   2728         # already saw a closing parenthesis, then these are probably
   2729         # function arguments with extra "class" or "struct" keywords.
   2730         # Also pop these stack for these.
   2731         if not self.SeenOpenBrace():
   2732           self.stack.pop()
   2733       else:  # token == '}'
   2734         # Perform end of block checks and pop the stack.
   2735         if self.stack:
   2736           self.stack[-1].CheckEnd(filename, clean_lines, linenum, error)
   2737           self.stack.pop()
   2738       line = matched.group(2)
   2739 
   2740   def InnermostClass(self):
   2741     """Get class info on the top of the stack.
   2742 
   2743     Returns:
   2744       A _ClassInfo object if we are inside a class, or None otherwise.
   2745     """
   2746     for i in range(len(self.stack), 0, -1):
   2747       classinfo = self.stack[i - 1]
   2748       if isinstance(classinfo, _ClassInfo):
   2749         return classinfo
   2750     return None
   2751 
   2752   def CheckCompletedBlocks(self, filename, error):
   2753     """Checks that all classes and namespaces have been completely parsed.
   2754 
   2755     Call this when all lines in a file have been processed.
   2756     Args:
   2757       filename: The name of the current file.
   2758       error: The function to call with any errors found.
   2759     """
   2760     # Note: This test can result in false positives if #ifdef constructs
   2761     # get in the way of brace matching. See the testBuildClass test in
   2762     # cpplint_unittest.py for an example of this.
   2763     for obj in self.stack:
   2764       if isinstance(obj, _ClassInfo):
   2765         error(filename, obj.starting_linenum, 'build/class', 5,
   2766               'Failed to find complete declaration of class %s' %
   2767               obj.name)
   2768       elif isinstance(obj, _NamespaceInfo):
   2769         error(filename, obj.starting_linenum, 'build/namespaces', 5,
   2770               'Failed to find complete declaration of namespace %s' %
   2771               obj.name)
   2772 
   2773 
   2774 def CheckForNonStandardConstructs(filename, clean_lines, linenum,
   2775                                   nesting_state, error):
   2776   r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
   2777 
   2778   Complain about several constructs which gcc-2 accepts, but which are
   2779   not standard C++.  Warning about these in lint is one way to ease the
   2780   transition to new compilers.
   2781   - put storage class first (e.g. "static const" instead of "const static").
   2782   - "%lld" instead of %qd" in printf-type functions.
   2783   - "%1$d" is non-standard in printf-type functions.
   2784   - "\%" is an undefined character escape sequence.
   2785   - text after #endif is not allowed.
   2786   - invalid inner-style forward declaration.
   2787   - >? and <? operators, and their >?= and <?= cousins.
   2788 
   2789   Additionally, check for constructor/destructor style violations and reference
   2790   members, as it is very convenient to do so while checking for
   2791   gcc-2 compliance.
   2792 
   2793   Args:
   2794     filename: The name of the current file.
   2795     clean_lines: A CleansedLines instance containing the file.
   2796     linenum: The number of the line to check.
   2797     nesting_state: A NestingState instance which maintains information about
   2798                    the current stack of nested blocks being parsed.
   2799     error: A callable to which errors are reported, which takes 4 arguments:
   2800            filename, line number, error level, and message
   2801   """
   2802 
   2803   # Remove comments from the line, but leave in strings for now.
   2804   line = clean_lines.lines[linenum]
   2805 
   2806   if Search(r'printf\s*\(.*".*%[-+ ]?\d*q', line):
   2807     error(filename, linenum, 'runtime/printf_format', 3,
   2808           '%q in format strings is deprecated.  Use %ll instead.')
   2809 
   2810   if Search(r'printf\s*\(.*".*%\d+\$', line):
   2811     error(filename, linenum, 'runtime/printf_format', 2,
   2812           '%N$ formats are unconventional.  Try rewriting to avoid them.')
   2813 
   2814   # Remove escaped backslashes before looking for undefined escapes.
   2815   line = line.replace('\\\\', '')
   2816 
   2817   if Search(r'("|\').*\\(%|\[|\(|{)', line):
   2818     error(filename, linenum, 'build/printf_format', 3,
   2819           '%, [, (, and { are undefined character escapes.  Unescape them.')
   2820 
   2821   # For the rest, work with both comments and strings removed.
   2822   line = clean_lines.elided[linenum]
   2823 
   2824   if Search(r'\b(const|volatile|void|char|short|int|long'
   2825             r'|float|double|signed|unsigned'
   2826             r'|schar|u?int8|u?int16|u?int32|u?int64)'
   2827             r'\s+(register|static|extern|typedef)\b',
   2828             line):
   2829     error(filename, linenum, 'build/storage_class', 5,
   2830           'Storage-class specifier (static, extern, typedef, etc) should be '
   2831           'at the beginning of the declaration.')
   2832 
   2833   if Match(r'\s*#\s*endif\s*[^/\s]+', line):
   2834     error(filename, linenum, 'build/endif_comment', 5,
   2835           'Uncommented text after #endif is non-standard.  Use a comment.')
   2836 
   2837   if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
   2838     error(filename, linenum, 'build/forward_decl', 5,
   2839           'Inner-style forward declarations are invalid.  Remove this line.')
   2840 
   2841   if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
   2842             line):
   2843     error(filename, linenum, 'build/deprecated', 3,
   2844           '>? and <? (max and min) operators are non-standard and deprecated.')
   2845 
   2846   if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
   2847     # TODO(unknown): Could it be expanded safely to arbitrary references,
   2848     # without triggering too many false positives? The first
   2849     # attempt triggered 5 warnings for mostly benign code in the regtest, hence
   2850     # the restriction.
   2851     # Here's the original regexp, for the reference:
   2852     # type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?'
   2853     # r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;'
   2854     error(filename, linenum, 'runtime/member_string_references', 2,
   2855           'const string& members are dangerous. It is much better to use '
   2856           'alternatives, such as pointers or simple constants.')
   2857 
   2858   # Everything else in this function operates on class declarations.
   2859   # Return early if the top of the nesting stack is not a class, or if
   2860   # the class head is not completed yet.
   2861   classinfo = nesting_state.InnermostClass()
   2862   if not classinfo or not classinfo.seen_open_brace:
   2863     return
   2864 
   2865   # The class may have been declared with namespace or classname qualifiers.
   2866   # The constructor and destructor will not have those qualifiers.
   2867   base_classname = classinfo.name.split('::')[-1]
   2868 
   2869   # Look for single-argument constructors that aren't marked explicit.
   2870   # Technically a valid construct, but against style.
   2871   explicit_constructor_match = Match(
   2872       r'\s+(?:(?:inline|constexpr)\s+)*(explicit\s+)?'
   2873       r'(?:(?:inline|constexpr)\s+)*%s\s*'
   2874       r'\(((?:[^()]|\([^()]*\))*)\)'
   2875       % re.escape(base_classname),
   2876       line)
   2877 
   2878   if explicit_constructor_match:
   2879     is_marked_explicit = explicit_constructor_match.group(1)
   2880 
   2881     if not explicit_constructor_match.group(2):
   2882       constructor_args = []
   2883     else:
   2884       constructor_args = explicit_constructor_match.group(2).split(',')
   2885 
   2886     # collapse arguments so that commas in template parameter lists and function
   2887     # argument parameter lists don't split arguments in two
   2888     i = 0
   2889     while i < len(constructor_args):
   2890       constructor_arg = constructor_args[i]
   2891       while (constructor_arg.count('<') > constructor_arg.count('>') or
   2892              constructor_arg.count('(') > constructor_arg.count(')')):
   2893         constructor_arg += ',' + constructor_args[i + 1]
   2894         del constructor_args[i + 1]
   2895       constructor_args[i] = constructor_arg
   2896       i += 1
   2897 
   2898     defaulted_args = [arg for arg in constructor_args if '=' in arg]
   2899     noarg_constructor = (not constructor_args or  # empty arg list
   2900                          # 'void' arg specifier
   2901                          (len(constructor_args) == 1 and
   2902                           constructor_args[0].strip() == 'void'))
   2903     onearg_constructor = ((len(constructor_args) == 1 and  # exactly one arg
   2904                            not noarg_constructor) or
   2905                           # all but at most one arg defaulted
   2906                           (len(constructor_args) >= 1 and
   2907                            not noarg_constructor and
   2908                            len(defaulted_args) >= len(constructor_args) - 1))
   2909     initializer_list_constructor = bool(
   2910         onearg_constructor and
   2911         Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0]))
   2912     copy_constructor = bool(
   2913         onearg_constructor and
   2914         Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&'
   2915               % re.escape(base_classname), constructor_args[0].strip()))
   2916 
   2917     if (not is_marked_explicit and
   2918         onearg_constructor and
   2919         not initializer_list_constructor and
   2920         not copy_constructor):
   2921       if defaulted_args:
   2922         error(filename, linenum, 'runtime/explicit', 5,
   2923               'Constructors callable with one argument '
   2924               'should be marked explicit.')
   2925       else:
   2926         error(filename, linenum, 'runtime/explicit', 5,
   2927               'Single-parameter constructors should be marked explicit.')
   2928     elif is_marked_explicit and not onearg_constructor:
   2929       if noarg_constructor:
   2930         error(filename, linenum, 'runtime/explicit', 5,
   2931               'Zero-parameter constructors should not be marked explicit.')
   2932 
   2933 
   2934 def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error):
   2935   """Checks for the correctness of various spacing around function calls.
   2936 
   2937   Args:
   2938     filename: The name of the current file.
   2939     clean_lines: A CleansedLines instance containing the file.
   2940     linenum: The number of the line to check.
   2941     error: The function to call with any errors found.
   2942   """
   2943   line = clean_lines.elided[linenum]
   2944 
   2945   # Since function calls often occur inside if/for/while/switch
   2946   # expressions - which have their own, more liberal conventions - we
   2947   # first see if we should be looking inside such an expression for a
   2948   # function call, to which we can apply more strict standards.
   2949   fncall = line    # if there's no control flow construct, look at whole line
   2950   for pattern in (r'\bif\s*\((.*)\)\s*{',
   2951                   r'\bfor\s*\((.*)\)\s*{',
   2952                   r'\bwhile\s*\((.*)\)\s*[{;]',
   2953                   r'\bswitch\s*\((.*)\)\s*{'):
   2954     match = Search(pattern, line)
   2955     if match:
   2956       fncall = match.group(1)    # look inside the parens for function calls
   2957       break
   2958 
   2959   # Except in if/for/while/switch, there should never be space
   2960   # immediately inside parens (eg "f( 3, 4 )").  We make an exception
   2961   # for nested parens ( (a+b) + c ).  Likewise, there should never be
   2962   # a space before a ( when it's a function argument.  I assume it's a
   2963   # function argument when the char before the whitespace is legal in
   2964   # a function name (alnum + _) and we're not starting a macro. Also ignore
   2965   # pointers and references to arrays and functions coz they're too tricky:
   2966   # we use a very simple way to recognize these:
   2967   # " (something)(maybe-something)" or
   2968   # " (something)(maybe-something," or
   2969   # " (something)[something]"
   2970   # Note that we assume the contents of [] to be short enough that
   2971   # they'll never need to wrap.
   2972   if (  # Ignore control structures.
   2973       not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b',
   2974                  fncall) and
   2975       # Ignore pointers/references to functions.
   2976       not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and
   2977       # Ignore pointers/references to arrays.
   2978       not Search(r' \([^)]+\)\[[^\]]+\]', fncall)):
   2979     if Search(r'\w\s*\(\s(?!\s*\\$)', fncall):      # a ( used for a fn call
   2980       error(filename, linenum, 'whitespace/parens', 4,
   2981             'Extra space after ( in function call')
   2982     elif Search(r'\(\s+(?!(\s*\\)|\()', fncall):
   2983       error(filename, linenum, 'whitespace/parens', 2,
   2984             'Extra space after (')
   2985     if (Search(r'\w\s+\(', fncall) and
   2986         not Search(r'_{0,2}asm_{0,2}\s+_{0,2}volatile_{0,2}\s+\(', fncall) and
   2987         not Search(r'#\s*define|typedef|using\s+\w+\s*=', fncall) and
   2988         not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall) and
   2989         not Search(r'\bcase\s+\(', fncall)):
   2990       # TODO(unknown): Space after an operator function seem to be a common
   2991       # error, silence those for now by restricting them to highest verbosity.
   2992       if Search(r'\boperator_*\b', line):
   2993         error(filename, linenum, 'whitespace/parens', 0,
   2994               'Extra space before ( in function call')
   2995       else:
   2996         error(filename, linenum, 'whitespace/parens', 4,
   2997               'Extra space before ( in function call')
   2998     # If the ) is followed only by a newline or a { + newline, assume it's
   2999     # part of a control statement (if/while/etc), and don't complain
   3000     if Search(r'[^)]\s+\)\s*[^{\s]', fncall):
   3001       # If the closing parenthesis is preceded by only whitespaces,
   3002       # try to give a more descriptive error message.
   3003       if Search(r'^\s+\)', fncall):
   3004         error(filename, linenum, 'whitespace/parens', 2,
   3005               'Closing ) should be moved to the previous line')
   3006       else:
   3007         error(filename, linenum, 'whitespace/parens', 2,
   3008               'Extra space before )')
   3009 
   3010 
   3011 def IsBlankLine(line):
   3012   """Returns true if the given line is blank.
   3013 
   3014   We consider a line to be blank if the line is empty or consists of
   3015   only white spaces.
   3016 
   3017   Args:
   3018     line: A line of a string.
   3019 
   3020   Returns:
   3021     True, if the given line is blank.
   3022   """
   3023   return not line or line.isspace()
   3024 
   3025 
   3026 def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
   3027                                  error):
   3028   is_namespace_indent_item = (
   3029       len(nesting_state.stack) > 1 and
   3030       nesting_state.stack[-1].check_namespace_indentation and
   3031       isinstance(nesting_state.previous_stack_top, _NamespaceInfo) and
   3032       nesting_state.previous_stack_top == nesting_state.stack[-2])
   3033 
   3034   if ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
   3035                                      clean_lines.elided, line):
   3036     CheckItemIndentationInNamespace(filename, clean_lines.elided,
   3037                                     line, error)
   3038 
   3039 
   3040 def CheckForFunctionLengths(filename, clean_lines, linenum,
   3041                             function_state, error):
   3042   """Reports for long function bodies.
   3043 
   3044   For an overview why this is done, see:
   3045   https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions
   3046 
   3047   Uses a simplistic algorithm assuming other style guidelines
   3048   (especially spacing) are followed.
   3049   Only checks unindented functions, so class members are unchecked.
   3050   Trivial bodies are unchecked, so constructors with huge initializer lists
   3051   may be missed.
   3052   Blank/comment lines are not counted so as to avoid encouraging the removal
   3053   of vertical space and comments just to get through a lint check.
   3054   NOLINT *on the last line of a function* disables this check.
   3055 
   3056   Args:
   3057     filename: The name of the current file.
   3058     clean_lines: A CleansedLines instance containing the file.
   3059     linenum: The number of the line to check.
   3060     function_state: Current function name and lines in body so far.
   3061     error: The function to call with any errors found.
   3062   """
   3063   lines = clean_lines.lines
   3064   line = lines[linenum]
   3065   joined_line = ''
   3066 
   3067   starting_func = False
   3068   regexp = r'(\w(\w|::|\*|\&|\s)*)\('  # decls * & space::name( ...
   3069   match_result = Match(regexp, line)
   3070   if match_result:
   3071     # If the name is all caps and underscores, figure it's a macro and
   3072     # ignore it, unless it's TEST or TEST_F.
   3073     function_name = match_result.group(1).split()[-1]
   3074     if function_name == 'TEST' or function_name == 'TEST_F' or (
   3075         not Match(r'[A-Z_]+$', function_name)):
   3076       starting_func = True
   3077 
   3078   if starting_func:
   3079     body_found = False
   3080     for start_linenum in xrange(linenum, clean_lines.NumLines()):
   3081       start_line = lines[start_linenum]
   3082       joined_line += ' ' + start_line.lstrip()
   3083       if Search(r'(;|})', start_line):  # Declarations and trivial functions
   3084         body_found = True
   3085         break                              # ... ignore
   3086       elif Search(r'{', start_line):
   3087         body_found = True
   3088         function = Search(r'((\w|:)*)\(', line).group(1)
   3089         if Match(r'TEST', function):    # Handle TEST... macros
   3090           parameter_regexp = Search(r'(\(.*\))', joined_line)
   3091           if parameter_regexp:             # Ignore bad syntax
   3092             function += parameter_regexp.group(1)
   3093         else:
   3094           function += '()'
   3095         function_state.Begin(function)
   3096         break
   3097     if not body_found:
   3098       # No body for the function (or evidence of a non-function) was found.
   3099       error(filename, linenum, 'readability/fn_size', 5,
   3100             'Lint failed to find start of function body.')
   3101   elif Match(r'^\}\s*$', line):  # function end
   3102     function_state.Check(error, filename, linenum)
   3103     function_state.End()
   3104   elif not Match(r'^\s*$', line):
   3105     function_state.Count()  # Count non-blank/non-comment lines.
   3106 
   3107 
   3108 _RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\(.+?\))?:?(\s|$)?')
   3109 
   3110 
   3111 def CheckComment(line, filename, linenum, next_line_start, error):
   3112   """Checks for common mistakes in comments.
   3113 
   3114   Args:
   3115     line: The line in question.
   3116     filename: The name of the current file.
   3117     linenum: The number of the line to check.
   3118     next_line_start: The first non-whitespace column of the next line.
   3119     error: The function to call with any errors found.
   3120   """
   3121   commentpos = line.find('//')
   3122   if commentpos != -1:
   3123     # Check if the // may be in quotes.  If so, ignore it
   3124     if re.sub(r'\\.', '', line[0:commentpos]).count('"') % 2 == 0:
   3125       # Allow one space for new scopes, two spaces otherwise:
   3126       if (not (Match(r'^.*{ *//', line) and next_line_start == commentpos) and
   3127           ((commentpos >= 1 and
   3128             line[commentpos-1] not in string.whitespace) or
   3129            (commentpos >= 2 and
   3130             line[commentpos-2] not in string.whitespace))):
   3131         error(filename, linenum, 'whitespace/comments', 2,
   3132               'At least two spaces is best between code and comments')
   3133 
   3134       # Checks for common mistakes in TODO comments.
   3135       comment = line[commentpos:]
   3136       match = _RE_PATTERN_TODO.match(comment)
   3137       if match:
   3138         # One whitespace is correct; zero whitespace is handled elsewhere.
   3139         leading_whitespace = match.group(1)
   3140         if len(leading_whitespace) > 1:
   3141           error(filename, linenum, 'whitespace/todo', 2,
   3142                 'Too many spaces before TODO')
   3143 
   3144         username = match.group(2)
   3145         if not username:
   3146           error(filename, linenum, 'readability/todo', 2,
   3147                 'Missing username in TODO; it should look like '
   3148                 '"// TODO(my_username): Stuff."')
   3149 
   3150         middle_whitespace = match.group(3)
   3151         # Comparisons made explicit for correctness -- pylint: disable=g-explicit-bool-comparison
   3152         if middle_whitespace != ' ' and middle_whitespace != '':
   3153           error(filename, linenum, 'whitespace/todo', 2,
   3154                 'TODO(my_username) should be followed by a space')
   3155 
   3156       # If the comment contains an alphanumeric character, there
   3157       # should be a space somewhere between it and the // unless
   3158       # it's a /// or //! Doxygen comment.
   3159       if (Match(r'//[^ ]*\w', comment) and
   3160           not Match(r'(///|//\!)(\s+|$)', comment)):
   3161         error(filename, linenum, 'whitespace/comments', 4,
   3162               'Should have a space between // and comment')
   3163 
   3164 
   3165 def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
   3166   """Checks for the correctness of various spacing issues in the code.
   3167 
   3168   Things we check for: spaces around operators, spaces after
   3169   if/for/while/switch, no spaces around parens in function calls, two
   3170   spaces between code and comment, don't start a block with a blank
   3171   line, don't end a function with a blank line, don't add a blank line
   3172   after public/protected/private, don't have too many blank lines in a row.
   3173 
   3174   Args:
   3175     filename: The name of the current file.
   3176     clean_lines: A CleansedLines instance containing the file.
   3177     linenum: The number of the line to check.
   3178     nesting_state: A NestingState instance which maintains information about
   3179                    the current stack of nested blocks being parsed.
   3180     error: The function to call with any errors found.
   3181   """
   3182 
   3183   # Don't use "elided" lines here, otherwise we can't check commented lines.
   3184   # Don't want to use "raw" either, because we don't want to check inside C++11
   3185   # raw strings,
   3186   raw = clean_lines.lines_without_raw_strings
   3187   line = raw[linenum]
   3188 
   3189   # Before nixing comments, check if the line is blank for no good
   3190   # reason.  This includes the first line after a block is opened, and
   3191   # blank lines at the end of a function (ie, right before a line like '}'
   3192   #
   3193   # Skip all the blank line checks if we are immediately inside a
   3194   # namespace body.  In other words, don't issue blank line warnings
   3195   # for this block:
   3196   #   namespace {
   3197   #
   3198   #   }
   3199   #
   3200   # A warning about missing end of namespace comments will be issued instead.
   3201   #
   3202   # Also skip blank line checks for 'extern "C"' blocks, which are formatted
   3203   # like namespaces.
   3204   if (IsBlankLine(line) and
   3205       not nesting_state.InNamespaceBody() and
   3206       not nesting_state.InExternC()):
   3207     elided = clean_lines.elided
   3208     prev_line = elided[linenum - 1]
   3209     prevbrace = prev_line.rfind('{')
   3210     # TODO(unknown): Don't complain if line before blank line, and line after,
   3211     #                both start with alnums and are indented the same amount.
   3212     #                This ignores whitespace at the start of a namespace block
   3213     #                because those are not usually indented.
   3214     if prevbrace != -1 and prev_line[prevbrace:].find('}') == -1:
   3215       # OK, we have a blank line at the start of a code block.  Before we
   3216       # complain, we check if it is an exception to the rule: The previous
   3217       # non-empty line has the parameters of a function header that are indented
   3218       # 4 spaces (because they did not fit in a 80 column line when placed on
   3219       # the same line as the function name).  We also check for the case where
   3220       # the previous line is indented 6 spaces, which may happen when the
   3221       # initializers of a constructor do not fit into a 80 column line.
   3222       exception = False
   3223       if Match(r' {6}\w', prev_line):  # Initializer list?
   3224         # We are looking for the opening column of initializer list, which
   3225         # should be indented 4 spaces to cause 6 space indentation afterwards.
   3226         search_position = linenum-2
   3227         while (search_position >= 0
   3228                and Match(r' {6}\w', elided[search_position])):
   3229           search_position -= 1
   3230         exception = (search_position >= 0
   3231                      and elided[search_position][:5] == '    :')
   3232       else:
   3233         # Search for the function arguments or an initializer list.  We use a
   3234         # simple heuristic here: If the line is indented 4 spaces; and we have a
   3235         # closing paren, without the opening paren, followed by an opening brace
   3236         # or colon (for initializer lists) we assume that it is the last line of
   3237         # a function header.  If we have a colon indented 4 spaces, it is an
   3238         # initializer list.
   3239         exception = (Match(r' {4}\w[^\(]*\)\s*(const\s*)?(\{\s*$|:)',
   3240                            prev_line)
   3241                      or Match(r' {4}:', prev_line))
   3242 
   3243       if not exception:
   3244         error(filename, linenum, 'whitespace/blank_line', 2,
   3245               'Redundant blank line at the start of a code block '
   3246               'should be deleted.')
   3247     # Ignore blank lines at the end of a block in a long if-else
   3248     # chain, like this:
   3249     #   if (condition1) {
   3250     #     // Something followed by a blank line
   3251     #
   3252     #   } else if (condition2) {
   3253     #     // Something else
   3254     #   }
   3255     if linenum + 1 < clean_lines.NumLines():
   3256       next_line = raw[linenum + 1]
   3257       if (next_line
   3258           and Match(r'\s*}', next_line)
   3259           and next_line.find('} else ') == -1):
   3260         error(filename, linenum, 'whitespace/blank_line', 3,
   3261               'Redundant blank line at the end of a code block '
   3262               'should be deleted.')
   3263 
   3264     matched = Match(r'\s*(public|protected|private):', prev_line)
   3265     if matched:
   3266       error(filename, linenum, 'whitespace/blank_line', 3,
   3267             'Do not leave a blank line after "%s:"' % matched.group(1))
   3268 
   3269   # Next, check comments
   3270   next_line_start = 0
   3271   if linenum + 1 < clean_lines.NumLines():
   3272     next_line = raw[linenum + 1]
   3273     next_line_start = len(next_line) - len(next_line.lstrip())
   3274   CheckComment(line, filename, linenum, next_line_start, error)
   3275 
   3276   # get rid of comments and strings
   3277   line = clean_lines.elided[linenum]
   3278 
   3279   # You shouldn't have spaces before your brackets, except maybe after
   3280   # 'delete []' or 'return []() {};'
   3281   if Search(r'\w\s+\[', line) and not Search(r'(?:delete|return)\s+\[', line):
   3282     error(filename, linenum, 'whitespace/braces', 5,
   3283           'Extra space before [')
   3284 
   3285   # In range-based for, we wanted spaces before and after the colon, but
   3286   # not around "::" tokens that might appear.
   3287   if (Search(r'for *\(.*[^:]:[^: ]', line) or
   3288       Search(r'for *\(.*[^: ]:[^:]', line)):
   3289     error(filename, linenum, 'whitespace/forcolon', 2,
   3290           'Missing space around colon in range-based for loop')
   3291 
   3292 
   3293 def CheckOperatorSpacing(filename, clean_lines, linenum, error):
   3294   """Checks for horizontal spacing around operators.
   3295 
   3296   Args:
   3297     filename: The name of the current file.
   3298     clean_lines: A CleansedLines instance containing the file.
   3299     linenum: The number of the line to check.
   3300     error: The function to call with any errors found.
   3301   """
   3302   line = clean_lines.elided[linenum]
   3303 
   3304   # Don't try to do spacing checks for operator methods.  Do this by
   3305   # replacing the troublesome characters with something else,
   3306   # preserving column position for all other characters.
   3307   #
   3308   # The replacement is done repeatedly to avoid false positives from
   3309   # operators that call operators.
   3310   while True:
   3311     match = Match(r'^(.*\boperator\b)(\S+)(\s*\(.*)$', line)
   3312     if match:
   3313       line = match.group(1) + ('_' * len(match.group(2))) + match.group(3)
   3314     else:
   3315       break
   3316 
   3317   # We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )".
   3318   # Otherwise not.  Note we only check for non-spaces on *both* sides;
   3319   # sometimes people put non-spaces on one side when aligning ='s among
   3320   # many lines (not that this is behavior that I approve of...)
   3321   if ((Search(r'[\w.]=', line) or
   3322        Search(r'=[\w.]', line))
   3323       and not Search(r'\b(if|while|for) ', line)
   3324       # Operators taken from [lex.operators] in C++11 standard.
   3325       and not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line)
   3326       and not Search(r'operator=', line)):
   3327     error(filename, linenum, 'whitespace/operators', 4,
   3328           'Missing spaces around =')
   3329 
   3330   # It's ok not to have spaces around binary operators like + - * /, but if
   3331   # there's too little whitespace, we get concerned.  It's hard to tell,
   3332   # though, so we punt on this one for now.  TODO.
   3333 
   3334   # You should always have whitespace around binary operators.
   3335   #
   3336   # Check <= and >= first to avoid false positives with < and >, then
   3337   # check non-include lines for spacing around < and >.
   3338   #
   3339   # If the operator is followed by a comma, assume it's be used in a
   3340   # macro context and don't do any checks.  This avoids false
   3341   # positives.
   3342   #
   3343   # Note that && is not included here.  This is because there are too
   3344   # many false positives due to RValue references.
   3345   match = Search(r'[^<>=!\s](==|!=|<=|>=|\|\|)[^<>=!\s,;\)]', line)
   3346   if match:
   3347     error(filename, linenum, 'whitespace/operators', 3,
   3348           'Missing spaces around %s' % match.group(1))
   3349   elif not Match(r'#.*include', line):
   3350     # Look for < that is not surrounded by spaces.  This is only
   3351     # triggered if both sides are missing spaces, even though
   3352     # technically should should flag if at least one side is missing a
   3353     # space.  This is done to avoid some false positives with shifts.
   3354     match = Match(r'^(.*[^\s<])<[^\s=<,]', line)
   3355     if match:
   3356       (_, _, end_pos) = CloseExpression(
   3357           clean_lines, linenum, len(match.group(1)))
   3358       if end_pos <= -1:
   3359         error(filename, linenum, 'whitespace/operators', 3,
   3360               'Missing spaces around <')
   3361 
   3362     # Look for > that is not surrounded by spaces.  Similar to the
   3363     # above, we only trigger if both sides are missing spaces to avoid
   3364     # false positives with shifts.
   3365     match = Match(r'^(.*[^-\s>])>[^\s=>,]', line)
   3366     if match:
   3367       (_, _, start_pos) = ReverseCloseExpression(
   3368           clean_lines, linenum, len(match.group(1)))
   3369       if start_pos <= -1:
   3370         error(filename, linenum, 'whitespace/operators', 3,
   3371               'Missing spaces around >')
   3372 
   3373   # We allow no-spaces around << when used like this: 10<<20, but
   3374   # not otherwise (particularly, not when used as streams)
   3375   #
   3376   # We also allow operators following an opening parenthesis, since
   3377   # those tend to be macros that deal with operators.
   3378   match = Search(r'(operator|[^\s(<])(?:L|UL|LL|ULL|l|ul|ll|ull)?<<([^\s,=<])', line)
   3379   if (match and not (match.group(1).isdigit() and match.group(2).isdigit()) and
   3380       not (match.group(1) == 'operator' and match.group(2) == ';')):
   3381     error(filename, linenum, 'whitespace/operators', 3,
   3382           'Missing spaces around <<')
   3383 
   3384   # We allow no-spaces around >> for almost anything.  This is because
   3385   # C++11 allows ">>" to close nested templates, which accounts for
   3386   # most cases when ">>" is not followed by a space.
   3387   #
   3388   # We still warn on ">>" followed by alpha character, because that is
   3389   # likely due to ">>" being used for right shifts, e.g.:
   3390   #   value >> alpha
   3391   #
   3392   # When ">>" is used to close templates, the alphanumeric letter that
   3393   # follows would be part of an identifier, and there should still be
   3394   # a space separating the template type and the identifier.
   3395   #   type<type<type>> alpha
   3396   match = Search(r'>>[a-zA-Z_]', line)
   3397   if match:
   3398     error(filename, linenum, 'whitespace/operators', 3,
   3399           'Missing spaces around >>')
   3400 
   3401   # There shouldn't be space around unary operators
   3402   match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line)
   3403   if match:
   3404     error(filename, linenum, 'whitespace/operators', 4,
   3405           'Extra space for operator %s' % match.group(1))
   3406 
   3407 
   3408 def CheckParenthesisSpacing(filename, clean_lines, linenum, error):
   3409   """Checks for horizontal spacing around parentheses.
   3410 
   3411   Args:
   3412     filename: The name of the current file.
   3413     clean_lines: A CleansedLines instance containing the file.
   3414     linenum: The number of the line to check.
   3415     error: The function to call with any errors found.
   3416   """
   3417   line = clean_lines.elided[linenum]
   3418 
   3419   # No spaces after an if, while, switch, or for
   3420   match = Search(r' (if\(|for\(|while\(|switch\()', line)
   3421   if match:
   3422     error(filename, linenum, 'whitespace/parens', 5,
   3423           'Missing space before ( in %s' % match.group(1))
   3424 
   3425   # For if/for/while/switch, the left and right parens should be
   3426   # consistent about how many spaces are inside the parens, and
   3427   # there should either be zero or one spaces inside the parens.
   3428   # We don't want: "if ( foo)" or "if ( foo   )".
   3429   # Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed.
   3430   match = Search(r'\b(if|for|while|switch)\s*'
   3431                  r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$',
   3432                  line)
   3433   if match:
   3434     if len(match.group(2)) != len(match.group(4)):
   3435       if not (match.group(3) == ';' and
   3436               len(match.group(2)) == 1 + len(match.group(4)) or
   3437               not match.group(2) and Search(r'\bfor\s*\(.*; \)', line)):
   3438         error(filename, linenum, 'whitespace/parens', 5,
   3439               'Mismatching spaces inside () in %s' % match.group(1))
   3440     if len(match.group(2)) not in [0, 1]:
   3441       error(filename, linenum, 'whitespace/parens', 5,
   3442             'Should have zero or one spaces inside ( and ) in %s' %
   3443             match.group(1))
   3444 
   3445 
   3446 def CheckCommaSpacing(filename, clean_lines, linenum, error):
   3447   """Checks for horizontal spacing near commas and semicolons.
   3448 
   3449   Args:
   3450     filename: The name of the current file.
   3451     clean_lines: A CleansedLines instance containing the file.
   3452     linenum: The number of the line to check.
   3453     error: The function to call with any errors found.
   3454   """
   3455   raw = clean_lines.lines_without_raw_strings
   3456   line = clean_lines.elided[linenum]
   3457 
   3458   # You should always have a space after a comma (either as fn arg or operator)
   3459   #
   3460   # This does not apply when the non-space character following the
   3461   # comma is another comma, since the only time when that happens is
   3462   # for empty macro arguments.
   3463   #
   3464   # We run this check in two passes: first pass on elided lines to
   3465   # verify that lines contain missing whitespaces, second pass on raw
   3466   # lines to confirm that those missing whitespaces are not due to
   3467   # elided comments.
   3468   if (Search(r',[^,\s]', ReplaceAll(r'\boperator\s*,\s*\(', 'F(', line)) and
   3469       Search(r',[^,\s]', raw[linenum])):
   3470     error(filename, linenum, 'whitespace/comma', 3,
   3471           'Missing space after ,')
   3472 
   3473   # You should always have a space after a semicolon
   3474   # except for few corner cases
   3475   # TODO(unknown): clarify if 'if (1) { return 1;}' is requires one more
   3476   # space after ;
   3477   if Search(r';[^\s};\\)/]', line):
   3478     error(filename, linenum, 'whitespace/semicolon', 3,
   3479           'Missing space after ;')
   3480 
   3481 
   3482 def _IsType(clean_lines, nesting_state, expr):
   3483   """Check if expression looks like a type name, returns true if so.
   3484 
   3485   Args:
   3486     clean_lines: A CleansedLines instance containing the file.
   3487     nesting_state: A NestingState instance which maintains information about
   3488                    the current stack of nested blocks being parsed.
   3489     expr: The expression to check.
   3490   Returns:
   3491     True, if token looks like a type.
   3492   """
   3493   # Keep only the last token in the expression
   3494   last_word = Match(r'^.*(\b\S+)$', expr)
   3495   if last_word:
   3496     token = last_word.group(1)
   3497   else:
   3498     token = expr
   3499 
   3500   # Match native types and stdint types
   3501   if _TYPES.match(token):
   3502     return True
   3503 
   3504   # Try a bit harder to match templated types.  Walk up the nesting
   3505   # stack until we find something that resembles a typename
   3506   # declaration for what we are looking for.
   3507   typename_pattern = (r'\b(?:typename|class|struct)\s+' + re.escape(token) +
   3508                       r'\b')
   3509   block_index = len(nesting_state.stack) - 1
   3510   while block_index >= 0:
   3511     if isinstance(nesting_state.stack[block_index], _NamespaceInfo):
   3512       return False
   3513 
   3514     # Found where the opening brace is.  We want to scan from this
   3515     # line up to the beginning of the function, minus a few lines.
   3516     #   template <typename Type1,  // stop scanning here
   3517     #             ...>
   3518     #   class C
   3519     #     : public ... {  // start scanning here
   3520     last_line = nesting_state.stack[block_index].starting_linenum
   3521 
   3522     next_block_start = 0
   3523     if block_index > 0:
   3524       next_block_start = nesting_state.stack[block_index - 1].starting_linenum
   3525     first_line = last_line
   3526     while first_line >= next_block_start:
   3527       if clean_lines.elided[first_line].find('template') >= 0:
   3528         break
   3529       first_line -= 1
   3530     if first_line < next_block_start:
   3531       # Didn't find any "template" keyword before reaching the next block,
   3532       # there are probably no template things to check for this block
   3533       block_index -= 1
   3534       continue
   3535 
   3536     # Look for typename in the specified range
   3537     for i in xrange(first_line, last_line + 1, 1):
   3538       if Search(typename_pattern, clean_lines.elided[i]):
   3539         return True
   3540     block_index -= 1
   3541 
   3542   return False
   3543 
   3544 
   3545 def CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error):
   3546   """Checks for horizontal spacing near commas.
   3547 
   3548   Args:
   3549     filename: The name of the current file.
   3550     clean_lines: A CleansedLines instance containing the file.
   3551     linenum: The number of the line to check.
   3552     nesting_state: A NestingState instance which maintains information about
   3553                    the current stack of nested blocks being parsed.
   3554     error: The function to call with any errors found.
   3555   """
   3556   line = clean_lines.elided[linenum]
   3557 
   3558   # Except after an opening paren, or after another opening brace (in case of
   3559   # an initializer list, for instance), you should have spaces before your
   3560   # braces when they are delimiting blocks, classes, namespaces etc.
   3561   # And since you should never have braces at the beginning of a line,
   3562   # this is an easy test.  Except that braces used for initialization don't
   3563   # follow the same rule; we often don't want spaces before those.
   3564   match = Match(r'^(.*[^ ({>]){', line)
   3565 
   3566   if match:
   3567     # Try a bit harder to check for brace initialization.  This
   3568     # happens in one of the following forms:
   3569     #   Constructor() : initializer_list_{} { ... }
   3570     #   Constructor{}.MemberFunction()
   3571     #   Type variable{};
   3572     #   FunctionCall(type{}, ...);
   3573     #   LastArgument(..., type{});
   3574     #   LOG(INFO) << type{} << " ...";
   3575     #   map_of_type[{...}] = ...;
   3576     #   ternary = expr ? new type{} : nullptr;
   3577     #   OuterTemplate<InnerTemplateConstructor<Type>{}>
   3578     #
   3579     # We check for the character following the closing brace, and
   3580     # silence the warning if it's one of those listed above, i.e.
   3581     # "{.;,)<>]:".
   3582     #
   3583     # To account for nested initializer list, we allow any number of
   3584     # closing braces up to "{;,)<".  We can't simply silence the
   3585     # warning on first sight of closing brace, because that would
   3586     # cause false negatives for things that are not initializer lists.
   3587     #   Silence this:         But not this:
   3588     #     Outer{                if (...) {
   3589     #       Inner{...}            if (...){  // Missing space before {
   3590     #     };                    }
   3591     #
   3592     # There is a false negative with this approach if people inserted
   3593     # spurious semicolons, e.g. "if (cond){};", but we will catch the
   3594     # spurious semicolon with a separate check.
   3595     leading_text = match.group(1)
   3596     (endline, endlinenum, endpos) = CloseExpression(
   3597         clean_lines, linenum, len(match.group(1)))
   3598     trailing_text = ''
   3599     if endpos > -1:
   3600       trailing_text = endline[endpos:]
   3601     for offset in xrange(endlinenum + 1,
   3602                          min(endlinenum + 3, clean_lines.NumLines() - 1)):
   3603       trailing_text += clean_lines.elided[offset]
   3604     # We also suppress warnings for `uint64_t{expression}` etc., as the style
   3605     # guide recommends brace initialization for integral types to avoid
   3606     # overflow/truncation.
   3607     if (not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text)
   3608         and not _IsType(clean_lines, nesting_state, leading_text)):
   3609       error(filename, linenum, 'whitespace/braces', 5,
   3610             'Missing space before {')
   3611 
   3612   # Make sure '} else {' has spaces.
   3613   if Search(r'}else', line):
   3614     error(filename, linenum, 'whitespace/braces', 5,
   3615           'Missing space before else')
   3616 
   3617   # You shouldn't have a space before a semicolon at the end of the line.
   3618   # There's a special case for "for" since the style guide allows space before
   3619   # the semicolon there.
   3620   if Search(r':\s*;\s*$', line):
   3621     error(filename, linenum, 'whitespace/semicolon', 5,
   3622           'Semicolon defining empty statement. Use {} instead.')
   3623   elif Search(r'^\s*;\s*$', line):
   3624     error(filename, linenum, 'whitespace/semicolon', 5,
   3625           'Line contains only semicolon. If this should be an empty statement, '
   3626           'use {} instead.')
   3627   elif (Search(r'\s+;\s*$', line) and
   3628         not Search(r'\bfor\b', line)):
   3629     error(filename, linenum, 'whitespace/semicolon', 5,
   3630           'Extra space before last semicolon. If this should be an empty '
   3631           'statement, use {} instead.')
   3632 
   3633 
   3634 def IsDecltype(clean_lines, linenum, column):
   3635   """Check if the token ending on (linenum, column) is decltype().
   3636 
   3637   Args:
   3638     clean_lines: A CleansedLines instance containing the file.
   3639     linenum: the number of the line to check.
   3640     column: end column of the token to check.
   3641   Returns:
   3642     True if this token is decltype() expression, False otherwise.
   3643   """
   3644   (text, _, start_col) = ReverseCloseExpression(clean_lines, linenum, column)
   3645   if start_col < 0:
   3646     return False
   3647   if Search(r'\bdecltype\s*$', text[0:start_col]):
   3648     return True
   3649   return False
   3650 
   3651 
   3652 def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error):
   3653   """Checks for additional blank line issues related to sections.
   3654 
   3655   Currently the only thing checked here is blank line before protected/private.
   3656 
   3657   Args:
   3658     filename: The name of the current file.
   3659     clean_lines: A CleansedLines instance containing the file.
   3660     class_info: A _ClassInfo objects.
   3661     linenum: The number of the line to check.
   3662     error: The function to call with any errors found.
   3663   """
   3664   # Skip checks if the class is small, where small means 25 lines or less.
   3665   # 25 lines seems like a good cutoff since that's the usual height of
   3666   # terminals, and any class that can't fit in one screen can't really
   3667   # be considered "small".
   3668   #
   3669   # Also skip checks if we are on the first line.  This accounts for
   3670   # classes that look like
   3671   #   class Foo { public: ... };
   3672   #
   3673   # If we didn't find the end of the class, last_line would be zero,
   3674   # and the check will be skipped by the first condition.
   3675   if (class_info.last_line - class_info.starting_linenum <= 24 or
   3676       linenum <= class_info.starting_linenum):
   3677     return
   3678 
   3679   matched = Match(r'\s*(public|protected|private):', clean_lines.lines[linenum])
   3680   if matched:
   3681     # Issue warning if the line before public/protected/private was
   3682     # not a blank line, but don't do this if the previous line contains
   3683     # "class" or "struct".  This can happen two ways:
   3684     #  - We are at the beginning of the class.
   3685     #  - We are forward-declaring an inner class that is semantically
   3686     #    private, but needed to be public for implementation reasons.
   3687     # Also ignores cases where the previous line ends with a backslash as can be
   3688     # common when defining classes in C macros.
   3689     prev_line = clean_lines.lines[linenum - 1]
   3690     if (not IsBlankLine(prev_line) and
   3691         not Search(r'\b(class|struct)\b', prev_line) and
   3692         not Search(r'\\$', prev_line)):
   3693       # Try a bit harder to find the beginning of the class.  This is to
   3694       # account for multi-line base-specifier lists, e.g.:
   3695       #   class Derived
   3696       #       : public Base {
   3697       end_class_head = class_info.starting_linenum
   3698       for i in range(class_info.starting_linenum, linenum):
   3699         if Search(r'\{\s*$', clean_lines.lines[i]):
   3700           end_class_head = i
   3701           break
   3702       if end_class_head < linenum - 1:
   3703         error(filename, linenum, 'whitespace/blank_line', 3,
   3704               '"%s:" should be preceded by a blank line' % matched.group(1))
   3705 
   3706 
   3707 def GetPreviousNonBlankLine(clean_lines, linenum):
   3708   """Return the most recent non-blank line and its line number.
   3709 
   3710   Args:
   3711     clean_lines: A CleansedLines instance containing the file contents.
   3712     linenum: The number of the line to check.
   3713 
   3714   Returns:
   3715     A tuple with two elements.  The first element is the contents of the last
   3716     non-blank line before the current line, or the empty string if this is the
   3717     first non-blank line.  The second is the line number of that line, or -1
   3718     if this is the first non-blank line.
   3719   """
   3720 
   3721   prevlinenum = linenum - 1
   3722   while prevlinenum >= 0:
   3723     prevline = clean_lines.elided[prevlinenum]
   3724     if not IsBlankLine(prevline):     # if not a blank line...
   3725       return (prevline, prevlinenum)
   3726     prevlinenum -= 1
   3727   return ('', -1)
   3728 
   3729 
   3730 def CheckBraces(filename, clean_lines, linenum, error):
   3731   """Looks for misplaced braces (e.g. at the end of line).
   3732 
   3733   Args:
   3734     filename: The name of the current file.
   3735     clean_lines: A CleansedLines instance containing the file.
   3736     linenum: The number of the line to check.
   3737     error: The function to call with any errors found.
   3738   """
   3739 
   3740   line = clean_lines.elided[linenum]        # get rid of comments and strings
   3741 
   3742   if Match(r'\s*{\s*$', line):
   3743     # We allow an open brace to start a line in the case where someone is using
   3744     # braces in a block to explicitly create a new scope, which is commonly used
   3745     # to control the lifetime of stack-allocated variables.  Braces are also
   3746     # used for brace initializers inside function calls.  We don't detect this
   3747     # perfectly: we just don't complain if the last non-whitespace character on
   3748     # the previous non-blank line is ',', ';', ':', '(', '{', or '}', or if the
   3749     # previous line starts a preprocessor block. We also allow a brace on the
   3750     # following line if it is part of an array initialization and would not fit
   3751     # within the 80 character limit of the preceding line.
   3752     prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
   3753     if (not Search(r'[,;:}{(]\s*$', prevline) and
   3754         not Match(r'\s*#', prevline) and
   3755         not (GetLineWidth(prevline) > _line_length - 2 and '[]' in prevline)):
   3756       error(filename, linenum, 'whitespace/braces', 4,
   3757             '{ should almost always be at the end of the previous line')
   3758 
   3759   # An else clause should be on the same line as the preceding closing brace.
   3760   if Match(r'\s*else\b\s*(?:if\b|\{|$)', line):
   3761     prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
   3762     if Match(r'\s*}\s*$', prevline):
   3763       error(filename, linenum, 'whitespace/newline', 4,
   3764             'An else should appear on the same line as the preceding }')
   3765 
   3766   # If braces come on one side of an else, they should be on both.
   3767   # However, we have to worry about "else if" that spans multiple lines!
   3768   if Search(r'else if\s*\(', line):       # could be multi-line if
   3769     brace_on_left = bool(Search(r'}\s*else if\s*\(', line))
   3770     # find the ( after the if
   3771     pos = line.find('else if')
   3772     pos = line.find('(', pos)
   3773     if pos > 0:
   3774       (endline, _, endpos) = CloseExpression(clean_lines, linenum, pos)
   3775       brace_on_right = endline[endpos:].find('{') != -1
   3776       if brace_on_left != brace_on_right:    # must be brace after if
   3777         error(filename, linenum, 'readability/braces', 5,
   3778               'If an else has a brace on one side, it should have it on both')
   3779   elif Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line):
   3780     error(filename, linenum, 'readability/braces', 5,
   3781           'If an else has a brace on one side, it should have it on both')
   3782 
   3783   # Likewise, an else should never have the else clause on the same line
   3784   if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line):
   3785     error(filename, linenum, 'whitespace/newline', 4,
   3786           'Else clause should never be on same line as else (use 2 lines)')
   3787 
   3788   # In the same way, a do/while should never be on one line
   3789   if Match(r'\s*do [^\s{]', line):
   3790     error(filename, linenum, 'whitespace/newline', 4,
   3791           'do/while clauses should not be on a single line')
   3792 
   3793   # Check single-line if/else bodies. The style guide says 'curly braces are not
   3794   # required for single-line statements'. We additionally allow multi-line,
   3795   # single statements, but we reject anything with more than one semicolon in
   3796   # it. This means that the first semicolon after the if should be at the end of
   3797   # its line, and the line after that should have an indent level equal to or
   3798   # lower than the if. We also check for ambiguous if/else nesting without
   3799   # braces.
   3800   if_else_match = Search(r'\b(if\s*\(|else\b)', line)
   3801   if if_else_match and not Match(r'\s*#', line):
   3802     if_indent = GetIndentLevel(line)
   3803     endline, endlinenum, endpos = line, linenum, if_else_match.end()
   3804     if_match = Search(r'\bif\s*\(', line)
   3805     if if_match:
   3806       # This could be a multiline if condition, so find the end first.
   3807       pos = if_match.end() - 1
   3808       (endline, endlinenum, endpos) = CloseExpression(clean_lines, linenum, pos)
   3809     # Check for an opening brace, either directly after the if or on the next
   3810     # line. If found, this isn't a single-statement conditional.
   3811     if (not Match(r'\s*{', endline[endpos:])
   3812         and not (Match(r'\s*$', endline[endpos:])
   3813                  and endlinenum < (len(clean_lines.elided) - 1)
   3814                  and Match(r'\s*{', clean_lines.elided[endlinenum + 1]))):
   3815       while (endlinenum < len(clean_lines.elided)
   3816              and ';' not in clean_lines.elided[endlinenum][endpos:]):
   3817         endlinenum += 1
   3818         endpos = 0
   3819       if endlinenum < len(clean_lines.elided):
   3820         endline = clean_lines.elided[endlinenum]
   3821         # We allow a mix of whitespace and closing braces (e.g. for one-liner
   3822         # methods) and a single \ after the semicolon (for macros)
   3823         endpos = endline.find(';')
   3824         if not Match(r';[\s}]*(\\?)$', endline[endpos:]):
   3825           # Semicolon isn't the last character, there's something trailing.
   3826           # Output a warning if the semicolon is not contained inside
   3827           # a lambda expression.
   3828           if not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}]*\}\s*\)*[;,]\s*$',
   3829                        endline):
   3830             error(filename, linenum, 'readability/braces', 4,
   3831                   'If/else bodies with multiple statements require braces')
   3832         elif endlinenum < len(clean_lines.elided) - 1:
   3833           # Make sure the next line is dedented
   3834           next_line = clean_lines.elided[endlinenum + 1]
   3835           next_indent = GetIndentLevel(next_line)
   3836           # With ambiguous nested if statements, this will error out on the
   3837           # if that *doesn't* match the else, regardless of whether it's the
   3838           # inner one or outer one.
   3839           if (if_match and Match(r'\s*else\b', next_line)
   3840               and next_indent != if_indent):
   3841             error(filename, linenum, 'readability/braces', 4,
   3842                   'Else clause should be indented at the same level as if. '
   3843                   'Ambiguous nested if/else chains require braces.')
   3844           elif next_indent > if_indent:
   3845             error(filename, linenum, 'readability/braces', 4,
   3846                   'If/else bodies with multiple statements require braces')
   3847 
   3848 
   3849 def CheckTrailingSemicolon(filename, clean_lines, linenum, error):
   3850   """Looks for redundant trailing semicolon.
   3851 
   3852   Args:
   3853     filename: The name of the current file.
   3854     clean_lines: A CleansedLines instance containing the file.
   3855     linenum: The number of the line to check.
   3856     error: The function to call with any errors found.
   3857   """
   3858 
   3859   line = clean_lines.elided[linenum]
   3860 
   3861   # Block bodies should not be followed by a semicolon.  Due to C++11
   3862   # brace initialization, there are more places where semicolons are
   3863   # required than not, so we use a whitelist approach to check these
   3864   # rather than a blacklist.  These are the places where "};" should
   3865   # be replaced by just "}":
   3866   # 1. Some flavor of block following closing parenthesis:
   3867   #    for (;;) {};
   3868   #    while (...) {};
   3869   #    switch (...) {};
   3870   #    Function(...) {};
   3871   #    if (...) {};
   3872   #    if (...) else if (...) {};
   3873   #
   3874   # 2. else block:
   3875   #    if (...) else {};
   3876   #
   3877   # 3. const member function:
   3878   #    Function(...) const {};
   3879   #
   3880   # 4. Block following some statement:
   3881   #    x = 42;
   3882   #    {};
   3883   #
   3884   # 5. Block at the beginning of a function:
   3885   #    Function(...) {
   3886   #      {};
   3887   #    }
   3888   #
   3889   #    Note that naively checking for the preceding "{" will also match
   3890   #    braces inside multi-dimensional arrays, but this is fine since
   3891   #    that expression will not contain semicolons.
   3892   #
   3893   # 6. Block following another block:
   3894   #    while (true) {}
   3895   #    {};
   3896   #
   3897   # 7. End of namespaces:
   3898   #    namespace {};
   3899   #
   3900   #    These semicolons seems far more common than other kinds of
   3901   #    redundant semicolons, possibly due to people converting classes
   3902   #    to namespaces.  For now we do not warn for this case.
   3903   #
   3904   # Try matching case 1 first.
   3905   match = Match(r'^(.*\)\s*)\{', line)
   3906   if match:
   3907     # Matched closing parenthesis (case 1).  Check the token before the
   3908     # matching opening parenthesis, and don't warn if it looks like a
   3909     # macro.  This avoids these false positives:
   3910     #  - macro that defines a base class
   3911     #  - multi-line macro that defines a base class
   3912     #  - macro that defines the whole class-head
   3913     #
   3914     # But we still issue warnings for macros that we know are safe to
   3915     # warn, specifically:
   3916     #  - TEST, TEST_F, TEST_P, MATCHER, MATCHER_P
   3917     #  - TYPED_TEST
   3918     #  - INTERFACE_DEF
   3919     #  - EXCLUSIVE_LOCKS_REQUIRED, SHARED_LOCKS_REQUIRED, LOCKS_EXCLUDED:
   3920     #
   3921     # We implement a whitelist of safe macros instead of a blacklist of
   3922     # unsafe macros, even though the latter appears less frequently in
   3923     # google code and would have been easier to implement.  This is because
   3924     # the downside for getting the whitelist wrong means some extra
   3925     # semicolons, while the downside for getting the blacklist wrong
   3926     # would result in compile errors.
   3927     #
   3928     # In addition to macros, we also don't want to warn on
   3929     #  - Compound literals
   3930     #  - Lambdas
   3931     #  - alignas specifier with anonymous structs
   3932     #  - decltype
   3933     closing_brace_pos = match.group(1).rfind(')')
   3934     opening_parenthesis = ReverseCloseExpression(
   3935         clean_lines, linenum, closing_brace_pos)
   3936     if opening_parenthesis[2] > -1:
   3937       line_prefix = opening_parenthesis[0][0:opening_parenthesis[2]]
   3938       macro = Search(r'\b([A-Z_][A-Z0-9_]*)\s*$', line_prefix)
   3939       func = Match(r'^(.*\])\s*$', line_prefix)
   3940       if ((macro and
   3941            macro.group(1) not in (
   3942                'TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST',
   3943                'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED',
   3944                'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or
   3945           (func and not Search(r'\boperator\s*\[\s*\]', func.group(1))) or
   3946           Search(r'\b(?:struct|union)\s+alignas\s*$', line_prefix) or
   3947           Search(r'\bdecltype$', line_prefix) or
   3948           Search(r'\s+=\s*$', line_prefix)):
   3949         match = None
   3950     if (match and
   3951         opening_parenthesis[1] > 1 and
   3952         Search(r'\]\s*$', clean_lines.elided[opening_parenthesis[1] - 1])):
   3953       # Multi-line lambda-expression
   3954       match = None
   3955 
   3956   else:
   3957     # Try matching cases 2-3.
   3958     match = Match(r'^(.*(?:else|\)\s*const)\s*)\{', line)
   3959     if not match:
   3960       # Try matching cases 4-6.  These are always matched on separate lines.
   3961       #
   3962       # Note that we can't simply concatenate the previous line to the
   3963       # current line and do a single match, otherwise we may output
   3964       # duplicate warnings for the blank line case:
   3965       #   if (cond) {
   3966       #     // blank line
   3967       #   }
   3968       prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
   3969       if prevline and Search(r'[;{}]\s*$', prevline):
   3970         match = Match(r'^(\s*)\{', line)
   3971 
   3972   # Check matching closing brace
   3973   if match:
   3974     (endline, endlinenum, endpos) = CloseExpression(
   3975         clean_lines, linenum, len(match.group(1)))
   3976     if endpos > -1 and Match(r'^\s*;', endline[endpos:]):
   3977       # Current {} pair is eligible for semicolon check, and we have found
   3978       # the redundant semicolon, output warning here.
   3979       #
   3980       # Note: because we are scanning forward for opening braces, and
   3981       # outputting warnings for the matching closing brace, if there are
   3982       # nested blocks with trailing semicolons, we will get the error
   3983       # messages in reversed order.
   3984 
   3985       # We need to check the line forward for NOLINT
   3986       raw_lines = clean_lines.raw_lines
   3987       ParseNolintSuppressions(filename, raw_lines[endlinenum-1], endlinenum-1,
   3988                               error)
   3989       ParseNolintSuppressions(filename, raw_lines[endlinenum], endlinenum,
   3990                               error)
   3991 
   3992       error(filename, endlinenum, 'readability/braces', 4,
   3993             "You don't need a ; after a }")
   3994 
   3995 
   3996 def CheckEmptyBlockBody(filename, clean_lines, linenum, error):
   3997   """Look for empty loop/conditional body with only a single semicolon.
   3998 
   3999   Args:
   4000     filename: The name of the current file.
   4001     clean_lines: A CleansedLines instance containing the file.
   4002     linenum: The number of the line to check.
   4003     error: The function to call with any errors found.
   4004   """
   4005 
   4006   # Search for loop keywords at the beginning of the line.  Because only
   4007   # whitespaces are allowed before the keywords, this will also ignore most
   4008   # do-while-loops, since those lines should start with closing brace.
   4009   #
   4010   # We also check "if" blocks here, since an empty conditional block
   4011   # is likely an error.
   4012   line = clean_lines.elided[linenum]
   4013   matched = Match(r'\s*(for|while|if)\s*\(', line)
   4014   if matched:
   4015     # Find the end of the conditional expression.
   4016     (end_line, end_linenum, end_pos) = CloseExpression(
   4017         clean_lines, linenum, line.find('('))
   4018 
   4019     # Output warning if what follows the condition expression is a semicolon.
   4020     # No warning for all other cases, including whitespace or newline, since we
   4021     # have a separate check for semicolons preceded by whitespace.
   4022     if end_pos >= 0 and Match(r';', end_line[end_pos:]):
   4023       if matched.group(1) == 'if':
   4024         error(filename, end_linenum, 'whitespace/empty_conditional_body', 5,
   4025               'Empty conditional bodies should use {}')
   4026       else:
   4027         error(filename, end_linenum, 'whitespace/empty_loop_body', 5,
   4028               'Empty loop bodies should use {} or continue')
   4029 
   4030     # Check for if statements that have completely empty bodies (no comments)
   4031     # and no else clauses.
   4032     if end_pos >= 0 and matched.group(1) == 'if':
   4033       # Find the position of the opening { for the if statement.
   4034       # Return without logging an error if it has no brackets.
   4035       opening_linenum = end_linenum
   4036       opening_line_fragment = end_line[end_pos:]
   4037       # Loop until EOF or find anything that's not whitespace or opening {.
   4038       while not Search(r'^\s*\{', opening_line_fragment):
   4039         if Search(r'^(?!\s*$)', opening_line_fragment):
   4040           # Conditional has no brackets.
   4041           return
   4042         opening_linenum += 1
   4043         if opening_linenum == len(clean_lines.elided):
   4044           # Couldn't find conditional's opening { or any code before EOF.
   4045           return
   4046         opening_line_fragment = clean_lines.elided[opening_linenum]
   4047       # Set opening_line (opening_line_fragment may not be entire opening line).
   4048       opening_line = clean_lines.elided[opening_linenum]
   4049 
   4050       # Find the position of the closing }.
   4051       opening_pos = opening_line_fragment.find('{')
   4052       if opening_linenum == end_linenum:
   4053         # We need to make opening_pos relative to the start of the entire line.
   4054         opening_pos += end_pos
   4055       (closing_line, closing_linenum, closing_pos) = CloseExpression(
   4056           clean_lines, opening_linenum, opening_pos)
   4057       if closing_pos < 0:
   4058         return
   4059 
   4060       # Now construct the body of the conditional. This consists of the portion
   4061       # of the opening line after the {, all lines until the closing line,
   4062       # and the portion of the closing line before the }.
   4063       if (clean_lines.raw_lines[opening_linenum] !=
   4064           CleanseComments(clean_lines.raw_lines[opening_linenum])):
   4065         # Opening line ends with a comment, so conditional isn't empty.
   4066         return
   4067       if closing_linenum > opening_linenum:
   4068         # Opening line after the {. Ignore comments here since we checked above.
   4069         body = list(opening_line[opening_pos+1:])
   4070         # All lines until closing line, excluding closing line, with comments.
   4071         body.extend(clean_lines.raw_lines[opening_linenum+1:closing_linenum])
   4072         # Closing line before the }. Won't (and can't) have comments.
   4073         body.append(clean_lines.elided[closing_linenum][:closing_pos-1])
   4074         body = '\n'.join(body)
   4075       else:
   4076         # If statement has brackets and fits on a single line.
   4077         body = opening_line[opening_pos+1:closing_pos-1]
   4078 
   4079       # Check if the body is empty
   4080       if not _EMPTY_CONDITIONAL_BODY_PATTERN.search(body):
   4081         return
   4082       # The body is empty. Now make sure there's not an else clause.
   4083       current_linenum = closing_linenum
   4084       current_line_fragment = closing_line[closing_pos:]
   4085       # Loop until EOF or find anything that's not whitespace or else clause.
   4086       while Search(r'^\s*$|^(?=\s*else)', current_line_fragment):
   4087         if Search(r'^(?=\s*else)', current_line_fragment):
   4088           # Found an else clause, so don't log an error.
   4089           return
   4090         current_linenum += 1
   4091         if current_linenum == len(clean_lines.elided):
   4092           break
   4093         current_line_fragment = clean_lines.elided[current_linenum]
   4094 
   4095       # The body is empty and there's no else clause until EOF or other code.
   4096       error(filename, end_linenum, 'whitespace/empty_if_body', 4,
   4097             ('If statement had no body and no else clause'))
   4098 
   4099 
   4100 def FindCheckMacro(line):
   4101   """Find a replaceable CHECK-like macro.
   4102 
   4103   Args:
   4104     line: line to search on.
   4105   Returns:
   4106     (macro name, start position), or (None, -1) if no replaceable
   4107     macro is found.
   4108   """
   4109   for macro in _CHECK_MACROS:
   4110     i = line.find(macro)
   4111     if i >= 0:
   4112       # Find opening parenthesis.  Do a regular expression match here
   4113       # to make sure that we are matching the expected CHECK macro, as
   4114       # opposed to some other macro that happens to contain the CHECK
   4115       # substring.
   4116       matched = Match(r'^(.*\b' + macro + r'\s*)\(', line)
   4117       if not matched:
   4118         continue
   4119       return (macro, len(matched.group(1)))
   4120   return (None, -1)
   4121 
   4122 
   4123 def CheckCheck(filename, clean_lines, linenum, error):
   4124   """Checks the use of CHECK and EXPECT macros.
   4125 
   4126   Args:
   4127     filename: The name of the current file.
   4128     clean_lines: A CleansedLines instance containing the file.
   4129     linenum: The number of the line to check.
   4130     error: The function to call with any errors found.
   4131   """
   4132 
   4133   # Decide the set of replacement macros that should be suggested
   4134   lines = clean_lines.elided
   4135   (check_macro, start_pos) = FindCheckMacro(lines[linenum])
   4136   if not check_macro:
   4137     return
   4138 
   4139   # Find end of the boolean expression by matching parentheses
   4140   (last_line, end_line, end_pos) = CloseExpression(
   4141       clean_lines, linenum, start_pos)
   4142   if end_pos < 0:
   4143     return
   4144 
   4145   # If the check macro is followed by something other than a
   4146   # semicolon, assume users will log their own custom error messages
   4147   # and don't suggest any replacements.
   4148   if not Match(r'\s*;', last_line[end_pos:]):
   4149     return
   4150 
   4151   if linenum == end_line:
   4152     expression = lines[linenum][start_pos + 1:end_pos - 1]
   4153   else:
   4154     expression = lines[linenum][start_pos + 1:]
   4155     for i in xrange(linenum + 1, end_line):
   4156       expression += lines[i]
   4157     expression += last_line[0:end_pos - 1]
   4158 
   4159   # Parse expression so that we can take parentheses into account.
   4160   # This avoids false positives for inputs like "CHECK((a < 4) == b)",
   4161   # which is not replaceable by CHECK_LE.
   4162   lhs = ''
   4163   rhs = ''
   4164   operator = None
   4165   while expression:
   4166     matched = Match(r'^\s*(<<|<<=|>>|>>=|->\*|->|&&|\|\||'
   4167                     r'==|!=|>=|>|<=|<|\()(.*)$', expression)
   4168     if matched:
   4169       token = matched.group(1)
   4170       if token == '(':
   4171         # Parenthesized operand
   4172         expression = matched.group(2)
   4173         (end, _) = FindEndOfExpressionInLine(expression, 0, ['('])
   4174         if end < 0:
   4175           return  # Unmatched parenthesis
   4176         lhs += '(' + expression[0:end]
   4177         expression = expression[end:]
   4178       elif token in ('&&', '||'):
   4179         # Logical and/or operators.  This means the expression
   4180         # contains more than one term, for example:
   4181         #   CHECK(42 < a && a < b);
   4182         #
   4183         # These are not replaceable with CHECK_LE, so bail out early.
   4184         return
   4185       elif token in ('<<', '<<=', '>>', '>>=', '->*', '->'):
   4186         # Non-relational operator
   4187         lhs += token
   4188         expression = matched.group(2)
   4189       else:
   4190         # Relational operator
   4191         operator = token
   4192         rhs = matched.group(2)
   4193         break
   4194     else:
   4195       # Unparenthesized operand.  Instead of appending to lhs one character
   4196       # at a time, we do another regular expression match to consume several
   4197       # characters at once if possible.  Trivial benchmark shows that this
   4198       # is more efficient when the operands are longer than a single
   4199       # character, which is generally the case.
   4200       matched = Match(r'^([^-=!<>()&|]+)(.*)$', expression)
   4201       if not matched:
   4202         matched = Match(r'^(\s*\S)(.*)$', expression)
   4203         if not matched:
   4204           break
   4205       lhs += matched.group(1)
   4206       expression = matched.group(2)
   4207 
   4208   # Only apply checks if we got all parts of the boolean expression
   4209   if not (lhs and operator and rhs):
   4210     return
   4211 
   4212   # Check that rhs do not contain logical operators.  We already know
   4213   # that lhs is fine since the loop above parses out && and ||.
   4214   if rhs.find('&&') > -1 or rhs.find('||') > -1:
   4215     return
   4216 
   4217   # At least one of the operands must be a constant literal.  This is
   4218   # to avoid suggesting replacements for unprintable things like
   4219   # CHECK(variable != iterator)
   4220   #
   4221   # The following pattern matches decimal, hex integers, strings, and
   4222   # characters (in that order).
   4223   lhs = lhs.strip()
   4224   rhs = rhs.strip()
   4225   match_constant = r'^([-+]?(\d+|0[xX][0-9a-fA-F]+)[lLuU]{0,3}|".*"|\'.*\')$'
   4226   if Match(match_constant, lhs) or Match(match_constant, rhs):
   4227     # Note: since we know both lhs and rhs, we can provide a more
   4228     # descriptive error message like:
   4229     #   Consider using CHECK_EQ(x, 42) instead of CHECK(x == 42)
   4230     # Instead of:
   4231     #   Consider using CHECK_EQ instead of CHECK(a == b)
   4232     #
   4233     # We are still keeping the less descriptive message because if lhs
   4234     # or rhs gets long, the error message might become unreadable.
   4235     error(filename, linenum, 'readability/check', 2,
   4236           'Consider using %s instead of %s(a %s b)' % (
   4237               _CHECK_REPLACEMENT[check_macro][operator],
   4238               check_macro, operator))
   4239 
   4240 
   4241 def CheckAltTokens(filename, clean_lines, linenum, error):
   4242   """Check alternative keywords being used in boolean expressions.
   4243 
   4244   Args:
   4245     filename: The name of the current file.
   4246     clean_lines: A CleansedLines instance containing the file.
   4247     linenum: The number of the line to check.
   4248     error: The function to call with any errors found.
   4249   """
   4250   line = clean_lines.elided[linenum]
   4251 
   4252   # Avoid preprocessor lines
   4253   if Match(r'^\s*#', line):
   4254     return
   4255 
   4256   # Last ditch effort to avoid multi-line comments.  This will not help
   4257   # if the comment started before the current line or ended after the
   4258   # current line, but it catches most of the false positives.  At least,
   4259   # it provides a way to workaround this warning for people who use
   4260   # multi-line comments in preprocessor macros.
   4261   #
   4262   # TODO(unknown): remove this once cpplint has better support for
   4263   # multi-line comments.
   4264   if line.find('/*') >= 0 or line.find('*/') >= 0:
   4265     return
   4266 
   4267   for match in _ALT_TOKEN_REPLACEMENT_PATTERN.finditer(line):
   4268     error(filename, linenum, 'readability/alt_tokens', 2,
   4269           'Use operator %s instead of %s' % (
   4270               _ALT_TOKEN_REPLACEMENT[match.group(1)], match.group(1)))
   4271 
   4272 
   4273 def GetLineWidth(line):
   4274   """Determines the width of the line in column positions.
   4275 
   4276   Args:
   4277     line: A string, which may be a Unicode string.
   4278 
   4279   Returns:
   4280     The width of the line in column positions, accounting for Unicode
   4281     combining characters and wide characters.
   4282   """
   4283   if isinstance(line, unicode):
   4284     width = 0
   4285     for uc in unicodedata.normalize('NFC', line):
   4286       if unicodedata.east_asian_width(uc) in ('W', 'F'):
   4287         width += 2
   4288       elif not unicodedata.combining(uc):
   4289         width += 1
   4290     return width
   4291   else:
   4292     return len(line)
   4293 
   4294 
   4295 def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
   4296                error):
   4297   """Checks rules from the 'C++ style rules' section of cppguide.html.
   4298 
   4299   Most of these rules are hard to test (naming, comment style), but we
   4300   do what we can.  In particular we check for 2-space indents, line lengths,
   4301   tab usage, spaces inside code, etc.
   4302 
   4303   Args:
   4304     filename: The name of the current file.
   4305     clean_lines: A CleansedLines instance containing the file.
   4306     linenum: The number of the line to check.
   4307     file_extension: The extension (without the dot) of the filename.
   4308     nesting_state: A NestingState instance which maintains information about
   4309                    the current stack of nested blocks being parsed.
   4310     error: The function to call with any errors found.
   4311   """
   4312 
   4313   # Don't use "elided" lines here, otherwise we can't check commented lines.
   4314   # Don't want to use "raw" either, because we don't want to check inside C++11
   4315   # raw strings,
   4316   raw_lines = clean_lines.lines_without_raw_strings
   4317   line = raw_lines[linenum]
   4318   prev = raw_lines[linenum - 1] if linenum > 0 else ''
   4319 
   4320   if line.find('\t') != -1:
   4321     error(filename, linenum, 'whitespace/tab', 1,
   4322           'Tab found; better to use spaces')
   4323 
   4324   # One or three blank spaces at the beginning of the line is weird; it's
   4325   # hard to reconcile that with 2-space indents.
   4326   # NOTE: here are the conditions rob pike used for his tests.  Mine aren't
   4327   # as sophisticated, but it may be worth becoming so:  RLENGTH==initial_spaces
   4328   # if(RLENGTH > 20) complain = 0;
   4329   # if(match($0, " +(error|private|public|protected):")) complain = 0;
   4330   # if(match(prev, "&& *$")) complain = 0;
   4331   # if(match(prev, "\\|\\| *$")) complain = 0;
   4332   # if(match(prev, "[\",=><] *$")) complain = 0;
   4333   # if(match($0, " <<")) complain = 0;
   4334   # if(match(prev, " +for \\(")) complain = 0;
   4335   # if(prevodd && match(prevprev, " +for \\(")) complain = 0;
   4336   scope_or_label_pattern = r'\s*\w+\s*:\s*\\?$'
   4337   classinfo = nesting_state.InnermostClass()
   4338   initial_spaces = 0
   4339   cleansed_line = clean_lines.elided[linenum]
   4340   while initial_spaces < len(line) and line[initial_spaces] == ' ':
   4341     initial_spaces += 1
   4342   # There are certain situations we allow one space, notably for
   4343   # section labels, and also lines containing multi-line raw strings.
   4344   # We also don't check for lines that look like continuation lines
   4345   # (of lines ending in double quotes, commas, equals, or angle brackets)
   4346   # because the rules for how to indent those are non-trivial.
   4347   if (not Search(r'[",=><] *$', prev) and
   4348       (initial_spaces == 1 or initial_spaces == 3) and
   4349       not Match(scope_or_label_pattern, cleansed_line) and
   4350       not (clean_lines.raw_lines[linenum] != line and
   4351            Match(r'^\s*""', line))):
   4352     error(filename, linenum, 'whitespace/indent', 3,
   4353           'Weird number of spaces at line-start.  '
   4354           'Are you using a 2-space indent?')
   4355 
   4356   if line and line[-1].isspace():
   4357     error(filename, linenum, 'whitespace/end_of_line', 4,
   4358           'Line ends in whitespace.  Consider deleting these extra spaces.')
   4359 
   4360   # Check if the line is a header guard.
   4361   is_header_guard = False
   4362   if IsHeaderExtension(file_extension):
   4363     cppvar = GetHeaderGuardCPPVariable(filename)
   4364     if (line.startswith('#ifndef %s' % cppvar) or
   4365         line.startswith('#define %s' % cppvar) or
   4366         line.startswith('#endif  // %s' % cppvar)):
   4367       is_header_guard = True
   4368   # #include lines and header guards can be long, since there's no clean way to
   4369   # split them.
   4370   #
   4371   # URLs can be long too.  It's possible to split these, but it makes them
   4372   # harder to cut&paste.
   4373   #
   4374   # The "$Id:...$" comment may also get very long without it being the
   4375   # developers fault.
   4376   if (not line.startswith('#include') and not is_header_guard and
   4377       not Match(r'^\s*//.*http(s?)://\S*$', line) and
   4378       not Match(r'^\s*//\s*[^\s]*$', line) and
   4379       not Match(r'^// \$Id:.*#[0-9]+ \$$', line)):
   4380     line_width = GetLineWidth(line)
   4381     if line_width > _line_length:
   4382       error(filename, linenum, 'whitespace/line_length', 2,
   4383             'Lines should be <= %i characters long' % _line_length)
   4384 
   4385   if (cleansed_line.count(';') > 1 and
   4386       # for loops are allowed two ;'s (and may run over two lines).
   4387       cleansed_line.find('for') == -1 and
   4388       (GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or
   4389        GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and
   4390       # It's ok to have many commands in a switch case that fits in 1 line
   4391       not ((cleansed_line.find('case ') != -1 or
   4392             cleansed_line.find('default:') != -1) and
   4393            cleansed_line.find('break;') != -1)):
   4394     error(filename, linenum, 'whitespace/newline', 0,
   4395           'More than one command on the same line')
   4396 
   4397   # Some more style checks
   4398   CheckBraces(filename, clean_lines, linenum, error)
   4399   CheckTrailingSemicolon(filename, clean_lines, linenum, error)
   4400   CheckEmptyBlockBody(filename, clean_lines, linenum, error)
   4401   CheckSpacing(filename, clean_lines, linenum, nesting_state, error)
   4402   CheckOperatorSpacing(filename, clean_lines, linenum, error)
   4403   CheckParenthesisSpacing(filename, clean_lines, linenum, error)
   4404   CheckCommaSpacing(filename, clean_lines, linenum, error)
   4405   CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error)
   4406   CheckSpacingForFunctionCall(filename, clean_lines, linenum, error)
   4407   CheckCheck(filename, clean_lines, linenum, error)
   4408   CheckAltTokens(filename, clean_lines, linenum, error)
   4409   classinfo = nesting_state.InnermostClass()
   4410   if classinfo:
   4411     CheckSectionSpacing(filename, clean_lines, classinfo, linenum, error)
   4412 
   4413 
   4414 _RE_PATTERN_INCLUDE = re.compile(r'^\s*#\s*include\s*([<"])([^>"]*)[>"].*$')
   4415 # Matches the first component of a filename delimited by -s and _s. That is:
   4416 #  _RE_FIRST_COMPONENT.match('foo').group(0) == 'foo'
   4417 #  _RE_FIRST_COMPONENT.match('foo.cc').group(0) == 'foo'
   4418 #  _RE_FIRST_COMPONENT.match('foo-bar_baz.cc').group(0) == 'foo'
   4419 #  _RE_FIRST_COMPONENT.match('foo_bar-baz.cc').group(0) == 'foo'
   4420 _RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+')
   4421 
   4422 
   4423 def _DropCommonSuffixes(filename):
   4424   """Drops common suffixes like _test.cc or -inl.h from filename.
   4425 
   4426   For example:
   4427     >>> _DropCommonSuffixes('foo/foo-inl.h')
   4428     'foo/foo'
   4429     >>> _DropCommonSuffixes('foo/bar/foo.cc')
   4430     'foo/bar/foo'
   4431     >>> _DropCommonSuffixes('foo/foo_internal.h')
   4432     'foo/foo'
   4433     >>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
   4434     'foo/foo_unusualinternal'
   4435 
   4436   Args:
   4437     filename: The input filename.
   4438 
   4439   Returns:
   4440     The filename with the common suffix removed.
   4441   """
   4442   for suffix in ('test.cc', 'regtest.cc', 'unittest.cc',
   4443                  'inl.h', 'impl.h', 'internal.h'):
   4444     if (filename.endswith(suffix) and len(filename) > len(suffix) and
   4445         filename[-len(suffix) - 1] in ('-', '_')):
   4446       return filename[:-len(suffix) - 1]
   4447   return os.path.splitext(filename)[0]
   4448 
   4449 
   4450 def _ClassifyInclude(fileinfo, include, is_system):
   4451   """Figures out what kind of header 'include' is.
   4452 
   4453   Args:
   4454     fileinfo: The current file cpplint is running over. A FileInfo instance.
   4455     include: The path to a #included file.
   4456     is_system: True if the #include used <> rather than "".
   4457 
   4458   Returns:
   4459     One of the _XXX_HEADER constants.
   4460 
   4461   For example:
   4462     >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True)
   4463     _C_SYS_HEADER
   4464     >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True)
   4465     _CPP_SYS_HEADER
   4466     >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False)
   4467     _LIKELY_MY_HEADER
   4468     >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'),
   4469     ...                  'bar/foo_other_ext.h', False)
   4470     _POSSIBLE_MY_HEADER
   4471     >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False)
   4472     _OTHER_HEADER
   4473   """
   4474   # This is a list of all standard c++ header files, except
   4475   # those already checked for above.
   4476   is_cpp_h = include in _CPP_HEADERS
   4477 
   4478   if is_system:
   4479     if is_cpp_h:
   4480       return _CPP_SYS_HEADER
   4481     else:
   4482       return _C_SYS_HEADER
   4483 
   4484   # If the target file and the include we're checking share a
   4485   # basename when we drop common extensions, and the include
   4486   # lives in . , then it's likely to be owned by the target file.
   4487   target_dir, target_base = (
   4488       os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName())))
   4489   include_dir, include_base = os.path.split(_DropCommonSuffixes(include))
   4490   if target_base == include_base and (
   4491       include_dir == target_dir or
   4492       include_dir == os.path.normpath(target_dir + '/../public')):
   4493     return _LIKELY_MY_HEADER
   4494 
   4495   # If the target and include share some initial basename
   4496   # component, it's possible the target is implementing the
   4497   # include, so it's allowed to be first, but we'll never
   4498   # complain if it's not there.
   4499   target_first_component = _RE_FIRST_COMPONENT.match(target_base)
   4500   include_first_component = _RE_FIRST_COMPONENT.match(include_base)
   4501   if (target_first_component and include_first_component and
   4502       target_first_component.group(0) ==
   4503       include_first_component.group(0)):
   4504     return _POSSIBLE_MY_HEADER
   4505 
   4506   return _OTHER_HEADER
   4507 
   4508 
   4509 
   4510 def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
   4511   """Check rules that are applicable to #include lines.
   4512 
   4513   Strings on #include lines are NOT removed from elided line, to make
   4514   certain tasks easier. However, to prevent false positives, checks
   4515   applicable to #include lines in CheckLanguage must be put here.
   4516 
   4517   Args:
   4518     filename: The name of the current file.
   4519     clean_lines: A CleansedLines instance containing the file.
   4520     linenum: The number of the line to check.
   4521     include_state: An _IncludeState instance in which the headers are inserted.
   4522     error: The function to call with any errors found.
   4523   """
   4524   fileinfo = FileInfo(filename)
   4525   line = clean_lines.lines[linenum]
   4526 
   4527   # "include" should use the new style "foo/bar.h" instead of just "bar.h"
   4528   # Only do this check if the included header follows google naming
   4529   # conventions.  If not, assume that it's a 3rd party API that
   4530   # requires special include conventions.
   4531   #
   4532   # We also make an exception for Lua headers, which follow google
   4533   # naming convention but not the include convention.
   4534   match = Match(r'#include\s*"([^/]+\.h)"', line)
   4535   if match and not _THIRD_PARTY_HEADERS_PATTERN.match(match.group(1)):
   4536     error(filename, linenum, 'build/include', 4,
   4537           'Include the directory when naming .h files')
   4538 
   4539   # we shouldn't include a file more than once. actually, there are a
   4540   # handful of instances where doing so is okay, but in general it's
   4541   # not.
   4542   match = _RE_PATTERN_INCLUDE.search(line)
   4543   if match:
   4544     include = match.group(2)
   4545     is_system = (match.group(1) == '<')
   4546     duplicate_line = include_state.FindHeader(include)
   4547     if duplicate_line >= 0:
   4548       error(filename, linenum, 'build/include', 4,
   4549             '"%s" already included at %s:%s' %
   4550             (include, filename, duplicate_line))
   4551     elif (include.endswith('.cc') and
   4552           os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)):
   4553       error(filename, linenum, 'build/include', 4,
   4554             'Do not include .cc files from other packages')
   4555     elif not _THIRD_PARTY_HEADERS_PATTERN.match(include):
   4556       include_state.include_list[-1].append((include, linenum))
   4557 
   4558       # We want to ensure that headers appear in the right order:
   4559       # 1) for foo.cc, foo.h  (preferred location)
   4560       # 2) c system files
   4561       # 3) cpp system files
   4562       # 4) for foo.cc, foo.h  (deprecated location)
   4563       # 5) other google headers
   4564       #
   4565       # We classify each include statement as one of those 5 types
   4566       # using a number of techniques. The include_state object keeps
   4567       # track of the highest type seen, and complains if we see a
   4568       # lower type after that.
   4569       error_message = include_state.CheckNextIncludeOrder(
   4570           _ClassifyInclude(fileinfo, include, is_system))
   4571       if error_message:
   4572         error(filename, linenum, 'build/include_order', 4,
   4573               '%s. Should be: %s.h, c system, c++ system, other.' %
   4574               (error_message, fileinfo.BaseName()))
   4575       canonical_include = include_state.CanonicalizeAlphabeticalOrder(include)
   4576       if not include_state.IsInAlphabeticalOrder(
   4577           clean_lines, linenum, canonical_include):
   4578         error(filename, linenum, 'build/include_alpha', 4,
   4579               'Include "%s" not in alphabetical order' % include)
   4580       include_state.SetLastHeader(canonical_include)
   4581 
   4582 
   4583 
   4584 def _GetTextInside(text, start_pattern):
   4585   r"""Retrieves all the text between matching open and close parentheses.
   4586 
   4587   Given a string of lines and a regular expression string, retrieve all the text
   4588   following the expression and between opening punctuation symbols like
   4589   (, [, or {, and the matching close-punctuation symbol. This properly nested
   4590   occurrences of the punctuations, so for the text like
   4591     printf(a(), b(c()));
   4592   a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'.
   4593   start_pattern must match string having an open punctuation symbol at the end.
   4594 
   4595   Args:
   4596     text: The lines to extract text. Its comments and strings must be elided.
   4597            It can be single line and can span multiple lines.
   4598     start_pattern: The regexp string indicating where to start extracting
   4599                    the text.
   4600   Returns:
   4601     The extracted text.
   4602     None if either the opening string or ending punctuation could not be found.
   4603   """
   4604   # TODO(unknown): Audit cpplint.py to see what places could be profitably
   4605   # rewritten to use _GetTextInside (and use inferior regexp matching today).
   4606 
   4607   # Give opening punctuations to get the matching close-punctuations.
   4608   matching_punctuation = {'(': ')', '{': '}', '[': ']'}
   4609   closing_punctuation = set(matching_punctuation.itervalues())
   4610 
   4611   # Find the position to start extracting text.
   4612   match = re.search(start_pattern, text, re.M)
   4613   if not match:  # start_pattern not found in text.
   4614     return None
   4615   start_position = match.end(0)
   4616 
   4617   assert start_position > 0, (
   4618       'start_pattern must ends with an opening punctuation.')
   4619   assert text[start_position - 1] in matching_punctuation, (
   4620       'start_pattern must ends with an opening punctuation.')
   4621   # Stack of closing punctuations we expect to have in text after position.
   4622   punctuation_stack = [matching_punctuation[text[start_position - 1]]]
   4623   position = start_position
   4624   while punctuation_stack and position < len(text):
   4625     if text[position] == punctuation_stack[-1]:
   4626       punctuation_stack.pop()
   4627     elif text[position] in closing_punctuation:
   4628       # A closing punctuation without matching opening punctuations.
   4629       return None
   4630     elif text[position] in matching_punctuation:
   4631       punctuation_stack.append(matching_punctuation[text[position]])
   4632     position += 1
   4633   if punctuation_stack:
   4634     # Opening punctuations left without matching close-punctuations.
   4635     return None
   4636   # punctuations match.
   4637   return text[start_position:position - 1]
   4638 
   4639 
   4640 # Patterns for matching call-by-reference parameters.
   4641 #
   4642 # Supports nested templates up to 2 levels deep using this messy pattern:
   4643 #   < (?: < (?: < [^<>]*
   4644 #               >
   4645 #           |   [^<>] )*
   4646 #         >
   4647 #     |   [^<>] )*
   4648 #   >
   4649 _RE_PATTERN_IDENT = r'[_a-zA-Z]\w*'  # =~ [[:alpha:]][[:alnum:]]*
   4650 _RE_PATTERN_TYPE = (
   4651     r'(?:const\s+)?(?:typename\s+|class\s+|struct\s+|union\s+|enum\s+)?'
   4652     r'(?:\w|'
   4653     r'\s*<(?:<(?:<[^<>]*>|[^<>])*>|[^<>])*>|'
   4654     r'::)+')
   4655 # A call-by-reference parameter ends with '& identifier'.
   4656 _RE_PATTERN_REF_PARAM = re.compile(
   4657     r'(' + _RE_PATTERN_TYPE + r'(?:\s*(?:\bconst\b|[*]))*\s*'
   4658     r'&\s*' + _RE_PATTERN_IDENT + r')\s*(?:=[^,()]+)?[,)]')
   4659 # A call-by-const-reference parameter either ends with 'const& identifier'
   4660 # or looks like 'const type& identifier' when 'type' is atomic.
   4661 _RE_PATTERN_CONST_REF_PARAM = (
   4662     r'(?:.*\s*\bconst\s*&\s*' + _RE_PATTERN_IDENT +
   4663     r'|const\s+' + _RE_PATTERN_TYPE + r'\s*&\s*' + _RE_PATTERN_IDENT + r')')
   4664 # Stream types.
   4665 _RE_PATTERN_REF_STREAM_PARAM = (
   4666     r'(?:.*stream\s*&\s*' + _RE_PATTERN_IDENT + r')')
   4667 
   4668 
   4669 def CheckLanguage(filename, clean_lines, linenum, file_extension,
   4670                   include_state, nesting_state, error):
   4671   """Checks rules from the 'C++ language rules' section of cppguide.html.
   4672 
   4673   Some of these rules are hard to test (function overloading, using
   4674   uint32 inappropriately), but we do the best we can.
   4675 
   4676   Args:
   4677     filename: The name of the current file.
   4678     clean_lines: A CleansedLines instance containing the file.
   4679     linenum: The number of the line to check.
   4680     file_extension: The extension (without the dot) of the filename.
   4681     include_state: An _IncludeState instance in which the headers are inserted.
   4682     nesting_state: A NestingState instance which maintains information about
   4683                    the current stack of nested blocks being parsed.
   4684     error: The function to call with any errors found.
   4685   """
   4686   # If the line is empty or consists of entirely a comment, no need to
   4687   # check it.
   4688   line = clean_lines.elided[linenum]
   4689   if not line:
   4690     return
   4691 
   4692   match = _RE_PATTERN_INCLUDE.search(line)
   4693   if match:
   4694     CheckIncludeLine(filename, clean_lines, linenum, include_state, error)
   4695     return
   4696 
   4697   # Reset include state across preprocessor directives.  This is meant
   4698   # to silence warnings for conditional includes.
   4699   match = Match(r'^\s*#\s*(if|ifdef|ifndef|elif|else|endif)\b', line)
   4700   if match:
   4701     include_state.ResetSection(match.group(1))
   4702 
   4703   # Make Windows paths like Unix.
   4704   fullname = os.path.abspath(filename).replace('\\', '/')
   4705 
   4706   # Perform other checks now that we are sure that this is not an include line
   4707   CheckCasts(filename, clean_lines, linenum, error)
   4708   CheckGlobalStatic(filename, clean_lines, linenum, error)
   4709   CheckPrintf(filename, clean_lines, linenum, error)
   4710 
   4711   if IsHeaderExtension(file_extension):
   4712     # TODO(unknown): check that 1-arg constructors are explicit.
   4713     #                How to tell it's a constructor?
   4714     #                (handled in CheckForNonStandardConstructs for now)
   4715     # TODO(unknown): check that classes declare or disable copy/assign
   4716     #                (level 1 error)
   4717     pass
   4718 
   4719   # Check if people are using the verboten C basic types.  The only exception
   4720   # we regularly allow is "unsigned short port" for port.
   4721   if Search(r'\bshort port\b', line):
   4722     if not Search(r'\bunsigned short port\b', line):
   4723       error(filename, linenum, 'runtime/int', 4,
   4724             'Use "unsigned short" for ports, not "short"')
   4725   else:
   4726     match = Search(r'\b(short|long(?! +double)|long long)\b', line)
   4727     if match:
   4728       error(filename, linenum, 'runtime/int', 4,
   4729             'Use int16/int64/etc, rather than the C type %s' % match.group(1))
   4730 
   4731   # Check if some verboten operator overloading is going on
   4732   # TODO(unknown): catch out-of-line unary operator&:
   4733   #   class X {};
   4734   #   int operator&(const X& x) { return 42; }  // unary operator&
   4735   # The trick is it's hard to tell apart from binary operator&:
   4736   #   class Y { int operator&(const Y& x) { return 23; } }; // binary operator&
   4737   if Search(r'\boperator\s*&\s*\(\s*\)', line):
   4738     error(filename, linenum, 'runtime/operator', 4,
   4739           'Unary operator& is dangerous.  Do not use it.')
   4740 
   4741   # Check for suspicious usage of "if" like
   4742   # } if (a == b) {
   4743   if Search(r'\}\s*if\s*\(', line):
   4744     error(filename, linenum, 'readability/braces', 4,
   4745           'Did you mean "else if"? If not, start a new line for "if".')
   4746 
   4747   # Check for potential format string bugs like printf(foo).
   4748   # We constrain the pattern not to pick things like DocidForPrintf(foo).
   4749   # Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str())
   4750   # TODO(unknown): Catch the following case. Need to change the calling
   4751   # convention of the whole function to process multiple line to handle it.
   4752   #   printf(
   4753   #       boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line);
   4754   printf_args = _GetTextInside(line, r'(?i)\b(string)?printf\s*\(')
   4755   if printf_args:
   4756     match = Match(r'([\w.\->()]+)$', printf_args)
   4757     if match and match.group(1) != '__VA_ARGS__':
   4758       function_name = re.search(r'\b((?:string)?printf)\s*\(',
   4759                                 line, re.I).group(1)
   4760       error(filename, linenum, 'runtime/printf', 4,
   4761             'Potential format string bug. Do %s("%%s", %s) instead.'
   4762             % (function_name, match.group(1)))
   4763 
   4764   # Check for potential memset bugs like memset(buf, sizeof(buf), 0).
   4765   match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line)
   4766   if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)):
   4767     error(filename, linenum, 'runtime/memset', 4,
   4768           'Did you mean "memset(%s, 0, %s)"?'
   4769           % (match.group(1), match.group(2)))
   4770 
   4771   if Search(r'\busing namespace\b', line):
   4772     error(filename, linenum, 'build/namespaces', 5,
   4773           'Do not use namespace using-directives.  '
   4774           'Use using-declarations instead.')
   4775 
   4776   # Detect variable-length arrays.
   4777   match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line)
   4778   if (match and match.group(2) != 'return' and match.group(2) != 'delete' and
   4779       match.group(3).find(']') == -1):
   4780     # Split the size using space and arithmetic operators as delimiters.
   4781     # If any of the resulting tokens are not compile time constants then
   4782     # report the error.
   4783     tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3))
   4784     is_const = True
   4785     skip_next = False
   4786     for tok in tokens:
   4787       if skip_next:
   4788         skip_next = False
   4789         continue
   4790 
   4791       if Search(r'sizeof\(.+\)', tok): continue
   4792       if Search(r'arraysize\(\w+\)', tok): continue
   4793 
   4794       tok = tok.lstrip('(')
   4795       tok = tok.rstrip(')')
   4796       if not tok: continue
   4797       if Match(r'\d+', tok): continue
   4798       if Match(r'0[xX][0-9a-fA-F]+', tok): continue
   4799       if Match(r'k[A-Z0-9]\w*', tok): continue
   4800       if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue
   4801       if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue
   4802       # A catch all for tricky sizeof cases, including 'sizeof expression',
   4803       # 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)'
   4804       # requires skipping the next token because we split on ' ' and '*'.
   4805       if tok.startswith('sizeof'):
   4806         skip_next = True
   4807         continue
   4808       is_const = False
   4809       break
   4810     if not is_const:
   4811       error(filename, linenum, 'runtime/arrays', 1,
   4812             'Do not use variable-length arrays.  Use an appropriately named '
   4813             "('k' followed by CamelCase) compile-time constant for the size.")
   4814 
   4815   # Check for use of unnamed namespaces in header files.  Registration
   4816   # macros are typically OK, so we allow use of "namespace {" on lines
   4817   # that end with backslashes.
   4818   if (IsHeaderExtension(file_extension)
   4819       and Search(r'\bnamespace\s*{', line)
   4820       and line[-1] != '\\'):
   4821     error(filename, linenum, 'build/namespaces', 4,
   4822           'Do not use unnamed namespaces in header files.  See '
   4823           'https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces'
   4824           ' for more information.')
   4825 
   4826 
   4827 def CheckGlobalStatic(filename, clean_lines, linenum, error):
   4828   """Check for unsafe global or static objects.
   4829 
   4830   Args:
   4831     filename: The name of the current file.
   4832     clean_lines: A CleansedLines instance containing the file.
   4833     linenum: The number of the line to check.
   4834     error: The function to call with any errors found.
   4835   """
   4836   line = clean_lines.elided[linenum]
   4837 
   4838   # Match two lines at a time to support multiline declarations
   4839   if linenum + 1 < clean_lines.NumLines() and not Search(r'[;({]', line):
   4840     line += clean_lines.elided[linenum + 1].strip()
   4841 
   4842   # Check for people declaring static/global STL strings at the top level.
   4843   # This is dangerous because the C++ language does not guarantee that
   4844   # globals with constructors are initialized before the first access, and
   4845   # also because globals can be destroyed when some threads are still running.
   4846   # TODO(unknown): Generalize this to also find static unique_ptr instances.
   4847   # TODO(unknown): File bugs for clang-tidy to find these.
   4848   match = Match(
   4849       r'((?:|static +)(?:|const +))(?::*std::)?string( +const)? +'
   4850       r'([a-zA-Z0-9_:]+)\b(.*)',
   4851       line)
   4852 
   4853   # Remove false positives:
   4854   # - String pointers (as opposed to values).
   4855   #    string *pointer
   4856   #    const string *pointer
   4857   #    string const *pointer
   4858   #    string *const pointer
   4859   #
   4860   # - Functions and template specializations.
   4861   #    string Function<Type>(...
   4862   #    string Class<Type>::Method(...
   4863   #
   4864   # - Operators.  These are matched separately because operator names
   4865   #   cross non-word boundaries, and trying to match both operators
   4866   #   and functions at the same time would decrease accuracy of
   4867   #   matching identifiers.
   4868   #    string Class::operator*()
   4869   if (match and
   4870       not Search(r'\bstring\b(\s+const)?\s*[\*\&]\s*(const\s+)?\w', line) and
   4871       not Search(r'\boperator\W', line) and
   4872       not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)*\s*\(([^"]|$)', match.group(4))):
   4873     if Search(r'\bconst\b', line):
   4874       error(filename, linenum, 'runtime/string', 4,
   4875             'For a static/global string constant, use a C style string '
   4876             'instead: "%schar%s %s[]".' %
   4877             (match.group(1), match.group(2) or '', match.group(3)))
   4878     else:
   4879       error(filename, linenum, 'runtime/string', 4,
   4880             'Static/global string variables are not permitted.')
   4881 
   4882   if (Search(r'\b([A-Za-z0-9_]*_)\(\1\)', line) or
   4883       Search(r'\b([A-Za-z0-9_]*_)\(CHECK_NOTNULL\(\1\)\)', line)):
   4884     error(filename, linenum, 'runtime/init', 4,
   4885           'You seem to be initializing a member variable with itself.')
   4886 
   4887 
   4888 def CheckPrintf(filename, clean_lines, linenum, error):
   4889   """Check for printf related issues.
   4890 
   4891   Args:
   4892     filename: The name of the current file.
   4893     clean_lines: A CleansedLines instance containing the file.
   4894     linenum: The number of the line to check.
   4895     error: The function to call with any errors found.
   4896   """
   4897   line = clean_lines.elided[linenum]
   4898 
   4899   # When snprintf is used, the second argument shouldn't be a literal.
   4900   match = Search(r'snprintf\s*\(([^,]*),\s*([0-9]*)\s*,', line)
   4901   if match and match.group(2) != '0':
   4902     # If 2nd arg is zero, snprintf is used to calculate size.
   4903     error(filename, linenum, 'runtime/printf', 3,
   4904           'If you can, use sizeof(%s) instead of %s as the 2nd arg '
   4905           'to snprintf.' % (match.group(1), match.group(2)))
   4906 
   4907   # Check if some verboten C functions are being used.
   4908   if Search(r'\bsprintf\s*\(', line):
   4909     error(filename, linenum, 'runtime/printf', 5,
   4910           'Never use sprintf. Use snprintf instead.')
   4911   match = Search(r'\b(strcpy|strcat)\s*\(', line)
   4912   if match:
   4913     error(filename, linenum, 'runtime/printf', 4,
   4914           'Almost always, snprintf is better than %s' % match.group(1))
   4915 
   4916 
   4917 def IsDerivedFunction(clean_lines, linenum):
   4918   """Check if current line contains an inherited function.
   4919 
   4920   Args:
   4921     clean_lines: A CleansedLines instance containing the file.
   4922     linenum: The number of the line to check.
   4923   Returns:
   4924     True if current line contains a function with "override"
   4925     virt-specifier.
   4926   """
   4927   # Scan back a few lines for start of current function
   4928   for i in xrange(linenum, max(-1, linenum - 10), -1):
   4929     match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i])
   4930     if match:
   4931       # Look for "override" after the matching closing parenthesis
   4932       line, _, closing_paren = CloseExpression(
   4933           clean_lines, i, len(match.group(1)))
   4934       return (closing_paren >= 0 and
   4935               Search(r'\boverride\b', line[closing_paren:]))
   4936   return False
   4937 
   4938 
   4939 def IsOutOfLineMethodDefinition(clean_lines, linenum):
   4940   """Check if current line contains an out-of-line method definition.
   4941 
   4942   Args:
   4943     clean_lines: A CleansedLines instance containing the file.
   4944     linenum: The number of the line to check.
   4945   Returns:
   4946     True if current line contains an out-of-line method definition.
   4947   """
   4948   # Scan back a few lines for start of current function
   4949   for i in xrange(linenum, max(-1, linenum - 10), -1):
   4950     if Match(r'^([^()]*\w+)\(', clean_lines.elided[i]):
   4951       return Match(r'^[^()]*\w+::\w+\(', clean_lines.elided[i]) is not None
   4952   return False
   4953 
   4954 
   4955 def IsInitializerList(clean_lines, linenum):
   4956   """Check if current line is inside constructor initializer list.
   4957 
   4958   Args:
   4959     clean_lines: A CleansedLines instance containing the file.
   4960     linenum: The number of the line to check.
   4961   Returns:
   4962     True if current line appears to be inside constructor initializer
   4963     list, False otherwise.
   4964   """
   4965   for i in xrange(linenum, 1, -1):
   4966     line = clean_lines.elided[i]
   4967     if i == linenum:
   4968       remove_function_body = Match(r'^(.*)\{\s*$', line)
   4969       if remove_function_body:
   4970         line = remove_function_body.group(1)
   4971 
   4972     if Search(r'\s:\s*\w+[({]', line):
   4973       # A lone colon tend to indicate the start of a constructor
   4974       # initializer list.  It could also be a ternary operator, which
   4975       # also tend to appear in constructor initializer lists as
   4976       # opposed to parameter lists.
   4977       return True
   4978     if Search(r'\}\s*,\s*$', line):
   4979       # A closing brace followed by a comma is probably the end of a
   4980       # brace-initialized member in constructor initializer list.
   4981       return True
   4982     if Search(r'[{};]\s*$', line):
   4983       # Found one of the following:
   4984       # - A closing brace or semicolon, probably the end of the previous
   4985       #   function.
   4986       # - An opening brace, probably the start of current class or namespace.
   4987       #
   4988       # Current line is probably not inside an initializer list since
   4989       # we saw one of those things without seeing the starting colon.
   4990       return False
   4991 
   4992   # Got to the beginning of the file without seeing the start of
   4993   # constructor initializer list.
   4994   return False
   4995 
   4996 
   4997 def CheckForNonConstReference(filename, clean_lines, linenum,
   4998                               nesting_state, error):
   4999   """Check for non-const references.
   5000 
   5001   Separate from CheckLanguage since it scans backwards from current
   5002   line, instead of scanning forward.
   5003 
   5004   Args:
   5005     filename: The name of the current file.
   5006     clean_lines: A CleansedLines instance containing the file.
   5007     linenum: The number of the line to check.
   5008     nesting_state: A NestingState instance which maintains information about
   5009                    the current stack of nested blocks being parsed.
   5010     error: The function to call with any errors found.
   5011   """
   5012   # Do nothing if there is no '&' on current line.
   5013   line = clean_lines.elided[linenum]
   5014   if '&' not in line:
   5015     return
   5016 
   5017   # If a function is inherited, current function doesn't have much of
   5018   # a choice, so any non-const references should not be blamed on
   5019   # derived function.
   5020   if IsDerivedFunction(clean_lines, linenum):
   5021     return
   5022 
   5023   # Don't warn on out-of-line method definitions, as we would warn on the
   5024   # in-line declaration, if it isn't marked with 'override'.
   5025   if IsOutOfLineMethodDefinition(clean_lines, linenum):
   5026     return
   5027 
   5028   # Long type names may be broken across multiple lines, usually in one
   5029   # of these forms:
   5030   #   LongType
   5031   #       ::LongTypeContinued &identifier
   5032   #   LongType::
   5033   #       LongTypeContinued &identifier
   5034   #   LongType<
   5035   #       ...>::LongTypeContinued &identifier
   5036   #
   5037   # If we detected a type split across two lines, join the previous
   5038   # line to current line so that we can match const references
   5039   # accordingly.
   5040   #
   5041   # Note that this only scans back one line, since scanning back
   5042   # arbitrary number of lines would be expensive.  If you have a type
   5043   # that spans more than 2 lines, please use a typedef.
   5044   if linenum > 1:
   5045     previous = None
   5046     if Match(r'\s*::(?:[\w<>]|::)+\s*&\s*\S', line):
   5047       # previous_line\n + ::current_line
   5048       previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+[\w<>])\s*$',
   5049                         clean_lines.elided[linenum - 1])
   5050     elif Match(r'\s*[a-zA-Z_]([\w<>]|::)+\s*&\s*\S', line):
   5051       # previous_line::\n + current_line
   5052       previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+::)\s*$',
   5053                         clean_lines.elided[linenum - 1])
   5054     if previous:
   5055       line = previous.group(1) + line.lstrip()
   5056     else:
   5057       # Check for templated parameter that is split across multiple lines
   5058       endpos = line.rfind('>')
   5059       if endpos > -1:
   5060         (_, startline, startpos) = ReverseCloseExpression(
   5061             clean_lines, linenum, endpos)
   5062         if startpos > -1 and startline < linenum:
   5063           # Found the matching < on an earlier line, collect all
   5064           # pieces up to current line.
   5065           line = ''
   5066           for i in xrange(startline, linenum + 1):
   5067             line += clean_lines.elided[i].strip()
   5068 
   5069   # Check for non-const references in function parameters.  A single '&' may
   5070   # found in the following places:
   5071   #   inside expression: binary & for bitwise AND
   5072   #   inside expression: unary & for taking the address of something
   5073   #   inside declarators: reference parameter
   5074   # We will exclude the first two cases by checking that we are not inside a
   5075   # function body, including one that was just introduced by a trailing '{'.
   5076   # TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare].
   5077   if (nesting_state.previous_stack_top and
   5078       not (isinstance(nesting_state.previous_stack_top, _ClassInfo) or
   5079            isinstance(nesting_state.previous_stack_top, _NamespaceInfo))):
   5080     # Not at toplevel, not within a class, and not within a namespace
   5081     return
   5082 
   5083   # Avoid initializer lists.  We only need to scan back from the
   5084   # current line for something that starts with ':'.
   5085   #
   5086   # We don't need to check the current line, since the '&' would
   5087   # appear inside the second set of parentheses on the current line as
   5088   # opposed to the first set.
   5089   if linenum > 0:
   5090     for i in xrange(linenum - 1, max(0, linenum - 10), -1):
   5091       previous_line = clean_lines.elided[i]
   5092       if not Search(r'[),]\s*$', previous_line):
   5093         break
   5094       if Match(r'^\s*:\s+\S', previous_line):
   5095         return
   5096 
   5097   # Avoid preprocessors
   5098   if Search(r'\\\s*$', line):
   5099     return
   5100 
   5101   # Avoid constructor initializer lists
   5102   if IsInitializerList(clean_lines, linenum):
   5103     return
   5104 
   5105   # We allow non-const references in a few standard places, like functions
   5106   # called "swap()" or iostream operators like "<<" or ">>".  Do not check
   5107   # those function parameters.
   5108   #
   5109   # We also accept & in static_assert, which looks like a function but
   5110   # it's actually a declaration expression.
   5111   whitelisted_functions = (r'(?:[sS]wap(?:<\w:+>)?|'
   5112                            r'operator\s*[<>][<>]|'
   5113                            r'static_assert|COMPILE_ASSERT'
   5114                            r')\s*\(')
   5115   if Search(whitelisted_functions, line):
   5116     return
   5117   elif not Search(r'\S+\([^)]*$', line):
   5118     # Don't see a whitelisted function on this line.  Actually we
   5119     # didn't see any function name on this line, so this is likely a
   5120     # multi-line parameter list.  Try a bit harder to catch this case.
   5121     for i in xrange(2):
   5122       if (linenum > i and
   5123           Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
   5124         return
   5125 
   5126   decls = ReplaceAll(r'{[^}]*}', ' ', line)  # exclude function body
   5127   for parameter in re.findall(_RE_PATTERN_REF_PARAM, decls):
   5128     if (not Match(_RE_PATTERN_CONST_REF_PARAM, parameter) and
   5129         not Match(_RE_PATTERN_REF_STREAM_PARAM, parameter)):
   5130       error(filename, linenum, 'runtime/references', 2,
   5131             'Is this a non-const reference? '
   5132             'If so, make const or use a pointer: ' +
   5133             ReplaceAll(' *<', '<', parameter))
   5134 
   5135 
   5136 def CheckCasts(filename, clean_lines, linenum, error):
   5137   """Various cast related checks.
   5138 
   5139   Args:
   5140     filename: The name of the current file.
   5141     clean_lines: A CleansedLines instance containing the file.
   5142     linenum: The number of the line to check.
   5143     error: The function to call with any errors found.
   5144   """
   5145   line = clean_lines.elided[linenum]
   5146 
   5147   # Check to see if they're using an conversion function cast.
   5148   # I just try to capture the most common basic types, though there are more.
   5149   # Parameterless conversion functions, such as bool(), are allowed as they are
   5150   # probably a member operator declaration or default constructor.
   5151   match = Search(
   5152       r'(\bnew\s+(?:const\s+)?|\S<\s*(?:const\s+)?)?\b'
   5153       r'(int|float|double|bool|char|int32|uint32|int64|uint64)'
   5154       r'(\([^)].*)', line)
   5155   expecting_function = ExpectingFunctionArgs(clean_lines, linenum)
   5156   if match and not expecting_function:
   5157     matched_type = match.group(2)
   5158 
   5159     # matched_new_or_template is used to silence two false positives:
   5160     # - New operators
   5161     # - Template arguments with function types
   5162     #
   5163     # For template arguments, we match on types immediately following
   5164     # an opening bracket without any spaces.  This is a fast way to
   5165     # silence the common case where the function type is the first
   5166     # template argument.  False negative with less-than comparison is
   5167     # avoided because those operators are usually followed by a space.
   5168     #
   5169     #   function<double(double)>   // bracket + no space = false positive
   5170     #   value < double(42)         // bracket + space = true positive
   5171     matched_new_or_template = match.group(1)
   5172 
   5173     # Avoid arrays by looking for brackets that come after the closing
   5174     # parenthesis.
   5175     if Match(r'\([^()]+\)\s*\[', match.group(3)):
   5176       return
   5177 
   5178     # Other things to ignore:
   5179     # - Function pointers
   5180     # - Casts to pointer types
   5181     # - Placement new
   5182     # - Alias declarations
   5183     matched_funcptr = match.group(3)
   5184     if (matched_new_or_template is None and
   5185         not (matched_funcptr and
   5186              (Match(r'\((?:[^() ]+::\s*\*\s*)?[^() ]+\)\s*\(',
   5187                     matched_funcptr) or
   5188               matched_funcptr.startswith('(*)'))) and
   5189         not Match(r'\s*using\s+\S+\s*=\s*' + matched_type, line) and
   5190         not Search(r'new\(\S+\)\s*' + matched_type, line)):
   5191       error(filename, linenum, 'readability/casting', 4,
   5192             'Using deprecated casting style.  '
   5193             'Use static_cast<%s>(...) instead' %
   5194             matched_type)
   5195 
   5196   if not expecting_function:
   5197     CheckCStyleCast(filename, clean_lines, linenum, 'static_cast',
   5198                     r'\((int|float|double|bool|char|u?int(16|32|64))\)', error)
   5199 
   5200   # This doesn't catch all cases. Consider (const char * const)"hello".
   5201   #
   5202   # (char *) "foo" should always be a const_cast (reinterpret_cast won't
   5203   # compile).
   5204   if CheckCStyleCast(filename, clean_lines, linenum, 'const_cast',
   5205                      r'\((char\s?\*+\s?)\)\s*"', error):
   5206     pass
   5207   else:
   5208     # Check pointer casts for other than string constants
   5209     CheckCStyleCast(filename, clean_lines, linenum, 'reinterpret_cast',
   5210                     r'\((\w+\s?\*+\s?)\)', error)
   5211 
   5212   # In addition, we look for people taking the address of a cast.  This
   5213   # is dangerous -- casts can assign to temporaries, so the pointer doesn't
   5214   # point where you think.
   5215   #
   5216   # Some non-identifier character is required before the '&' for the
   5217   # expression to be recognized as a cast.  These are casts:
   5218   #   expression = &static_cast<int*>(temporary());
   5219   #   function(&(int*)(temporary()));
   5220   #
   5221   # This is not a cast:
   5222   #   reference_type&(int* function_param);
   5223   match = Search(
   5224       r'(?:[^\w]&\(([^)*][^)]*)\)[\w(])|'
   5225       r'(?:[^\w]&(static|dynamic|down|reinterpret)_cast\b)', line)
   5226   if match:
   5227     # Try a better error message when the & is bound to something
   5228     # dereferenced by the casted pointer, as opposed to the casted
   5229     # pointer itself.
   5230     parenthesis_error = False
   5231     match = Match(r'^(.*&(?:static|dynamic|down|reinterpret)_cast\b)<', line)
   5232     if match:
   5233       _, y1, x1 = CloseExpression(clean_lines, linenum, len(match.group(1)))
   5234       if x1 >= 0 and clean_lines.elided[y1][x1] == '(':
   5235         _, y2, x2 = CloseExpression(clean_lines, y1, x1)
   5236         if x2 >= 0:
   5237           extended_line = clean_lines.elided[y2][x2:]
   5238           if y2 < clean_lines.NumLines() - 1:
   5239             extended_line += clean_lines.elided[y2 + 1]
   5240           if Match(r'\s*(?:->|\[)', extended_line):
   5241             parenthesis_error = True
   5242 
   5243     if parenthesis_error:
   5244       error(filename, linenum, 'readability/casting', 4,
   5245             ('Are you taking an address of something dereferenced '
   5246              'from a cast?  Wrapping the dereferenced expression in '
   5247              'parentheses will make the binding more obvious'))
   5248     else:
   5249       error(filename, linenum, 'runtime/casting', 4,
   5250             ('Are you taking an address of a cast?  '
   5251              'This is dangerous: could be a temp var.  '
   5252              'Take the address before doing the cast, rather than after'))
   5253 
   5254 
   5255 def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error):
   5256   """Checks for a C-style cast by looking for the pattern.
   5257 
   5258   Args:
   5259     filename: The name of the current file.
   5260     clean_lines: A CleansedLines instance containing the file.
   5261     linenum: The number of the line to check.
   5262     cast_type: The string for the C++ cast to recommend.  This is either
   5263       reinterpret_cast, static_cast, or const_cast, depending.
   5264     pattern: The regular expression used to find C-style casts.
   5265     error: The function to call with any errors found.
   5266 
   5267   Returns:
   5268     True if an error was emitted.
   5269     False otherwise.
   5270   """
   5271   line = clean_lines.elided[linenum]
   5272   match = Search(pattern, line)
   5273   if not match:
   5274     return False
   5275 
   5276   # Exclude lines with keywords that tend to look like casts
   5277   context = line[0:match.start(1) - 1]
   5278   if Match(r'.*\b(?:sizeof|alignof|alignas|[_A-Z][_A-Z0-9]*)\s*$', context):
   5279     return False
   5280 
   5281   # Try expanding current context to see if we one level of
   5282   # parentheses inside a macro.
   5283   if linenum > 0:
   5284     for i in xrange(linenum - 1, max(0, linenum - 5), -1):
   5285       context = clean_lines.elided[i] + context
   5286   if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\((?:\([^()]*\)|[^()])*$', context):
   5287     return False
   5288 
   5289   # operator++(int) and operator--(int)
   5290   if context.endswith(' operator++') or context.endswith(' operator--'):
   5291     return False
   5292 
   5293   # A single unnamed argument for a function tends to look like old style cast.
   5294   # If we see those, don't issue warnings for deprecated casts.
   5295   remainder = line[match.end(0):]
   5296   if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),]|->)',
   5297            remainder):
   5298     return False
   5299 
   5300   # At this point, all that should be left is actual casts.
   5301   error(filename, linenum, 'readability/casting', 4,
   5302         'Using C-style cast.  Use %s<%s>(...) instead' %
   5303         (cast_type, match.group(1)))
   5304 
   5305   return True
   5306 
   5307 
   5308 def ExpectingFunctionArgs(clean_lines, linenum):
   5309   """Checks whether where function type arguments are expected.
   5310 
   5311   Args:
   5312     clean_lines: A CleansedLines instance containing the file.
   5313     linenum: The number of the line to check.
   5314 
   5315   Returns:
   5316     True if the line at 'linenum' is inside something that expects arguments
   5317     of function types.
   5318   """
   5319   line = clean_lines.elided[linenum]
   5320   return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or
   5321           (linenum >= 2 and
   5322            (Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$',
   5323                   clean_lines.elided[linenum - 1]) or
   5324             Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$',
   5325                   clean_lines.elided[linenum - 2]) or
   5326             Search(r'\bstd::m?function\s*\<\s*$',
   5327                    clean_lines.elided[linenum - 1]))))
   5328 
   5329 
   5330 _HEADERS_CONTAINING_TEMPLATES = (
   5331     ('<deque>', ('deque',)),
   5332     ('<functional>', ('unary_function', 'binary_function',
   5333                       'plus', 'minus', 'multiplies', 'divides', 'modulus',
   5334                       'negate',
   5335                       'equal_to', 'not_equal_to', 'greater', 'less',
   5336                       'greater_equal', 'less_equal',
   5337                       'logical_and', 'logical_or', 'logical_not',
   5338                       'unary_negate', 'not1', 'binary_negate', 'not2',
   5339                       'bind1st', 'bind2nd',
   5340                       'pointer_to_unary_function',
   5341                       'pointer_to_binary_function',
   5342                       'ptr_fun',
   5343                       'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t',
   5344                       'mem_fun_ref_t',
   5345                       'const_mem_fun_t', 'const_mem_fun1_t',
   5346                       'const_mem_fun_ref_t', 'const_mem_fun1_ref_t',
   5347                       'mem_fun_ref',
   5348                      )),
   5349     ('<limits>', ('numeric_limits',)),
   5350     ('<list>', ('list',)),
   5351     ('<map>', ('map', 'multimap',)),
   5352     ('<memory>', ('allocator', 'make_shared', 'make_unique', 'shared_ptr',
   5353                   'unique_ptr', 'weak_ptr')),
   5354     ('<queue>', ('queue', 'priority_queue',)),
   5355     ('<set>', ('set', 'multiset',)),
   5356     ('<stack>', ('stack',)),
   5357     ('<string>', ('char_traits', 'basic_string',)),
   5358     ('<tuple>', ('tuple',)),
   5359     ('<unordered_map>', ('unordered_map', 'unordered_multimap')),
   5360     ('<unordered_set>', ('unordered_set', 'unordered_multiset')),
   5361     ('<utility>', ('pair',)),
   5362     ('<vector>', ('vector',)),
   5363 
   5364     # gcc extensions.
   5365     # Note: std::hash is their hash, ::hash is our hash
   5366     ('<hash_map>', ('hash_map', 'hash_multimap',)),
   5367     ('<hash_set>', ('hash_set', 'hash_multiset',)),
   5368     ('<slist>', ('slist',)),
   5369     )
   5370 
   5371 _HEADERS_MAYBE_TEMPLATES = (
   5372     ('<algorithm>', ('copy', 'max', 'min', 'min_element', 'sort',
   5373                      'transform',
   5374                     )),
   5375     ('<utility>', ('forward', 'make_pair', 'move', 'swap')),
   5376     )
   5377 
   5378 _RE_PATTERN_STRING = re.compile(r'\bstring\b')
   5379 
   5380 _re_pattern_headers_maybe_templates = []
   5381 for _header, _templates in _HEADERS_MAYBE_TEMPLATES:
   5382   for _template in _templates:
   5383     # Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
   5384     # type::max().
   5385     _re_pattern_headers_maybe_templates.append(
   5386         (re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
   5387             _template,
   5388             _header))
   5389 
   5390 # Other scripts may reach in and modify this pattern.
   5391 _re_pattern_templates = []
   5392 for _header, _templates in _HEADERS_CONTAINING_TEMPLATES:
   5393   for _template in _templates:
   5394     _re_pattern_templates.append(
   5395         (re.compile(r'(\<|\b)' + _template + r'\s*\<'),
   5396          _template + '<>',
   5397          _header))
   5398 
   5399 
   5400 def FilesBelongToSameModule(filename_cc, filename_h):
   5401   """Check if these two filenames belong to the same module.
   5402 
   5403   The concept of a 'module' here is a as follows:
   5404   foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
   5405   same 'module' if they are in the same directory.
   5406   some/path/public/xyzzy and some/path/internal/xyzzy are also considered
   5407   to belong to the same module here.
   5408 
   5409   If the filename_cc contains a longer path than the filename_h, for example,
   5410   '/absolute/path/to/base/sysinfo.cc', and this file would include
   5411   'base/sysinfo.h', this function also produces the prefix needed to open the
   5412   header. This is used by the caller of this function to more robustly open the
   5413   header file. We don't have access to the real include paths in this context,
   5414   so we need this guesswork here.
   5415 
   5416   Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
   5417   according to this implementation. Because of this, this function gives
   5418   some false positives. This should be sufficiently rare in practice.
   5419 
   5420   Args:
   5421     filename_cc: is the path for the .cc file
   5422     filename_h: is the path for the header path
   5423 
   5424   Returns:
   5425     Tuple with a bool and a string:
   5426     bool: True if filename_cc and filename_h belong to the same module.
   5427     string: the additional prefix needed to open the header file.
   5428   """
   5429 
   5430   fileinfo = FileInfo(filename_cc)
   5431   if not fileinfo.IsSource():
   5432     return (False, '')
   5433   filename_cc = filename_cc[:-len(fileinfo.Extension())]
   5434   matched_test_suffix = Search(_TEST_FILE_SUFFIX, fileinfo.BaseName())
   5435   if matched_test_suffix:
   5436     filename_cc = filename_cc[:-len(matched_test_suffix.group(1))]
   5437   filename_cc = filename_cc.replace('/public/', '/')
   5438   filename_cc = filename_cc.replace('/internal/', '/')
   5439 
   5440   if not filename_h.endswith('.h'):
   5441     return (False, '')
   5442   filename_h = filename_h[:-len('.h')]
   5443   if filename_h.endswith('-inl'):
   5444     filename_h = filename_h[:-len('-inl')]
   5445   filename_h = filename_h.replace('/public/', '/')
   5446   filename_h = filename_h.replace('/internal/', '/')
   5447 
   5448   files_belong_to_same_module = filename_cc.endswith(filename_h)
   5449   common_path = ''
   5450   if files_belong_to_same_module:
   5451     common_path = filename_cc[:-len(filename_h)]
   5452   return files_belong_to_same_module, common_path
   5453 
   5454 
   5455 def UpdateIncludeState(filename, include_dict, io=codecs):
   5456   """Fill up the include_dict with new includes found from the file.
   5457 
   5458   Args:
   5459     filename: the name of the header to read.
   5460     include_dict: a dictionary in which the headers are inserted.
   5461     io: The io factory to use to read the file. Provided for testability.
   5462 
   5463   Returns:
   5464     True if a header was successfully added. False otherwise.
   5465   """
   5466   headerfile = None
   5467   try:
   5468     headerfile = io.open(filename, 'r', 'utf8', 'replace')
   5469   except IOError:
   5470     return False
   5471   linenum = 0
   5472   for line in headerfile:
   5473     linenum += 1
   5474     clean_line = CleanseComments(line)
   5475     match = _RE_PATTERN_INCLUDE.search(clean_line)
   5476     if match:
   5477       include = match.group(2)
   5478       include_dict.setdefault(include, linenum)
   5479   return True
   5480 
   5481 
   5482 def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
   5483                               io=codecs):
   5484   """Reports for missing stl includes.
   5485 
   5486   This function will output warnings to make sure you are including the headers
   5487   necessary for the stl containers and functions that you use. We only give one
   5488   reason to include a header. For example, if you use both equal_to<> and
   5489   less<> in a .h file, only one (the latter in the file) of these will be
   5490   reported as a reason to include the <functional>.
   5491 
   5492   Args:
   5493     filename: The name of the current file.
   5494     clean_lines: A CleansedLines instance containing the file.
   5495     include_state: An _IncludeState instance.
   5496     error: The function to call with any errors found.
   5497     io: The IO factory to use to read the header file. Provided for unittest
   5498         injection.
   5499   """
   5500   required = {}  # A map of header name to linenumber and the template entity.
   5501                  # Example of required: { '<functional>': (1219, 'less<>') }
   5502 
   5503   for linenum in xrange(clean_lines.NumLines()):
   5504     line = clean_lines.elided[linenum]
   5505     if not line or line[0] == '#':
   5506       continue
   5507 
   5508     # String is special -- it is a non-templatized type in STL.
   5509     matched = _RE_PATTERN_STRING.search(line)
   5510     if matched:
   5511       # Don't warn about strings in non-STL namespaces:
   5512       # (We check only the first match per line; good enough.)
   5513       prefix = line[:matched.start()]
   5514       if prefix.endswith('std::') or not prefix.endswith('::'):
   5515         required['<string>'] = (linenum, 'string')
   5516 
   5517     for pattern, template, header in _re_pattern_headers_maybe_templates:
   5518       if pattern.search(line):
   5519         required[header] = (linenum, template)
   5520 
   5521     # The following function is just a speed up, no semantics are changed.
   5522     if not '<' in line:  # Reduces the cpu time usage by skipping lines.
   5523       continue
   5524 
   5525     for pattern, template, header in _re_pattern_templates:
   5526       matched = pattern.search(line)
   5527       if matched:
   5528         # Don't warn about IWYU in non-STL namespaces:
   5529         # (We check only the first match per line; good enough.)
   5530         prefix = line[:matched.start()]
   5531         if prefix.endswith('std::') or not prefix.endswith('::'):
   5532           required[header] = (linenum, template)
   5533 
   5534   # The policy is that if you #include something in foo.h you don't need to
   5535   # include it again in foo.cc. Here, we will look at possible includes.
   5536   # Let's flatten the include_state include_list and copy it into a dictionary.
   5537   include_dict = dict([item for sublist in include_state.include_list
   5538                        for item in sublist])
   5539 
   5540   # Did we find the header for this file (if any) and successfully load it?
   5541   header_found = False
   5542 
   5543   # Use the absolute path so that matching works properly.
   5544   abs_filename = FileInfo(filename).FullName()
   5545 
   5546   # For Emacs's flymake.
   5547   # If cpplint is invoked from Emacs's flymake, a temporary file is generated
   5548   # by flymake and that file name might end with '_flymake.cc'. In that case,
   5549   # restore original file name here so that the corresponding header file can be
   5550   # found.
   5551   # e.g. If the file name is 'foo_flymake.cc', we should search for 'foo.h'
   5552   # instead of 'foo_flymake.h'
   5553   abs_filename = re.sub(r'_flymake\.cc$', '.cc', abs_filename)
   5554 
   5555   # include_dict is modified during iteration, so we iterate over a copy of
   5556   # the keys.
   5557   header_keys = include_dict.keys()
   5558   for header in header_keys:
   5559     (same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
   5560     fullpath = common_path + header
   5561     if same_module and UpdateIncludeState(fullpath, include_dict, io):
   5562       header_found = True
   5563 
   5564   # If we can't find the header file for a .cc, assume it's because we don't
   5565   # know where to look. In that case we'll give up as we're not sure they
   5566   # didn't include it in the .h file.
   5567   # TODO(unknown): Do a better job of finding .h files so we are confident that
   5568   # not having the .h file means there isn't one.
   5569   if filename.endswith('.cc') and not header_found:
   5570     return
   5571 
   5572   # All the lines have been processed, report the errors found.
   5573   for required_header_unstripped in required:
   5574     template = required[required_header_unstripped][1]
   5575     if required_header_unstripped.strip('<>"') not in include_dict:
   5576       error(filename, required[required_header_unstripped][0],
   5577             'build/include_what_you_use', 4,
   5578             'Add #include ' + required_header_unstripped + ' for ' + template)
   5579 
   5580 
   5581 _RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<')
   5582 
   5583 
   5584 def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error):
   5585   """Check that make_pair's template arguments are deduced.
   5586 
   5587   G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are
   5588   specified explicitly, and such use isn't intended in any case.
   5589 
   5590   Args:
   5591     filename: The name of the current file.
   5592     clean_lines: A CleansedLines instance containing the file.
   5593     linenum: The number of the line to check.
   5594     error: The function to call with any errors found.
   5595   """
   5596   line = clean_lines.elided[linenum]
   5597   match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line)
   5598   if match:
   5599     error(filename, linenum, 'build/explicit_make_pair',
   5600           4,  # 4 = high confidence
   5601           'For C++11-compatibility, omit template arguments from make_pair'
   5602           ' OR use pair directly OR if appropriate, construct a pair directly')
   5603 
   5604 
   5605 def CheckRedundantVirtual(filename, clean_lines, linenum, error):
   5606   """Check if line contains a redundant "virtual" function-specifier.
   5607 
   5608   Args:
   5609     filename: The name of the current file.
   5610     clean_lines: A CleansedLines instance containing the file.
   5611     linenum: The number of the line to check.
   5612     error: The function to call with any errors found.
   5613   """
   5614   # Look for "virtual" on current line.
   5615   line = clean_lines.elided[linenum]
   5616   virtual = Match(r'^(.*)(\bvirtual\b)(.*)$', line)
   5617   if not virtual: return
   5618 
   5619   # Ignore "virtual" keywords that are near access-specifiers.  These
   5620   # are only used in class base-specifier and do not apply to member
   5621   # functions.
   5622   if (Search(r'\b(public|protected|private)\s+$', virtual.group(1)) or
   5623       Match(r'^\s+(public|protected|private)\b', virtual.group(3))):
   5624     return
   5625 
   5626   # Ignore the "virtual" keyword from virtual base classes.  Usually
   5627   # there is a column on the same line in these cases (virtual base
   5628   # classes are rare in google3 because multiple inheritance is rare).
   5629   if Match(r'^.*[^:]:[^:].*$', line): return
   5630 
   5631   # Look for the next opening parenthesis.  This is the start of the
   5632   # parameter list (possibly on the next line shortly after virtual).
   5633   # TODO(unknown): doesn't work if there are virtual functions with
   5634   # decltype() or other things that use parentheses, but csearch suggests
   5635   # that this is rare.
   5636   end_col = -1
   5637   end_line = -1
   5638   start_col = len(virtual.group(2))
   5639   for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())):
   5640     line = clean_lines.elided[start_line][start_col:]
   5641     parameter_list = Match(r'^([^(]*)\(', line)
   5642     if parameter_list:
   5643       # Match parentheses to find the end of the parameter list
   5644       (_, end_line, end_col) = CloseExpression(
   5645           clean_lines, start_line, start_col + len(parameter_list.group(1)))
   5646       break
   5647     start_col = 0
   5648 
   5649   if end_col < 0:
   5650     return  # Couldn't find end of parameter list, give up
   5651 
   5652   # Look for "override" or "final" after the parameter list
   5653   # (possibly on the next few lines).
   5654   for i in xrange(end_line, min(end_line + 3, clean_lines.NumLines())):
   5655     line = clean_lines.elided[i][end_col:]
   5656     match = Search(r'\b(override|final)\b', line)
   5657     if match:
   5658       error(filename, linenum, 'readability/inheritance', 4,
   5659             ('"virtual" is redundant since function is '
   5660              'already declared as "%s"' % match.group(1)))
   5661 
   5662     # Set end_col to check whole lines after we are done with the
   5663     # first line.
   5664     end_col = 0
   5665     if Search(r'[^\w]\s*$', line):
   5666       break
   5667 
   5668 
   5669 def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error):
   5670   """Check if line contains a redundant "override" or "final" virt-specifier.
   5671 
   5672   Args:
   5673     filename: The name of the current file.
   5674     clean_lines: A CleansedLines instance containing the file.
   5675     linenum: The number of the line to check.
   5676     error: The function to call with any errors found.
   5677   """
   5678   # Look for closing parenthesis nearby.  We need one to confirm where
   5679   # the declarator ends and where the virt-specifier starts to avoid
   5680   # false positives.
   5681   line = clean_lines.elided[linenum]
   5682   declarator_end = line.rfind(')')
   5683   if declarator_end >= 0:
   5684     fragment = line[declarator_end:]
   5685   else:
   5686     if linenum > 1 and clean_lines.elided[linenum - 1].rfind(')') >= 0:
   5687       fragment = line
   5688     else:
   5689       return
   5690 
   5691   # Check that at most one of "override" or "final" is present, not both
   5692   if Search(r'\boverride\b', fragment) and Search(r'\bfinal\b', fragment):
   5693     error(filename, linenum, 'readability/inheritance', 4,
   5694           ('"override" is redundant since function is '
   5695            'already declared as "final"'))
   5696 
   5697 
   5698 
   5699 
   5700 # Returns true if we are at a new block, and it is directly
   5701 # inside of a namespace.
   5702 def IsBlockInNameSpace(nesting_state, is_forward_declaration):
   5703   """Checks that the new block is directly in a namespace.
   5704 
   5705   Args:
   5706     nesting_state: The _NestingState object that contains info about our state.
   5707     is_forward_declaration: If the class is a forward declared class.
   5708   Returns:
   5709     Whether or not the new block is directly in a namespace.
   5710   """
   5711   if is_forward_declaration:
   5712     if len(nesting_state.stack) >= 1 and (
   5713         isinstance(nesting_state.stack[-1], _NamespaceInfo)):
   5714       return True
   5715     else:
   5716       return False
   5717 
   5718   return (len(nesting_state.stack) > 1 and
   5719           nesting_state.stack[-1].check_namespace_indentation and
   5720           isinstance(nesting_state.stack[-2], _NamespaceInfo))
   5721 
   5722 
   5723 def ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
   5724                                     raw_lines_no_comments, linenum):
   5725   """This method determines if we should apply our namespace indentation check.
   5726 
   5727   Args:
   5728     nesting_state: The current nesting state.
   5729     is_namespace_indent_item: If we just put a new class on the stack, True.
   5730       If the top of the stack is not a class, or we did not recently
   5731       add the class, False.
   5732     raw_lines_no_comments: The lines without the comments.
   5733     linenum: The current line number we are processing.
   5734 
   5735   Returns:
   5736     True if we should apply our namespace indentation check. Currently, it
   5737     only works for classes and namespaces inside of a namespace.
   5738   """
   5739 
   5740   is_forward_declaration = IsForwardClassDeclaration(raw_lines_no_comments,
   5741                                                      linenum)
   5742 
   5743   if not (is_namespace_indent_item or is_forward_declaration):
   5744     return False
   5745 
   5746   # If we are in a macro, we do not want to check the namespace indentation.
   5747   if IsMacroDefinition(raw_lines_no_comments, linenum):
   5748     return False
   5749 
   5750   return IsBlockInNameSpace(nesting_state, is_forward_declaration)
   5751 
   5752 
   5753 # Call this method if the line is directly inside of a namespace.
   5754 # If the line above is blank (excluding comments) or the start of
   5755 # an inner namespace, it cannot be indented.
   5756 def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum,
   5757                                     error):
   5758   line = raw_lines_no_comments[linenum]
   5759   if Match(r'^\s+', line):
   5760     error(filename, linenum, 'runtime/indentation_namespace', 4,
   5761           'Do not indent within a namespace')
   5762 
   5763 
   5764 def ProcessLine(filename, file_extension, clean_lines, line,
   5765                 include_state, function_state, nesting_state, error,
   5766                 extra_check_functions=[]):
   5767   """Processes a single line in the file.
   5768 
   5769   Args:
   5770     filename: Filename of the file that is being processed.
   5771     file_extension: The extension (dot not included) of the file.
   5772     clean_lines: An array of strings, each representing a line of the file,
   5773                  with comments stripped.
   5774     line: Number of line being processed.
   5775     include_state: An _IncludeState instance in which the headers are inserted.
   5776     function_state: A _FunctionState instance which counts function lines, etc.
   5777     nesting_state: A NestingState instance which maintains information about
   5778                    the current stack of nested blocks being parsed.
   5779     error: A callable to which errors are reported, which takes 4 arguments:
   5780            filename, line number, error level, and message
   5781     extra_check_functions: An array of additional check functions that will be
   5782                            run on each source line. Each function takes 4
   5783                            arguments: filename, clean_lines, line, error
   5784   """
   5785   raw_lines = clean_lines.raw_lines
   5786   ParseNolintSuppressions(filename, raw_lines[line], line, error)
   5787   nesting_state.Update(filename, clean_lines, line, error)
   5788   CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
   5789                                error)
   5790   if nesting_state.InAsmBlock(): return
   5791   CheckForFunctionLengths(filename, clean_lines, line, function_state, error)
   5792   CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error)
   5793   CheckStyle(filename, clean_lines, line, file_extension, nesting_state, error)
   5794   CheckLanguage(filename, clean_lines, line, file_extension, include_state,
   5795                 nesting_state, error)
   5796   CheckForNonConstReference(filename, clean_lines, line, nesting_state, error)
   5797   CheckForNonStandardConstructs(filename, clean_lines, line,
   5798                                 nesting_state, error)
   5799   CheckVlogArguments(filename, clean_lines, line, error)
   5800   CheckPosixThreading(filename, clean_lines, line, error)
   5801   CheckInvalidIncrement(filename, clean_lines, line, error)
   5802   CheckMakePairUsesDeduction(filename, clean_lines, line, error)
   5803   CheckRedundantVirtual(filename, clean_lines, line, error)
   5804   CheckRedundantOverrideOrFinal(filename, clean_lines, line, error)
   5805   for check_fn in extra_check_functions:
   5806     check_fn(filename, clean_lines, line, error)
   5807 
   5808 def FlagCxx11Features(filename, clean_lines, linenum, error):
   5809   """Flag those c++11 features that we only allow in certain places.
   5810 
   5811   Args:
   5812     filename: The name of the current file.
   5813     clean_lines: A CleansedLines instance containing the file.
   5814     linenum: The number of the line to check.
   5815     error: The function to call with any errors found.
   5816   """
   5817   line = clean_lines.elided[linenum]
   5818 
   5819   include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line)
   5820 
   5821   # Flag unapproved C++ TR1 headers.
   5822   if include and include.group(1).startswith('tr1/'):
   5823     error(filename, linenum, 'build/c++tr1', 5,
   5824           ('C++ TR1 headers such as <%s> are unapproved.') % include.group(1))
   5825 
   5826   # Flag unapproved C++11 headers.
   5827   if include and include.group(1) in ('cfenv',
   5828                                       'condition_variable',
   5829                                       'fenv.h',
   5830                                       'future',
   5831                                       'mutex',
   5832                                       'thread',
   5833                                       'chrono',
   5834                                       'ratio',
   5835                                       'regex',
   5836                                       'system_error',
   5837                                      ):
   5838     error(filename, linenum, 'build/c++11', 5,
   5839           ('<%s> is an unapproved C++11 header.') % include.group(1))
   5840 
   5841   # The only place where we need to worry about C++11 keywords and library
   5842   # features in preprocessor directives is in macro definitions.
   5843   if Match(r'\s*#', line) and not Match(r'\s*#\s*define\b', line): return
   5844 
   5845   # These are classes and free functions.  The classes are always
   5846   # mentioned as std::*, but we only catch the free functions if
   5847   # they're not found by ADL.  They're alphabetical by header.
   5848   for top_name in (
   5849       # type_traits
   5850       'alignment_of',
   5851       'aligned_union',
   5852       ):
   5853     if Search(r'\bstd::%s\b' % top_name, line):
   5854       error(filename, linenum, 'build/c++11', 5,
   5855             ('std::%s is an unapproved C++11 class or function.  Send c-style '
   5856              'an example of where it would make your code more readable, and '
   5857              'they may let you use it.') % top_name)
   5858 
   5859 
   5860 def FlagCxx14Features(filename, clean_lines, linenum, error):
   5861   """Flag those C++14 features that we restrict.
   5862 
   5863   Args:
   5864     filename: The name of the current file.
   5865     clean_lines: A CleansedLines instance containing the file.
   5866     linenum: The number of the line to check.
   5867     error: The function to call with any errors found.
   5868   """
   5869   line = clean_lines.elided[linenum]
   5870 
   5871   include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line)
   5872 
   5873   # Flag unapproved C++14 headers.
   5874   if include and include.group(1) in ('scoped_allocator', 'shared_mutex'):
   5875     error(filename, linenum, 'build/c++14', 5,
   5876           ('<%s> is an unapproved C++14 header.') % include.group(1))
   5877 
   5878 
   5879 def ProcessFileData(filename, file_extension, lines, error,
   5880                     extra_check_functions=[]):
   5881   """Performs lint checks and reports any errors to the given error function.
   5882 
   5883   Args:
   5884     filename: Filename of the file that is being processed.
   5885     file_extension: The extension (dot not included) of the file.
   5886     lines: An array of strings, each representing a line of the file, with the
   5887            last element being empty if the file is terminated with a newline.
   5888     error: A callable to which errors are reported, which takes 4 arguments:
   5889            filename, line number, error level, and message
   5890     extra_check_functions: An array of additional check functions that will be
   5891                            run on each source line. Each function takes 4
   5892                            arguments: filename, clean_lines, line, error
   5893   """
   5894   lines = (['// marker so line numbers and indices both start at 1'] + lines +
   5895            ['// marker so line numbers end in a known way'])
   5896 
   5897   include_state = _IncludeState()
   5898   function_state = _FunctionState()
   5899   nesting_state = NestingState()
   5900 
   5901   ResetNolintSuppressions()
   5902 
   5903   CheckForCopyright(filename, lines, error)
   5904   ProcessGlobalSuppresions(lines)
   5905   RemoveMultiLineComments(filename, lines, error)
   5906   clean_lines = CleansedLines(lines)
   5907 
   5908   if IsHeaderExtension(file_extension):
   5909     CheckForHeaderGuard(filename, clean_lines, error)
   5910 
   5911   for line in xrange(clean_lines.NumLines()):
   5912     ProcessLine(filename, file_extension, clean_lines, line,
   5913                 include_state, function_state, nesting_state, error,
   5914                 extra_check_functions)
   5915     FlagCxx11Features(filename, clean_lines, line, error)
   5916   nesting_state.CheckCompletedBlocks(filename, error)
   5917 
   5918   CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error)
   5919 
   5920   # Check that the .cc file has included its header if it exists.
   5921   if _IsSourceExtension(file_extension):
   5922     CheckHeaderFileIncluded(filename, include_state, error)
   5923 
   5924   # We check here rather than inside ProcessLine so that we see raw
   5925   # lines rather than "cleaned" lines.
   5926   CheckForBadCharacters(filename, lines, error)
   5927 
   5928   CheckForNewlineAtEOF(filename, lines, error)
   5929 
   5930 def ProcessConfigOverrides(filename):
   5931   """ Loads the configuration files and processes the config overrides.
   5932 
   5933   Args:
   5934     filename: The name of the file being processed by the linter.
   5935 
   5936   Returns:
   5937     False if the current |filename| should not be processed further.
   5938   """
   5939 
   5940   abs_filename = os.path.abspath(filename)
   5941   cfg_filters = []
   5942   keep_looking = True
   5943   while keep_looking:
   5944     abs_path, base_name = os.path.split(abs_filename)
   5945     if not base_name:
   5946       break  # Reached the root directory.
   5947 
   5948     cfg_file = os.path.join(abs_path, "CPPLINT.cfg")
   5949     abs_filename = abs_path
   5950     if not os.path.isfile(cfg_file):
   5951       continue
   5952 
   5953     try:
   5954       with open(cfg_file) as file_handle:
   5955         for line in file_handle:
   5956           line, _, _ = line.partition('#')  # Remove comments.
   5957           if not line.strip():
   5958             continue
   5959 
   5960           name, _, val = line.partition('=')
   5961           name = name.strip()
   5962           val = val.strip()
   5963           if name == 'set noparent':
   5964             keep_looking = False
   5965           elif name == 'filter':
   5966             cfg_filters.append(val)
   5967           elif name == 'exclude_files':
   5968             # When matching exclude_files pattern, use the base_name of
   5969             # the current file name or the directory name we are processing.
   5970             # For example, if we are checking for lint errors in /foo/bar/baz.cc
   5971             # and we found the .cfg file at /foo/CPPLINT.cfg, then the config
   5972             # file's "exclude_files" filter is meant to be checked against "bar"
   5973             # and not "baz" nor "bar/baz.cc".
   5974             if base_name:
   5975               pattern = re.compile(val)
   5976               if pattern.match(base_name):
   5977                 if _cpplint_state.quiet:
   5978                   # Suppress "Ignoring file" warning when using --quiet.
   5979                   return False
   5980                 sys.stderr.write('Ignoring "%s": file excluded by "%s". '
   5981                                  'File path component "%s" matches '
   5982                                  'pattern "%s"\n' %
   5983                                  (filename, cfg_file, base_name, val))
   5984                 return False
   5985           elif name == 'linelength':
   5986             global _line_length
   5987             try:
   5988                 _line_length = int(val)
   5989             except ValueError:
   5990                 sys.stderr.write('Line length must be numeric.')
   5991           elif name == 'root':
   5992             global _root
   5993             # root directories are specified relative to CPPLINT.cfg dir.
   5994             _root = os.path.join(os.path.dirname(cfg_file), val)
   5995           elif name == 'headers':
   5996             ProcessHppHeadersOption(val)
   5997           else:
   5998             sys.stderr.write(
   5999                 'Invalid configuration option (%s) in file %s\n' %
   6000                 (name, cfg_file))
   6001 
   6002     except IOError:
   6003       sys.stderr.write(
   6004           "Skipping config file '%s': Can't open for reading\n" % cfg_file)
   6005       keep_looking = False
   6006 
   6007   # Apply all the accumulated filters in reverse order (top-level directory
   6008   # config options having the least priority).
   6009   for filter in reversed(cfg_filters):
   6010      _AddFilters(filter)
   6011 
   6012   return True
   6013 
   6014 
   6015 def ProcessFile(filename, vlevel, extra_check_functions=[]):
   6016   """Does google-lint on a single file.
   6017 
   6018   Args:
   6019     filename: The name of the file to parse.
   6020 
   6021     vlevel: The level of errors to report.  Every error of confidence
   6022     >= verbose_level will be reported.  0 is a good default.
   6023 
   6024     extra_check_functions: An array of additional check functions that will be
   6025                            run on each source line. Each function takes 4
   6026                            arguments: filename, clean_lines, line, error
   6027   """
   6028 
   6029   _SetVerboseLevel(vlevel)
   6030   _BackupFilters()
   6031   old_errors = _cpplint_state.error_count
   6032 
   6033   if not ProcessConfigOverrides(filename):
   6034     _RestoreFilters()
   6035     return
   6036 
   6037   lf_lines = []
   6038   crlf_lines = []
   6039   try:
   6040     # Support the UNIX convention of using "-" for stdin.  Note that
   6041     # we are not opening the file with universal newline support
   6042     # (which codecs doesn't support anyway), so the resulting lines do
   6043     # contain trailing '\r' characters if we are reading a file that
   6044     # has CRLF endings.
   6045     # If after the split a trailing '\r' is present, it is removed
   6046     # below.
   6047     if filename == '-':
   6048       lines = codecs.StreamReaderWriter(sys.stdin,
   6049                                         codecs.getreader('utf8'),
   6050                                         codecs.getwriter('utf8'),
   6051                                         'replace').read().split('\n')
   6052     else:
   6053       lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n')
   6054 
   6055     # Remove trailing '\r'.
   6056     # The -1 accounts for the extra trailing blank line we get from split()
   6057     for linenum in range(len(lines) - 1):
   6058       if lines[linenum].endswith('\r'):
   6059         lines[linenum] = lines[linenum].rstrip('\r')
   6060         crlf_lines.append(linenum + 1)
   6061       else:
   6062         lf_lines.append(linenum + 1)
   6063 
   6064   except IOError:
   6065     sys.stderr.write(
   6066         "Skipping input '%s': Can't open for reading\n" % filename)
   6067     _RestoreFilters()
   6068     return
   6069 
   6070   # Note, if no dot is found, this will give the entire filename as the ext.
   6071   file_extension = filename[filename.rfind('.') + 1:]
   6072 
   6073   # When reading from stdin, the extension is unknown, so no cpplint tests
   6074   # should rely on the extension.
   6075   if filename != '-' and file_extension not in _valid_extensions:
   6076     sys.stderr.write('Ignoring %s; not a valid file name '
   6077                      '(%s)\n' % (filename, ', '.join(_valid_extensions)))
   6078   else:
   6079     ProcessFileData(filename, file_extension, lines, Error,
   6080                     extra_check_functions)
   6081 
   6082     # If end-of-line sequences are a mix of LF and CR-LF, issue
   6083     # warnings on the lines with CR.
   6084     #
   6085     # Don't issue any warnings if all lines are uniformly LF or CR-LF,
   6086     # since critique can handle these just fine, and the style guide
   6087     # doesn't dictate a particular end of line sequence.
   6088     #
   6089     # We can't depend on os.linesep to determine what the desired
   6090     # end-of-line sequence should be, since that will return the
   6091     # server-side end-of-line sequence.
   6092     if lf_lines and crlf_lines:
   6093       # Warn on every line with CR.  An alternative approach might be to
   6094       # check whether the file is mostly CRLF or just LF, and warn on the
   6095       # minority, we bias toward LF here since most tools prefer LF.
   6096       for linenum in crlf_lines:
   6097         Error(filename, linenum, 'whitespace/newline', 1,
   6098               'Unexpected \\r (^M) found; better to use only \\n')
   6099 
   6100   # Suppress printing anything if --quiet was passed unless the error
   6101   # count has increased after processing this file.
   6102   if not _cpplint_state.quiet or old_errors != _cpplint_state.error_count:
   6103     sys.stdout.write('Done processing %s\n' % filename)
   6104   _RestoreFilters()
   6105 
   6106 
   6107 def PrintUsage(message):
   6108   """Prints a brief usage string and exits, optionally with an error message.
   6109 
   6110   Args:
   6111     message: The optional error message.
   6112   """
   6113   sys.stderr.write(_USAGE)
   6114   if message:
   6115     sys.exit('\nFATAL ERROR: ' + message)
   6116   else:
   6117     sys.exit(1)
   6118 
   6119 
   6120 def PrintCategories():
   6121   """Prints a list of all the error-categories used by error messages.
   6122 
   6123   These are the categories used to filter messages via --filter.
   6124   """
   6125   sys.stderr.write(''.join('  %s\n' % cat for cat in _ERROR_CATEGORIES))
   6126   sys.exit(0)
   6127 
   6128 
   6129 def ParseArguments(args):
   6130   """Parses the command line arguments.
   6131 
   6132   This may set the output format and verbosity level as side-effects.
   6133 
   6134   Args:
   6135     args: The command line arguments:
   6136 
   6137   Returns:
   6138     The list of filenames to lint.
   6139   """
   6140   try:
   6141     (opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=',
   6142                                                  'counting=',
   6143                                                  'filter=',
   6144                                                  'root=',
   6145                                                  'linelength=',
   6146                                                  'extensions=',
   6147                                                  'headers=',
   6148                                                  'quiet'])
   6149   except getopt.GetoptError:
   6150     PrintUsage('Invalid arguments.')
   6151 
   6152   verbosity = _VerboseLevel()
   6153   output_format = _OutputFormat()
   6154   filters = ''
   6155   quiet = _Quiet()
   6156   counting_style = ''
   6157 
   6158   for (opt, val) in opts:
   6159     if opt == '--help':
   6160       PrintUsage(None)
   6161     elif opt == '--output':
   6162       if val not in ('emacs', 'vs7', 'eclipse'):
   6163         PrintUsage('The only allowed output formats are emacs, vs7 and eclipse.')
   6164       output_format = val
   6165     elif opt == '--quiet':
   6166       quiet = True
   6167     elif opt == '--verbose':
   6168       verbosity = int(val)
   6169     elif opt == '--filter':
   6170       filters = val
   6171       if not filters:
   6172         PrintCategories()
   6173     elif opt == '--counting':
   6174       if val not in ('total', 'toplevel', 'detailed'):
   6175         PrintUsage('Valid counting options are total, toplevel, and detailed')
   6176       counting_style = val
   6177     elif opt == '--root':
   6178       global _root
   6179       _root = val
   6180     elif opt == '--linelength':
   6181       global _line_length
   6182       try:
   6183           _line_length = int(val)
   6184       except ValueError:
   6185           PrintUsage('Line length must be digits.')
   6186     elif opt == '--extensions':
   6187       global _valid_extensions
   6188       try:
   6189           _valid_extensions = set(val.split(','))
   6190       except ValueError:
   6191           PrintUsage('Extensions must be comma seperated list.')
   6192     elif opt == '--headers':
   6193       ProcessHppHeadersOption(val)
   6194 
   6195   if not filenames:
   6196     PrintUsage('No files were specified.')
   6197 
   6198   _SetOutputFormat(output_format)
   6199   _SetQuiet(quiet)
   6200   _SetVerboseLevel(verbosity)
   6201   _SetFilters(filters)
   6202   _SetCountingStyle(counting_style)
   6203 
   6204   return filenames
   6205 
   6206 
   6207 def main():
   6208   filenames = ParseArguments(sys.argv[1:])
   6209 
   6210   # Change stderr to write with replacement characters so we don't die
   6211   # if we try to print something containing non-ASCII characters.
   6212   sys.stderr = codecs.StreamReaderWriter(sys.stderr,
   6213                                          codecs.getreader('utf8'),
   6214                                          codecs.getwriter('utf8'),
   6215                                          'replace')
   6216 
   6217   _cpplint_state.ResetErrorCounts()
   6218   for filename in filenames:
   6219     ProcessFile(filename, _cpplint_state.verbose_level)
   6220   # If --quiet is passed, suppress printing error count unless there are errors.
   6221   if not _cpplint_state.quiet or _cpplint_state.error_count > 0:
   6222     _cpplint_state.PrintErrorCounts()
   6223 
   6224   sys.exit(_cpplint_state.error_count > 0)
   6225 
   6226 
   6227 if __name__ == '__main__':
   6228   main()
   6229