SABER
saber_cpplint.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # Copyright (c) 2009 Google Inc. All rights reserved.
4 #
5 # Redistribution and use in source and binary forms, with or without
6 # modification, are permitted provided that the following conditions are
7 # met:
8 #
9 # * Redistributions of source code must retain the above copyright
10 # notice, this list of conditions and the following disclaimer.
11 # * Redistributions in binary form must reproduce the above
12 # copyright notice, this list of conditions and the following disclaimer
13 # in the documentation and/or other materials provided with the
14 # distribution.
15 # * Neither the name of Google Inc. nor the names of its
16 # contributors may be used to endorse or promote products derived from
17 # this software without specific prior written permission.
18 #
19 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 """Does google-lint on c++ files.
32 
33 The goal of this script is to identify places in the code that *may*
34 be in non-compliance with google style. It does not attempt to fix
35 up these problems -- the point is to educate. It does also not
36 attempt to find all problems, or to ensure that everything it does
37 find is legitimately a problem.
38 
39 In particular, we can get very confused by /* and // inside strings!
40 We do a small hack, which is to ignore //'s with "'s after them on the
41 same line, but it is far from perfect (in either direction).
42 """
43 
44 import codecs
45 import copy
46 import getopt
47 import glob
48 import itertools
49 import math # for log
50 import os
51 import re
52 import sre_compile
53 import string
54 import sys
55 import unicodedata
56 import xml.etree.ElementTree
57 
58 # if empty, use defaults
59 _header_extensions = set([])
60 
61 # if empty, use defaults
62 _valid_extensions = set([])
63 
64 
65 # Files with any of these extensions are considered to be
66 # header files (and will undergo different style checks).
67 # This set can be extended by using the --headers
68 # option (also supported in CPPLINT.cfg)
70  if not _header_extensions:
71  return set(['h', 'hpp', 'hxx', 'h++', 'cuh'])
72  return _header_extensions
73 
74 # The allowed extensions for file names
75 # This is set by --extensions flag
77  if not _valid_extensions:
78  return GetHeaderExtensions().union(set(['c', 'cc', 'cpp', 'cxx', 'c++', 'cu']))
79  return _valid_extensions
80 
82  return GetAllExtensions().difference(GetHeaderExtensions())
83 
84 
85 _USAGE = """
86 Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
87  [--filter=-x,+y,...]
88  [--counting=total|toplevel|detailed] [--repository=path]
89  [--root=subdir] [--linelength=digits] [--recursive]
90  [--exclude=path]
91  [--headers=ext1,ext2]
92  [--extensions=hpp,cpp,...]
93  <file> [file] ...
94 
95  The style guidelines this tries to follow are those in
96  https://google.github.io/styleguide/cppguide.html
97 
98  Every problem is given a confidence score from 1-5, with 5 meaning we are
99  certain of the problem, and 1 meaning it could be a legitimate construct.
100  This will miss some errors, and is not a substitute for a code review.
101 
102  To suppress false-positive errors of a certain category, add a
103  'NOLINT(category)' comment to the line. NOLINT or NOLINT(*)
104  suppresses errors of all categories on that line.
105 
106  The files passed in will be linted; at least one file must be provided.
107  Default linted extensions are %s.
108  Other file types will be ignored.
109  Change the extensions with the --extensions flag.
110 
111  Flags:
112 
113  output=emacs|eclipse|vs7|junit
114  By default, the output is formatted to ease emacs parsing. Output
115  compatible with eclipse (eclipse), Visual Studio (vs7), and JUnit
116  XML parsers such as those used in Jenkins and Bamboo may also be
117  used. Other formats are unsupported.
118 
119  verbose=#
120  Specify a number 0-5 to restrict errors to certain verbosity levels.
121  Errors with lower verbosity levels have lower confidence and are more
122  likely to be false positives.
123 
124  quiet
125  Supress output other than linting errors, such as information about
126  which files have been processed and excluded.
127 
128  filter=-x,+y,...
129  Specify a comma-separated list of category-filters to apply: only
130  error messages whose category names pass the filters will be printed.
131  (Category names are printed with the message and look like
132  "[whitespace/indent]".) Filters are evaluated left to right.
133  "-FOO" and "FOO" means "do not print categories that start with FOO".
134  "+FOO" means "do print categories that start with FOO".
135 
136  Examples: --filter=-whitespace,+whitespace/braces
137  --filter=whitespace,runtime/printf,+runtime/printf_format
138  --filter=-,+build/include_what_you_use
139 
140  To see a list of all the categories used in cpplint, pass no arg:
141  --filter=
142 
143  counting=total|toplevel|detailed
144  The total number of errors found is always printed. If
145  'toplevel' is provided, then the count of errors in each of
146  the top-level categories like 'build' and 'whitespace' will
147  also be printed. If 'detailed' is provided, then a count
148  is provided for each category like 'build/class'.
149 
150  repository=path
151  The top level directory of the repository, used to derive the header
152  guard CPP variable. By default, this is determined by searching for a
153  path that contains .git, .hg, or .svn. When this flag is specified, the
154  given path is used instead. This option allows the header guard CPP
155  variable to remain consistent even if members of a team have different
156  repository root directories (such as when checking out a subdirectory
157  with SVN). In addition, users of non-mainstream version control systems
158  can use this flag to ensure readable header guard CPP variables.
159 
160  Examples:
161  Assuming that Alice checks out ProjectName and Bob checks out
162  ProjectName/trunk and trunk contains src/chrome/ui/browser.h, then
163  with no --repository flag, the header guard CPP variable will be:
164 
165  Alice => TRUNK_SRC_CHROME_BROWSER_UI_BROWSER_H_
166  Bob => SRC_CHROME_BROWSER_UI_BROWSER_H_
167 
168  If Alice uses the --repository=trunk flag and Bob omits the flag or
169  uses --repository=. then the header guard CPP variable will be:
170 
171  Alice => SRC_CHROME_BROWSER_UI_BROWSER_H_
172  Bob => SRC_CHROME_BROWSER_UI_BROWSER_H_
173 
174  root=subdir
175  The root directory used for deriving header guard CPP variables. This
176  directory is relative to the top level directory of the repository which
177  by default is determined by searching for a directory that contains .git,
178  .hg, or .svn but can also be controlled with the --repository flag. If
179  the specified directory does not exist, this flag is ignored.
180 
181  Examples:
182  Assuming that src is the top level directory of the repository, the
183  header guard CPP variables for src/chrome/browser/ui/browser.h are:
184 
185  No flag => CHROME_BROWSER_UI_BROWSER_H_
186  --root=chrome => BROWSER_UI_BROWSER_H_
187  --root=chrome/browser => UI_BROWSER_H_
188 
189  linelength=digits
190  This is the allowed line length for the project. The default value is
191  80 characters.
192 
193  Examples:
194  --linelength=120
195 
196  recursive
197  Search for files to lint recursively. Each directory given in the list
198  of files to be linted is replaced by all files that descend from that
199  directory. Files with extensions not in the valid extensions list are
200  excluded.
201 
202  exclude=path
203  Exclude the given path from the list of files to be linted. Relative
204  paths are evaluated relative to the current directory and shell globbing
205  is performed. This flag can be provided multiple times to exclude
206  multiple files.
207 
208  Examples:
209  --exclude=one.cc
210  --exclude=src/*.cc
211  --exclude=src/*.cc --exclude=test/*.cc
212 
213  extensions=extension,extension,...
214  The allowed file extensions that cpplint will check
215 
216  Examples:
217  --extensions=%s
218 
219  headers=extension,extension,...
220  The allowed header extensions that cpplint will consider to be header files
221  (by default, only files with extensions %s
222  will be assumed to be headers)
223 
224  Examples:
225  --headers=%s
226 
227  cpplint.py supports per-directory configurations specified in CPPLINT.cfg
228  files. CPPLINT.cfg file can contain a number of key=value pairs.
229  Currently the following options are supported:
230 
231  set noparent
232  filter=+filter1,-filter2,...
233  exclude_files=regex
234  linelength=80
235  root=subdir
236 
237  "set noparent" option prevents cpplint from traversing directory tree
238  upwards looking for more .cfg files in parent directories. This option
239  is usually placed in the top-level project directory.
240 
241  The "filter" option is similar in function to --filter flag. It specifies
242  message filters in addition to the |_DEFAULT_FILTERS| and those specified
243  through --filter command-line flag.
244 
245  "exclude_files" allows to specify a regular expression to be matched against
246  a file name. If the expression matches, the file is skipped and not run
247  through the linter.
248 
249  "linelength" specifies the allowed line length for the project.
250 
251  The "root" option is similar in function to the --root flag (see example
252  above).
253 
254  CPPLINT.cfg has an effect on files in the same directory and all
255  subdirectories, unless overridden by a nested configuration file.
256 
257  Example file:
258  filter=-build/include_order,+build/include_alpha
259  exclude_files=.*\\.cc
260 
261  The above example disables build/include_order warning and enables
262  build/include_alpha as well as excludes all .cc from being
263  processed by linter, in the current directory (where the .cfg
264  file is located) and all subdirectories.
265 """ % (list(GetAllExtensions()),
266  ','.join(list(GetAllExtensions())),
268  ','.join(GetHeaderExtensions()))
269 
270 # We categorize each error message we print. Here are the categories.
271 # We want an explicit list so we can list them all in cpplint --filter=.
272 # If you add a new error message with a new category, add it to the list
273 # here! cpplint_unittest.py should tell you if you forget to do this.
274 _ERROR_CATEGORIES = [
275  'build/class',
276  'build/c++11',
277  'build/c++14',
278  'build/c++tr1',
279  'build/deprecated',
280  'build/endif_comment',
281  'build/explicit_make_pair',
282  'build/forward_decl',
283  'build/header_guard',
284  'build/include',
285  'build/include_subdir',
286  'build/include_alpha',
287  'build/include_order',
288  'build/include_what_you_use',
289  'build/namespaces_literals',
290  'build/namespaces',
291  'build/printf_format',
292  'build/storage_class',
293  'legal/copyright',
294  'readability/alt_tokens',
295  'readability/braces',
296  'readability/casting',
297  'readability/check',
298  'readability/constructors',
299  'readability/fn_size',
300  'readability/inheritance',
301  'readability/multiline_comment',
302  'readability/multiline_string',
303  'readability/namespace',
304  'readability/nolint',
305  'readability/nul',
306  'readability/strings',
307  'readability/todo',
308  'readability/utf8',
309  'runtime/arrays',
310  'runtime/casting',
311  'runtime/explicit',
312  'runtime/int',
313  'runtime/init',
314  'runtime/invalid_increment',
315  'runtime/member_string_references',
316  'runtime/memset',
317  'runtime/indentation_namespace',
318  'runtime/operator',
319  'runtime/printf',
320  'runtime/printf_format',
321  'runtime/references',
322  'runtime/string',
323  'runtime/threadsafe_fn',
324  'runtime/vlog',
325  'whitespace/blank_line',
326  'whitespace/braces',
327  'whitespace/comma',
328  'whitespace/comments',
329  'whitespace/empty_conditional_body',
330  'whitespace/empty_if_body',
331  'whitespace/empty_loop_body',
332  'whitespace/end_of_line',
333  'whitespace/ending_newline',
334  'whitespace/forcolon',
335  'whitespace/indent',
336  'whitespace/line_length',
337  'whitespace/newline',
338  'whitespace/operators',
339  'whitespace/parens',
340  'whitespace/semicolon',
341  'whitespace/tab',
342  'whitespace/todo',
343  ]
344 
345 # These error categories are no longer enforced by cpplint, but for backwards-
346 # compatibility they may still appear in NOLINT comments.
347 _LEGACY_ERROR_CATEGORIES = [
348  'readability/streams',
349  'readability/function',
350  ]
351 
352 # The default state of the category filter. This is overridden by the --filter=
353 # flag. By default all errors are on, so only add here categories that should be
354 # off by default (i.e., categories that must be enabled by the --filter= flags).
355 # All entries here should start with a '-' or '+', as in the --filter= flag.
356 _DEFAULT_FILTERS = ['-build/include_alpha']
357 
358 # The default list of categories suppressed for C (not C++) files.
359 _DEFAULT_C_SUPPRESSED_CATEGORIES = [
360  'readability/casting',
361  ]
362 
363 # The default list of categories suppressed for Linux Kernel files.
364 _DEFAULT_KERNEL_SUPPRESSED_CATEGORIES = [
365  'whitespace/tab',
366  ]
367 
368 # We used to check for high-bit characters, but after much discussion we
369 # decided those were OK, as long as they were in UTF-8 and didn't represent
370 # hard-coded international strings, which belong in a separate i18n file.
371 
372 # C++ headers
373 _CPP_HEADERS = frozenset([
374  # Legacy
375  'algobase.h',
376  'algo.h',
377  'alloc.h',
378  'builtinbuf.h',
379  'bvector.h',
380  'complex.h',
381  'defalloc.h',
382  'deque.h',
383  'editbuf.h',
384  'fstream.h',
385  'function.h',
386  'hash_map',
387  'hash_map.h',
388  'hash_set',
389  'hash_set.h',
390  'hashtable.h',
391  'heap.h',
392  'indstream.h',
393  'iomanip.h',
394  'iostream.h',
395  'istream.h',
396  'iterator.h',
397  'list.h',
398  'map.h',
399  'multimap.h',
400  'multiset.h',
401  'ostream.h',
402  'pair.h',
403  'parsestream.h',
404  'pfstream.h',
405  'procbuf.h',
406  'pthread_alloc',
407  'pthread_alloc.h',
408  'rope',
409  'rope.h',
410  'ropeimpl.h',
411  'set.h',
412  'slist',
413  'slist.h',
414  'stack.h',
415  'stdiostream.h',
416  'stl_alloc.h',
417  'stl_relops.h',
418  'streambuf.h',
419  'stream.h',
420  'strfile.h',
421  'strstream.h',
422  'tempbuf.h',
423  'tree.h',
424  'type_traits.h',
425  'vector.h',
426  # 17.6.1.2 C++ library headers
427  'algorithm',
428  'array',
429  'atomic',
430  'bitset',
431  'chrono',
432  'codecvt',
433  'complex',
434  'condition_variable',
435  'deque',
436  'exception',
437  'forward_list',
438  'fstream',
439  'functional',
440  'future',
441  'initializer_list',
442  'iomanip',
443  'ios',
444  'iosfwd',
445  'iostream',
446  'istream',
447  'iterator',
448  'limits',
449  'list',
450  'locale',
451  'map',
452  'memory',
453  'mutex',
454  'new',
455  'numeric',
456  'ostream',
457  'queue',
458  'random',
459  'ratio',
460  'regex',
461  'scoped_allocator',
462  'set',
463  'sstream',
464  'stack',
465  'stdexcept',
466  'streambuf',
467  'string',
468  'strstream',
469  'system_error',
470  'thread',
471  'tuple',
472  'typeindex',
473  'typeinfo',
474  'type_traits',
475  'unordered_map',
476  'unordered_set',
477  'utility',
478  'valarray',
479  'vector',
480  # 17.6.1.2 C++ headers for C library facilities
481  'cassert',
482  'ccomplex',
483  'cctype',
484  'cerrno',
485  'cfenv',
486  'cfloat',
487  'cinttypes',
488  'ciso646',
489  'climits',
490  'clocale',
491  'cmath',
492  'csetjmp',
493  'csignal',
494  'cstdalign',
495  'cstdarg',
496  'cstdbool',
497  'cstddef',
498  'cstdint',
499  'cstdio',
500  'cstdlib',
501  'cstring',
502  'ctgmath',
503  'ctime',
504  'cuchar',
505  'cwchar',
506  'cwctype',
507  ])
508 
509 # Type names
510 _TYPES = re.compile(
511  r'^(?:'
512  # [dcl.type.simple]
513  r'(char(16_t|32_t)?)|wchar_t|'
514  r'bool|short|int|long|signed|unsigned|float|double|'
515  # [support.types]
516  r'(ptrdiff_t|size_t|max_align_t|nullptr_t)|'
517  # [cstdint.syn]
518  r'(u?int(_fast|_least)?(8|16|32|64)_t)|'
519  r'(u?int(max|ptr)_t)|'
520  r')$')
521 
522 
523 # These headers are excluded from [build/include] and [build/include_order]
524 # checks:
525 # - Anything not following google file name conventions (containing an
526 # uppercase character, such as Python.h or nsStringAPI.h, for example).
527 # - Lua headers.
528 _THIRD_PARTY_HEADERS_PATTERN = re.compile(
529  r'^(?:[^/]*[A-Z][^/]*\.h|lua\.h|lauxlib\.h|lualib\.h)$')
530 
531 # Pattern for matching FileInfo.BaseName() against test file name
532 _test_suffixes = ['_test', '_regtest', '_unittest']
533 _TEST_FILE_SUFFIX = '(' + '|'.join(_test_suffixes) + r')$'
534 
535 # Pattern that matches only complete whitespace, possibly across multiple lines.
536 _EMPTY_CONDITIONAL_BODY_PATTERN = re.compile(r'^\s*$', re.DOTALL)
537 
538 # Assertion macros. These are defined in base/logging.h and
539 # testing/base/public/gunit.h.
540 _CHECK_MACROS = [
541  'DCHECK', 'CHECK',
542  'EXPECT_TRUE', 'ASSERT_TRUE',
543  'EXPECT_FALSE', 'ASSERT_FALSE',
544  ]
545 
546 # Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE
547 _CHECK_REPLACEMENT = dict([(macro_var, {}) for macro_var in _CHECK_MACROS])
548 
549 for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
550  ('>=', 'GE'), ('>', 'GT'),
551  ('<=', 'LE'), ('<', 'LT')]:
552  _CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement
553  _CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement
554  _CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement
555  _CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement
556 
557 for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'),
558  ('>=', 'LT'), ('>', 'LE'),
559  ('<=', 'GT'), ('<', 'GE')]:
560  _CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement
561  _CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement
562 
563 # Alternative tokens and their replacements. For full list, see section 2.5
564 # Alternative tokens [lex.digraph] in the C++ standard.
565 #
566 # Digraphs (such as '%:') are not included here since it's a mess to
567 # match those on a word boundary.
568 _ALT_TOKEN_REPLACEMENT = {
569  'and': '&&',
570  'bitor': '|',
571  'or': '||',
572  'xor': '^',
573  'compl': '~',
574  'bitand': '&',
575  'and_eq': '&=',
576  'or_eq': '|=',
577  'xor_eq': '^=',
578  'not': '!',
579  'not_eq': '!='
580  }
581 
582 # Compile regular expression that matches all the above keywords. The "[ =()]"
583 # bit is meant to avoid matching these keywords outside of boolean expressions.
584 #
585 # False positives include C-style multi-line comments and multi-line strings
586 # but those have always been troublesome for cpplint.
587 _ALT_TOKEN_REPLACEMENT_PATTERN = re.compile(
588  r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)')
589 
590 
591 # These constants define types of headers for use with
592 # _IncludeState.CheckNextIncludeOrder().
593 _C_SYS_HEADER = 1
594 _CPP_SYS_HEADER = 2
595 _LIKELY_MY_HEADER = 3
596 _POSSIBLE_MY_HEADER = 4
597 _OTHER_HEADER = 5
598 
599 # These constants define the current inline assembly state
600 _NO_ASM = 0 # Outside of inline assembly block
601 _INSIDE_ASM = 1 # Inside inline assembly block
602 _END_ASM = 2 # Last line of inline assembly block
603 _BLOCK_ASM = 3 # The whole block is an inline assembly block
604 
605 # Match start of assembly blocks
606 _MATCH_ASM = re.compile(r'^\s*(?:asm|_asm|__asm|__asm__)'
607  r'(?:\s+(volatile|__volatile__))?'
608  r'\s*[{(]')
609 
610 # Match strings that indicate we're working on a C (not C++) file.
611 _SEARCH_C_FILE = re.compile(r'\b(?:LINT_C_FILE|'
612  r'vim?:\s*.*(\s*|:)filetype=c(\s*|:|$))')
613 
614 # Match string that indicates we're working on a Linux Kernel file.
615 _SEARCH_KERNEL_FILE = re.compile(r'\b(?:LINT_KERNEL_FILE)')
616 
617 _regexp_compile_cache = {}
618 
619 # {str, set(int)}: a map from error categories to sets of linenumbers
620 # on which those errors are expected and should be suppressed.
621 _error_suppressions = {}
622 
623 # The root directory used for deriving header guard CPP variable.
624 # This is set by --root flag.
625 _root = None
626 
627 # The top level repository directory. If set, _root is calculated relative to
628 # this directory instead of the directory containing version control artifacts.
629 # This is set by the --repository flag.
630 _repository = None
631 
632 # Files to exclude from linting. This is set by the --exclude flag.
633 _excludes = None
634 
635 # Whether to supress PrintInfo messages
636 _quiet = False
637 
638 # The allowed line length of files.
639 # This is set by --linelength flag.
640 _line_length = 80
641 
642 try:
643  xrange(1, 0)
644 except NameError:
645  # -- pylint: disable=redefined-builtin
646  xrange = range
647 
648 try:
649  unicode
650 except NameError:
651  # -- pylint: disable=redefined-builtin
652  basestring = unicode = str
653 
654 try:
655  long(2)
656 except NameError:
657  # -- pylint: disable=redefined-builtin
658  long = int
659 
660 if sys.version_info < (3,):
661  # -- pylint: disable=no-member
662  # BINARY_TYPE = str
663  itervalues = dict.itervalues
664  iteritems = dict.iteritems
665 else:
666  # BINARY_TYPE = bytes
667  itervalues = dict.values
668  iteritems = dict.items
669 
671  if sys.version_info < (3,):
672  return codecs.unicode_escape_decode(x)[0]
673  else:
674  return x
675 
676 # {str, bool}: a map from error categories to booleans which indicate if the
677 # category should be suppressed for every line.
678 _global_error_suppressions = {}
679 
680 
681 
682 
683 def ParseNolintSuppressions(filename, raw_line, linenum, error):
684  """Updates the global list of line error-suppressions.
685 
686  Parses any NOLINT comments on the current line, updating the global
687  error_suppressions store. Reports an error if the NOLINT comment
688  was malformed.
689 
690  Args:
691  filename: str, the name of the input file.
692  raw_line: str, the line of input text, with comments.
693  linenum: int, the number of the current line.
694  error: function, an error handler.
695  """
696  matched = Search(r'\bNOLINT(NEXTLINE)?\b(\‍([^)]+\‍))?', raw_line)
697  if matched:
698  if matched.group(1):
699  suppressed_line = linenum + 1
700  else:
701  suppressed_line = linenum
702  category = matched.group(2)
703  if category in (None, '(*)'): # => "suppress all"
704  _error_suppressions.setdefault(None, set()).add(suppressed_line)
705  else:
706  if category.startswith('(') and category.endswith(')'):
707  category = category[1:-1]
708  if category in _ERROR_CATEGORIES:
709  _error_suppressions.setdefault(category, set()).add(suppressed_line)
710  elif category not in _LEGACY_ERROR_CATEGORIES:
711  error(filename, linenum, 'readability/nolint', 5,
712  'Unknown NOLINT error category: %s' % category)
713 
714 
716  """Updates the list of global error suppressions.
717 
718  Parses any lint directives in the file that have global effect.
719 
720  Args:
721  lines: An array of strings, each representing a line of the file, with the
722  last element being empty if the file is terminated with a newline.
723  """
724  for line in lines:
725  if _SEARCH_C_FILE.search(line):
726  for category in _DEFAULT_C_SUPPRESSED_CATEGORIES:
727  _global_error_suppressions[category] = True
728  if _SEARCH_KERNEL_FILE.search(line):
729  for category in _DEFAULT_KERNEL_SUPPRESSED_CATEGORIES:
730  _global_error_suppressions[category] = True
731 
732 
734  """Resets the set of NOLINT suppressions to empty."""
735  _error_suppressions.clear()
736  _global_error_suppressions.clear()
737 
738 
739 def IsErrorSuppressedByNolint(category, linenum):
740  """Returns true if the specified error category is suppressed on this line.
741 
742  Consults the global error_suppressions map populated by
743  ParseNolintSuppressions/ProcessGlobalSuppresions/ResetNolintSuppressions.
744 
745  Args:
746  category: str, the category of the error.
747  linenum: int, the current line number.
748  Returns:
749  bool, True iff the error should be suppressed due to a NOLINT comment or
750  global suppression.
751  """
752  return (_global_error_suppressions.get(category, False) or
753  linenum in _error_suppressions.get(category, set()) or
754  linenum in _error_suppressions.get(None, set()))
755 
756 
757 def Match(pattern, s):
758  """Matches the string with the pattern, caching the compiled regexp."""
759  # The regexp compilation caching is inlined in both Match and Search for
760  # performance reasons; factoring it out into a separate function turns out
761  # to be noticeably expensive.
762  if pattern not in _regexp_compile_cache:
763  _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
764  return _regexp_compile_cache[pattern].match(s)
765 
766 
767 def ReplaceAll(pattern, rep, s):
768  """Replaces instances of pattern in a string with a replacement.
769 
770  The compiled regex is kept in a cache shared by Match and Search.
771 
772  Args:
773  pattern: regex pattern
774  rep: replacement text
775  s: search string
776 
777  Returns:
778  string with replacements made (or original string if no replacements)
779  """
780  if pattern not in _regexp_compile_cache:
781  _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
782  return _regexp_compile_cache[pattern].sub(rep, s)
783 
784 
785 def Search(pattern, s):
786  """Searches the string for the pattern, caching the compiled regexp."""
787  if pattern not in _regexp_compile_cache:
788  _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
789  return _regexp_compile_cache[pattern].search(s)
790 
791 
793  """File extension (excluding dot) matches a source file extension."""
794  return s in GetNonHeaderExtensions()
795 
796 
797 class _IncludeState(object):
798  """Tracks line numbers for includes, and the order in which includes appear.
799 
800  include_list contains list of lists of (header, line number) pairs.
801  It's a lists of lists rather than just one flat list to make it
802  easier to update across preprocessor boundaries.
803 
804  Call CheckNextIncludeOrder() once for each header in the file, passing
805  in the type constants defined above. Calls in an illegal order will
806  raise an _IncludeError with an appropriate error message.
807 
808  """
809  # self._section will move monotonically through this set. If it ever
810  # needs to move backwards, CheckNextIncludeOrder will raise an error.
811  _INITIAL_SECTION = 0
812  _MY_H_SECTION = 1
813  _C_SECTION = 2
814  _CPP_SECTION = 3
815  _OTHER_H_SECTION = 4
816 
817  _TYPE_NAMES = {
818  _C_SYS_HEADER: 'C system header',
819  _CPP_SYS_HEADER: 'C++ system header',
820  _LIKELY_MY_HEADER: 'header this file implements',
821  _POSSIBLE_MY_HEADER: 'header this file may implement',
822  _OTHER_HEADER: 'other header',
823  }
824  _SECTION_NAMES = {
825  _INITIAL_SECTION: "... nothing. (This can't be an error.)",
826  _MY_H_SECTION: 'a header this file implements',
827  _C_SECTION: 'C system header',
828  _CPP_SECTION: 'C++ system header',
829  _OTHER_H_SECTION: 'other header',
830  }
831 
832  def __init__(self):
833  self.include_list = [[]]
834  self._section = None
835  self._last_header = None
836  self.ResetSection('')
837 
838  def FindHeader(self, header):
839  """Check if a header has already been included.
840 
841  Args:
842  header: header to check.
843  Returns:
844  Line number of previous occurrence, or -1 if the header has not
845  been seen before.
846  """
847  for section_list in self.include_list:
848  for f in section_list:
849  if f[0] == header:
850  return f[1]
851  return -1
852 
853  def ResetSection(self, directive):
854  """Reset section checking for preprocessor directive.
855 
856  Args:
857  directive: preprocessor directive (e.g. "if", "else").
858  """
859  # The name of the current section.
860  self._section = self._INITIAL_SECTION
861  # The path of last found header.
862  self._last_header = ''
863 
864  # Update list of includes. Note that we never pop from the
865  # include list.
866  if directive in ('if', 'ifdef', 'ifndef'):
867  self.include_list.append([])
868  elif directive in ('else', 'elif'):
869  self.include_list[-1] = []
870 
871  def SetLastHeader(self, header_path):
872  self._last_header = header_path
873 
874  def CanonicalizeAlphabeticalOrder(self, header_path):
875  """Returns a path canonicalized for alphabetical comparison.
876 
877  - replaces "-" with "_" so they both cmp the same.
878  - removes '-inl' since we don't require them to be after the main header.
879  - lowercase everything, just in case.
880 
881  Args:
882  header_path: Path to be canonicalized.
883 
884  Returns:
885  Canonicalized path.
886  """
887  return header_path.replace('-inl.h', '.h').replace('-', '_').lower()
888 
889  def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path):
890  """Check if a header is in alphabetical order with the previous header.
891 
892  Args:
893  clean_lines: A CleansedLines instance containing the file.
894  linenum: The number of the line to check.
895  header_path: Canonicalized header to be checked.
896 
897  Returns:
898  Returns true if the header is in alphabetical order.
899  """
900  # If previous section is different from current section, _last_header will
901  # be reset to empty string, so it's always less than current header.
902  #
903  # If previous line was a blank line, assume that the headers are
904  # intentionally sorted the way they are.
905  if (self._last_header > header_path and
906  Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])):
907  return False
908  return True
909 
910  def CheckNextIncludeOrder(self, header_type):
911  """Returns a non-empty error message if the next header is out of order.
912 
913  This function also updates the internal state to be ready to check
914  the next include.
915 
916  Args:
917  header_type: One of the _XXX_HEADER constants defined above.
918 
919  Returns:
920  The empty string if the header is in the right order, or an
921  error message describing what's wrong.
922 
923  """
924  error_message = ('Found %s after %s' %
925  (self._TYPE_NAMES[header_type],
926  self._SECTION_NAMES[self._section]))
927 
928  last_section = self._section
929 
930  if header_type == _C_SYS_HEADER:
931  if self._section <= self._C_SECTION:
932  self._section = self._C_SECTION
933  else:
934  self._last_header = ''
935  return error_message
936  elif header_type == _CPP_SYS_HEADER:
937  if self._section <= self._CPP_SECTION:
938  self._section = self._CPP_SECTION
939  else:
940  self._last_header = ''
941  return error_message
942  elif header_type == _LIKELY_MY_HEADER:
943  if self._section <= self._MY_H_SECTION:
944  self._section = self._MY_H_SECTION
945  else:
946  self._section = self._OTHER_H_SECTION
947  elif header_type == _POSSIBLE_MY_HEADER:
948  if self._section <= self._MY_H_SECTION:
949  self._section = self._MY_H_SECTION
950  else:
951  # This will always be the fallback because we're not sure
952  # enough that the header is associated with this file.
953  self._section = self._OTHER_H_SECTION
954  else:
955  assert header_type == _OTHER_HEADER
956  self._section = self._OTHER_H_SECTION
957 
958  if last_section != self._section:
959  self._last_header = ''
960 
961  return ''
962 
963 
964 class _CppLintState(object):
965  """Maintains module-wide state.."""
966 
967  def __init__(self):
968  self.verbose_level = 1 # global setting.
969  self.error_count = 0 # global count of reported errors
970  # filters to apply when emitting error messages
971  self.filters = _DEFAULT_FILTERS[:]
972  # backup of filter list. Used to restore the state after each file.
973  self._filters_backup = self.filters[:]
974  self.counting = 'total' # In what way are we counting errors?
975  self.errors_by_category = {} # string to int dict storing error counts
976 
977  # output format:
978  # "emacs" - format that emacs can parse (default)
979  # "eclipse" - format that eclipse can parse
980  # "vs7" - format that Microsoft Visual Studio 7 can parse
981  # "junit" - format that Jenkins, Bamboo, etc can parse
982  self.output_format = 'emacs'
983 
984  # For JUnit output, save errors and failures until the end so that they
985  # can be written into the XML
986  self._junit_errors = []
987  self._junit_failures = []
988 
989  def SetOutputFormat(self, output_format):
990  """Sets the output format for errors."""
991  self.output_format = output_format
992 
993  def SetVerboseLevel(self, level):
994  """Sets the module's verbosity, and returns the previous setting."""
995  last_verbose_level = self.verbose_level
996  self.verbose_level = level
997  return last_verbose_level
998 
999  def SetCountingStyle(self, counting_style):
1000  """Sets the module's counting options."""
1001  self.counting = counting_style
1002 
1003  def SetFilters(self, filters):
1004  """Sets the error-message filters.
1005 
1006  These filters are applied when deciding whether to emit a given
1007  error message.
1008 
1009  Args:
1010  filters: A string of comma-separated filters (eg "+whitespace/indent").
1011  Each filter should start with + or -; else we die.
1012 
1013  Raises:
1014  ValueError: The comma-separated filters did not all start with '+' or '-'.
1015  E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter"
1016  """
1017  # Default filters always have less priority than the flag ones.
1018  self.filters = _DEFAULT_FILTERS[:]
1019  self.AddFilters(filters)
1020 
1021  def AddFilters(self, filters):
1022  """ Adds more filters to the existing list of error-message filters. """
1023  for filt in filters.split(','):
1024  clean_filt = filt.strip()
1025  if clean_filt:
1026  self.filters.append(clean_filt)
1027  for filt in self.filters:
1028  if not (filt.startswith('+') or filt.startswith('-')):
1029  raise ValueError('Every filter in --filters must start with + or -'
1030  ' (%s does not)' % filt)
1031 
1032  def BackupFilters(self):
1033  """ Saves the current filter list to backup storage."""
1034  self._filters_backup = self.filters[:]
1035 
1036  def RestoreFilters(self):
1037  """ Restores filters previously backed up."""
1038  self.filters = self._filters_backup[:]
1039 
1040  def ResetErrorCounts(self):
1041  """Sets the module's error statistic back to zero."""
1042  self.error_count = 0
1043  self.errors_by_category = {}
1044 
1045  def IncrementErrorCount(self, category):
1046  """Bumps the module's error statistic."""
1047  self.error_count += 1
1048  if self.counting in ('toplevel', 'detailed'):
1049  if self.counting != 'detailed':
1050  category = category.split('/')[0]
1051  if category not in self.errors_by_category:
1052  self.errors_by_category[category] = 0
1053  self.errors_by_category[category] += 1
1054 
1055  def PrintErrorCounts(self):
1056  """Print a summary of errors by category, and the total."""
1057  for category, count in sorted(iteritems(self.errors_by_category)):
1058  self.PrintInfo('Category \'%s\' errors found: %d\n' %
1059  (category, count))
1060  if self.error_count > 0:
1061  self.PrintInfo('Total errors found: %d\n' % self.error_count)
1062 
1063  def PrintInfo(self, message):
1064  if not _quiet and self.output_format != 'junit':
1065  sys.stderr.write(message)
1066 
1067  def PrintError(self, message):
1068  if self.output_format == 'junit':
1069  self._junit_errors.append(message)
1070  else:
1071  sys.stderr.write(message)
1072 
1073  def AddJUnitFailure(self, filename, linenum, message, category, confidence):
1074  self._junit_failures.append((filename, linenum, message, category,
1075  confidence))
1076 
1077  def FormatJUnitXML(self):
1078  num_errors = len(self._junit_errors)
1079  num_failures = len(self._junit_failures)
1080 
1081  testsuite = xml.etree.ElementTree.Element('testsuite')
1082  testsuite.attrib['name'] = 'cpplint'
1083  testsuite.attrib['errors'] = str(num_errors)
1084  testsuite.attrib['failures'] = str(num_failures)
1085 
1086  if num_errors == 0 and num_failures == 0:
1087  testsuite.attrib['tests'] = str(1)
1088  xml.etree.ElementTree.SubElement(testsuite, 'testcase', name='passed')
1089 
1090  else:
1091  testsuite.attrib['tests'] = str(num_errors + num_failures)
1092  if num_errors > 0:
1093  testcase = xml.etree.ElementTree.SubElement(testsuite, 'testcase')
1094  testcase.attrib['name'] = 'errors'
1095  error = xml.etree.ElementTree.SubElement(testcase, 'error')
1096  error.text = '\n'.join(self._junit_errors)
1097  if num_failures > 0:
1098  # Group failures by file
1099  failed_file_order = []
1100  failures_by_file = {}
1101  for failure in self._junit_failures:
1102  failed_file = failure[0]
1103  if failed_file not in failed_file_order:
1104  failed_file_order.append(failed_file)
1105  failures_by_file[failed_file] = []
1106  failures_by_file[failed_file].append(failure)
1107  # Create a testcase for each file
1108  for failed_file in failed_file_order:
1109  failures = failures_by_file[failed_file]
1110  testcase = xml.etree.ElementTree.SubElement(testsuite, 'testcase')
1111  testcase.attrib['name'] = failed_file
1112  failure = xml.etree.ElementTree.SubElement(testcase, 'failure')
1113  template = '{0}: {1} [{2}] [{3}]'
1114  texts = [template.format(f[1], f[2], f[3], f[4]) for f in failures]
1115  failure.text = '\n'.join(texts)
1116 
1117  xml_decl = '<?xml version="1.0" encoding="UTF-8" ?>\n'
1118  return xml_decl + xml.etree.ElementTree.tostring(testsuite, 'utf-8').decode('utf-8')
1119 
1120 
1121 _cpplint_state = _CppLintState()
1122 
1123 
1125  """Gets the module's output format."""
1126  return _cpplint_state.output_format
1127 
1128 
1129 def _SetOutputFormat(output_format):
1130  """Sets the module's output format."""
1131  _cpplint_state.SetOutputFormat(output_format)
1132 
1133 
1135  """Returns the module's verbosity setting."""
1136  return _cpplint_state.verbose_level
1137 
1138 
1139 def _SetVerboseLevel(level):
1140  """Sets the module's verbosity, and returns the previous setting."""
1141  return _cpplint_state.SetVerboseLevel(level)
1142 
1143 
1145  """Sets the module's counting options."""
1146  _cpplint_state.SetCountingStyle(level)
1147 
1148 
1149 def _Filters():
1150  """Returns the module's list of output filters, as a list."""
1151  return _cpplint_state.filters
1152 
1153 
1154 def _SetFilters(filters):
1155  """Sets the module's error-message filters.
1156 
1157  These filters are applied when deciding whether to emit a given
1158  error message.
1159 
1160  Args:
1161  filters: A string of comma-separated filters (eg "whitespace/indent").
1162  Each filter should start with + or -; else we die.
1163  """
1164  _cpplint_state.SetFilters(filters)
1165 
1166 def _AddFilters(filters):
1167  """Adds more filter overrides.
1168 
1169  Unlike _SetFilters, this function does not reset the current list of filters
1170  available.
1171 
1172  Args:
1173  filters: A string of comma-separated filters (eg "whitespace/indent").
1174  Each filter should start with + or -; else we die.
1175  """
1176  _cpplint_state.AddFilters(filters)
1177 
1179  """ Saves the current filter list to backup storage."""
1180  _cpplint_state.BackupFilters()
1181 
1183  """ Restores filters previously backed up."""
1184  _cpplint_state.RestoreFilters()
1185 
1186 class _FunctionState(object):
1187  """Tracks current function name and the number of lines in its body."""
1188 
1189  _NORMAL_TRIGGER = 250 # for --v=0, 500 for --v=1, etc.
1190  _TEST_TRIGGER = 400 # about 50% more than _NORMAL_TRIGGER.
1191 
1192  def __init__(self):
1193  self.in_a_function = False
1196 
1197  def Begin(self, function_name):
1198  """Start analyzing function body.
1199 
1200  Args:
1201  function_name: The name of the function being tracked.
1202  """
1203  self.in_a_function = True
1204  self.lines_in_function = 0
1205  self.current_function = function_name
1206 
1207  def Count(self):
1208  """Count line in current function body."""
1209  if self.in_a_function:
1210  self.lines_in_function += 1
1211 
1212  def Check(self, error, filename, linenum):
1213  """Report if too many lines in function body.
1214 
1215  Args:
1216  error: The function to call with any errors found.
1217  filename: The name of the current file.
1218  linenum: The number of the line to check.
1219  """
1220  if not self.in_a_function:
1221  return
1222 
1223  if Match(r'T(EST|est)', self.current_function):
1224  base_trigger = self._TEST_TRIGGER
1225  else:
1226  base_trigger = self._NORMAL_TRIGGER
1227  trigger = base_trigger * 2**_VerboseLevel()
1228 
1229  if self.lines_in_function > trigger:
1230  error_level = int(math.log(self.lines_in_function / base_trigger, 2))
1231  # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
1232  if error_level > 5:
1233  error_level = 5
1234  error(filename, linenum, 'readability/fn_size', error_level,
1235  'Small and focused functions are preferred:'
1236  ' %s has %d non-comment lines'
1237  ' (error triggered by exceeding %d lines).' % (
1238  self.current_function, self.lines_in_function, trigger))
1239 
1240  def End(self):
1241  """Stop analyzing function body."""
1242  self.in_a_function = False
1243 
1244 
1245 class _IncludeError(Exception):
1246  """Indicates a problem with the include order in a file."""
1247  pass
1248 
1249 
1250 class FileInfo(object):
1251  """Provides utility functions for filenames.
1252 
1253  FileInfo provides easy access to the components of a file's path
1254  relative to the project root.
1255  """
1256 
1257  def __init__(self, filename):
1258  self._filename = filename
1259 
1260  def FullName(self):
1261  """Make Windows paths like Unix."""
1262  return os.path.abspath(self._filename).replace('\\', '/')
1263 
1264  def RepositoryName(self):
1265  r"""FullName after removing the local path to the repository.
1266 
1267  If we have a real absolute path name here we can try to do something smart:
1268  detecting the root of the checkout and truncating /path/to/checkout from
1269  the name so that we get header guards that don't include things like
1270  "C:\Documents and Settings\..." or "/home/username/..." in them and thus
1271  people on different computers who have checked the source out to different
1272  locations won't see bogus errors.
1273  """
1274  fullname = self.FullName()
1275 
1276  if os.path.exists(fullname):
1277  project_dir = os.path.dirname(fullname)
1278 
1279  # If the user specified a repository path, it exists, and the file is
1280  # contained in it, use the specified repository path
1281  if _repository:
1282  repo = FileInfo(_repository).FullName()
1283  root_dir = project_dir
1284  while os.path.exists(root_dir):
1285  # allow case insensitive compare on Windows
1286  if os.path.normcase(root_dir) == os.path.normcase(repo):
1287  return os.path.relpath(fullname, root_dir).replace('\\', '/')
1288  one_up_dir = os.path.dirname(root_dir)
1289  if one_up_dir == root_dir:
1290  break
1291  root_dir = one_up_dir
1292 
1293  if os.path.exists(os.path.join(project_dir, ".svn")):
1294  # If there's a .svn file in the current directory, we recursively look
1295  # up the directory tree for the top of the SVN checkout
1296  root_dir = project_dir
1297  one_up_dir = os.path.dirname(root_dir)
1298  while os.path.exists(os.path.join(one_up_dir, ".svn")):
1299  root_dir = os.path.dirname(root_dir)
1300  one_up_dir = os.path.dirname(one_up_dir)
1301 
1302  prefix = os.path.commonprefix([root_dir, project_dir])
1303  return fullname[len(prefix) + 1:]
1304 
1305  # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by
1306  # searching up from the current path.
1307  root_dir = current_dir = os.path.dirname(fullname)
1308  while current_dir != os.path.dirname(current_dir):
1309  if (os.path.exists(os.path.join(current_dir, ".git")) or
1310  os.path.exists(os.path.join(current_dir, ".hg")) or
1311  os.path.exists(os.path.join(current_dir, ".svn"))):
1312  root_dir = current_dir
1313  break
1314  current_dir = os.path.dirname(current_dir)
1315 
1316  if (os.path.exists(os.path.join(root_dir, ".git")) or
1317  os.path.exists(os.path.join(root_dir, ".hg")) or
1318  os.path.exists(os.path.join(root_dir, ".svn"))):
1319  prefix = os.path.commonprefix([root_dir, project_dir])
1320  return fullname[len(prefix) + 1:]
1321 
1322  # Don't know what to do; header guard warnings may be wrong...
1323  return fullname
1324 
1325  def Split(self):
1326  """Splits the file into the directory, basename, and extension.
1327 
1328  For 'chrome/browser/browser.cc', Split() would
1329  return ('chrome/browser', 'browser', '.cc')
1330 
1331  Returns:
1332  A tuple of (directory, basename, extension).
1333  """
1334 
1335  googlename = self.RepositoryName()
1336  project, rest = os.path.split(googlename)
1337  return (project,) + os.path.splitext(rest)
1338 
1339  def BaseName(self):
1340  """File base name - text after the final slash, before the final period."""
1341  return self.Split()[1]
1342 
1343  def Extension(self):
1344  """File extension - text following the final period, includes that period."""
1345  return self.Split()[2]
1346 
1347  def NoExtension(self):
1348  """File has no source file extension."""
1349  return '/'.join(self.Split()[0:2])
1350 
1351  def IsSource(self):
1352  """File has a source file extension."""
1353  return _IsSourceExtension(self.Extension()[1:])
1354 
1355 
1356 def _ShouldPrintError(category, confidence, linenum):
1357  """If confidence >= verbose, category passes filter and is not suppressed."""
1358 
1359  # There are three ways we might decide not to print an error message:
1360  # a "NOLINT(category)" comment appears in the source,
1361  # the verbosity level isn't high enough, or the filters filter it out.
1362  if IsErrorSuppressedByNolint(category, linenum):
1363  return False
1364 
1365  if confidence < _cpplint_state.verbose_level:
1366  return False
1367 
1368  is_filtered = False
1369  for one_filter in _Filters():
1370  if one_filter.startswith('-'):
1371  if category.startswith(one_filter[1:]):
1372  is_filtered = True
1373  elif one_filter.startswith('+'):
1374  if category.startswith(one_filter[1:]):
1375  is_filtered = False
1376  else:
1377  assert False # should have been checked for in SetFilter.
1378  if is_filtered:
1379  return False
1380 
1381  return True
1382 
1383 
1384 def Error(filename, linenum, category, confidence, message):
1385  """Logs the fact we've found a lint error.
1386 
1387  We log where the error was found, and also our confidence in the error,
1388  that is, how certain we are this is a legitimate style regression, and
1389  not a misidentification or a use that's sometimes justified.
1390 
1391  False positives can be suppressed by the use of
1392  "cpplint(category)" comments on the offending line. These are
1393  parsed into _error_suppressions.
1394 
1395  Args:
1396  filename: The name of the file containing the error.
1397  linenum: The number of the line containing the error.
1398  category: A string used to describe the "category" this bug
1399  falls under: "whitespace", say, or "runtime". Categories
1400  may have a hierarchy separated by slashes: "whitespace/indent".
1401  confidence: A number from 1-5 representing a confidence score for
1402  the error, with 5 meaning that we are certain of the problem,
1403  and 1 meaning that it could be a legitimate construct.
1404  message: The error message.
1405  """
1406  if _ShouldPrintError(category, confidence, linenum):
1407  _cpplint_state.IncrementErrorCount(category)
1408  if _cpplint_state.output_format == 'vs7':
1409  _cpplint_state.PrintError('%s(%s): warning: %s [%s] [%d]\n' % (
1410  filename, linenum, message, category, confidence))
1411  elif _cpplint_state.output_format == 'eclipse':
1412  sys.stderr.write('%s:%s: warning: %s [%s] [%d]\n' % (
1413  filename, linenum, message, category, confidence))
1414  elif _cpplint_state.output_format == 'junit':
1415  _cpplint_state.AddJUnitFailure(filename, linenum, message, category,
1416  confidence)
1417  else:
1418  final_message = '%s:%s: %s [%s] [%d]\n' % (
1419  filename, linenum, message, category, confidence)
1420  sys.stderr.write(final_message)
1421 
1422 # Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard.
1423 _RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile(
1424  r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)')
1425 # Match a single C style comment on the same line.
1426 _RE_PATTERN_C_COMMENTS = r'/\*(?:[^*]|\*(?!/))*\*/'
1427 # Matches multi-line C style comments.
1428 # This RE is a little bit more complicated than one might expect, because we
1429 # have to take care of space removals tools so we can handle comments inside
1430 # statements better.
1431 # The current rule is: We only clear spaces from both sides when we're at the
1432 # end of the line. Otherwise, we try to remove spaces from the right side,
1433 # if this doesn't work we try on left side but only if there's a non-character
1434 # on the right.
1435 _RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(
1436  r'(\s*' + _RE_PATTERN_C_COMMENTS + r'\s*$|' +
1437  _RE_PATTERN_C_COMMENTS + r'\s+|' +
1438  r'\s+' + _RE_PATTERN_C_COMMENTS + r'(?=\W)|' +
1439  _RE_PATTERN_C_COMMENTS + r')')
1440 
1441 
1442 def IsCppString(line):
1443  """Does line terminate so, that the next symbol is in string constant.
1444 
1445  This function does not consider single-line nor multi-line comments.
1446 
1447  Args:
1448  line: is a partial line of code starting from the 0..n.
1449 
1450  Returns:
1451  True, if next character appended to 'line' is inside a
1452  string constant.
1453  """
1454 
1455  line = line.replace(r'\\', 'XX') # after this, \\" does not match to \"
1456  return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
1457 
1458 
1459 def CleanseRawStrings(raw_lines):
1460  """Removes C++11 raw strings from lines.
1461 
1462  Before:
1463  static const char kData[] = R"(
1464  multi-line string
1465  )";
1466 
1467  After:
1468  static const char kData[] = ""
1469  (replaced by blank line)
1470  "";
1471 
1472  Args:
1473  raw_lines: list of raw lines.
1474 
1475  Returns:
1476  list of lines with C++11 raw strings replaced by empty strings.
1477  """
1478 
1479  delimiter = None
1480  lines_without_raw_strings = []
1481  for line in raw_lines:
1482  if delimiter:
1483  # Inside a raw string, look for the end
1484  end = line.find(delimiter)
1485  if end >= 0:
1486  # Found the end of the string, match leading space for this
1487  # line and resume copying the original lines, and also insert
1488  # a "" on the last line.
1489  leading_space = Match(r'^(\s*)\S', line)
1490  line = leading_space.group(1) + '""' + line[end + len(delimiter):]
1491  delimiter = None
1492  else:
1493  # Haven't found the end yet, append a blank line.
1494  line = '""'
1495 
1496  # Look for beginning of a raw string, and replace them with
1497  # empty strings. This is done in a loop to handle multiple raw
1498  # strings on the same line.
1499  while delimiter is None:
1500  # Look for beginning of a raw string.
1501  # See 2.14.15 [lex.string] for syntax.
1502  #
1503  # Once we have matched a raw string, we check the prefix of the
1504  # line to make sure that the line is not part of a single line
1505  # comment. It's done this way because we remove raw strings
1506  # before removing comments as opposed to removing comments
1507  # before removing raw strings. This is because there are some
1508  # cpplint checks that requires the comments to be preserved, but
1509  # we don't want to check comments that are inside raw strings.
1510  matched = Match(r'^(.*?)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\‍((.*)$', line)
1511  if (matched and
1512  not Match(r'^([^\'"]|\'(\\.|[^\'])*\'|"(\\.|[^"])*")*//',
1513  matched.group(1))):
1514  delimiter = ')' + matched.group(2) + '"'
1515 
1516  end = matched.group(3).find(delimiter)
1517  if end >= 0:
1518  # Raw string ended on same line
1519  line = (matched.group(1) + '""' +
1520  matched.group(3)[end + len(delimiter):])
1521  delimiter = None
1522  else:
1523  # Start of a multi-line raw string
1524  line = matched.group(1) + '""'
1525  else:
1526  break
1527 
1528  lines_without_raw_strings.append(line)
1529 
1530  # TODO(unknown): if delimiter is not None here, we might want to
1531  # emit a warning for unterminated string.
1532  return lines_without_raw_strings
1533 
1534 
1536  """Find the beginning marker for a multiline comment."""
1537  while lineix < len(lines):
1538  if lines[lineix].strip().startswith('/*'):
1539  # Only return this marker if the comment goes beyond this line
1540  if lines[lineix].strip().find('*/', 2) < 0:
1541  return lineix
1542  lineix += 1
1543  return len(lines)
1544 
1545 
1546 def FindNextMultiLineCommentEnd(lines, lineix):
1547  """We are inside a comment, find the end marker."""
1548  while lineix < len(lines):
1549  if lines[lineix].strip().endswith('*/'):
1550  return lineix
1551  lineix += 1
1552  return len(lines)
1553 
1554 
1555 def RemoveMultiLineCommentsFromRange(lines, begin, end):
1556  """Clears a range of lines for multi-line comments."""
1557  # Having // dummy comments makes the lines non-empty, so we will not get
1558  # unnecessary blank line warnings later in the code.
1559  for i in range(begin, end):
1560  lines[i] = '/**/'
1561 
1562 
1563 def RemoveMultiLineComments(filename, lines, error):
1564  """Removes multiline (c-style) comments from lines."""
1565  lineix = 0
1566  while lineix < len(lines):
1567  lineix_begin = FindNextMultiLineCommentStart(lines, lineix)
1568  if lineix_begin >= len(lines):
1569  return
1570  lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin)
1571  if lineix_end >= len(lines):
1572  error(filename, lineix_begin + 1, 'readability/multiline_comment', 5,
1573  'Could not find end of multi-line comment')
1574  return
1575  RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1)
1576  lineix = lineix_end + 1
1577 
1578 
1580  """Removes //-comments and single-line C-style /* */ comments.
1581 
1582  Args:
1583  line: A line of C++ source.
1584 
1585  Returns:
1586  The line with single-line comments removed.
1587  """
1588  commentpos = line.find('//')
1589  if commentpos != -1 and not IsCppString(line[:commentpos]):
1590  line = line[:commentpos].rstrip()
1591  # get rid of /* ... */
1592  return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
1593 
1594 
1595 class CleansedLines(object):
1596  """Holds 4 copies of all lines with different preprocessing applied to them.
1597 
1598  1) elided member contains lines without strings and comments.
1599  2) lines member contains lines without comments.
1600  3) raw_lines member contains all the lines without processing.
1601  4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw
1602  strings removed.
1603  All these members are of <type 'list'>, and of the same length.
1604  """
1605 
1606  def __init__(self, lines):
1607  self.elided = []
1608  self.lines = []
1609  self.raw_lines = lines
1610  self.num_lines = len(lines)
1612  for linenum in range(len(self.lines_without_raw_strings)):
1613  self.lines.append(CleanseComments(
1614  self.lines_without_raw_strings[linenum]))
1615  elided = self._CollapseStrings(self.lines_without_raw_strings[linenum])
1616  self.elided.append(CleanseComments(elided))
1617 
1618  def NumLines(self):
1619  """Returns the number of lines represented."""
1620  return self.num_lines
1621 
1622  @staticmethod
1623  def _CollapseStrings(elided):
1624  """Collapses strings and chars on a line to simple "" or '' blocks.
1625 
1626  We nix strings first so we're not fooled by text like '"http://"'
1627 
1628  Args:
1629  elided: The line being processed.
1630 
1631  Returns:
1632  The line with collapsed strings.
1633  """
1634  if _RE_PATTERN_INCLUDE.match(elided):
1635  return elided
1636 
1637  # Remove escaped characters first to make quote/single quote collapsing
1638  # basic. Things that look like escaped characters shouldn't occur
1639  # outside of strings and chars.
1640  elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided)
1641 
1642  # Replace quoted strings and digit separators. Both single quotes
1643  # and double quotes are processed in the same loop, otherwise
1644  # nested quotes wouldn't work.
1645  collapsed = ''
1646  while True:
1647  # Find the first quote character
1648  match = Match(r'^([^\'"]*)([\'"])(.*)$', elided)
1649  if not match:
1650  collapsed += elided
1651  break
1652  head, quote, tail = match.groups()
1653 
1654  if quote == '"':
1655  # Collapse double quoted strings
1656  second_quote = tail.find('"')
1657  if second_quote >= 0:
1658  collapsed += head + '""'
1659  elided = tail[second_quote + 1:]
1660  else:
1661  # Unmatched double quote, don't bother processing the rest
1662  # of the line since this is probably a multiline string.
1663  collapsed += elided
1664  break
1665  else:
1666  # Found single quote, check nearby text to eliminate digit separators.
1667  #
1668  # There is no special handling for floating point here, because
1669  # the integer/fractional/exponent parts would all be parsed
1670  # correctly as long as there are digits on both sides of the
1671  # separator. So we are fine as long as we don't see something
1672  # like "0.'3" (gcc 4.9.0 will not allow this literal).
1673  if Search(r'\b(?:0[bBxX]?|[1-9])[0-9a-fA-F]*$', head):
1674  match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$', "'" + tail)
1675  collapsed += head + match_literal.group(1).replace("'", '')
1676  elided = match_literal.group(2)
1677  else:
1678  second_quote = tail.find('\'')
1679  if second_quote >= 0:
1680  collapsed += head + "''"
1681  elided = tail[second_quote + 1:]
1682  else:
1683  # Unmatched single quote
1684  collapsed += elided
1685  break
1686 
1687  return collapsed
1688 
1689 
1690 def FindEndOfExpressionInLine(line, startpos, stack):
1691  """Find the position just after the end of current parenthesized expression.
1692 
1693  Args:
1694  line: a CleansedLines line.
1695  startpos: start searching at this position.
1696  stack: nesting stack at startpos.
1697 
1698  Returns:
1699  On finding matching end: (index just after matching end, None)
1700  On finding an unclosed expression: (-1, None)
1701  Otherwise: (-1, new stack at end of this line)
1702  """
1703  for i in xrange(startpos, len(line)):
1704  char = line[i]
1705  if char in '([{':
1706  # Found start of parenthesized expression, push to expression stack
1707  stack.append(char)
1708  elif char == '<':
1709  # Found potential start of template argument list
1710  if i > 0 and line[i - 1] == '<':
1711  # Left shift operator
1712  if stack and stack[-1] == '<':
1713  stack.pop()
1714  if not stack:
1715  return (-1, None)
1716  elif i > 0 and Search(r'\boperator\s*$', line[0:i]):
1717  # operator<, don't add to stack
1718  continue
1719  else:
1720  # Tentative start of template argument list
1721  stack.append('<')
1722  elif char in ')]}':
1723  # Found end of parenthesized expression.
1724  #
1725  # If we are currently expecting a matching '>', the pending '<'
1726  # must have been an operator. Remove them from expression stack.
1727  while stack and stack[-1] == '<':
1728  stack.pop()
1729  if not stack:
1730  return (-1, None)
1731  if ((stack[-1] == '(' and char == ')') or
1732  (stack[-1] == '[' and char == ']') or
1733  (stack[-1] == '{' and char == '}')):
1734  stack.pop()
1735  if not stack:
1736  return (i + 1, None)
1737  else:
1738  # Mismatched parentheses
1739  return (-1, None)
1740  elif char == '>':
1741  # Found potential end of template argument list.
1742 
1743  # Ignore "->" and operator functions
1744  if (i > 0 and
1745  (line[i - 1] == '-' or Search(r'\boperator\s*$', line[0:i - 1]))):
1746  continue
1747 
1748  # Pop the stack if there is a matching '<'. Otherwise, ignore
1749  # this '>' since it must be an operator.
1750  if stack:
1751  if stack[-1] == '<':
1752  stack.pop()
1753  if not stack:
1754  return (i + 1, None)
1755  elif char == ';':
1756  # Found something that look like end of statements. If we are currently
1757  # expecting a '>', the matching '<' must have been an operator, since
1758  # template argument list should not contain statements.
1759  while stack and stack[-1] == '<':
1760  stack.pop()
1761  if not stack:
1762  return (-1, None)
1763 
1764  # Did not find end of expression or unbalanced parentheses on this line
1765  return (-1, stack)
1766 
1767 
1768 def CloseExpression(clean_lines, linenum, pos):
1769  """If input points to ( or { or [ or <, finds the position that closes it.
1770 
1771  If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the
1772  linenum/pos that correspond to the closing of the expression.
1773 
1774  TODO(unknown): cpplint spends a fair bit of time matching parentheses.
1775  Ideally we would want to index all opening and closing parentheses once
1776  and have CloseExpression be just a simple lookup, but due to preprocessor
1777  tricks, this is not so easy.
1778 
1779  Args:
1780  clean_lines: A CleansedLines instance containing the file.
1781  linenum: The number of the line to check.
1782  pos: A position on the line.
1783 
1784  Returns:
1785  A tuple (line, linenum, pos) pointer *past* the closing brace, or
1786  (line, len(lines), -1) if we never find a close. Note we ignore
1787  strings and comments when matching; and the line we return is the
1788  'cleansed' line at linenum.
1789  """
1790 
1791  line = clean_lines.elided[linenum]
1792  if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]):
1793  return (line, clean_lines.NumLines(), -1)
1794 
1795  # Check first line
1796  (end_pos, stack) = FindEndOfExpressionInLine(line, pos, [])
1797  if end_pos > -1:
1798  return (line, linenum, end_pos)
1799 
1800  # Continue scanning forward
1801  while stack and linenum < clean_lines.NumLines() - 1:
1802  linenum += 1
1803  line = clean_lines.elided[linenum]
1804  (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack)
1805  if end_pos > -1:
1806  return (line, linenum, end_pos)
1807 
1808  # Did not find end of expression before end of file, give up
1809  return (line, clean_lines.NumLines(), -1)
1810 
1811 
1812 def FindStartOfExpressionInLine(line, endpos, stack):
1813  """Find position at the matching start of current expression.
1814 
1815  This is almost the reverse of FindEndOfExpressionInLine, but note
1816  that the input position and returned position differs by 1.
1817 
1818  Args:
1819  line: a CleansedLines line.
1820  endpos: start searching at this position.
1821  stack: nesting stack at endpos.
1822 
1823  Returns:
1824  On finding matching start: (index at matching start, None)
1825  On finding an unclosed expression: (-1, None)
1826  Otherwise: (-1, new stack at beginning of this line)
1827  """
1828  i = endpos
1829  while i >= 0:
1830  char = line[i]
1831  if char in ')]}':
1832  # Found end of expression, push to expression stack
1833  stack.append(char)
1834  elif char == '>':
1835  # Found potential end of template argument list.
1836  #
1837  # Ignore it if it's a "->" or ">=" or "operator>"
1838  if (i > 0 and
1839  (line[i - 1] == '-' or
1840  Match(r'\s>=\s', line[i - 1:]) or
1841  Search(r'\boperator\s*$', line[0:i]))):
1842  i -= 1
1843  else:
1844  stack.append('>')
1845  elif char == '<':
1846  # Found potential start of template argument list
1847  if i > 0 and line[i - 1] == '<':
1848  # Left shift operator
1849  i -= 1
1850  else:
1851  # If there is a matching '>', we can pop the expression stack.
1852  # Otherwise, ignore this '<' since it must be an operator.
1853  if stack and stack[-1] == '>':
1854  stack.pop()
1855  if not stack:
1856  return (i, None)
1857  elif char in '([{':
1858  # Found start of expression.
1859  #
1860  # If there are any unmatched '>' on the stack, they must be
1861  # operators. Remove those.
1862  while stack and stack[-1] == '>':
1863  stack.pop()
1864  if not stack:
1865  return (-1, None)
1866  if ((char == '(' and stack[-1] == ')') or
1867  (char == '[' and stack[-1] == ']') or
1868  (char == '{' and stack[-1] == '}')):
1869  stack.pop()
1870  if not stack:
1871  return (i, None)
1872  else:
1873  # Mismatched parentheses
1874  return (-1, None)
1875  elif char == ';':
1876  # Found something that look like end of statements. If we are currently
1877  # expecting a '<', the matching '>' must have been an operator, since
1878  # template argument list should not contain statements.
1879  while stack and stack[-1] == '>':
1880  stack.pop()
1881  if not stack:
1882  return (-1, None)
1883 
1884  i -= 1
1885 
1886  return (-1, stack)
1887 
1888 
1889 def ReverseCloseExpression(clean_lines, linenum, pos):
1890  """If input points to ) or } or ] or >, finds the position that opens it.
1891 
1892  If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the
1893  linenum/pos that correspond to the opening of the expression.
1894 
1895  Args:
1896  clean_lines: A CleansedLines instance containing the file.
1897  linenum: The number of the line to check.
1898  pos: A position on the line.
1899 
1900  Returns:
1901  A tuple (line, linenum, pos) pointer *at* the opening brace, or
1902  (line, 0, -1) if we never find the matching opening brace. Note
1903  we ignore strings and comments when matching; and the line we
1904  return is the 'cleansed' line at linenum.
1905  """
1906  line = clean_lines.elided[linenum]
1907  if line[pos] not in ')}]>':
1908  return (line, 0, -1)
1909 
1910  # Check last line
1911  (start_pos, stack) = FindStartOfExpressionInLine(line, pos, [])
1912  if start_pos > -1:
1913  return (line, linenum, start_pos)
1914 
1915  # Continue scanning backward
1916  while stack and linenum > 0:
1917  linenum -= 1
1918  line = clean_lines.elided[linenum]
1919  (start_pos, stack) = FindStartOfExpressionInLine(line, len(line) - 1, stack)
1920  if start_pos > -1:
1921  return (line, linenum, start_pos)
1922 
1923  # Did not find start of expression before beginning of file, give up
1924  return (line, 0, -1)
1925 
1926 
1927 def CheckForCopyright(filename, lines, error):
1928  """Logs an error if no Copyright message appears at the top of the file."""
1929 
1930  # We'll say it should occur by line 10. Don't forget there's a
1931  # dummy line at the front.
1932  for line in range(1, min(len(lines), 11)):
1933  if re.search(r'Copyright', lines[line], re.I): break
1934  else: # means no copyright line was found
1935  error(filename, 0, 'legal/copyright', 5,
1936  'No copyright message found. '
1937  'You should have a line: "Copyright [year] <Copyright Owner>"')
1938 
1939 
1940 def GetIndentLevel(line):
1941  """Return the number of leading spaces in line.
1942 
1943  Args:
1944  line: A string to check.
1945 
1946  Returns:
1947  An integer count of leading spaces, possibly zero.
1948  """
1949  indent = Match(r'^( *)\S', line)
1950  if indent:
1951  return len(indent.group(1))
1952  else:
1953  return 0
1954 
1955 
1957  """Returns the CPP variable that should be used as a header guard.
1958 
1959  Args:
1960  filename: The name of a C++ header file.
1961 
1962  Returns:
1963  The CPP variable that should be used as a header guard in the
1964  named file.
1965 
1966  """
1967 
1968  # Restores original filename in case that cpplint is invoked from Emacs's
1969  # flymake.
1970  filename = re.sub(r'_flymake\.h$', '.h', filename)
1971  filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename)
1972  # Replace 'c++' with 'cpp'.
1973  filename = filename.replace('C++', 'cpp').replace('c++', 'cpp')
1974 
1975  fileinfo = FileInfo(filename)
1976  file_path_from_root = fileinfo.RepositoryName()
1977  if _root:
1978  suffix = os.sep
1979  # On Windows using directory separator will leave us with
1980  # "bogus escape error" unless we properly escape regex.
1981  if suffix == '\\':
1982  suffix += '\\'
1983  file_path_from_root = re.sub('^' + _root + suffix, '', file_path_from_root)
1984  return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_'
1985 
1986 
1987 def CheckForHeaderGuard(filename, clean_lines, error):
1988  """Checks that the file contains a header guard.
1989 
1990  Logs an error if no #ifndef header guard is present. For other
1991  headers, checks that the full pathname is used.
1992 
1993  Args:
1994  filename: The name of the C++ header file.
1995  clean_lines: A CleansedLines instance containing the file.
1996  error: The function to call with any errors found.
1997  """
1998 
1999  # Don't check for header guards if there are error suppression
2000  # comments somewhere in this file.
2001  #
2002  # Because this is silencing a warning for a nonexistent line, we
2003  # only support the very specific NOLINT(build/header_guard) syntax,
2004  # and not the general NOLINT or NOLINT(*) syntax.
2005  raw_lines = clean_lines.lines_without_raw_strings
2006  for i in raw_lines:
2007  if Search(r'//\s*NOLINT\‍(build/header_guard\‍)', i):
2008  return
2009 
2010  # Allow pragma once instead of header guards
2011  for i in raw_lines:
2012  if Search(r'^\s*#pragma\s+once', i):
2013  return
2014 
2015  cppvar = GetHeaderGuardCPPVariable(filename)
2016 
2017  ifndef = ''
2018  ifndef_linenum = 0
2019  define = ''
2020  endif = ''
2021  endif_linenum = 0
2022  for linenum, line in enumerate(raw_lines):
2023  linesplit = line.split()
2024  if len(linesplit) >= 2:
2025  # find the first occurrence of #ifndef and #define, save arg
2026  if not ifndef and linesplit[0] == '#ifndef':
2027  # set ifndef to the header guard presented on the #ifndef line.
2028  ifndef = linesplit[1]
2029  ifndef_linenum = linenum
2030  if not define and linesplit[0] == '#define':
2031  define = linesplit[1]
2032  # find the last occurrence of #endif, save entire line
2033  if line.startswith('#endif'):
2034  endif = line
2035  endif_linenum = linenum
2036 
2037  if not ifndef or not define or ifndef != define:
2038  error(filename, 0, 'build/header_guard', 5,
2039  'No #ifndef header guard found, suggested CPP variable is: %s' %
2040  cppvar)
2041  return
2042 
2043  # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
2044  # for backward compatibility.
2045  if ifndef != cppvar:
2046  error_level = 0
2047  if ifndef != cppvar + '_':
2048  error_level = 5
2049 
2050  ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum,
2051  error)
2052  error(filename, ifndef_linenum, 'build/header_guard', error_level,
2053  '#ifndef header guard has wrong style, please use: %s' % cppvar)
2054 
2055  # Check for "//" comments on endif line.
2056  ParseNolintSuppressions(filename, raw_lines[endif_linenum], endif_linenum,
2057  error)
2058  match = Match(r'#endif\s*//\s*' + cppvar + r'(_)?\b', endif)
2059  if match:
2060  if match.group(1) == '_':
2061  # Issue low severity warning for deprecated double trailing underscore
2062  error(filename, endif_linenum, 'build/header_guard', 0,
2063  '#endif line should be "#endif // %s"' % cppvar)
2064  return
2065 
2066  # Didn't find the corresponding "//" comment. If this file does not
2067  # contain any "//" comments at all, it could be that the compiler
2068  # only wants "/**/" comments, look for those instead.
2069  no_single_line_comments = True
2070  for i in xrange(1, len(raw_lines) - 1):
2071  line = raw_lines[i]
2072  if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line):
2073  no_single_line_comments = False
2074  break
2075 
2076  if no_single_line_comments:
2077  match = Match(r'#endif\s*/\*\s*' + cppvar + r'(_)?\s*\*/', endif)
2078  if match:
2079  if match.group(1) == '_':
2080  # Low severity warning for double trailing underscore
2081  error(filename, endif_linenum, 'build/header_guard', 0,
2082  '#endif line should be "#endif /* %s */"' % cppvar)
2083  return
2084 
2085  # Didn't find anything
2086  error(filename, endif_linenum, 'build/header_guard', 5,
2087  '#endif line should be "#endif // %s"' % cppvar)
2088 
2089 
2090 def CheckHeaderFileIncluded(filename, include_state, error):
2091  """Logs an error if a source file does not include its header."""
2092 
2093  # Do not check test files
2094  fileinfo = FileInfo(filename)
2095  if Search(_TEST_FILE_SUFFIX, fileinfo.BaseName()):
2096  return
2097 
2098  for ext in GetHeaderExtensions():
2099  basefilename = filename[0:len(filename) - len(fileinfo.Extension())]
2100  headerfile = basefilename + '.' + ext
2101  if not os.path.exists(headerfile):
2102  continue
2103  headername = FileInfo(headerfile).RepositoryName()
2104  first_include = None
2105  for section_list in include_state.include_list:
2106  for f in section_list:
2107  if headername in f[0] or f[0] in headername:
2108  return
2109  if not first_include:
2110  first_include = f[1]
2111 
2112  error(filename, first_include, 'build/include', 5,
2113  '%s should include its header file %s' % (fileinfo.RepositoryName(),
2114  headername))
2115 
2116 
2117 def CheckForBadCharacters(filename, lines, error):
2118  """Logs an error for each line containing bad characters.
2119 
2120  Two kinds of bad characters:
2121 
2122  1. Unicode replacement characters: These indicate that either the file
2123  contained invalid UTF-8 (likely) or Unicode replacement characters (which
2124  it shouldn't). Note that it's possible for this to throw off line
2125  numbering if the invalid UTF-8 occurred adjacent to a newline.
2126 
2127  2. NUL bytes. These are problematic for some tools.
2128 
2129  Args:
2130  filename: The name of the current file.
2131  lines: An array of strings, each representing a line of the file.
2132  error: The function to call with any errors found.
2133  """
2134  for linenum, line in enumerate(lines):
2135  if unicode_escape_decode('\ufffd') in line:
2136  error(filename, linenum, 'readability/utf8', 5,
2137  'Line contains invalid UTF-8 (or Unicode replacement character).')
2138  if '\0' in line:
2139  error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.')
2140 
2141 
2142 def CheckForNewlineAtEOF(filename, lines, error):
2143  """Logs an error if there is no newline char at the end of the file.
2144 
2145  Args:
2146  filename: The name of the current file.
2147  lines: An array of strings, each representing a line of the file.
2148  error: The function to call with any errors found.
2149  """
2150 
2151  # The array lines() was created by adding two newlines to the
2152  # original file (go figure), then splitting on \n.
2153  # To verify that the file ends in \n, we just have to make sure the
2154  # last-but-two element of lines() exists and is empty.
2155  if len(lines) < 3 or lines[-2]:
2156  error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
2157  'Could not find a newline character at the end of the file.')
2158 
2159 
2160 def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
2161  """Logs an error if we see /* ... */ or "..." that extend past one line.
2162 
2163  /* ... */ comments are legit inside macros, for one line.
2164  Otherwise, we prefer // comments, so it's ok to warn about the
2165  other. Likewise, it's ok for strings to extend across multiple
2166  lines, as long as a line continuation character (backslash)
2167  terminates each line. Although not currently prohibited by the C++
2168  style guide, it's ugly and unnecessary. We don't do well with either
2169  in this lint program, so we warn about both.
2170 
2171  Args:
2172  filename: The name of the current file.
2173  clean_lines: A CleansedLines instance containing the file.
2174  linenum: The number of the line to check.
2175  error: The function to call with any errors found.
2176  """
2177  line = clean_lines.elided[linenum]
2178 
2179  # Remove all \\ (escaped backslashes) from the line. They are OK, and the
2180  # second (escaped) slash may trigger later \" detection erroneously.
2181  line = line.replace('\\\\', '')
2182 
2183  if line.count('/*') > line.count('*/'):
2184  error(filename, linenum, 'readability/multiline_comment', 5,
2185  'Complex multi-line /*...*/-style comment found. '
2186  'Lint may give bogus warnings. '
2187  'Consider replacing these with //-style comments, '
2188  'with #if 0...#endif, '
2189  'or with more clearly structured multi-line comments.')
2190 
2191  if (line.count('"') - line.count('\\"')) % 2:
2192  error(filename, linenum, 'readability/multiline_string', 5,
2193  'Multi-line string ("...") found. This lint script doesn\'t '
2194  'do well with such strings, and may give bogus warnings. '
2195  'Use C++11 raw strings or concatenation instead.')
2196 
2197 
2198 # (non-threadsafe name, thread-safe alternative, validation pattern)
2199 #
2200 # The validation pattern is used to eliminate false positives such as:
2201 # _rand(); // false positive due to substring match.
2202 # ->rand(); // some member function rand().
2203 # ACMRandom rand(seed); // some variable named rand.
2204 # ISAACRandom rand(); // another variable named rand.
2205 #
2206 # Basically we require the return value of these functions to be used
2207 # in some expression context on the same line by matching on some
2208 # operator before the function name. This eliminates constructors and
2209 # member function calls.
2210 _UNSAFE_FUNC_PREFIX = r'(?:[-+*/=%^&|(<]\s*|>\s+)'
2211 _THREADING_LIST = (
2212  ('asctime(', 'asctime_r(', _UNSAFE_FUNC_PREFIX + r'asctime\‍([^)]+\‍)'),
2213  ('ctime(', 'ctime_r(', _UNSAFE_FUNC_PREFIX + r'ctime\‍([^)]+\‍)'),
2214  ('getgrgid(', 'getgrgid_r(', _UNSAFE_FUNC_PREFIX + r'getgrgid\‍([^)]+\‍)'),
2215  ('getgrnam(', 'getgrnam_r(', _UNSAFE_FUNC_PREFIX + r'getgrnam\‍([^)]+\‍)'),
2216  ('getlogin(', 'getlogin_r(', _UNSAFE_FUNC_PREFIX + r'getlogin\‍(\‍)'),
2217  ('getpwnam(', 'getpwnam_r(', _UNSAFE_FUNC_PREFIX + r'getpwnam\‍([^)]+\‍)'),
2218  ('getpwuid(', 'getpwuid_r(', _UNSAFE_FUNC_PREFIX + r'getpwuid\‍([^)]+\‍)'),
2219  ('gmtime(', 'gmtime_r(', _UNSAFE_FUNC_PREFIX + r'gmtime\‍([^)]+\‍)'),
2220  ('localtime(', 'localtime_r(', _UNSAFE_FUNC_PREFIX + r'localtime\‍([^)]+\‍)'),
2221  ('rand(', 'rand_r(', _UNSAFE_FUNC_PREFIX + r'rand\‍(\‍)'),
2222  ('strtok(', 'strtok_r(',
2223  _UNSAFE_FUNC_PREFIX + r'strtok\‍([^)]+\‍)'),
2224  ('ttyname(', 'ttyname_r(', _UNSAFE_FUNC_PREFIX + r'ttyname\‍([^)]+\‍)'),
2225  )
2226 
2227 
2228 def CheckPosixThreading(filename, clean_lines, linenum, error):
2229  """Checks for calls to thread-unsafe functions.
2230 
2231  Much code has been originally written without consideration of
2232  multi-threading. Also, engineers are relying on their old experience;
2233  they have learned posix before threading extensions were added. These
2234  tests guide the engineers to use thread-safe functions (when using
2235  posix directly).
2236 
2237  Args:
2238  filename: The name of the current file.
2239  clean_lines: A CleansedLines instance containing the file.
2240  linenum: The number of the line to check.
2241  error: The function to call with any errors found.
2242  """
2243  line = clean_lines.elided[linenum]
2244  for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST:
2245  # Additional pattern matching check to confirm that this is the
2246  # function we are looking for
2247  if Search(pattern, line):
2248  error(filename, linenum, 'runtime/threadsafe_fn', 2,
2249  'Consider using ' + multithread_safe_func +
2250  '...) instead of ' + single_thread_func +
2251  '...) for improved thread safety.')
2252 
2253 
2254 def CheckVlogArguments(filename, clean_lines, linenum, error):
2255  """Checks that VLOG() is only used for defining a logging level.
2256 
2257  For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and
2258  VLOG(FATAL) are not.
2259 
2260  Args:
2261  filename: The name of the current file.
2262  clean_lines: A CleansedLines instance containing the file.
2263  linenum: The number of the line to check.
2264  error: The function to call with any errors found.
2265  """
2266  line = clean_lines.elided[linenum]
2267  if Search(r'\bVLOG\‍((INFO|ERROR|WARNING|DFATAL|FATAL)\‍)', line):
2268  error(filename, linenum, 'runtime/vlog', 5,
2269  'VLOG() should be used with numeric verbosity level. '
2270  'Use LOG() if you want symbolic severity levels.')
2271 
2272 # Matches invalid increment: *count++, which moves pointer instead of
2273 # incrementing a value.
2274 _RE_PATTERN_INVALID_INCREMENT = re.compile(
2275  r'^\s*\*\w+(\+\+|--);')
2276 
2277 
2278 def CheckInvalidIncrement(filename, clean_lines, linenum, error):
2279  """Checks for invalid increment *count++.
2280 
2281  For example following function:
2282  void increment_counter(int* count) {
2283  *count++;
2284  }
2285  is invalid, because it effectively does count++, moving pointer, and should
2286  be replaced with ++*count, (*count)++ or *count += 1.
2287 
2288  Args:
2289  filename: The name of the current file.
2290  clean_lines: A CleansedLines instance containing the file.
2291  linenum: The number of the line to check.
2292  error: The function to call with any errors found.
2293  """
2294  line = clean_lines.elided[linenum]
2295  if _RE_PATTERN_INVALID_INCREMENT.match(line):
2296  error(filename, linenum, 'runtime/invalid_increment', 5,
2297  'Changing pointer instead of value (or unused value of operator*).')
2298 
2299 
2300 def IsMacroDefinition(clean_lines, linenum):
2301  if Search(r'^#define', clean_lines[linenum]):
2302  return True
2303 
2304  if linenum > 0 and Search(r'\\$', clean_lines[linenum - 1]):
2305  return True
2306 
2307  return False
2308 
2309 
2310 def IsForwardClassDeclaration(clean_lines, linenum):
2311  return Match(r'^\s*(\btemplate\b)*.*class\s+\w+;\s*$', clean_lines[linenum])
2312 
2313 
2314 class _BlockInfo(object):
2315  """Stores information about a generic block of code."""
2316 
2317  def __init__(self, linenum, seen_open_brace):
2318  self.starting_linenum = linenum
2319  self.seen_open_brace = seen_open_brace
2321  self.inline_asm = _NO_ASM
2323 
2324  def CheckBegin(self, filename, clean_lines, linenum, error):
2325  """Run checks that applies to text up to the opening brace.
2326 
2327  This is mostly for checking the text after the class identifier
2328  and the "{", usually where the base class is specified. For other
2329  blocks, there isn't much to check, so we always pass.
2330 
2331  Args:
2332  filename: The name of the current file.
2333  clean_lines: A CleansedLines instance containing the file.
2334  linenum: The number of the line to check.
2335  error: The function to call with any errors found.
2336  """
2337  pass
2338 
2339  def CheckEnd(self, filename, clean_lines, linenum, error):
2340  """Run checks that applies to text after the closing brace.
2341 
2342  This is mostly used for checking end of namespace comments.
2343 
2344  Args:
2345  filename: The name of the current file.
2346  clean_lines: A CleansedLines instance containing the file.
2347  linenum: The number of the line to check.
2348  error: The function to call with any errors found.
2349  """
2350  pass
2351 
2352  def IsBlockInfo(self):
2353  """Returns true if this block is a _BlockInfo.
2354 
2355  This is convenient for verifying that an object is an instance of
2356  a _BlockInfo, but not an instance of any of the derived classes.
2357 
2358  Returns:
2359  True for this class, False for derived classes.
2360  """
2361  return self.__class__ == _BlockInfo
2362 
2363 
2365  """Stores information about an 'extern "C"' block."""
2366 
2367  def __init__(self, linenum):
2368  _BlockInfo.__init__(self, linenum, True)
2369 
2370 
2372  """Stores information about a class."""
2373 
2374  def __init__(self, name, class_or_struct, clean_lines, linenum):
2375  _BlockInfo.__init__(self, linenum, False)
2376  self.name = name
2377  self.is_derived = False
2379  if class_or_struct == 'struct':
2380  self.access = 'public'
2381  self.is_struct = True
2382  else:
2383  self.access = 'private'
2384  self.is_struct = False
2385 
2386  # Remember initial indentation level for this class. Using raw_lines here
2387  # instead of elided to account for leading comments.
2388  self.class_indent = GetIndentLevel(clean_lines.raw_lines[linenum])
2389 
2390  # Try to find the end of the class. This will be confused by things like:
2391  # class A {
2392  # } *x = { ...
2393  #
2394  # But it's still good enough for CheckSectionSpacing.
2395  self.last_line = 0
2396  depth = 0
2397  for i in range(linenum, clean_lines.NumLines()):
2398  line = clean_lines.elided[i]
2399  depth += line.count('{') - line.count('}')
2400  if not depth:
2401  self.last_line = i
2402  break
2403 
2404  def CheckBegin(self, filename, clean_lines, linenum, error):
2405  # Look for a bare ':'
2406  if Search('(^|[^:]):($|[^:])', clean_lines.elided[linenum]):
2407  self.is_derived = True
2408 
2409  def CheckEnd(self, filename, clean_lines, linenum, error):
2410  # If there is a DISALLOW macro, it should appear near the end of
2411  # the class.
2412  seen_last_thing_in_class = False
2413  for i in xrange(linenum - 1, self.starting_linenum, -1):
2414  match = Search(
2415  r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\‍(' +
2416  self.name + r'\‍)',
2417  clean_lines.elided[i])
2418  if match:
2419  if seen_last_thing_in_class:
2420  error(filename, i, 'readability/constructors', 3,
2421  match.group(1) + ' should be the last thing in the class')
2422  break
2423 
2424  if not Match(r'^\s*$', clean_lines.elided[i]):
2425  seen_last_thing_in_class = True
2426 
2427  # Check that closing brace is aligned with beginning of the class.
2428  # Only do this if the closing brace is indented by only whitespaces.
2429  # This means we will not check single-line class definitions.
2430  indent = Match(r'^( *)\}', clean_lines.elided[linenum])
2431  if indent and len(indent.group(1)) != self.class_indent:
2432  if self.is_struct:
2433  parent = 'struct ' + self.name
2434  else:
2435  parent = 'class ' + self.name
2436  error(filename, linenum, 'whitespace/indent', 3,
2437  'Closing brace should be aligned with beginning of %s' % parent)
2438 
2439 
2441  """Stores information about a namespace."""
2442 
2443  def __init__(self, name, linenum):
2444  _BlockInfo.__init__(self, linenum, False)
2445  self.name = name or ''
2447 
2448  def CheckEnd(self, filename, clean_lines, linenum, error):
2449  """Check end of namespace comments."""
2450  line = clean_lines.raw_lines[linenum]
2451 
2452  # Check how many lines is enclosed in this namespace. Don't issue
2453  # warning for missing namespace comments if there aren't enough
2454  # lines. However, do apply checks if there is already an end of
2455  # namespace comment and it's incorrect.
2456  #
2457  # TODO(unknown): We always want to check end of namespace comments
2458  # if a namespace is large, but sometimes we also want to apply the
2459  # check if a short namespace contained nontrivial things (something
2460  # other than forward declarations). There is currently no logic on
2461  # deciding what these nontrivial things are, so this check is
2462  # triggered by namespace size only, which works most of the time.
2463  if (linenum - self.starting_linenum < 10
2464  and not Match(r'^\s*};*\s*(//|/\*).*\bnamespace\b', line)):
2465  return
2466 
2467  # Look for matching comment at end of namespace.
2468  #
2469  # Note that we accept C style "/* */" comments for terminating
2470  # namespaces, so that code that terminate namespaces inside
2471  # preprocessor macros can be cpplint clean.
2472  #
2473  # We also accept stuff like "// end of namespace <name>." with the
2474  # period at the end.
2475  #
2476  # Besides these, we don't accept anything else, otherwise we might
2477  # get false negatives when existing comment is a substring of the
2478  # expected namespace.
2479  if self.name:
2480  # Named namespace
2481  if not Match((r'^\s*};*\s*(//|/\*).*\bnamespace\s+' +
2482  re.escape(self.name) + r'[\*/\.\\\s]*$'),
2483  line):
2484  error(filename, linenum, 'readability/namespace', 5,
2485  'Namespace should be terminated with "// namespace %s"' %
2486  self.name)
2487  else:
2488  # Anonymous namespace
2489  if not Match(r'^\s*};*\s*(//|/\*).*\bnamespace[\*/\.\\\s]*$', line):
2490  # If "// namespace anonymous" or "// anonymous namespace (more text)",
2491  # mention "// anonymous namespace" as an acceptable form
2492  if Match(r'^\s*}.*\b(namespace anonymous|anonymous namespace)\b', line):
2493  error(filename, linenum, 'readability/namespace', 5,
2494  'Anonymous namespace should be terminated with "// namespace"'
2495  ' or "// anonymous namespace"')
2496  else:
2497  error(filename, linenum, 'readability/namespace', 5,
2498  'Anonymous namespace should be terminated with "// namespace"')
2499 
2500 
2501 class _PreprocessorInfo(object):
2502  """Stores checkpoints of nesting stacks when #if/#else is seen."""
2503 
2504  def __init__(self, stack_before_if):
2505  # The entire nesting stack before #if
2506  self.stack_before_if = stack_before_if
2507 
2508  # The entire nesting stack up to #else
2510 
2511  # Whether we have already seen #else or #elif
2512  self.seen_else = False
2513 
2514 
2515 class NestingState(object):
2516  """Holds states related to parsing braces."""
2517 
2518  def __init__(self):
2519  # Stack for tracking all braces. An object is pushed whenever we
2520  # see a "{", and popped when we see a "}". Only 3 types of
2521  # objects are possible:
2522  # - _ClassInfo: a class or struct.
2523  # - _NamespaceInfo: a namespace.
2524  # - _BlockInfo: some other type of block.
2525  self.stack = []
2526 
2527  # Top of the previous stack before each Update().
2528  #
2529  # Because the nesting_stack is updated at the end of each line, we
2530  # had to do some convoluted checks to find out what is the current
2531  # scope at the beginning of the line. This check is simplified by
2532  # saving the previous top of nesting stack.
2533  #
2534  # We could save the full stack, but we only need the top. Copying
2535  # the full nesting stack would slow down cpplint by ~10%.
2537 
2538  # Stack of _PreprocessorInfo objects.
2539  self.pp_stack = []
2540 
2541  def SeenOpenBrace(self):
2542  """Check if we have seen the opening brace for the innermost block.
2543 
2544  Returns:
2545  True if we have seen the opening brace, False if the innermost
2546  block is still expecting an opening brace.
2547  """
2548  return (not self.stack) or self.stack[-1].seen_open_brace
2549 
2550  def InNamespaceBody(self):
2551  """Check if we are currently one level inside a namespace body.
2552 
2553  Returns:
2554  True if top of the stack is a namespace block, False otherwise.
2555  """
2556  return self.stack and isinstance(self.stack[-1], _NamespaceInfo)
2557 
2558  def InExternC(self):
2559  """Check if we are currently one level inside an 'extern "C"' block.
2560 
2561  Returns:
2562  True if top of the stack is an extern block, False otherwise.
2563  """
2564  return self.stack and isinstance(self.stack[-1], _ExternCInfo)
2565 
2567  """Check if we are currently one level inside a class or struct declaration.
2568 
2569  Returns:
2570  True if top of the stack is a class/struct, False otherwise.
2571  """
2572  return self.stack and isinstance(self.stack[-1], _ClassInfo)
2573 
2574  def InAsmBlock(self):
2575  """Check if we are currently one level inside an inline ASM block.
2576 
2577  Returns:
2578  True if the top of the stack is a block containing inline ASM.
2579  """
2580  return self.stack and self.stack[-1].inline_asm != _NO_ASM
2581 
2582  def InTemplateArgumentList(self, clean_lines, linenum, pos):
2583  """Check if current position is inside template argument list.
2584 
2585  Args:
2586  clean_lines: A CleansedLines instance containing the file.
2587  linenum: The number of the line to check.
2588  pos: position just after the suspected template argument.
2589  Returns:
2590  True if (linenum, pos) is inside template arguments.
2591  """
2592  while linenum < clean_lines.NumLines():
2593  # Find the earliest character that might indicate a template argument
2594  line = clean_lines.elided[linenum]
2595  match = Match(r'^[^{};=\[\]\.<>]*(.)', line[pos:])
2596  if not match:
2597  linenum += 1
2598  pos = 0
2599  continue
2600  token = match.group(1)
2601  pos += len(match.group(0))
2602 
2603  # These things do not look like template argument list:
2604  # class Suspect {
2605  # class Suspect x; }
2606  if token in ('{', '}', ';'): return False
2607 
2608  # These things look like template argument list:
2609  # template <class Suspect>
2610  # template <class Suspect = default_value>
2611  # template <class Suspect[]>
2612  # template <class Suspect...>
2613  if token in ('>', '=', '[', ']', '.'): return True
2614 
2615  # Check if token is an unmatched '<'.
2616  # If not, move on to the next character.
2617  if token != '<':
2618  pos += 1
2619  if pos >= len(line):
2620  linenum += 1
2621  pos = 0
2622  continue
2623 
2624  # We can't be sure if we just find a single '<', and need to
2625  # find the matching '>'.
2626  (_, end_line, end_pos) = CloseExpression(clean_lines, linenum, pos - 1)
2627  if end_pos < 0:
2628  # Not sure if template argument list or syntax error in file
2629  return False
2630  linenum = end_line
2631  pos = end_pos
2632  return False
2633 
2634  def UpdatePreprocessor(self, line):
2635  """Update preprocessor stack.
2636 
2637  We need to handle preprocessors due to classes like this:
2638  #ifdef SWIG
2639  struct ResultDetailsPageElementExtensionPoint {
2640  #else
2641  struct ResultDetailsPageElementExtensionPoint : public Extension {
2642  #endif
2643 
2644  We make the following assumptions (good enough for most files):
2645  - Preprocessor condition evaluates to true from #if up to first
2646  #else/#elif/#endif.
2647 
2648  - Preprocessor condition evaluates to false from #else/#elif up
2649  to #endif. We still perform lint checks on these lines, but
2650  these do not affect nesting stack.
2651 
2652  Args:
2653  line: current line to check.
2654  """
2655  if Match(r'^\s*#\s*(if|ifdef|ifndef)\b', line):
2656  # Beginning of #if block, save the nesting stack here. The saved
2657  # stack will allow us to restore the parsing state in the #else case.
2658  self.pp_stack.append(_PreprocessorInfo(copy.deepcopy(self.stack)))
2659  elif Match(r'^\s*#\s*(else|elif)\b', line):
2660  # Beginning of #else block
2661  if self.pp_stack:
2662  if not self.pp_stack[-1].seen_else:
2663  # This is the first #else or #elif block. Remember the
2664  # whole nesting stack up to this point. This is what we
2665  # keep after the #endif.
2666  self.pp_stack[-1].seen_else = True
2667  self.pp_stack[-1].stack_before_else = copy.deepcopy(self.stack)
2668 
2669  # Restore the stack to how it was before the #if
2670  self.stack = copy.deepcopy(self.pp_stack[-1].stack_before_if)
2671  else:
2672  # TODO(unknown): unexpected #else, issue warning?
2673  pass
2674  elif Match(r'^\s*#\s*endif\b', line):
2675  # End of #if or #else blocks.
2676  if self.pp_stack:
2677  # If we saw an #else, we will need to restore the nesting
2678  # stack to its former state before the #else, otherwise we
2679  # will just continue from where we left off.
2680  if self.pp_stack[-1].seen_else:
2681  # Here we can just use a shallow copy since we are the last
2682  # reference to it.
2683  self.stack = self.pp_stack[-1].stack_before_else
2684  # Drop the corresponding #if
2685  self.pp_stack.pop()
2686  else:
2687  # TODO(unknown): unexpected #endif, issue warning?
2688  pass
2689 
2690  # TODO(unknown): Update() is too long, but we will refactor later.
2691  def Update(self, filename, clean_lines, linenum, error):
2692  """Update nesting state with current line.
2693 
2694  Args:
2695  filename: The name of the current file.
2696  clean_lines: A CleansedLines instance containing the file.
2697  linenum: The number of the line to check.
2698  error: The function to call with any errors found.
2699  """
2700  line = clean_lines.elided[linenum]
2701 
2702  # Remember top of the previous nesting stack.
2703  #
2704  # The stack is always pushed/popped and not modified in place, so
2705  # we can just do a shallow copy instead of copy.deepcopy. Using
2706  # deepcopy would slow down cpplint by ~28%.
2707  if self.stack:
2708  self.previous_stack_top = self.stack[-1]
2709  else:
2710  self.previous_stack_top = None
2711 
2712  # Update pp_stack
2713  self.UpdatePreprocessor(line)
2714 
2715  # Count parentheses. This is to avoid adding struct arguments to
2716  # the nesting stack.
2717  if self.stack:
2718  inner_block = self.stack[-1]
2719  depth_change = line.count('(') - line.count(')')
2720  inner_block.open_parentheses += depth_change
2721 
2722  # Also check if we are starting or ending an inline assembly block.
2723  if inner_block.inline_asm in (_NO_ASM, _END_ASM):
2724  if (depth_change != 0 and
2725  inner_block.open_parentheses == 1 and
2726  _MATCH_ASM.match(line)):
2727  # Enter assembly block
2728  inner_block.inline_asm = _INSIDE_ASM
2729  else:
2730  # Not entering assembly block. If previous line was _END_ASM,
2731  # we will now shift to _NO_ASM state.
2732  inner_block.inline_asm = _NO_ASM
2733  elif (inner_block.inline_asm == _INSIDE_ASM and
2734  inner_block.open_parentheses == 0):
2735  # Exit assembly block
2736  inner_block.inline_asm = _END_ASM
2737 
2738  # Consume namespace declaration at the beginning of the line. Do
2739  # this in a loop so that we catch same line declarations like this:
2740  # namespace proto2 { namespace bridge { class MessageSet; } }
2741  while True:
2742  # Match start of namespace. The "\b\s*" below catches namespace
2743  # declarations even if it weren't followed by a whitespace, this
2744  # is so that we don't confuse our namespace checker. The
2745  # missing spaces will be flagged by CheckSpacing.
2746  namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line)
2747  if not namespace_decl_match:
2748  break
2749 
2750  new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum)
2751  self.stack.append(new_namespace)
2752 
2753  line = namespace_decl_match.group(2)
2754  if line.find('{') != -1:
2755  new_namespace.seen_open_brace = True
2756  line = line[line.find('{') + 1:]
2757 
2758  # Look for a class declaration in whatever is left of the line
2759  # after parsing namespaces. The regexp accounts for decorated classes
2760  # such as in:
2761  # class LOCKABLE API Object {
2762  # };
2763  class_decl_match = Match(
2764  r'^(\s*(?:template\s*<[\w\s<>,:=]*>\s*)?'
2765  r'(class|struct)\s+(?:[A-Z_]+\s+)*(\w+(?:::\w+)*))'
2766  r'(.*)$', line)
2767  if (class_decl_match and
2768  (not self.stack or self.stack[-1].open_parentheses == 0)):
2769  # We do not want to accept classes that are actually template arguments:
2770  # template <class Ignore1,
2771  # class Ignore2 = Default<Args>,
2772  # template <Args> class Ignore3>
2773  # void Function() {};
2774  #
2775  # To avoid template argument cases, we scan forward and look for
2776  # an unmatched '>'. If we see one, assume we are inside a
2777  # template argument list.
2778  end_declaration = len(class_decl_match.group(1))
2779  if not self.InTemplateArgumentList(clean_lines, linenum, end_declaration):
2780  self.stack.append(_ClassInfo(
2781  class_decl_match.group(3), class_decl_match.group(2),
2782  clean_lines, linenum))
2783  line = class_decl_match.group(4)
2784 
2785  # If we have not yet seen the opening brace for the innermost block,
2786  # run checks here.
2787  if not self.SeenOpenBrace():
2788  self.stack[-1].CheckBegin(filename, clean_lines, linenum, error)
2789 
2790  # Update access control if we are inside a class/struct
2791  if self.stack and isinstance(self.stack[-1], _ClassInfo):
2792  classinfo = self.stack[-1]
2793  access_match = Match(
2794  r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?'
2795  r':(?:[^:]|$)',
2796  line)
2797  if access_match:
2798  classinfo.access = access_match.group(2)
2799 
2800  # Check that access keywords are indented +1 space. Skip this
2801  # check if the keywords are not preceded by whitespaces.
2802  indent = access_match.group(1)
2803  if (len(indent) != classinfo.class_indent + 1 and
2804  Match(r'^\s*$', indent)):
2805  if classinfo.is_struct:
2806  parent = 'struct ' + classinfo.name
2807  else:
2808  parent = 'class ' + classinfo.name
2809  slots = ''
2810  if access_match.group(3):
2811  slots = access_match.group(3)
2812  error(filename, linenum, 'whitespace/indent', 3,
2813  '%s%s: should be indented +1 space inside %s' % (
2814  access_match.group(2), slots, parent))
2815 
2816  # Consume braces or semicolons from what's left of the line
2817  while True:
2818  # Match first brace, semicolon, or closed parenthesis.
2819  matched = Match(r'^[^{;)}]*([{;)}])(.*)$', line)
2820  if not matched:
2821  break
2822 
2823  token = matched.group(1)
2824  if token == '{':
2825  # If namespace or class hasn't seen a opening brace yet, mark
2826  # namespace/class head as complete. Push a new block onto the
2827  # stack otherwise.
2828  if not self.SeenOpenBrace():
2829  self.stack[-1].seen_open_brace = True
2830  elif Match(r'^extern\s*"[^"]*"\s*\{', line):
2831  self.stack.append(_ExternCInfo(linenum))
2832  else:
2833  self.stack.append(_BlockInfo(linenum, True))
2834  if _MATCH_ASM.match(line):
2835  self.stack[-1].inline_asm = _BLOCK_ASM
2836 
2837  elif token == ';' or token == ')':
2838  # If we haven't seen an opening brace yet, but we already saw
2839  # a semicolon, this is probably a forward declaration. Pop
2840  # the stack for these.
2841  #
2842  # Similarly, if we haven't seen an opening brace yet, but we
2843  # already saw a closing parenthesis, then these are probably
2844  # function arguments with extra "class" or "struct" keywords.
2845  # Also pop these stack for these.
2846  if not self.SeenOpenBrace():
2847  self.stack.pop()
2848  else: # token == '}'
2849  # Perform end of block checks and pop the stack.
2850  if self.stack:
2851  self.stack[-1].CheckEnd(filename, clean_lines, linenum, error)
2852  self.stack.pop()
2853  line = matched.group(2)
2854 
2855  def InnermostClass(self):
2856  """Get class info on the top of the stack.
2857 
2858  Returns:
2859  A _ClassInfo object if we are inside a class, or None otherwise.
2860  """
2861  for i in range(len(self.stack), 0, -1):
2862  classinfo = self.stack[i - 1]
2863  if isinstance(classinfo, _ClassInfo):
2864  return classinfo
2865  return None
2866 
2867  def CheckCompletedBlocks(self, filename, error):
2868  """Checks that all classes and namespaces have been completely parsed.
2869 
2870  Call this when all lines in a file have been processed.
2871  Args:
2872  filename: The name of the current file.
2873  error: The function to call with any errors found.
2874  """
2875  # Note: This test can result in false positives if #ifdef constructs
2876  # get in the way of brace matching. See the testBuildClass test in
2877  # cpplint_unittest.py for an example of this.
2878  for obj in self.stack:
2879  if isinstance(obj, _ClassInfo):
2880  error(filename, obj.starting_linenum, 'build/class', 5,
2881  'Failed to find complete declaration of class %s' %
2882  obj.name)
2883  elif isinstance(obj, _NamespaceInfo):
2884  error(filename, obj.starting_linenum, 'build/namespaces', 5,
2885  'Failed to find complete declaration of namespace %s' %
2886  obj.name)
2887 
2888 
2889 def CheckForNonStandardConstructs(filename, clean_lines, linenum,
2890  nesting_state, error):
2891  r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
2892 
2893  Complain about several constructs which gcc-2 accepts, but which are
2894  not standard C++. Warning about these in lint is one way to ease the
2895  transition to new compilers.
2896  - put storage class first (e.g. "static const" instead of "const static").
2897  - "%lld" instead of %qd" in printf-type functions.
2898  - "%1$d" is non-standard in printf-type functions.
2899  - "\%" is an undefined character escape sequence.
2900  - text after #endif is not allowed.
2901  - invalid inner-style forward declaration.
2902  - >? and <? operators, and their >?= and <?= cousins.
2903 
2904  Additionally, check for constructor/destructor style violations and reference
2905  members, as it is very convenient to do so while checking for
2906  gcc-2 compliance.
2907 
2908  Args:
2909  filename: The name of the current file.
2910  clean_lines: A CleansedLines instance containing the file.
2911  linenum: The number of the line to check.
2912  nesting_state: A NestingState instance which maintains information about
2913  the current stack of nested blocks being parsed.
2914  error: A callable to which errors are reported, which takes 4 arguments:
2915  filename, line number, error level, and message
2916  """
2917 
2918  # Remove comments from the line, but leave in strings for now.
2919  line = clean_lines.lines[linenum]
2920 
2921  if Search(r'printf\s*\‍(.*".*%[-+ ]?\d*q', line):
2922  error(filename, linenum, 'runtime/printf_format', 3,
2923  '%q in format strings is deprecated. Use %ll instead.')
2924 
2925  if Search(r'printf\s*\‍(.*".*%\d+\$', line):
2926  error(filename, linenum, 'runtime/printf_format', 2,
2927  '%N$ formats are unconventional. Try rewriting to avoid them.')
2928 
2929  # Remove escaped backslashes before looking for undefined escapes.
2930  line = line.replace('\\\\', '')
2931 
2932  if Search(r'("|\').*\\(%|\[|\‍(|{)', line):
2933  error(filename, linenum, 'build/printf_format', 3,
2934  '%, [, (, and { are undefined character escapes. Unescape them.')
2935 
2936  # For the rest, work with both comments and strings removed.
2937  line = clean_lines.elided[linenum]
2938 
2939  if Search(r'\b(const|volatile|void|char|short|int|long'
2940  r'|float|double|signed|unsigned'
2941  r'|schar|u?int8|u?int16|u?int32|u?int64)'
2942  r'\s+(register|static|extern|typedef)\b',
2943  line):
2944  error(filename, linenum, 'build/storage_class', 5,
2945  'Storage-class specifier (static, extern, typedef, etc) should be '
2946  'at the beginning of the declaration.')
2947 
2948  if Match(r'\s*#\s*endif\s*[^/\s]+', line):
2949  error(filename, linenum, 'build/endif_comment', 5,
2950  'Uncommented text after #endif is non-standard. Use a comment.')
2951 
2952  if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
2953  error(filename, linenum, 'build/forward_decl', 5,
2954  'Inner-style forward declarations are invalid. Remove this line.')
2955 
2956  if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
2957  line):
2958  error(filename, linenum, 'build/deprecated', 3,
2959  '>? and <? (max and min) operators are non-standard and deprecated.')
2960 
2961  if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
2962  # TODO(unknown): Could it be expanded safely to arbitrary references,
2963  # without triggering too many false positives? The first
2964  # attempt triggered 5 warnings for mostly benign code in the regtest, hence
2965  # the restriction.
2966  # Here's the original regexp, for the reference:
2967  # type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?'
2968  # r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;'
2969  error(filename, linenum, 'runtime/member_string_references', 2,
2970  'const string& members are dangerous. It is much better to use '
2971  'alternatives, such as pointers or simple constants.')
2972 
2973  # Everything else in this function operates on class declarations.
2974  # Return early if the top of the nesting stack is not a class, or if
2975  # the class head is not completed yet.
2976  classinfo = nesting_state.InnermostClass()
2977  if not classinfo or not classinfo.seen_open_brace:
2978  return
2979 
2980  # The class may have been declared with namespace or classname qualifiers.
2981  # The constructor and destructor will not have those qualifiers.
2982  base_classname = classinfo.name.split('::')[-1]
2983 
2984  # Look for single-argument constructors that aren't marked explicit.
2985  # Technically a valid construct, but against style.
2986  explicit_constructor_match = Match(
2987  r'\s+(?:inline\s+)?(explicit\s+)?(?:inline\s+)?%s\s*'
2988  r'\‍(((?:[^()]|\‍([^()]*\‍))*)\‍)'
2989  % re.escape(base_classname),
2990  line)
2991 
2992  if explicit_constructor_match:
2993  is_marked_explicit = explicit_constructor_match.group(1)
2994 
2995  if not explicit_constructor_match.group(2):
2996  constructor_args = []
2997  else:
2998  constructor_args = explicit_constructor_match.group(2).split(',')
2999 
3000  # collapse arguments so that commas in template parameter lists and function
3001  # argument parameter lists don't split arguments in two
3002  i = 0
3003  while i < len(constructor_args):
3004  constructor_arg = constructor_args[i]
3005  while (constructor_arg.count('<') > constructor_arg.count('>') or
3006  constructor_arg.count('(') > constructor_arg.count(')')):
3007  constructor_arg += ',' + constructor_args[i + 1]
3008  del constructor_args[i + 1]
3009  constructor_args[i] = constructor_arg
3010  i += 1
3011 
3012  variadic_args = [arg for arg in constructor_args if '&&...' in arg]
3013  defaulted_args = [arg for arg in constructor_args if '=' in arg]
3014  noarg_constructor = (not constructor_args or # empty arg list
3015  # 'void' arg specifier
3016  (len(constructor_args) == 1 and
3017  constructor_args[0].strip() == 'void'))
3018  onearg_constructor = ((len(constructor_args) == 1 and # exactly one arg
3019  not noarg_constructor) or
3020  # all but at most one arg defaulted
3021  (len(constructor_args) >= 1 and
3022  not noarg_constructor and
3023  len(defaulted_args) >= len(constructor_args) - 1) or
3024  # variadic arguments with zero or one argument
3025  (len(constructor_args) <= 2 and
3026  len(variadic_args) >= 1))
3027  initializer_list_constructor = bool(
3028  onearg_constructor and
3029  Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0]))
3030  copy_constructor = bool(
3031  onearg_constructor and
3032  Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&'
3033  % re.escape(base_classname), constructor_args[0].strip()))
3034 
3035  if (not is_marked_explicit and
3036  onearg_constructor and
3037  not initializer_list_constructor and
3038  not copy_constructor):
3039  if defaulted_args or variadic_args:
3040  error(filename, linenum, 'runtime/explicit', 5,
3041  'Constructors callable with one argument '
3042  'should be marked explicit.')
3043  else:
3044  error(filename, linenum, 'runtime/explicit', 5,
3045  'Single-parameter constructors should be marked explicit.')
3046  elif is_marked_explicit and not onearg_constructor:
3047  if noarg_constructor:
3048  error(filename, linenum, 'runtime/explicit', 5,
3049  'Zero-parameter constructors should not be marked explicit.')
3050 
3051 
3052 def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error):
3053  """Checks for the correctness of various spacing around function calls.
3054 
3055  Args:
3056  filename: The name of the current file.
3057  clean_lines: A CleansedLines instance containing the file.
3058  linenum: The number of the line to check.
3059  error: The function to call with any errors found.
3060  """
3061  line = clean_lines.elided[linenum]
3062 
3063  # Since function calls often occur inside if/for/while/switch
3064  # expressions - which have their own, more liberal conventions - we
3065  # first see if we should be looking inside such an expression for a
3066  # function call, to which we can apply more strict standards.
3067  fncall = line # if there's no control flow construct, look at whole line
3068  for pattern in (r'\bif\s*\‍((.*)\‍)\s*{',
3069  r'\bfor\s*\‍((.*)\‍)\s*{',
3070  r'\bwhile\s*\‍((.*)\‍)\s*[{;]',
3071  r'\bswitch\s*\‍((.*)\‍)\s*{'):
3072  match = Search(pattern, line)
3073  if match:
3074  fncall = match.group(1) # look inside the parens for function calls
3075  break
3076 
3077  # Except in if/for/while/switch, there should never be space
3078  # immediately inside parens (eg "f( 3, 4 )"). We make an exception
3079  # for nested parens ( (a+b) + c ). Likewise, there should never be
3080  # a space before a ( when it's a function argument. I assume it's a
3081  # function argument when the char before the whitespace is legal in
3082  # a function name (alnum + _) and we're not starting a macro. Also ignore
3083  # pointers and references to arrays and functions coz they're too tricky:
3084  # we use a very simple way to recognize these:
3085  # " (something)(maybe-something)" or
3086  # " (something)(maybe-something," or
3087  # " (something)[something]"
3088  # Note that we assume the contents of [] to be short enough that
3089  # they'll never need to wrap.
3090  if ( # Ignore control structures.
3091  not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b',
3092  fncall) and
3093  # Ignore pointers/references to functions.
3094  not Search(r' \‍([^)]+\‍)\‍([^)]*(\‍)|,$)', fncall) and
3095  # Ignore pointers/references to arrays.
3096  not Search(r' \‍([^)]+\‍)\[[^\]]+\]', fncall)):
3097  if Search(r'\w\s*\‍(\s(?!\s*\\$)', fncall): # a ( used for a fn call
3098  error(filename, linenum, 'whitespace/parens', 4,
3099  'Extra space after ( in function call')
3100  elif Search(r'\‍(\s+(?!(\s*\\)|\‍()', fncall):
3101  error(filename, linenum, 'whitespace/parens', 2,
3102  'Extra space after (')
3103  if (Search(r'\w\s+\‍(', fncall) and
3104  not Search(r'_{0,2}asm_{0,2}\s+_{0,2}volatile_{0,2}\s+\‍(', fncall) and
3105  not Search(r'#\s*define|typedef|using\s+\w+\s*=', fncall) and
3106  not Search(r'\w\s+\‍((\w+::)*\*\w+\‍)\‍(', fncall) and
3107  not Search(r'\bcase\s+\‍(', fncall)):
3108  # TODO(unknown): Space after an operator function seem to be a common
3109  # error, silence those for now by restricting them to highest verbosity.
3110  if Search(r'\boperator_*\b', line):
3111  error(filename, linenum, 'whitespace/parens', 0,
3112  'Extra space before ( in function call')
3113  else:
3114  error(filename, linenum, 'whitespace/parens', 4,
3115  'Extra space before ( in function call')
3116  # If the ) is followed only by a newline or a { + newline, assume it's
3117  # part of a control statement (if/while/etc), and don't complain
3118  if Search(r'[^)]\s+\‍)\s*[^{\s]', fncall):
3119  # If the closing parenthesis is preceded by only whitespaces,
3120  # try to give a more descriptive error message.
3121  if Search(r'^\s+\‍)', fncall):
3122  error(filename, linenum, 'whitespace/parens', 2,
3123  'Closing ) should be moved to the previous line')
3124  else:
3125  error(filename, linenum, 'whitespace/parens', 2,
3126  'Extra space before )')
3127 
3128 
3129 def IsBlankLine(line):
3130  """Returns true if the given line is blank.
3131 
3132  We consider a line to be blank if the line is empty or consists of
3133  only white spaces.
3134 
3135  Args:
3136  line: A line of a string.
3137 
3138  Returns:
3139  True, if the given line is blank.
3140  """
3141  return not line or line.isspace()
3142 
3143 
3144 def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
3145  error):
3146  is_namespace_indent_item = (
3147  len(nesting_state.stack) > 1 and
3148  nesting_state.stack[-1].check_namespace_indentation and
3149  isinstance(nesting_state.previous_stack_top, _NamespaceInfo) and
3150  nesting_state.previous_stack_top == nesting_state.stack[-2])
3151 
3152  if ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
3153  clean_lines.elided, line):
3154  CheckItemIndentationInNamespace(filename, clean_lines.elided,
3155  line, error)
3156 
3157 
3158 def CheckForFunctionLengths(filename, clean_lines, linenum,
3159  function_state, error):
3160  """Reports for long function bodies.
3161 
3162  For an overview why this is done, see:
3163  https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions
3164 
3165  Uses a simplistic algorithm assuming other style guidelines
3166  (especially spacing) are followed.
3167  Only checks unindented functions, so class members are unchecked.
3168  Trivial bodies are unchecked, so constructors with huge initializer lists
3169  may be missed.
3170  Blank/comment lines are not counted so as to avoid encouraging the removal
3171  of vertical space and comments just to get through a lint check.
3172  NOLINT *on the last line of a function* disables this check.
3173 
3174  Args:
3175  filename: The name of the current file.
3176  clean_lines: A CleansedLines instance containing the file.
3177  linenum: The number of the line to check.
3178  function_state: Current function name and lines in body so far.
3179  error: The function to call with any errors found.
3180  """
3181  lines = clean_lines.lines
3182  line = lines[linenum]
3183  joined_line = ''
3184 
3185  starting_func = False
3186  regexp = r'(\w(\w|::|\*|\&|\s)*)\‍(' # decls * & space::name( ...
3187  match_result = Match(regexp, line)
3188  if match_result:
3189  # If the name is all caps and underscores, figure it's a macro and
3190  # ignore it, unless it's TEST or TEST_F.
3191  function_name = match_result.group(1).split()[-1]
3192  if function_name == 'TEST' or function_name == 'TEST_F' or (
3193  not Match(r'[A-Z_]+$', function_name)):
3194  starting_func = True
3195 
3196  if starting_func:
3197  body_found = False
3198  for start_linenum in range(linenum, clean_lines.NumLines()):
3199  start_line = lines[start_linenum]
3200  joined_line += ' ' + start_line.lstrip()
3201  if Search(r'(;|})', start_line): # Declarations and trivial functions
3202  body_found = True
3203  break # ... ignore
3204  elif Search(r'{', start_line):
3205  body_found = True
3206  function = Search(r'((\w|:)*)\‍(', line).group(1)
3207  if Match(r'TEST', function): # Handle TEST... macros
3208  parameter_regexp = Search(r'(\‍(.*\‍))', joined_line)
3209  if parameter_regexp: # Ignore bad syntax
3210  function += parameter_regexp.group(1)
3211  else:
3212  function += '()'
3213  function_state.Begin(function)
3214  break
3215  if not body_found:
3216  # No body for the function (or evidence of a non-function) was found.
3217  error(filename, linenum, 'readability/fn_size', 5,
3218  'Lint failed to find start of function body.')
3219  elif Match(r'^\}\s*$', line): # function end
3220  function_state.Check(error, filename, linenum)
3221  function_state.End()
3222  elif not Match(r'^\s*$', line):
3223  function_state.Count() # Count non-blank/non-comment lines.
3224 
3225 
3226 _RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\‍(.+?\‍))?:?(\s|$)?')
3227 
3228 
3229 def CheckComment(line, filename, linenum, next_line_start, error):
3230  """Checks for common mistakes in comments.
3231 
3232  Args:
3233  line: The line in question.
3234  filename: The name of the current file.
3235  linenum: The number of the line to check.
3236  next_line_start: The first non-whitespace column of the next line.
3237  error: The function to call with any errors found.
3238  """
3239  commentpos = line.find('//')
3240  if commentpos != -1:
3241  # Check if the // may be in quotes. If so, ignore it
3242  if re.sub(r'\\.', '', line[0:commentpos]).count('"') % 2 == 0:
3243  # Allow one space for new scopes, two spaces otherwise:
3244  if (not (Match(r'^.*{ *//', line) and next_line_start == commentpos) and
3245  ((commentpos >= 1 and
3246  line[commentpos-1] not in string.whitespace) or
3247  (commentpos >= 2 and
3248  line[commentpos-2] not in string.whitespace))):
3249  error(filename, linenum, 'whitespace/comments', 2,
3250  'At least two spaces is best between code and comments')
3251 
3252  # Checks for common mistakes in TODO comments.
3253  comment = line[commentpos:]
3254  match = _RE_PATTERN_TODO.match(comment)
3255  if match:
3256  # One whitespace is correct; zero whitespace is handled elsewhere.
3257  leading_whitespace = match.group(1)
3258  if len(leading_whitespace) > 1:
3259  error(filename, linenum, 'whitespace/todo', 2,
3260  'Too many spaces before TODO')
3261 
3262  username = match.group(2)
3263  if not username:
3264  error(filename, linenum, 'readability/todo', 2,
3265  'Missing username in TODO; it should look like '
3266  '"// TODO(my_username): Stuff."')
3267 
3268  middle_whitespace = match.group(3)
3269  # Comparisons made explicit for correctness -- pylint: disable=g-explicit-bool-comparison
3270  if middle_whitespace != ' ' and middle_whitespace != '':
3271  error(filename, linenum, 'whitespace/todo', 2,
3272  'TODO(my_username) should be followed by a space')
3273 
3274  # If the comment contains an alphanumeric character, there
3275  # should be a space somewhere between it and the // unless
3276  # it's a /// or //! Doxygen comment.
3277  if (Match(r'//[^ ]*\w', comment) and
3278  not Match(r'(///|//\!)(\s+|$)', comment)):
3279  error(filename, linenum, 'whitespace/comments', 4,
3280  'Should have a space between // and comment')
3281 
3282 
3283 def CheckAccess(filename, clean_lines, linenum, nesting_state, error):
3284  """Checks for improper use of DISALLOW* macros.
3285 
3286  Args:
3287  filename: The name of the current file.
3288  clean_lines: A CleansedLines instance containing the file.
3289  linenum: The number of the line to check.
3290  nesting_state: A NestingState instance which maintains information about
3291  the current stack of nested blocks being parsed.
3292  error: The function to call with any errors found.
3293  """
3294  line = clean_lines.elided[linenum] # get rid of comments and strings
3295 
3296  matched = Match((r'\s*(DISALLOW_COPY_AND_ASSIGN|'
3297  r'DISALLOW_IMPLICIT_CONSTRUCTORS)'), line)
3298  if not matched:
3299  return
3300  if nesting_state.stack and isinstance(nesting_state.stack[-1], _ClassInfo):
3301  if nesting_state.stack[-1].access != 'private':
3302  error(filename, linenum, 'readability/constructors', 3,
3303  '%s must be in the private: section' % matched.group(1))
3304 
3305  else:
3306  # Found DISALLOW* macro outside a class declaration, or perhaps it
3307  # was used inside a function when it should have been part of the
3308  # class declaration. We could issue a warning here, but it
3309  # probably resulted in a compiler error already.
3310  pass
3311 
3312 
3313 def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
3314  """Checks for the correctness of various spacing issues in the code.
3315 
3316  Things we check for: spaces around operators, spaces after
3317  if/for/while/switch, no spaces around parens in function calls, two
3318  spaces between code and comment, don't start a block with a blank
3319  line, don't end a function with a blank line, don't add a blank line
3320  after public/protected/private, don't have too many blank lines in a row.
3321 
3322  Args:
3323  filename: The name of the current file.
3324  clean_lines: A CleansedLines instance containing the file.
3325  linenum: The number of the line to check.
3326  nesting_state: A NestingState instance which maintains information about
3327  the current stack of nested blocks being parsed.
3328  error: The function to call with any errors found.
3329  """
3330 
3331  # Don't use "elided" lines here, otherwise we can't check commented lines.
3332  # Don't want to use "raw" either, because we don't want to check inside C++11
3333  # raw strings,
3334  raw = clean_lines.lines_without_raw_strings
3335  line = raw[linenum]
3336 
3337  # Before nixing comments, check if the line is blank for no good
3338  # reason. This includes the first line after a block is opened, and
3339  # blank lines at the end of a function (ie, right before a line like '}'
3340  #
3341  # Skip all the blank line checks if we are immediately inside a
3342  # namespace body. In other words, don't issue blank line warnings
3343  # for this block:
3344  # namespace {
3345  #
3346  # }
3347  #
3348  # A warning about missing end of namespace comments will be issued instead.
3349  #
3350  # Also skip blank line checks for 'extern "C"' blocks, which are formatted
3351  # like namespaces.
3352  if (IsBlankLine(line) and
3353  not nesting_state.InNamespaceBody() and
3354  not nesting_state.InExternC()):
3355  elided = clean_lines.elided
3356  prev_line = elided[linenum - 1]
3357  prevbrace = prev_line.rfind('{')
3358  # TODO(unknown): Don't complain if line before blank line, and line after,
3359  # both start with alnums and are indented the same amount.
3360  # This ignores whitespace at the start of a namespace block
3361  # because those are not usually indented.
3362  if prevbrace != -1 and prev_line[prevbrace:].find('}') == -1:
3363  # OK, we have a blank line at the start of a code block. Before we
3364  # complain, we check if it is an exception to the rule: The previous
3365  # non-empty line has the parameters of a function header that are indented
3366  # 4 spaces (because they did not fit in a 80 column line when placed on
3367  # the same line as the function name). We also check for the case where
3368  # the previous line is indented 6 spaces, which may happen when the
3369  # initializers of a constructor do not fit into a 80 column line.
3370  exception = False
3371  if Match(r' {6}\w', prev_line): # Initializer list?
3372  # We are looking for the opening column of initializer list, which
3373  # should be indented 4 spaces to cause 6 space indentation afterwards.
3374  search_position = linenum-2
3375  while (search_position >= 0
3376  and Match(r' {6}\w', elided[search_position])):
3377  search_position -= 1
3378  exception = (search_position >= 0
3379  and elided[search_position][:5] == ' :')
3380  else:
3381  # Search for the function arguments or an initializer list. We use a
3382  # simple heuristic here: If the line is indented 4 spaces; and we have a
3383  # closing paren, without the opening paren, followed by an opening brace
3384  # or colon (for initializer lists) we assume that it is the last line of
3385  # a function header. If we have a colon indented 4 spaces, it is an
3386  # initializer list.
3387  exception = (Match(r' {4}\w[^\‍(]*\‍)\s*(const\s*)?(\{\s*$|:)',
3388  prev_line)
3389  or Match(r' {4}:', prev_line))
3390 
3391  if not exception:
3392  error(filename, linenum, 'whitespace/blank_line', 2,
3393  'Redundant blank line at the start of a code block '
3394  'should be deleted.')
3395  # Ignore blank lines at the end of a block in a long if-else
3396  # chain, like this:
3397  # if (condition1) {
3398  # // Something followed by a blank line
3399  #
3400  # } else if (condition2) {
3401  # // Something else
3402  # }
3403  if linenum + 1 < clean_lines.NumLines():
3404  next_line = raw[linenum + 1]
3405  if (next_line
3406  and Match(r'\s*}', next_line)
3407  and next_line.find('} else ') == -1):
3408  error(filename, linenum, 'whitespace/blank_line', 3,
3409  'Redundant blank line at the end of a code block '
3410  'should be deleted.')
3411 
3412  matched = Match(r'\s*(public|protected|private):', prev_line)
3413  if matched:
3414  error(filename, linenum, 'whitespace/blank_line', 3,
3415  'Do not leave a blank line after "%s:"' % matched.group(1))
3416 
3417  # Next, check comments
3418  next_line_start = 0
3419  if linenum + 1 < clean_lines.NumLines():
3420  next_line = raw[linenum + 1]
3421  next_line_start = len(next_line) - len(next_line.lstrip())
3422  CheckComment(line, filename, linenum, next_line_start, error)
3423 
3424  # get rid of comments and strings
3425  line = clean_lines.elided[linenum]
3426 
3427  # You shouldn't have spaces before your brackets, except maybe after
3428  # 'delete []' or 'return []() {};'
3429  if Search(r'\w\s+\[', line) and not Search(r'(?:delete|return)\s+\[', line):
3430  error(filename, linenum, 'whitespace/braces', 5,
3431  'Extra space before [')
3432 
3433  # In range-based for, we wanted spaces before and after the colon, but
3434  # not around "::" tokens that might appear.
3435  if (Search(r'for *\‍(.*[^:]:[^: ]', line) or
3436  Search(r'for *\‍(.*[^: ]:[^:]', line)):
3437  error(filename, linenum, 'whitespace/forcolon', 2,
3438  'Missing space around colon in range-based for loop')
3439 
3440 
3441 def CheckOperatorSpacing(filename, clean_lines, linenum, error):
3442  """Checks for horizontal spacing around operators.
3443 
3444  Args:
3445  filename: The name of the current file.
3446  clean_lines: A CleansedLines instance containing the file.
3447  linenum: The number of the line to check.
3448  error: The function to call with any errors found.
3449  """
3450  line = clean_lines.elided[linenum]
3451 
3452  # Don't try to do spacing checks for operator methods. Do this by
3453  # replacing the troublesome characters with something else,
3454  # preserving column position for all other characters.
3455  #
3456  # The replacement is done repeatedly to avoid false positives from
3457  # operators that call operators.
3458  while True:
3459  match = Match(r'^(.*\boperator\b)(\S+)(\s*\‍(.*)$', line)
3460  if match:
3461  line = match.group(1) + ('_' * len(match.group(2))) + match.group(3)
3462  else:
3463  break
3464 
3465  # We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )".
3466  # Otherwise not. Note we only check for non-spaces on *both* sides;
3467  # sometimes people put non-spaces on one side when aligning ='s among
3468  # many lines (not that this is behavior that I approve of...)
3469  if ((Search(r'[\w.]=', line) or
3470  Search(r'=[\w.]', line))
3471  and not Search(r'\b(if|while|for) ', line)
3472  # Operators taken from [lex.operators] in C++11 standard.
3473  and not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line)
3474  and not Search(r'operator=', line)):
3475  error(filename, linenum, 'whitespace/operators', 4,
3476  'Missing spaces around =')
3477 
3478  # It's ok not to have spaces around binary operators like + - * /, but if
3479  # there's too little whitespace, we get concerned. It's hard to tell,
3480  # though, so we punt on this one for now. TODO.
3481 
3482  # You should always have whitespace around binary operators.
3483  #
3484  # Check <= and >= first to avoid false positives with < and >, then
3485  # check non-include lines for spacing around < and >.
3486  #
3487  # If the operator is followed by a comma, assume it's be used in a
3488  # macro context and don't do any checks. This avoids false
3489  # positives.
3490  #
3491  # Note that && is not included here. This is because there are too
3492  # many false positives due to RValue references.
3493  match = Search(r'[^<>=!\s](==|!=|<=|>=|\|\|)[^<>=!\s,;\‍)]', line)
3494  if match:
3495  error(filename, linenum, 'whitespace/operators', 3,
3496  'Missing spaces around %s' % match.group(1))
3497  elif not Match(r'#.*include', line):
3498  # Look for < that is not surrounded by spaces. This is only
3499  # triggered if both sides are missing spaces, even though
3500  # technically should should flag if at least one side is missing a
3501  # space. This is done to avoid some false positives with shifts.
3502  match = Match(r'^(.*[^\s<])<[^\s=<,]', line)
3503  if match:
3504  (_, _, end_pos) = CloseExpression(
3505  clean_lines, linenum, len(match.group(1)))
3506  if end_pos <= -1:
3507  error(filename, linenum, 'whitespace/operators', 3,
3508  'Missing spaces around <')
3509 
3510  # Look for > that is not surrounded by spaces. Similar to the
3511  # above, we only trigger if both sides are missing spaces to avoid
3512  # false positives with shifts.
3513  match = Match(r'^(.*[^-\s>])>[^\s=>,]', line)
3514  if match:
3515  (_, _, start_pos) = ReverseCloseExpression(
3516  clean_lines, linenum, len(match.group(1)))
3517  if start_pos <= -1:
3518  error(filename, linenum, 'whitespace/operators', 3,
3519  'Missing spaces around >')
3520 
3521  # We allow no-spaces around << when used like this: 10<<20, but
3522  # not otherwise (particularly, not when used as streams)
3523  #
3524  # We also allow operators following an opening parenthesis, since
3525  # those tend to be macros that deal with operators.
3526  match = Search(r'(operator|[^\s(<])(?:L|UL|LL|ULL|l|ul|ll|ull)?<<([^\s,=<])', line)
3527  if (match and not (match.group(1).isdigit() and match.group(2).isdigit()) and
3528  not (match.group(1) == 'operator' and match.group(2) == ';')):
3529  error(filename, linenum, 'whitespace/operators', 3,
3530  'Missing spaces around <<')
3531 
3532  # We allow no-spaces around >> for almost anything. This is because
3533  # C++11 allows ">>" to close nested templates, which accounts for
3534  # most cases when ">>" is not followed by a space.
3535  #
3536  # We still warn on ">>" followed by alpha character, because that is
3537  # likely due to ">>" being used for right shifts, e.g.:
3538  # value >> alpha
3539  #
3540  # When ">>" is used to close templates, the alphanumeric letter that
3541  # follows would be part of an identifier, and there should still be
3542  # a space separating the template type and the identifier.
3543  # type<type<type>> alpha
3544  match = Search(r'>>[a-zA-Z_]', line)
3545  if match:
3546  error(filename, linenum, 'whitespace/operators', 3,
3547  'Missing spaces around >>')
3548 
3549  # There shouldn't be space around unary operators
3550  match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line)
3551  if match:
3552  error(filename, linenum, 'whitespace/operators', 4,
3553  'Extra space for operator %s' % match.group(1))
3554 
3555 
3556 def CheckParenthesisSpacing(filename, clean_lines, linenum, error):
3557  """Checks for horizontal spacing around parentheses.
3558 
3559  Args:
3560  filename: The name of the current file.
3561  clean_lines: A CleansedLines instance containing the file.
3562  linenum: The number of the line to check.
3563  error: The function to call with any errors found.
3564  """
3565  line = clean_lines.elided[linenum]
3566 
3567  # No spaces after an if, while, switch, or for
3568  match = Search(r' (if\‍(|for\‍(|while\‍(|switch\‍()', line)
3569  if match:
3570  error(filename, linenum, 'whitespace/parens', 5,
3571  'Missing space before ( in %s' % match.group(1))
3572 
3573  # For if/for/while/switch, the left and right parens should be
3574  # consistent about how many spaces are inside the parens, and
3575  # there should either be zero or one spaces inside the parens.
3576  # We don't want: "if ( foo)" or "if ( foo )".
3577  # Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed.
3578  match = Search(r'\b(if|for|while|switch)\s*'
3579  r'\‍(([ ]*)(.).*[^ ]+([ ]*)\‍)\s*{\s*$',
3580  line)
3581  if match:
3582  if len(match.group(2)) != len(match.group(4)):
3583  if not (match.group(3) == ';' and
3584  len(match.group(2)) == 1 + len(match.group(4)) or
3585  not match.group(2) and Search(r'\bfor\s*\‍(.*; \‍)', line)):
3586  error(filename, linenum, 'whitespace/parens', 5,
3587  'Mismatching spaces inside () in %s' % match.group(1))
3588  if len(match.group(2)) not in [0, 1]:
3589  error(filename, linenum, 'whitespace/parens', 5,
3590  'Should have zero or one spaces inside ( and ) in %s' %
3591  match.group(1))
3592 
3593 
3594 def CheckCommaSpacing(filename, clean_lines, linenum, error):
3595  """Checks for horizontal spacing near commas and semicolons.
3596 
3597  Args:
3598  filename: The name of the current file.
3599  clean_lines: A CleansedLines instance containing the file.
3600  linenum: The number of the line to check.
3601  error: The function to call with any errors found.
3602  """
3603  raw = clean_lines.lines_without_raw_strings
3604  line = clean_lines.elided[linenum]
3605 
3606  # You should always have a space after a comma (either as fn arg or operator)
3607  #
3608  # This does not apply when the non-space character following the
3609  # comma is another comma, since the only time when that happens is
3610  # for empty macro arguments.
3611  #
3612  # We run this check in two passes: first pass on elided lines to
3613  # verify that lines contain missing whitespaces, second pass on raw
3614  # lines to confirm that those missing whitespaces are not due to
3615  # elided comments.
3616  if (Search(r',[^,\s]', ReplaceAll(r'\boperator\s*,\s*\‍(', 'F(', line)) and
3617  Search(r',[^,\s]', raw[linenum])):
3618  error(filename, linenum, 'whitespace/comma', 3,
3619  'Missing space after ,')
3620 
3621  # You should always have a space after a semicolon
3622  # except for few corner cases
3623  # TODO(unknown): clarify if 'if (1) { return 1;}' is requires one more
3624  # space after ;
3625  if Search(r';[^\s};\\)/]', line):
3626  error(filename, linenum, 'whitespace/semicolon', 3,
3627  'Missing space after ;')
3628 
3629 
3630 def _IsType(clean_lines, nesting_state, expr):
3631  """Check if expression looks like a type name, returns true if so.
3632 
3633  Args:
3634  clean_lines: A CleansedLines instance containing the file.
3635  nesting_state: A NestingState instance which maintains information about
3636  the current stack of nested blocks being parsed.
3637  expr: The expression to check.
3638  Returns:
3639  True, if token looks like a type.
3640  """
3641  # Keep only the last token in the expression
3642  last_word = Match(r'^.*(\b\S+)$', expr)
3643  if last_word:
3644  token = last_word.group(1)
3645  else:
3646  token = expr
3647 
3648  # Match native types and stdint types
3649  if _TYPES.match(token):
3650  return True
3651 
3652  # Try a bit harder to match templated types. Walk up the nesting
3653  # stack until we find something that resembles a typename
3654  # declaration for what we are looking for.
3655  typename_pattern = (r'\b(?:typename|class|struct)\s+' + re.escape(token) +
3656  r'\b')
3657  block_index = len(nesting_state.stack) - 1
3658  while block_index >= 0:
3659  if isinstance(nesting_state.stack[block_index], _NamespaceInfo):
3660  return False
3661 
3662  # Found where the opening brace is. We want to scan from this
3663  # line up to the beginning of the function, minus a few lines.
3664  # template <typename Type1, // stop scanning here
3665  # ...>
3666  # class C
3667  # : public ... { // start scanning here
3668  last_line = nesting_state.stack[block_index].starting_linenum
3669 
3670  next_block_start = 0
3671  if block_index > 0:
3672  next_block_start = nesting_state.stack[block_index - 1].starting_linenum
3673  first_line = last_line
3674  while first_line >= next_block_start:
3675  if clean_lines.elided[first_line].find('template') >= 0:
3676  break
3677  first_line -= 1
3678  if first_line < next_block_start:
3679  # Didn't find any "template" keyword before reaching the next block,
3680  # there are probably no template things to check for this block
3681  block_index -= 1
3682  continue
3683 
3684  # Look for typename in the specified range
3685  for i in xrange(first_line, last_line + 1, 1):
3686  if Search(typename_pattern, clean_lines.elided[i]):
3687  return True
3688  block_index -= 1
3689 
3690  return False
3691 
3692 
3693 def CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error):
3694  """Checks for horizontal spacing near commas.
3695 
3696  Args:
3697  filename: The name of the current file.
3698  clean_lines: A CleansedLines instance containing the file.
3699  linenum: The number of the line to check.
3700  nesting_state: A NestingState instance which maintains information about
3701  the current stack of nested blocks being parsed.
3702  error: The function to call with any errors found.
3703  """
3704  line = clean_lines.elided[linenum]
3705 
3706  # Except after an opening paren, or after another opening brace (in case of
3707  # an initializer list, for instance), you should have spaces before your
3708  # braces when they are delimiting blocks, classes, namespaces etc.
3709  # And since you should never have braces at the beginning of a line,
3710  # this is an easy test. Except that braces used for initialization don't
3711  # follow the same rule; we often don't want spaces before those.
3712  match = Match(r'^(.*[^ ({>]){', line)
3713 
3714  if match:
3715  # Try a bit harder to check for brace initialization. This
3716  # happens in one of the following forms:
3717  # Constructor() : initializer_list_{} { ... }
3718  # Constructor{}.MemberFunction()
3719  # Type variable{};
3720  # FunctionCall(type{}, ...);
3721  # LastArgument(..., type{});
3722  # LOG(INFO) << type{} << " ...";
3723  # map_of_type[{...}] = ...;
3724  # ternary = expr ? new type{} : nullptr;
3725  # OuterTemplate<InnerTemplateConstructor<Type>{}>
3726  #
3727  # We check for the character following the closing brace, and
3728  # silence the warning if it's one of those listed above, i.e.
3729  # "{.;,)<>]:".
3730  #
3731  # To account for nested initializer list, we allow any number of
3732  # closing braces up to "{;,)<". We can't simply silence the
3733  # warning on first sight of closing brace, because that would
3734  # cause false negatives for things that are not initializer lists.
3735  # Silence this: But not this:
3736  # Outer{ if (...) {
3737  # Inner{...} if (...){ // Missing space before {
3738  # }; }
3739  #
3740  # There is a false negative with this approach if people inserted
3741  # spurious semicolons, e.g. "if (cond){};", but we will catch the
3742  # spurious semicolon with a separate check.
3743  leading_text = match.group(1)
3744  (endline, endlinenum, endpos) = CloseExpression(
3745  clean_lines, linenum, len(match.group(1)))
3746  trailing_text = ''
3747  if endpos > -1:
3748  trailing_text = endline[endpos:]
3749  for offset in xrange(endlinenum + 1,
3750  min(endlinenum + 3, clean_lines.NumLines() - 1)):
3751  trailing_text += clean_lines.elided[offset]
3752  # We also suppress warnings for `uint64_t{expression}` etc., as the style
3753  # guide recommends brace initialization for integral types to avoid
3754  # overflow/truncation.
3755  if (not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text)
3756  and not _IsType(clean_lines, nesting_state, leading_text)):
3757  error(filename, linenum, 'whitespace/braces', 5,
3758  'Missing space before {')
3759 
3760  # Make sure '} else {' has spaces.
3761  if Search(r'}else', line):
3762  error(filename, linenum, 'whitespace/braces', 5,
3763  'Missing space before else')
3764 
3765  # You shouldn't have a space before a semicolon at the end of the line.
3766  # There's a special case for "for" since the style guide allows space before
3767  # the semicolon there.
3768  if Search(r':\s*;\s*$', line):
3769  error(filename, linenum, 'whitespace/semicolon', 5,
3770  'Semicolon defining empty statement. Use {} instead.')
3771  elif Search(r'^\s*;\s*$', line):
3772  error(filename, linenum, 'whitespace/semicolon', 5,
3773  'Line contains only semicolon. If this should be an empty statement, '
3774  'use {} instead.')
3775  elif (Search(r'\s+;\s*$', line) and
3776  not Search(r'\bfor\b', line)):
3777  error(filename, linenum, 'whitespace/semicolon', 5,
3778  'Extra space before last semicolon. If this should be an empty '
3779  'statement, use {} instead.')
3780 
3781 
3782 def IsDecltype(clean_lines, linenum, column):
3783  """Check if the token ending on (linenum, column) is decltype().
3784 
3785  Args:
3786  clean_lines: A CleansedLines instance containing the file.
3787  linenum: the number of the line to check.
3788  column: end column of the token to check.
3789  Returns:
3790  True if this token is decltype() expression, False otherwise.
3791  """
3792  (text, _, start_col) = ReverseCloseExpression(clean_lines, linenum, column)
3793  if start_col < 0:
3794  return False
3795  if Search(r'\bdecltype\s*$', text[0:start_col]):
3796  return True
3797  return False
3798 
3799 def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error):
3800  """Checks for additional blank line issues related to sections.
3801 
3802  Currently the only thing checked here is blank line before protected/private.
3803 
3804  Args:
3805  filename: The name of the current file.
3806  clean_lines: A CleansedLines instance containing the file.
3807  class_info: A _ClassInfo objects.
3808  linenum: The number of the line to check.
3809  error: The function to call with any errors found.
3810  """
3811  # Skip checks if the class is small, where small means 25 lines or less.
3812  # 25 lines seems like a good cutoff since that's the usual height of
3813  # terminals, and any class that can't fit in one screen can't really
3814  # be considered "small".
3815  #
3816  # Also skip checks if we are on the first line. This accounts for
3817  # classes that look like
3818  # class Foo { public: ... };
3819  #
3820  # If we didn't find the end of the class, last_line would be zero,
3821  # and the check will be skipped by the first condition.
3822  if (class_info.last_line - class_info.starting_linenum <= 24 or
3823  linenum <= class_info.starting_linenum):
3824  return
3825 
3826  matched = Match(r'\s*(public|protected|private):', clean_lines.lines[linenum])
3827  if matched:
3828  # Issue warning if the line before public/protected/private was
3829  # not a blank line, but don't do this if the previous line contains
3830  # "class" or "struct". This can happen two ways:
3831  # - We are at the beginning of the class.
3832  # - We are forward-declaring an inner class that is semantically
3833  # private, but needed to be public for implementation reasons.
3834  # Also ignores cases where the previous line ends with a backslash as can be
3835  # common when defining classes in C macros.
3836  prev_line = clean_lines.lines[linenum - 1]
3837  if (not IsBlankLine(prev_line) and
3838  not Search(r'\b(class|struct)\b', prev_line) and
3839  not Search(r'\\$', prev_line)):
3840  # Try a bit harder to find the beginning of the class. This is to
3841  # account for multi-line base-specifier lists, e.g.:
3842  # class Derived
3843  # : public Base {
3844  end_class_head = class_info.starting_linenum
3845  for i in range(class_info.starting_linenum, linenum):
3846  if Search(r'\{\s*$', clean_lines.lines[i]):
3847  end_class_head = i
3848  break
3849  if end_class_head < linenum - 1:
3850  error(filename, linenum, 'whitespace/blank_line', 3,
3851  '"%s:" should be preceded by a blank line' % matched.group(1))
3852 
3853 
3854 def GetPreviousNonBlankLine(clean_lines, linenum):
3855  """Return the most recent non-blank line and its line number.
3856 
3857  Args:
3858  clean_lines: A CleansedLines instance containing the file contents.
3859  linenum: The number of the line to check.
3860 
3861  Returns:
3862  A tuple with two elements. The first element is the contents of the last
3863  non-blank line before the current line, or the empty string if this is the
3864  first non-blank line. The second is the line number of that line, or -1
3865  if this is the first non-blank line.
3866  """
3867 
3868  prevlinenum = linenum - 1
3869  while prevlinenum >= 0:
3870  prevline = clean_lines.elided[prevlinenum]
3871  if not IsBlankLine(prevline): # if not a blank line...
3872  return (prevline, prevlinenum)
3873  prevlinenum -= 1
3874  return ('', -1)
3875 
3876 
3877 def CheckBraces(filename, clean_lines, linenum, error):
3878  """Looks for misplaced braces (e.g. at the end of line).
3879 
3880  Args:
3881  filename: The name of the current file.
3882  clean_lines: A CleansedLines instance containing the file.
3883  linenum: The number of the line to check.
3884  error: The function to call with any errors found.
3885  """
3886 
3887  line = clean_lines.elided[linenum] # get rid of comments and strings
3888 
3889 # if Match(r'\s*{\s*$', line):
3890 # # We allow an open brace to start a line in the case where someone is using
3891 # # braces in a block to explicitly create a new scope, which is commonly used
3892 # # to control the lifetime of stack-allocated variables. Braces are also
3893 # # used for brace initializers inside function calls. We don't detect this
3894 # # perfectly: we just don't complain if the last non-whitespace character on
3895 # # the previous non-blank line is ',', ';', ':', '(', '{', or '}', or if the
3896 # # previous line starts a preprocessor block. We also allow a brace on the
3897 # # following line if it is part of an array initialization and would not fit
3898 # # within the 80 character limit of the preceding line.
3899 # prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
3900 # if (not Search(r'[,;:}{(]\s*$', prevline) and
3901 # not Match(r'\s*#', prevline) and
3902 # not (GetLineWidth(prevline) > _line_length - 2 and '[]' in prevline)):
3903 # error(filename, linenum, 'whitespace/braces', 4,
3904 # '{ should almost always be at the end of the previous line')
3905 
3906  # An else clause should be on the same line as the preceding closing brace.
3907  if Match(r'\s*else\b\s*(?:if\b|\{|$)', line):
3908  prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
3909  if Match(r'\s*}\s*$', prevline):
3910  error(filename, linenum, 'whitespace/newline', 4,
3911  'An else should appear on the same line as the preceding }')
3912 
3913  # If braces come on one side of an else, they should be on both.
3914  # However, we have to worry about "else if" that spans multiple lines!
3915  if Search(r'else if\s*\‍(', line): # could be multi-line if
3916  brace_on_left = bool(Search(r'}\s*else if\s*\‍(', line))
3917  # find the ( after the if
3918  pos = line.find('else if')
3919  pos = line.find('(', pos)
3920  if pos > 0:
3921  (endline, _, endpos) = CloseExpression(clean_lines, linenum, pos)
3922  brace_on_right = endline[endpos:].find('{') != -1
3923  if brace_on_left != brace_on_right: # must be brace after if
3924  error(filename, linenum, 'readability/braces', 5,
3925  'If an else has a brace on one side, it should have it on both')
3926  elif Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line):
3927  error(filename, linenum, 'readability/braces', 5,
3928  'If an else has a brace on one side, it should have it on both')
3929 
3930  # Likewise, an else should never have the else clause on the same line
3931  if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line):
3932  error(filename, linenum, 'whitespace/newline', 4,
3933  'Else clause should never be on same line as else (use 2 lines)')
3934 
3935  # In the same way, a do/while should never be on one line
3936  if Match(r'\s*do [^\s{]', line):
3937  error(filename, linenum, 'whitespace/newline', 4,
3938  'do/while clauses should not be on a single line')
3939 
3940  # Check single-line if/else bodies. The style guide says 'curly braces are not
3941  # required for single-line statements'. We additionally allow multi-line,
3942  # single statements, but we reject anything with more than one semicolon in
3943  # it. This means that the first semicolon after the if should be at the end of
3944  # its line, and the line after that should have an indent level equal to or
3945  # lower than the if. We also check for ambiguous if/else nesting without
3946  # braces.
3947  if_else_match = Search(r'\b(if\s*\‍(|else\b)', line)
3948  if if_else_match and not Match(r'\s*#', line):
3949  if_indent = GetIndentLevel(line)
3950  endline, endlinenum, endpos = line, linenum, if_else_match.end()
3951  if_match = Search(r'\bif\s*\‍(', line)
3952  if if_match:
3953  # This could be a multiline if condition, so find the end first.
3954  pos = if_match.end() - 1
3955  (endline, endlinenum, endpos) = CloseExpression(clean_lines, linenum, pos)
3956  # Check for an opening brace, either directly after the if or on the next
3957  # line. If found, this isn't a single-statement conditional.
3958  if (not Match(r'\s*{', endline[endpos:])
3959  and not (Match(r'\s*$', endline[endpos:])
3960  and endlinenum < (len(clean_lines.elided) - 1)
3961  and Match(r'\s*{', clean_lines.elided[endlinenum + 1]))):
3962  while (endlinenum < len(clean_lines.elided)
3963  and ';' not in clean_lines.elided[endlinenum][endpos:]):
3964  endlinenum += 1
3965  endpos = 0
3966  if endlinenum < len(clean_lines.elided):
3967  endline = clean_lines.elided[endlinenum]
3968  # We allow a mix of whitespace and closing braces (e.g. for one-liner
3969  # methods) and a single \ after the semicolon (for macros)
3970  endpos = endline.find(';')
3971  if not Match(r';[\s}]*(\\?)$', endline[endpos:]):
3972  # Semicolon isn't the last character, there's something trailing.
3973  # Output a warning if the semicolon is not contained inside
3974  # a lambda expression.
3975  if not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}]*\}\s*\‍)*[;,]\s*$',
3976  endline):
3977  error(filename, linenum, 'readability/braces', 4,
3978  'If/else bodies with multiple statements require braces')
3979  elif endlinenum < len(clean_lines.elided) - 1:
3980  # Make sure the next line is dedented
3981  next_line = clean_lines.elided[endlinenum + 1]
3982  next_indent = GetIndentLevel(next_line)
3983  # With ambiguous nested if statements, this will error out on the
3984  # if that *doesn't* match the else, regardless of whether it's the
3985  # inner one or outer one.
3986  if (if_match and Match(r'\s*else\b', next_line)
3987  and next_indent != if_indent):
3988  error(filename, linenum, 'readability/braces', 4,
3989  'Else clause should be indented at the same level as if. '
3990  'Ambiguous nested if/else chains require braces.')
3991  elif next_indent > if_indent:
3992  error(filename, linenum, 'readability/braces', 4,
3993  'If/else bodies with multiple statements require braces')
3994 
3995 
3996 def CheckTrailingSemicolon(filename, clean_lines, linenum, error):
3997  """Looks for redundant trailing semicolon.
3998 
3999  Args:
4000  filename: The name of the current file.
4001  clean_lines: A CleansedLines instance containing the file.
4002  linenum: The number of the line to check.
4003  error: The function to call with any errors found.
4004  """
4005 
4006  line = clean_lines.elided[linenum]
4007 
4008  # Block bodies should not be followed by a semicolon. Due to C++11
4009  # brace initialization, there are more places where semicolons are
4010  # required than not, so we use a whitelist approach to check these
4011  # rather than a blacklist. These are the places where "};" should
4012  # be replaced by just "}":
4013  # 1. Some flavor of block following closing parenthesis:
4014  # for (;;) {};
4015  # while (...) {};
4016  # switch (...) {};
4017  # Function(...) {};
4018  # if (...) {};
4019  # if (...) else if (...) {};
4020  #
4021  # 2. else block:
4022  # if (...) else {};
4023  #
4024  # 3. const member function:
4025  # Function(...) const {};
4026  #
4027  # 4. Block following some statement:
4028  # x = 42;
4029  # {};
4030  #
4031  # 5. Block at the beginning of a function:
4032  # Function(...) {
4033  # {};
4034  # }
4035  #
4036  # Note that naively checking for the preceding "{" will also match
4037  # braces inside multi-dimensional arrays, but this is fine since
4038  # that expression will not contain semicolons.
4039  #
4040  # 6. Block following another block:
4041  # while (true) {}
4042  # {};
4043  #
4044  # 7. End of namespaces:
4045  # namespace {};
4046  #
4047  # These semicolons seems far more common than other kinds of
4048  # redundant semicolons, possibly due to people converting classes
4049  # to namespaces. For now we do not warn for this case.
4050  #
4051  # Try matching case 1 first.
4052  match = Match(r'^(.*\‍)\s*)\{', line)
4053  if match:
4054  # Matched closing parenthesis (case 1). Check the token before the
4055  # matching opening parenthesis, and don't warn if it looks like a
4056  # macro. This avoids these false positives:
4057  # - macro that defines a base class
4058  # - multi-line macro that defines a base class
4059  # - macro that defines the whole class-head
4060  #
4061  # But we still issue warnings for macros that we know are safe to
4062  # warn, specifically:
4063  # - TEST, TEST_F, TEST_P, MATCHER, MATCHER_P
4064  # - TYPED_TEST
4065  # - INTERFACE_DEF
4066  # - EXCLUSIVE_LOCKS_REQUIRED, SHARED_LOCKS_REQUIRED, LOCKS_EXCLUDED:
4067  #
4068  # We implement a whitelist of safe macros instead of a blacklist of
4069  # unsafe macros, even though the latter appears less frequently in
4070  # google code and would have been easier to implement. This is because
4071  # the downside for getting the whitelist wrong means some extra
4072  # semicolons, while the downside for getting the blacklist wrong
4073  # would result in compile errors.
4074  #
4075  # In addition to macros, we also don't want to warn on
4076  # - Compound literals
4077  # - Lambdas
4078  # - alignas specifier with anonymous structs
4079  # - decltype
4080  closing_brace_pos = match.group(1).rfind(')')
4081  opening_parenthesis = ReverseCloseExpression(
4082  clean_lines, linenum, closing_brace_pos)
4083  if opening_parenthesis[2] > -1:
4084  line_prefix = opening_parenthesis[0][0:opening_parenthesis[2]]
4085  macro = Search(r'\b([A-Z_][A-Z0-9_]*)\s*$', line_prefix)
4086  func = Match(r'^(.*\])\s*$', line_prefix)
4087  if ((macro and
4088  macro.group(1) not in (
4089  'TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST',
4090  'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED',
4091  'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or
4092  (func and not Search(r'\boperator\s*\[\s*\]', func.group(1))) or
4093  Search(r'\b(?:struct|union)\s+alignas\s*$', line_prefix) or
4094  Search(r'\bdecltype$', line_prefix) or
4095  Search(r'\s+=\s*$', line_prefix)):
4096  match = None
4097  if (match and
4098  opening_parenthesis[1] > 1 and
4099  Search(r'\]\s*$', clean_lines.elided[opening_parenthesis[1] - 1])):
4100  # Multi-line lambda-expression
4101  match = None
4102 
4103  else:
4104  # Try matching cases 2-3.
4105  match = Match(r'^(.*(?:else|\‍)\s*const)\s*)\{', line)
4106  if not match:
4107  # Try matching cases 4-6. These are always matched on separate lines.
4108  #
4109  # Note that we can't simply concatenate the previous line to the
4110  # current line and do a single match, otherwise we may output
4111  # duplicate warnings for the blank line case:
4112  # if (cond) {
4113  # // blank line
4114  # }
4115  prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
4116  if prevline and Search(r'[;{}]\s*$', prevline):
4117  match = Match(r'^(\s*)\{', line)
4118 
4119  # Check matching closing brace
4120  if match:
4121  (endline, endlinenum, endpos) = CloseExpression(
4122  clean_lines, linenum, len(match.group(1)))
4123  if endpos > -1 and Match(r'^\s*;', endline[endpos:]):
4124  # Current {} pair is eligible for semicolon check, and we have found
4125  # the redundant semicolon, output warning here.
4126  #
4127  # Note: because we are scanning forward for opening braces, and
4128  # outputting warnings for the matching closing brace, if there are
4129  # nested blocks with trailing semicolons, we will get the error
4130  # messages in reversed order.
4131 
4132  # We need to check the line forward for NOLINT
4133  raw_lines = clean_lines.raw_lines
4134  ParseNolintSuppressions(filename, raw_lines[endlinenum-1], endlinenum-1,
4135  error)
4136  ParseNolintSuppressions(filename, raw_lines[endlinenum], endlinenum,
4137  error)
4138 
4139  error(filename, endlinenum, 'readability/braces', 4,
4140  "You don't need a ; after a }")
4141 
4142 
4143 def CheckEmptyBlockBody(filename, clean_lines, linenum, error):
4144  """Look for empty loop/conditional body with only a single semicolon.
4145 
4146  Args:
4147  filename: The name of the current file.
4148  clean_lines: A CleansedLines instance containing the file.
4149  linenum: The number of the line to check.
4150  error: The function to call with any errors found.
4151  """
4152 
4153  # Search for loop keywords at the beginning of the line. Because only
4154  # whitespaces are allowed before the keywords, this will also ignore most
4155  # do-while-loops, since those lines should start with closing brace.
4156  #
4157  # We also check "if" blocks here, since an empty conditional block
4158  # is likely an error.
4159  line = clean_lines.elided[linenum]
4160  matched = Match(r'\s*(for|while|if)\s*\‍(', line)
4161  if matched:
4162  # Find the end of the conditional expression.
4163  (end_line, end_linenum, end_pos) = CloseExpression(
4164  clean_lines, linenum, line.find('('))
4165 
4166  # Output warning if what follows the condition expression is a semicolon.
4167  # No warning for all other cases, including whitespace or newline, since we
4168  # have a separate check for semicolons preceded by whitespace.
4169  if end_pos >= 0 and Match(r';', end_line[end_pos:]):
4170  if matched.group(1) == 'if':
4171  error(filename, end_linenum, 'whitespace/empty_conditional_body', 5,
4172  'Empty conditional bodies should use {}')
4173  else:
4174  error(filename, end_linenum, 'whitespace/empty_loop_body', 5,
4175  'Empty loop bodies should use {} or continue')
4176 
4177  # Check for if statements that have completely empty bodies (no comments)
4178  # and no else clauses.
4179  if end_pos >= 0 and matched.group(1) == 'if':
4180  # Find the position of the opening { for the if statement.
4181  # Return without logging an error if it has no brackets.
4182  opening_linenum = end_linenum
4183  opening_line_fragment = end_line[end_pos:]
4184  # Loop until EOF or find anything that's not whitespace or opening {.
4185  while not Search(r'^\s*\{', opening_line_fragment):
4186  if Search(r'^(?!\s*$)', opening_line_fragment):
4187  # Conditional has no brackets.
4188  return
4189  opening_linenum += 1
4190  if opening_linenum == len(clean_lines.elided):
4191  # Couldn't find conditional's opening { or any code before EOF.
4192  return
4193  opening_line_fragment = clean_lines.elided[opening_linenum]
4194  # Set opening_line (opening_line_fragment may not be entire opening line).
4195  opening_line = clean_lines.elided[opening_linenum]
4196 
4197  # Find the position of the closing }.
4198  opening_pos = opening_line_fragment.find('{')
4199  if opening_linenum == end_linenum:
4200  # We need to make opening_pos relative to the start of the entire line.
4201  opening_pos += end_pos
4202  (closing_line, closing_linenum, closing_pos) = CloseExpression(
4203  clean_lines, opening_linenum, opening_pos)
4204  if closing_pos < 0:
4205  return
4206 
4207  # Now construct the body of the conditional. This consists of the portion
4208  # of the opening line after the {, all lines until the closing line,
4209  # and the portion of the closing line before the }.
4210  if (clean_lines.raw_lines[opening_linenum] !=
4211  CleanseComments(clean_lines.raw_lines[opening_linenum])):
4212  # Opening line ends with a comment, so conditional isn't empty.
4213  return
4214  if closing_linenum > opening_linenum:
4215  # Opening line after the {. Ignore comments here since we checked above.
4216  bodylist = list(opening_line[opening_pos+1:])
4217  # All lines until closing line, excluding closing line, with comments.
4218  bodylist.extend(clean_lines.raw_lines[opening_linenum+1:closing_linenum])
4219  # Closing line before the }. Won't (and can't) have comments.
4220  bodylist.append(clean_lines.elided[closing_linenum][:closing_pos-1])
4221  body = '\n'.join(bodylist)
4222  else:
4223  # If statement has brackets and fits on a single line.
4224  body = opening_line[opening_pos+1:closing_pos-1]
4225 
4226  # Check if the body is empty
4227  if not _EMPTY_CONDITIONAL_BODY_PATTERN.search(body):
4228  return
4229  # The body is empty. Now make sure there's not an else clause.
4230  current_linenum = closing_linenum
4231  current_line_fragment = closing_line[closing_pos:]
4232  # Loop until EOF or find anything that's not whitespace or else clause.
4233  while Search(r'^\s*$|^(?=\s*else)', current_line_fragment):
4234  if Search(r'^(?=\s*else)', current_line_fragment):
4235  # Found an else clause, so don't log an error.
4236  return
4237  current_linenum += 1
4238  if current_linenum == len(clean_lines.elided):
4239  break
4240  current_line_fragment = clean_lines.elided[current_linenum]
4241 
4242  # The body is empty and there's no else clause until EOF or other code.
4243  error(filename, end_linenum, 'whitespace/empty_if_body', 4,
4244  ('If statement had no body and no else clause'))
4245 
4246 
4247 def FindCheckMacro(line):
4248  """Find a replaceable CHECK-like macro.
4249 
4250  Args:
4251  line: line to search on.
4252  Returns:
4253  (macro name, start position), or (None, -1) if no replaceable
4254  macro is found.
4255  """
4256  for macro in _CHECK_MACROS:
4257  i = line.find(macro)
4258  if i >= 0:
4259  # Find opening parenthesis. Do a regular expression match here
4260  # to make sure that we are matching the expected CHECK macro, as
4261  # opposed to some other macro that happens to contain the CHECK
4262  # substring.
4263  matched = Match(r'^(.*\b' + macro + r'\s*)\‍(', line)
4264  if not matched:
4265  continue
4266  return (macro, len(matched.group(1)))
4267  return (None, -1)
4268 
4269 
4270 def CheckCheck(filename, clean_lines, linenum, error):
4271  """Checks the use of CHECK and EXPECT macros.
4272 
4273  Args:
4274  filename: The name of the current file.
4275  clean_lines: A CleansedLines instance containing the file.
4276  linenum: The number of the line to check.
4277  error: The function to call with any errors found.
4278  """
4279 
4280  # Decide the set of replacement macros that should be suggested
4281  lines = clean_lines.elided
4282  (check_macro, start_pos) = FindCheckMacro(lines[linenum])
4283  if not check_macro:
4284  return
4285 
4286  # Find end of the boolean expression by matching parentheses
4287  (last_line, end_line, end_pos) = CloseExpression(
4288  clean_lines, linenum, start_pos)
4289  if end_pos < 0:
4290  return
4291 
4292  # If the check macro is followed by something other than a
4293  # semicolon, assume users will log their own custom error messages
4294  # and don't suggest any replacements.
4295  if not Match(r'\s*;', last_line[end_pos:]):
4296  return
4297 
4298  if linenum == end_line:
4299  expression = lines[linenum][start_pos + 1:end_pos - 1]
4300  else:
4301  expression = lines[linenum][start_pos + 1:]
4302  for i in xrange(linenum + 1, end_line):
4303  expression += lines[i]
4304  expression += last_line[0:end_pos - 1]
4305 
4306  # Parse expression so that we can take parentheses into account.
4307  # This avoids false positives for inputs like "CHECK((a < 4) == b)",
4308  # which is not replaceable by CHECK_LE.
4309  lhs = ''
4310  rhs = ''
4311  operator = None
4312  while expression:
4313  matched = Match(r'^\s*(<<|<<=|>>|>>=|->\*|->|&&|\|\||'
4314  r'==|!=|>=|>|<=|<|\‍()(.*)$', expression)
4315  if matched:
4316  token = matched.group(1)
4317  if token == '(':
4318  # Parenthesized operand
4319  expression = matched.group(2)
4320  (end, _) = FindEndOfExpressionInLine(expression, 0, ['('])
4321  if end < 0:
4322  return # Unmatched parenthesis
4323  lhs += '(' + expression[0:end]
4324  expression = expression[end:]
4325  elif token in ('&&', '||'):
4326  # Logical and/or operators. This means the expression
4327  # contains more than one term, for example:
4328  # CHECK(42 < a && a < b);
4329  #
4330  # These are not replaceable with CHECK_LE, so bail out early.
4331  return
4332  elif token in ('<<', '<<=', '>>', '>>=', '->*', '->'):
4333  # Non-relational operator
4334  lhs += token
4335  expression = matched.group(2)
4336  else:
4337  # Relational operator
4338  operator = token
4339  rhs = matched.group(2)
4340  break
4341  else:
4342  # Unparenthesized operand. Instead of appending to lhs one character
4343  # at a time, we do another regular expression match to consume several
4344  # characters at once if possible. Trivial benchmark shows that this
4345  # is more efficient when the operands are longer than a single
4346  # character, which is generally the case.
4347  matched = Match(r'^([^-=!<>()&|]+)(.*)$', expression)
4348  if not matched:
4349  matched = Match(r'^(\s*\S)(.*)$', expression)
4350  if not matched:
4351  break
4352  lhs += matched.group(1)
4353  expression = matched.group(2)
4354 
4355  # Only apply checks if we got all parts of the boolean expression
4356  if not (lhs and operator and rhs):
4357  return
4358 
4359  # Check that rhs do not contain logical operators. We already know
4360  # that lhs is fine since the loop above parses out && and ||.
4361  if rhs.find('&&') > -1 or rhs.find('||') > -1:
4362  return
4363 
4364  # At least one of the operands must be a constant literal. This is
4365  # to avoid suggesting replacements for unprintable things like
4366  # CHECK(variable != iterator)
4367  #
4368  # The following pattern matches decimal, hex integers, strings, and
4369  # characters (in that order).
4370  lhs = lhs.strip()
4371  rhs = rhs.strip()
4372  match_constant = r'^([-+]?(\d+|0[xX][0-9a-fA-F]+)[lLuU]{0,3}|".*"|\'.*\')$'
4373  if Match(match_constant, lhs) or Match(match_constant, rhs):
4374  # Note: since we know both lhs and rhs, we can provide a more
4375  # descriptive error message like:
4376  # Consider using CHECK_EQ(x, 42) instead of CHECK(x == 42)
4377  # Instead of:
4378  # Consider using CHECK_EQ instead of CHECK(a == b)
4379  #
4380  # We are still keeping the less descriptive message because if lhs
4381  # or rhs gets long, the error message might become unreadable.
4382  error(filename, linenum, 'readability/check', 2,
4383  'Consider using %s instead of %s(a %s b)' % (
4384  _CHECK_REPLACEMENT[check_macro][operator],
4385  check_macro, operator))
4386 
4387 
4388 def CheckAltTokens(filename, clean_lines, linenum, error):
4389  """Check alternative keywords being used in boolean expressions.
4390 
4391  Args:
4392  filename: The name of the current file.
4393  clean_lines: A CleansedLines instance containing the file.
4394  linenum: The number of the line to check.
4395  error: The function to call with any errors found.
4396  """
4397  line = clean_lines.elided[linenum]
4398 
4399  # Avoid preprocessor lines
4400  if Match(r'^\s*#', line):
4401  return
4402 
4403  # Last ditch effort to avoid multi-line comments. This will not help
4404  # if the comment started before the current line or ended after the
4405  # current line, but it catches most of the false positives. At least,
4406  # it provides a way to workaround this warning for people who use
4407  # multi-line comments in preprocessor macros.
4408  #
4409  # TODO(unknown): remove this once cpplint has better support for
4410  # multi-line comments.
4411  if line.find('/*') >= 0 or line.find('*/') >= 0:
4412  return
4413 
4414  for match in _ALT_TOKEN_REPLACEMENT_PATTERN.finditer(line):
4415  error(filename, linenum, 'readability/alt_tokens', 2,
4416  'Use operator %s instead of %s' % (
4417  _ALT_TOKEN_REPLACEMENT[match.group(1)], match.group(1)))
4418 
4419 
4420 def GetLineWidth(line):
4421  """Determines the width of the line in column positions.
4422 
4423  Args:
4424  line: A string, which may be a Unicode string.
4425 
4426  Returns:
4427  The width of the line in column positions, accounting for Unicode
4428  combining characters and wide characters.
4429  """
4430  if isinstance(line, unicode):
4431  width = 0
4432  for uc in unicodedata.normalize('NFC', line):
4433  if unicodedata.east_asian_width(uc) in ('W', 'F'):
4434  width += 2
4435  elif not unicodedata.combining(uc):
4436  width += 1
4437  return width
4438  else:
4439  return len(line)
4440 
4441 
4442 def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
4443  error):
4444  """Checks rules from the 'C++ style rules' section of cppguide.html.
4445 
4446  Most of these rules are hard to test (naming, comment style), but we
4447  do what we can. In particular we check for 2-space indents, line lengths,
4448  tab usage, spaces inside code, etc.
4449 
4450  Args:
4451  filename: The name of the current file.
4452  clean_lines: A CleansedLines instance containing the file.
4453  linenum: The number of the line to check.
4454  file_extension: The extension (without the dot) of the filename.
4455  nesting_state: A NestingState instance which maintains information about
4456  the current stack of nested blocks being parsed.
4457  error: The function to call with any errors found.
4458  """
4459 
4460  # Don't use "elided" lines here, otherwise we can't check commented lines.
4461  # Don't want to use "raw" either, because we don't want to check inside C++11
4462  # raw strings,
4463  raw_lines = clean_lines.lines_without_raw_strings
4464  line = raw_lines[linenum]
4465  prev = raw_lines[linenum - 1] if linenum > 0 else ''
4466 
4467  if line.find('\t') != -1:
4468  error(filename, linenum, 'whitespace/tab', 1,
4469  'Tab found; better to use spaces')
4470 
4471  # One or three blank spaces at the beginning of the line is weird; it's
4472  # hard to reconcile that with 2-space indents.
4473  # NOTE: here are the conditions rob pike used for his tests. Mine aren't
4474  # as sophisticated, but it may be worth becoming so: RLENGTH==initial_spaces
4475  # if(RLENGTH > 20) complain = 0;
4476  # if(match($0, " +(error|private|public|protected):")) complain = 0;
4477  # if(match(prev, "&& *$")) complain = 0;
4478  # if(match(prev, "\\|\\| *$")) complain = 0;
4479  # if(match(prev, "[\",=><] *$")) complain = 0;
4480  # if(match($0, " <<")) complain = 0;
4481  # if(match(prev, " +for \\‍(")) complain = 0;
4482  # if(prevodd && match(prevprev, " +for \\‍(")) complain = 0;
4483  scope_or_label_pattern = r'\s*\w+\s*:\s*\\?$'
4484  classinfo = nesting_state.InnermostClass()
4485  initial_spaces = 0
4486  cleansed_line = clean_lines.elided[linenum]
4487  while initial_spaces < len(line) and line[initial_spaces] == ' ':
4488  initial_spaces += 1
4489  # There are certain situations we allow one space, notably for
4490  # section labels, and also lines containing multi-line raw strings.
4491  # We also don't check for lines that look like continuation lines
4492  # (of lines ending in double quotes, commas, equals, or angle brackets)
4493  # because the rules for how to indent those are non-trivial.
4494  if (not Search(r'[",=><] *$', prev) and
4495  (initial_spaces == 1 or initial_spaces == 3) and
4496  not Match(scope_or_label_pattern, cleansed_line) and
4497  not (clean_lines.raw_lines[linenum] != line and
4498  Match(r'^\s*""', line))):
4499  error(filename, linenum, 'whitespace/indent', 3,
4500  'Weird number of spaces at line-start. '
4501  'Are you using a 2-space indent?')
4502 
4503  if line and line[-1].isspace():
4504  error(filename, linenum, 'whitespace/end_of_line', 4,
4505  'Line ends in whitespace. Consider deleting these extra spaces.')
4506 
4507  # Check if the line is a header guard.
4508  is_header_guard = False
4509  if file_extension in GetHeaderExtensions():
4510  cppvar = GetHeaderGuardCPPVariable(filename)
4511  if (line.startswith('#ifndef %s' % cppvar) or
4512  line.startswith('#define %s' % cppvar) or
4513  line.startswith('#endif // %s' % cppvar)):
4514  is_header_guard = True
4515  # #include lines and header guards can be long, since there's no clean way to
4516  # split them.
4517  #
4518  # URLs can be long too. It's possible to split these, but it makes them
4519  # harder to cut&paste.
4520  #
4521  # The "$Id:...$" comment may also get very long without it being the
4522  # developers fault.
4523  #
4524  # Doxygen documentation copying can get pretty long when using an overloaded
4525  # function declaration
4526  if (not line.startswith('#include') and not is_header_guard and
4527  not Match(r'^\s*//.*http(s?)://\S*$', line) and
4528  not Match(r'^\s*//\s*[^\s]*$', line) and
4529  not Match(r'^// \$Id:.*#[0-9]+ \$$', line) and
4530  not Match(r'^\s*/// [@\\](copydoc|copydetails|copybrief) .*$', line)):
4531  line_width = GetLineWidth(line)
4532  if line_width > _line_length:
4533  error(filename, linenum, 'whitespace/line_length', 2,
4534  'Lines should be <= %i characters long' % _line_length)
4535 
4536  if (cleansed_line.count(';') > 1 and
4537  # allow simple single line lambdas
4538  not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}\n\r]*\}',
4539  line) and
4540  # for loops are allowed two ;'s (and may run over two lines).
4541  cleansed_line.find('for') == -1 and
4542  (GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or
4543  GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and
4544  # It's ok to have many commands in a switch case that fits in 1 line
4545  not ((cleansed_line.find('case ') != -1 or
4546  cleansed_line.find('default:') != -1) and
4547  cleansed_line.find('break;') != -1)):
4548  error(filename, linenum, 'whitespace/newline', 0,
4549  'More than one command on the same line')
4550 
4551  # Some more style checks
4552  CheckBraces(filename, clean_lines, linenum, error)
4553  CheckTrailingSemicolon(filename, clean_lines, linenum, error)
4554  CheckEmptyBlockBody(filename, clean_lines, linenum, error)
4555  CheckAccess(filename, clean_lines, linenum, nesting_state, error)
4556  CheckSpacing(filename, clean_lines, linenum, nesting_state, error)
4557  CheckOperatorSpacing(filename, clean_lines, linenum, error)
4558  CheckParenthesisSpacing(filename, clean_lines, linenum, error)
4559  CheckCommaSpacing(filename, clean_lines, linenum, error)
4560  CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error)
4561  CheckSpacingForFunctionCall(filename, clean_lines, linenum, error)
4562  CheckCheck(filename, clean_lines, linenum, error)
4563  CheckAltTokens(filename, clean_lines, linenum, error)
4564  classinfo = nesting_state.InnermostClass()
4565  if classinfo:
4566  CheckSectionSpacing(filename, clean_lines, classinfo, linenum, error)
4567 
4568 
4569 _RE_PATTERN_INCLUDE = re.compile(r'^\s*#\s*include\s*([<"])([^>"]*)[>"].*$')
4570 # Matches the first component of a filename delimited by -s and _s. That is:
4571 # _RE_FIRST_COMPONENT.match('foo').group(0) == 'foo'
4572 # _RE_FIRST_COMPONENT.match('foo.cc').group(0) == 'foo'
4573 # _RE_FIRST_COMPONENT.match('foo-bar_baz.cc').group(0) == 'foo'
4574 # _RE_FIRST_COMPONENT.match('foo_bar-baz.cc').group(0) == 'foo'
4575 _RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+')
4576 
4577 
4578 def _DropCommonSuffixes(filename):
4579  """Drops common suffixes like _test.cc or -inl.h from filename.
4580 
4581  For example:
4582  >>> _DropCommonSuffixes('foo/foo-inl.h')
4583  'foo/foo'
4584  >>> _DropCommonSuffixes('foo/bar/foo.cc')
4585  'foo/bar/foo'
4586  >>> _DropCommonSuffixes('foo/foo_internal.h')
4587  'foo/foo'
4588  >>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
4589  'foo/foo_unusualinternal'
4590 
4591  Args:
4592  filename: The input filename.
4593 
4594  Returns:
4595  The filename with the common suffix removed.
4596  """
4597  for suffix in itertools.chain(
4598  ('%s.%s' % (test_suffix.lstrip('_'), ext)
4599  for test_suffix, ext in itertools.product(_test_suffixes, GetNonHeaderExtensions())),
4600  ('%s.%s' % (suffix, ext)
4601  for suffix, ext in itertools.product(['inl', 'imp', 'internal'], GetHeaderExtensions()))):
4602  if (filename.endswith(suffix) and len(filename) > len(suffix) and
4603  filename[-len(suffix) - 1] in ('-', '_')):
4604  return filename[:-len(suffix) - 1]
4605  return os.path.splitext(filename)[0]
4606 
4607 
4608 def _ClassifyInclude(fileinfo, include, is_system):
4609  """Figures out what kind of header 'include' is.
4610 
4611  Args:
4612  fileinfo: The current file cpplint is running over. A FileInfo instance.
4613  include: The path to a #included file.
4614  is_system: True if the #include used <> rather than "".
4615 
4616  Returns:
4617  One of the _XXX_HEADER constants.
4618 
4619  For example:
4620  >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True)
4621  _C_SYS_HEADER
4622  >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True)
4623  _CPP_SYS_HEADER
4624  >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False)
4625  _LIKELY_MY_HEADER
4626  >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'),
4627  ... 'bar/foo_other_ext.h', False)
4628  _POSSIBLE_MY_HEADER
4629  >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False)
4630  _OTHER_HEADER
4631  """
4632  # This is a list of all standard c++ header files, except
4633  # those already checked for above.
4634  is_cpp_h = include in _CPP_HEADERS
4635 
4636  # Headers with C++ extensions shouldn't be considered C system headers
4637  if is_system and os.path.splitext(include)[1] in ['.hpp', '.hxx', '.h++']:
4638  is_system = False
4639 
4640  if is_system:
4641  if is_cpp_h:
4642  return _CPP_SYS_HEADER
4643  else:
4644  return _C_SYS_HEADER
4645 
4646  # If the target file and the include we're checking share a
4647  # basename when we drop common extensions, and the include
4648  # lives in . , then it's likely to be owned by the target file.
4649  target_dir, target_base = (
4650  os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName())))
4651  include_dir, include_base = os.path.split(_DropCommonSuffixes(include))
4652  target_dir_pub = os.path.normpath(target_dir + '/../public')
4653  target_dir_pub = target_dir_pub.replace('\\', '/')
4654  if target_base == include_base and (
4655  include_dir == target_dir or
4656  include_dir == target_dir_pub):
4657  return _LIKELY_MY_HEADER
4658 
4659  # If the target and include share some initial basename
4660  # component, it's possible the target is implementing the
4661  # include, so it's allowed to be first, but we'll never
4662  # complain if it's not there.
4663  target_first_component = _RE_FIRST_COMPONENT.match(target_base)
4664  include_first_component = _RE_FIRST_COMPONENT.match(include_base)
4665  if (target_first_component and include_first_component and
4666  target_first_component.group(0) ==
4667  include_first_component.group(0)):
4668  return _POSSIBLE_MY_HEADER
4669 
4670  return _OTHER_HEADER
4671 
4672 
4673 
4674 def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
4675  """Check rules that are applicable to #include lines.
4676 
4677  Strings on #include lines are NOT removed from elided line, to make
4678  certain tasks easier. However, to prevent false positives, checks
4679  applicable to #include lines in CheckLanguage must be put here.
4680 
4681  Args:
4682  filename: The name of the current file.
4683  clean_lines: A CleansedLines instance containing the file.
4684  linenum: The number of the line to check.
4685  include_state: An _IncludeState instance in which the headers are inserted.
4686  error: The function to call with any errors found.
4687  """
4688  fileinfo = FileInfo(filename)
4689  line = clean_lines.lines[linenum]
4690 
4691  # "include" should use the new style "foo/bar.h" instead of just "bar.h"
4692  # Only do this check if the included header follows google naming
4693  # conventions. If not, assume that it's a 3rd party API that
4694  # requires special include conventions.
4695  #
4696  # We also make an exception for Lua headers, which follow google
4697  # naming convention but not the include convention.
4698  match = Match(r'#include\s*"([^/]+\.h)"', line)
4699  if match and not _THIRD_PARTY_HEADERS_PATTERN.match(match.group(1)):
4700  error(filename, linenum, 'build/include_subdir', 4,
4701  'Include the directory when naming .h files')
4702 
4703  # we shouldn't include a file more than once. actually, there are a
4704  # handful of instances where doing so is okay, but in general it's
4705  # not.
4706  match = _RE_PATTERN_INCLUDE.search(line)
4707  if match:
4708  include = match.group(2)
4709  is_system = (match.group(1) == '<')
4710  duplicate_line = include_state.FindHeader(include)
4711  if duplicate_line >= 0:
4712  error(filename, linenum, 'build/include', 4,
4713  '"%s" already included at %s:%s' %
4714  (include, filename, duplicate_line))
4715  return
4716 
4717  for extension in GetNonHeaderExtensions():
4718  if (include.endswith('.' + extension) and
4719  os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)):
4720  error(filename, linenum, 'build/include', 4,
4721  'Do not include .' + extension + ' files from other packages')
4722  return
4723 
4724  if not _THIRD_PARTY_HEADERS_PATTERN.match(include):
4725  include_state.include_list[-1].append((include, linenum))
4726 
4727  # We want to ensure that headers appear in the right order:
4728  # 1) for foo.cc, foo.h (preferred location)
4729  # 2) c system files
4730  # 3) cpp system files
4731  # 4) for foo.cc, foo.h (deprecated location)
4732  # 5) other google headers
4733  #
4734  # We classify each include statement as one of those 5 types
4735  # using a number of techniques. The include_state object keeps
4736  # track of the highest type seen, and complains if we see a
4737  # lower type after that.
4738  error_message = include_state.CheckNextIncludeOrder(
4739  _ClassifyInclude(fileinfo, include, is_system))
4740  if error_message:
4741  error(filename, linenum, 'build/include_order', 4,
4742  '%s. Should be: %s.h, c system, c++ system, other.' %
4743  (error_message, fileinfo.BaseName()))
4744  canonical_include = include_state.CanonicalizeAlphabeticalOrder(include)
4745  if not include_state.IsInAlphabeticalOrder(
4746  clean_lines, linenum, canonical_include):
4747  error(filename, linenum, 'build/include_alpha', 4,
4748  'Include "%s" not in alphabetical order' % include)
4749  include_state.SetLastHeader(canonical_include)
4750 
4751 
4752 
4753 def _GetTextInside(text, start_pattern):
4754  r"""Retrieves all the text between matching open and close parentheses.
4755 
4756  Given a string of lines and a regular expression string, retrieve all the text
4757  following the expression and between opening punctuation symbols like
4758  (, [, or {, and the matching close-punctuation symbol. This properly nested
4759  occurrences of the punctuations, so for the text like
4760  printf(a(), b(c()));
4761  a call to _GetTextInside(text, r'printf\‍(') will return 'a(), b(c())'.
4762  start_pattern must match string having an open punctuation symbol at the end.
4763 
4764  Args:
4765  text: The lines to extract text. Its comments and strings must be elided.
4766  It can be single line and can span multiple lines.
4767  start_pattern: The regexp string indicating where to start extracting
4768  the text.
4769  Returns:
4770  The extracted text.
4771  None if either the opening string or ending punctuation could not be found.
4772  """
4773  # TODO(unknown): Audit cpplint.py to see what places could be profitably
4774  # rewritten to use _GetTextInside (and use inferior regexp matching today).
4775 
4776  # Give opening punctuations to get the matching close-punctuations.
4777  matching_punctuation = {'(': ')', '{': '}', '[': ']'}
4778  closing_punctuation = set(itervalues(matching_punctuation))
4779 
4780  # Find the position to start extracting text.
4781  match = re.search(start_pattern, text, re.M)
4782  if not match: # start_pattern not found in text.
4783  return None
4784  start_position = match.end(0)
4785 
4786  assert start_position > 0, (
4787  'start_pattern must ends with an opening punctuation.')
4788  assert text[start_position - 1] in matching_punctuation, (
4789  'start_pattern must ends with an opening punctuation.')
4790  # Stack of closing punctuations we expect to have in text after position.
4791  punctuation_stack = [matching_punctuation[text[start_position - 1]]]
4792  position = start_position
4793  while punctuation_stack and position < len(text):
4794  if text[position] == punctuation_stack[-1]:
4795  punctuation_stack.pop()
4796  elif text[position] in closing_punctuation:
4797  # A closing punctuation without matching opening punctuations.
4798  return None
4799  elif text[position] in matching_punctuation:
4800  punctuation_stack.append(matching_punctuation[text[position]])
4801  position += 1
4802  if punctuation_stack:
4803  # Opening punctuations left without matching close-punctuations.
4804  return None
4805  # punctuations match.
4806  return text[start_position:position - 1]
4807 
4808 
4809 # Patterns for matching call-by-reference parameters.
4810 #
4811 # Supports nested templates up to 2 levels deep using this messy pattern:
4812 # < (?: < (?: < [^<>]*
4813 # >
4814 # | [^<>] )*
4815 # >
4816 # | [^<>] )*
4817 # >
4818 _RE_PATTERN_IDENT = r'[_a-zA-Z]\w*' # =~ [[:alpha:]][[:alnum:]]*
4819 _RE_PATTERN_TYPE = (
4820  r'(?:const\s+)?(?:typename\s+|class\s+|struct\s+|union\s+|enum\s+)?'
4821  r'(?:\w|'
4822  r'\s*<(?:<(?:<[^<>]*>|[^<>])*>|[^<>])*>|'
4823  r'::)+')
4824 # A call-by-reference parameter ends with '& identifier'.
4825 _RE_PATTERN_REF_PARAM = re.compile(
4826  r'(' + _RE_PATTERN_TYPE + r'(?:\s*(?:\bconst\b|[*]))*\s*'
4827  r'&\s*' + _RE_PATTERN_IDENT + r')\s*(?:=[^,()]+)?[,)]')
4828 # A call-by-const-reference parameter either ends with 'const& identifier'
4829 # or looks like 'const type& identifier' when 'type' is atomic.
4830 _RE_PATTERN_CONST_REF_PARAM = (
4831  r'(?:.*\s*\bconst\s*&\s*' + _RE_PATTERN_IDENT +
4832  r'|const\s+' + _RE_PATTERN_TYPE + r'\s*&\s*' + _RE_PATTERN_IDENT + r')')
4833 # Stream types.
4834 _RE_PATTERN_REF_STREAM_PARAM = (
4835  r'(?:.*stream\s*&\s*' + _RE_PATTERN_IDENT + r')')
4836 
4837 
4838 def CheckLanguage(filename, clean_lines, linenum, file_extension,
4839  include_state, nesting_state, error):
4840  """Checks rules from the 'C++ language rules' section of cppguide.html.
4841 
4842  Some of these rules are hard to test (function overloading, using
4843  uint32 inappropriately), but we do the best we can.
4844 
4845  Args:
4846  filename: The name of the current file.
4847  clean_lines: A CleansedLines instance containing the file.
4848  linenum: The number of the line to check.
4849  file_extension: The extension (without the dot) of the filename.
4850  include_state: An _IncludeState instance in which the headers are inserted.
4851  nesting_state: A NestingState instance which maintains information about
4852  the current stack of nested blocks being parsed.
4853  error: The function to call with any errors found.
4854  """
4855  # If the line is empty or consists of entirely a comment, no need to
4856  # check it.
4857  line = clean_lines.elided[linenum]
4858  if not line:
4859  return
4860 
4861  match = _RE_PATTERN_INCLUDE.search(line)
4862  if match:
4863  CheckIncludeLine(filename, clean_lines, linenum, include_state, error)
4864  return
4865 
4866  # Reset include state across preprocessor directives. This is meant
4867  # to silence warnings for conditional includes.
4868  match = Match(r'^\s*#\s*(if|ifdef|ifndef|elif|else|endif)\b', line)
4869  if match:
4870  include_state.ResetSection(match.group(1))
4871 
4872 
4873  # Perform other checks now that we are sure that this is not an include line
4874  CheckCasts(filename, clean_lines, linenum, error)
4875  CheckGlobalStatic(filename, clean_lines, linenum, error)
4876  CheckPrintf(filename, clean_lines, linenum, error)
4877 
4878  if file_extension in GetHeaderExtensions():
4879  # TODO(unknown): check that 1-arg constructors are explicit.
4880  # How to tell it's a constructor?
4881  # (handled in CheckForNonStandardConstructs for now)
4882  # TODO(unknown): check that classes declare or disable copy/assign
4883  # (level 1 error)
4884  pass
4885 
4886  # Check if people are using the verboten C basic types. The only exception
4887  # we regularly allow is "unsigned short port" for port.
4888  if Search(r'\bshort port\b', line):
4889  if not Search(r'\bunsigned short port\b', line):
4890  error(filename, linenum, 'runtime/int', 4,
4891  'Use "unsigned short" for ports, not "short"')
4892  else:
4893  match = Search(r'\b(short|long(?! +double)|long long)\b', line)
4894  if match:
4895  error(filename, linenum, 'runtime/int', 4,
4896  'Use int16/int64/etc, rather than the C type %s' % match.group(1))
4897 
4898  # Check if some verboten operator overloading is going on
4899  # TODO(unknown): catch out-of-line unary operator&:
4900  # class X {};
4901  # int operator&(const X& x) { return 42; } // unary operator&
4902  # The trick is it's hard to tell apart from binary operator&:
4903  # class Y { int operator&(const Y& x) { return 23; } }; // binary operator&
4904  if Search(r'\boperator\s*&\s*\‍(\s*\‍)', line):
4905  error(filename, linenum, 'runtime/operator', 4,
4906  'Unary operator& is dangerous. Do not use it.')
4907 
4908  # Check for suspicious usage of "if" like
4909  # } if (a == b) {
4910  if Search(r'\}\s*if\s*\‍(', line):
4911  error(filename, linenum, 'readability/braces', 4,
4912  'Did you mean "else if"? If not, start a new line for "if".')
4913 
4914  # Check for potential format string bugs like printf(foo).
4915  # We constrain the pattern not to pick things like DocidForPrintf(foo).
4916  # Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str())
4917  # TODO(unknown): Catch the following case. Need to change the calling
4918  # convention of the whole function to process multiple line to handle it.
4919  # printf(
4920  # boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line);
4921  printf_args = _GetTextInside(line, r'(?i)\b(string)?printf\s*\‍(')
4922  if printf_args:
4923  match = Match(r'([\w.\->()]+)$', printf_args)
4924  if match and match.group(1) != '__VA_ARGS__':
4925  function_name = re.search(r'\b((?:string)?printf)\s*\‍(',
4926  line, re.I).group(1)
4927  error(filename, linenum, 'runtime/printf', 4,
4928  'Potential format string bug. Do %s("%%s", %s) instead.'
4929  % (function_name, match.group(1)))
4930 
4931  # Check for potential memset bugs like memset(buf, sizeof(buf), 0).
4932  match = Search(r'memset\s*\‍(([^,]*),\s*([^,]*),\s*0\s*\‍)', line)
4933  if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)):
4934  error(filename, linenum, 'runtime/memset', 4,
4935  'Did you mean "memset(%s, 0, %s)"?'
4936  % (match.group(1), match.group(2)))
4937 
4938  if Search(r'\busing namespace\b', line):
4939  if Search(r'\bliterals\b', line):
4940  error(filename, linenum, 'build/namespaces_literals', 5,
4941  'Do not use namespace using-directives. '
4942  'Use using-declarations instead.')
4943  else:
4944  error(filename, linenum, 'build/namespaces', 5,
4945  'Do not use namespace using-directives. '
4946  'Use using-declarations instead.')
4947 
4948  # Detect variable-length arrays.
4949  match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line)
4950  if (match and match.group(2) != 'return' and match.group(2) != 'delete' and
4951  match.group(3).find(']') == -1):
4952  # Split the size using space and arithmetic operators as delimiters.
4953  # If any of the resulting tokens are not compile time constants then
4954  # report the error.
4955  tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3))
4956  is_const = True
4957  skip_next = False
4958  for tok in tokens:
4959  if skip_next:
4960  skip_next = False
4961  continue
4962 
4963  if Search(r'sizeof\‍(.+\‍)', tok): continue
4964  if Search(r'arraysize\‍(\w+\‍)', tok): continue
4965 
4966  tok = tok.lstrip('(')
4967  tok = tok.rstrip(')')
4968  if not tok: continue
4969  if Match(r'\d+', tok): continue
4970  if Match(r'0[xX][0-9a-fA-F]+', tok): continue
4971  if Match(r'k[A-Z0-9]\w*', tok): continue
4972  if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue
4973  if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue
4974  # A catch all for tricky sizeof cases, including 'sizeof expression',
4975  # 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)'
4976  # requires skipping the next token because we split on ' ' and '*'.
4977  if tok.startswith('sizeof'):
4978  skip_next = True
4979  continue
4980  is_const = False
4981  break
4982  if not is_const:
4983  error(filename, linenum, 'runtime/arrays', 1,
4984  'Do not use variable-length arrays. Use an appropriately named '
4985  "('k' followed by CamelCase) compile-time constant for the size.")
4986 
4987  # Check for use of unnamed namespaces in header files. Registration
4988  # macros are typically OK, so we allow use of "namespace {" on lines
4989  # that end with backslashes.
4990  if (file_extension in GetHeaderExtensions()
4991  and Search(r'\bnamespace\s*{', line)
4992  and line[-1] != '\\'):
4993  error(filename, linenum, 'build/namespaces', 4,
4994  'Do not use unnamed namespaces in header files. See '
4995  'https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces'
4996  ' for more information.')
4997 
4998 
4999 def CheckGlobalStatic(filename, clean_lines, linenum, error):
5000  """Check for unsafe global or static objects.
5001 
5002  Args:
5003  filename: The name of the current file.
5004  clean_lines: A CleansedLines instance containing the file.
5005  linenum: The number of the line to check.
5006  error: The function to call with any errors found.
5007  """
5008  line = clean_lines.elided[linenum]
5009 
5010  # Match two lines at a time to support multiline declarations
5011  if linenum + 1 < clean_lines.NumLines() and not Search(r'[;({]', line):
5012  line += clean_lines.elided[linenum + 1].strip()
5013 
5014  # Check for people declaring static/global STL strings at the top level.
5015  # This is dangerous because the C++ language does not guarantee that
5016  # globals with constructors are initialized before the first access, and
5017  # also because globals can be destroyed when some threads are still running.
5018  # TODO(unknown): Generalize this to also find static unique_ptr instances.
5019  # TODO(unknown): File bugs for clang-tidy to find these.
5020  match = Match(
5021  r'((?:|static +)(?:|const +))(?::*std::)?string( +const)? +'
5022  r'([a-zA-Z0-9_:]+)\b(.*)',
5023  line)
5024 
5025  # Remove false positives:
5026  # - String pointers (as opposed to values).
5027  # string *pointer
5028  # const string *pointer
5029  # string const *pointer
5030  # string *const pointer
5031  #
5032  # - Functions and template specializations.
5033  # string Function<Type>(...
5034  # string Class<Type>::Method(...
5035  #
5036  # - Operators. These are matched separately because operator names
5037  # cross non-word boundaries, and trying to match both operators
5038  # and functions at the same time would decrease accuracy of
5039  # matching identifiers.
5040  # string Class::operator*()
5041  if (match and
5042  not Search(r'\bstring\b(\s+const)?\s*[\*\&]\s*(const\s+)?\w', line) and
5043  not Search(r'\boperator\W', line) and
5044  not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)*\s*\‍(([^"]|$)', match.group(4))):
5045  if Search(r'\bconst\b', line):
5046  error(filename, linenum, 'runtime/string', 4,
5047  'For a static/global string constant, use a C style string '
5048  'instead: "%schar%s %s[]".' %
5049  (match.group(1), match.group(2) or '', match.group(3)))
5050  else:
5051  error(filename, linenum, 'runtime/string', 4,
5052  'Static/global string variables are not permitted.')
5053 
5054  if (Search(r'\b([A-Za-z0-9_]*_)\‍(\1\‍)', line) or
5055  Search(r'\b([A-Za-z0-9_]*_)\‍(CHECK_NOTNULL\‍(\1\‍)\‍)', line)):
5056  error(filename, linenum, 'runtime/init', 4,
5057  'You seem to be initializing a member variable with itself.')
5058 
5059 
5060 def CheckPrintf(filename, clean_lines, linenum, error):
5061  """Check for printf related issues.
5062 
5063  Args:
5064  filename: The name of the current file.
5065  clean_lines: A CleansedLines instance containing the file.
5066  linenum: The number of the line to check.
5067  error: The function to call with any errors found.
5068  """
5069  line = clean_lines.elided[linenum]
5070 
5071  # When snprintf is used, the second argument shouldn't be a literal.
5072  match = Search(r'snprintf\s*\‍(([^,]*),\s*([0-9]*)\s*,', line)
5073  if match and match.group(2) != '0':
5074  # If 2nd arg is zero, snprintf is used to calculate size.
5075  error(filename, linenum, 'runtime/printf', 3,
5076  'If you can, use sizeof(%s) instead of %s as the 2nd arg '
5077  'to snprintf.' % (match.group(1), match.group(2)))
5078 
5079  # Check if some verboten C functions are being used.
5080  if Search(r'\bsprintf\s*\‍(', line):
5081  error(filename, linenum, 'runtime/printf', 5,
5082  'Never use sprintf. Use snprintf instead.')
5083  match = Search(r'\b(strcpy|strcat)\s*\‍(', line)
5084  if match:
5085  error(filename, linenum, 'runtime/printf', 4,
5086  'Almost always, snprintf is better than %s' % match.group(1))
5087 
5088 
5089 def IsDerivedFunction(clean_lines, linenum):
5090  """Check if current line contains an inherited function.
5091 
5092  Args:
5093  clean_lines: A CleansedLines instance containing the file.
5094  linenum: The number of the line to check.
5095  Returns:
5096  True if current line contains a function with "override"
5097  virt-specifier.
5098  """
5099  # Scan back a few lines for start of current function
5100  for i in xrange(linenum, max(-1, linenum - 10), -1):
5101  match = Match(r'^([^()]*\w+)\‍(', clean_lines.elided[i])
5102  if match:
5103  # Look for "override" after the matching closing parenthesis
5104  line, _, closing_paren = CloseExpression(
5105  clean_lines, i, len(match.group(1)))
5106  return (closing_paren >= 0 and
5107  Search(r'\boverride\b', line[closing_paren:]))
5108  return False
5109 
5110 
5111 def IsOutOfLineMethodDefinition(clean_lines, linenum):
5112  """Check if current line contains an out-of-line method definition.
5113 
5114  Args:
5115  clean_lines: A CleansedLines instance containing the file.
5116  linenum: The number of the line to check.
5117  Returns:
5118  True if current line contains an out-of-line method definition.
5119  """
5120  # Scan back a few lines for start of current function
5121  for i in xrange(linenum, max(-1, linenum - 10), -1):
5122  if Match(r'^([^()]*\w+)\‍(', clean_lines.elided[i]):
5123  return Match(r'^[^()]*\w+::\w+\‍(', clean_lines.elided[i]) is not None
5124  return False
5125 
5126 
5127 def IsInitializerList(clean_lines, linenum):
5128  """Check if current line is inside constructor initializer list.
5129 
5130  Args:
5131  clean_lines: A CleansedLines instance containing the file.
5132  linenum: The number of the line to check.
5133  Returns:
5134  True if current line appears to be inside constructor initializer
5135  list, False otherwise.
5136  """
5137  for i in xrange(linenum, 1, -1):
5138  line = clean_lines.elided[i]
5139  if i == linenum:
5140  remove_function_body = Match(r'^(.*)\{\s*$', line)
5141  if remove_function_body:
5142  line = remove_function_body.group(1)
5143 
5144  if Search(r'\s:\s*\w+[({]', line):
5145  # A lone colon tend to indicate the start of a constructor
5146  # initializer list. It could also be a ternary operator, which
5147  # also tend to appear in constructor initializer lists as
5148  # opposed to parameter lists.
5149  return True
5150  if Search(r'\}\s*,\s*$', line):
5151  # A closing brace followed by a comma is probably the end of a
5152  # brace-initialized member in constructor initializer list.
5153  return True
5154  if Search(r'[{};]\s*$', line):
5155  # Found one of the following:
5156  # - A closing brace or semicolon, probably the end of the previous
5157  # function.
5158  # - An opening brace, probably the start of current class or namespace.
5159  #
5160  # Current line is probably not inside an initializer list since
5161  # we saw one of those things without seeing the starting colon.
5162  return False
5163 
5164  # Got to the beginning of the file without seeing the start of
5165  # constructor initializer list.
5166  return False
5167 
5168 
5169 def CheckForNonConstReference(filename, clean_lines, linenum,
5170  nesting_state, error):
5171  """Check for non-const references.
5172 
5173  Separate from CheckLanguage since it scans backwards from current
5174  line, instead of scanning forward.
5175 
5176  Args:
5177  filename: The name of the current file.
5178  clean_lines: A CleansedLines instance containing the file.
5179  linenum: The number of the line to check.
5180  nesting_state: A NestingState instance which maintains information about
5181  the current stack of nested blocks being parsed.
5182  error: The function to call with any errors found.
5183  """
5184  # Do nothing if there is no '&' on current line.
5185  line = clean_lines.elided[linenum]
5186  if '&' not in line:
5187  return
5188 
5189  # If a function is inherited, current function doesn't have much of
5190  # a choice, so any non-const references should not be blamed on
5191  # derived function.
5192  if IsDerivedFunction(clean_lines, linenum):
5193  return
5194 
5195  # Don't warn on out-of-line method definitions, as we would warn on the
5196  # in-line declaration, if it isn't marked with 'override'.
5197  if IsOutOfLineMethodDefinition(clean_lines, linenum):
5198  return
5199 
5200  # Long type names may be broken across multiple lines, usually in one
5201  # of these forms:
5202  # LongType
5203  # ::LongTypeContinued &identifier
5204  # LongType::
5205  # LongTypeContinued &identifier
5206  # LongType<
5207  # ...>::LongTypeContinued &identifier
5208  #
5209  # If we detected a type split across two lines, join the previous
5210  # line to current line so that we can match const references
5211  # accordingly.
5212  #
5213  # Note that this only scans back one line, since scanning back
5214  # arbitrary number of lines would be expensive. If you have a type
5215  # that spans more than 2 lines, please use a typedef.
5216  if linenum > 1:
5217  previous = None
5218  if Match(r'\s*::(?:[\w<>]|::)+\s*&\s*\S', line):
5219  # previous_line\n + ::current_line
5220  previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+[\w<>])\s*$',
5221  clean_lines.elided[linenum - 1])
5222  elif Match(r'\s*[a-zA-Z_]([\w<>]|::)+\s*&\s*\S', line):
5223  # previous_line::\n + current_line
5224  previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+::)\s*$',
5225  clean_lines.elided[linenum - 1])
5226  if previous:
5227  line = previous.group(1) + line.lstrip()
5228  else:
5229  # Check for templated parameter that is split across multiple lines
5230  endpos = line.rfind('>')
5231  if endpos > -1:
5232  (_, startline, startpos) = ReverseCloseExpression(
5233  clean_lines, linenum, endpos)
5234  if startpos > -1 and startline < linenum:
5235  # Found the matching < on an earlier line, collect all
5236  # pieces up to current line.
5237  line = ''
5238  for i in xrange(startline, linenum + 1):
5239  line += clean_lines.elided[i].strip()
5240 
5241  # Check for non-const references in function parameters. A single '&' may
5242  # found in the following places:
5243  # inside expression: binary & for bitwise AND
5244  # inside expression: unary & for taking the address of something
5245  # inside declarators: reference parameter
5246  # We will exclude the first two cases by checking that we are not inside a
5247  # function body, including one that was just introduced by a trailing '{'.
5248  # TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare].
5249  if (nesting_state.previous_stack_top and
5250  not (isinstance(nesting_state.previous_stack_top, _ClassInfo) or
5251  isinstance(nesting_state.previous_stack_top, _NamespaceInfo))):
5252  # Not at toplevel, not within a class, and not within a namespace
5253  return
5254 
5255  # Avoid initializer lists. We only need to scan back from the
5256  # current line for something that starts with ':'.
5257  #
5258  # We don't need to check the current line, since the '&' would
5259  # appear inside the second set of parentheses on the current line as
5260  # opposed to the first set.
5261  if linenum > 0:
5262  for i in xrange(linenum - 1, max(0, linenum - 10), -1):
5263  previous_line = clean_lines.elided[i]
5264  if not Search(r'[),]\s*$', previous_line):
5265  break
5266  if Match(r'^\s*:\s+\S', previous_line):
5267  return
5268 
5269  # Avoid preprocessors
5270  if Search(r'\\\s*$', line):
5271  return
5272 
5273  # Avoid constructor initializer lists
5274  if IsInitializerList(clean_lines, linenum):
5275  return
5276 
5277  # We allow non-const references in a few standard places, like functions
5278  # called "swap()" or iostream operators like "<<" or ">>". Do not check
5279  # those function parameters.
5280  #
5281  # We also accept & in static_assert, which looks like a function but
5282  # it's actually a declaration expression.
5283  whitelisted_functions = (r'(?:[sS]wap(?:<\w:+>)?|'
5284  r'operator\s*[<>][<>]|'
5285  r'static_assert|COMPILE_ASSERT'
5286  r')\s*\‍(')
5287  if Search(whitelisted_functions, line):
5288  return
5289  elif not Search(r'\S+\‍([^)]*$', line):
5290  # Don't see a whitelisted function on this line. Actually we
5291  # didn't see any function name on this line, so this is likely a
5292  # multi-line parameter list. Try a bit harder to catch this case.
5293  for i in xrange(2):
5294  if (linenum > i and
5295  Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
5296  return
5297 
5298  decls = ReplaceAll(r'{[^}]*}', ' ', line) # exclude function body
5299  for parameter in re.findall(_RE_PATTERN_REF_PARAM, decls):
5300  if (not Match(_RE_PATTERN_CONST_REF_PARAM, parameter) and
5301  not Match(_RE_PATTERN_REF_STREAM_PARAM, parameter)):
5302  error(filename, linenum, 'runtime/references', 2,
5303  'Is this a non-const reference? '
5304  'If so, make const or use a pointer: ' +
5305  ReplaceAll(' *<', '<', parameter))
5306 
5307 
5308 def CheckCasts(filename, clean_lines, linenum, error):
5309  """Various cast related checks.
5310 
5311  Args:
5312  filename: The name of the current file.
5313  clean_lines: A CleansedLines instance containing the file.
5314  linenum: The number of the line to check.
5315  error: The function to call with any errors found.
5316  """
5317  line = clean_lines.elided[linenum]
5318 
5319  # Check to see if they're using an conversion function cast.
5320  # I just try to capture the most common basic types, though there are more.
5321  # Parameterless conversion functions, such as bool(), are allowed as they are
5322  # probably a member operator declaration or default constructor.
5323  match = Search(
5324  r'(\bnew\s+(?:const\s+)?|\S<\s*(?:const\s+)?)?\b'
5325  r'(int|float|double|bool|char|int32|uint32|int64|uint64)'
5326  r'(\‍([^)].*)', line)
5327  expecting_function = ExpectingFunctionArgs(clean_lines, linenum)
5328  if match and not expecting_function:
5329  matched_type = match.group(2)
5330 
5331  # matched_new_or_template is used to silence two false positives:
5332  # - New operators
5333  # - Template arguments with function types
5334  #
5335  # For template arguments, we match on types immediately following
5336  # an opening bracket without any spaces. This is a fast way to
5337  # silence the common case where the function type is the first
5338  # template argument. False negative with less-than comparison is
5339  # avoided because those operators are usually followed by a space.
5340  #
5341  # function<double(double)> // bracket + no space = false positive
5342  # value < double(42) // bracket + space = true positive
5343  matched_new_or_template = match.group(1)
5344 
5345  # Avoid arrays by looking for brackets that come after the closing
5346  # parenthesis.
5347  if Match(r'\‍([^()]+\‍)\s*\[', match.group(3)):
5348  return
5349 
5350  # Other things to ignore:
5351  # - Function pointers
5352  # - Casts to pointer types
5353  # - Placement new
5354  # - Alias declarations
5355  matched_funcptr = match.group(3)
5356  if (matched_new_or_template is None and
5357  not (matched_funcptr and
5358  (Match(r'\‍((?:[^() ]+::\s*\*\s*)?[^() ]+\‍)\s*\‍(',
5359  matched_funcptr) or
5360  matched_funcptr.startswith('(*)'))) and
5361  not Match(r'\s*using\s+\S+\s*=\s*' + matched_type, line) and
5362  not Search(r'new\‍(\S+\‍)\s*' + matched_type, line)):
5363  error(filename, linenum, 'readability/casting', 4,
5364  'Using deprecated casting style. '
5365  'Use static_cast<%s>(...) instead' %
5366  matched_type)
5367 
5368  if not expecting_function:
5369  CheckCStyleCast(filename, clean_lines, linenum, 'static_cast',
5370  r'\‍((int|float|double|bool|char|u?int(16|32|64))\‍)', error)
5371 
5372  # This doesn't catch all cases. Consider (const char * const)"hello".
5373  #
5374  # (char *) "foo" should always be a const_cast (reinterpret_cast won't
5375  # compile).
5376  if CheckCStyleCast(filename, clean_lines, linenum, 'const_cast',
5377  r'\‍((char\s?\*+\s?)\‍)\s*"', error):
5378  pass
5379  else:
5380  # Check pointer casts for other than string constants
5381  CheckCStyleCast(filename, clean_lines, linenum, 'reinterpret_cast',
5382  r'\‍((\w+\s?\*+\s?)\‍)', error)
5383 
5384  # In addition, we look for people taking the address of a cast. This
5385  # is dangerous -- casts can assign to temporaries, so the pointer doesn't
5386  # point where you think.
5387  #
5388  # Some non-identifier character is required before the '&' for the
5389  # expression to be recognized as a cast. These are casts:
5390  # expression = &static_cast<int*>(temporary());
5391  # function(&(int*)(temporary()));
5392  #
5393  # This is not a cast:
5394  # reference_type&(int* function_param);
5395  match = Search(
5396  r'(?:[^\w]&\‍(([^)*][^)]*)\‍)[\w(])|'
5397  r'(?:[^\w]&(static|dynamic|down|reinterpret)_cast\b)', line)
5398  if match:
5399  # Try a better error message when the & is bound to something
5400  # dereferenced by the casted pointer, as opposed to the casted
5401  # pointer itself.
5402  parenthesis_error = False
5403  match = Match(r'^(.*&(?:static|dynamic|down|reinterpret)_cast\b)<', line)
5404  if match:
5405  _, y1, x1 = CloseExpression(clean_lines, linenum, len(match.group(1)))
5406  if x1 >= 0 and clean_lines.elided[y1][x1] == '(':
5407  _, y2, x2 = CloseExpression(clean_lines, y1, x1)
5408  if x2 >= 0:
5409  extended_line = clean_lines.elided[y2][x2:]
5410  if y2 < clean_lines.NumLines() - 1:
5411  extended_line += clean_lines.elided[y2 + 1]
5412  if Match(r'\s*(?:->|\[)', extended_line):
5413  parenthesis_error = True
5414 
5415  if parenthesis_error:
5416  error(filename, linenum, 'readability/casting', 4,
5417  ('Are you taking an address of something dereferenced '
5418  'from a cast? Wrapping the dereferenced expression in '
5419  'parentheses will make the binding more obvious'))
5420  else:
5421  error(filename, linenum, 'runtime/casting', 4,
5422  ('Are you taking an address of a cast? '
5423  'This is dangerous: could be a temp var. '
5424  'Take the address before doing the cast, rather than after'))
5425 
5426 
5427 def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error):
5428  """Checks for a C-style cast by looking for the pattern.
5429 
5430  Args:
5431  filename: The name of the current file.
5432  clean_lines: A CleansedLines instance containing the file.
5433  linenum: The number of the line to check.
5434  cast_type: The string for the C++ cast to recommend. This is either
5435  reinterpret_cast, static_cast, or const_cast, depending.
5436  pattern: The regular expression used to find C-style casts.
5437  error: The function to call with any errors found.
5438 
5439  Returns:
5440  True if an error was emitted.
5441  False otherwise.
5442  """
5443  line = clean_lines.elided[linenum]
5444  match = Search(pattern, line)
5445  if not match:
5446  return False
5447 
5448  # Exclude lines with keywords that tend to look like casts
5449  context = line[0:match.start(1) - 1]
5450  if Match(r'.*\b(?:sizeof|alignof|alignas|[_A-Z][_A-Z0-9]*)\s*$', context):
5451  return False
5452 
5453  # Try expanding current context to see if we one level of
5454  # parentheses inside a macro.
5455  if linenum > 0:
5456  for i in xrange(linenum - 1, max(0, linenum - 5), -1):
5457  context = clean_lines.elided[i] + context
5458  if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\‍((?:\‍([^()]*\‍)|[^()])*$', context):
5459  return False
5460 
5461  # operator++(int) and operator--(int)
5462  if context.endswith(' operator++') or context.endswith(' operator--'):
5463  return False
5464 
5465  # A single unnamed argument for a function tends to look like old style cast.
5466  # If we see those, don't issue warnings for deprecated casts.
5467  remainder = line[match.end(0):]
5468  if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),]|->)',
5469  remainder):
5470  return False
5471 
5472  # At this point, all that should be left is actual casts.
5473  error(filename, linenum, 'readability/casting', 4,
5474  'Using C-style cast. Use %s<%s>(...) instead' %
5475  (cast_type, match.group(1)))
5476 
5477  return True
5478 
5479 
5480 def ExpectingFunctionArgs(clean_lines, linenum):
5481  """Checks whether where function type arguments are expected.
5482 
5483  Args:
5484  clean_lines: A CleansedLines instance containing the file.
5485  linenum: The number of the line to check.
5486 
5487  Returns:
5488  True if the line at 'linenum' is inside something that expects arguments
5489  of function types.
5490  """
5491  line = clean_lines.elided[linenum]
5492  return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\‍(', line) or
5493  (linenum >= 2 and
5494  (Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\‍((?:\S+,)?\s*$',
5495  clean_lines.elided[linenum - 1]) or
5496  Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\‍(\s*$',
5497  clean_lines.elided[linenum - 2]) or
5498  Search(r'\bstd::m?function\s*<\s*$',
5499  clean_lines.elided[linenum - 1]))))
5500 
5501 
5502 _HEADERS_CONTAINING_TEMPLATES = (
5503  ('<deque>', ('deque',)),
5504  ('<functional>', ('unary_function', 'binary_function',
5505  'plus', 'minus', 'multiplies', 'divides', 'modulus',
5506  'negate',
5507  'equal_to', 'not_equal_to', 'greater', 'less',
5508  'greater_equal', 'less_equal',
5509  'logical_and', 'logical_or', 'logical_not',
5510  'unary_negate', 'not1', 'binary_negate', 'not2',
5511  'bind1st', 'bind2nd',
5512  'pointer_to_unary_function',
5513  'pointer_to_binary_function',
5514  'ptr_fun',
5515  'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t',
5516  'mem_fun_ref_t',
5517  'const_mem_fun_t', 'const_mem_fun1_t',
5518  'const_mem_fun_ref_t', 'const_mem_fun1_ref_t',
5519  'mem_fun_ref',
5520  )),
5521  ('<limits>', ('numeric_limits',)),
5522  ('<list>', ('list',)),
5523  ('<map>', ('map', 'multimap',)),
5524  ('<memory>', ('allocator', 'make_shared', 'make_unique', 'shared_ptr',
5525  'unique_ptr', 'weak_ptr')),
5526  ('<queue>', ('queue', 'priority_queue',)),
5527  ('<set>', ('set', 'multiset',)),
5528  ('<stack>', ('stack',)),
5529  ('<string>', ('char_traits', 'basic_string',)),
5530  ('<tuple>', ('tuple',)),
5531  ('<unordered_map>', ('unordered_map', 'unordered_multimap')),
5532  ('<unordered_set>', ('unordered_set', 'unordered_multiset')),
5533  ('<utility>', ('pair',)),
5534  ('<vector>', ('vector',)),
5535 
5536  # gcc extensions.
5537  # Note: std::hash is their hash, ::hash is our hash
5538  ('<hash_map>', ('hash_map', 'hash_multimap',)),
5539  ('<hash_set>', ('hash_set', 'hash_multiset',)),
5540  ('<slist>', ('slist',)),
5541  )
5542 
5543 _HEADERS_MAYBE_TEMPLATES = (
5544  ('<algorithm>', ('copy', 'max', 'min', 'min_element', 'sort',
5545  'transform',
5546  )),
5547  ('<utility>', ('forward', 'make_pair', 'move', 'swap')),
5548  )
5549 
5550 _RE_PATTERN_STRING = re.compile(r'\bstring\b')
5551 
5552 _re_pattern_headers_maybe_templates = []
5553 for _header, _templates in _HEADERS_MAYBE_TEMPLATES:
5554  for _template in _templates:
5555  # Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
5556  # type::max().
5557  _re_pattern_headers_maybe_templates.append(
5558  (re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\‍([^\‍)]'),
5559  _template,
5560  _header))
5561 
5562 # Other scripts may reach in and modify this pattern.
5563 _re_pattern_templates = []
5564 for _header, _templates in _HEADERS_CONTAINING_TEMPLATES:
5565  for _template in _templates:
5566  _re_pattern_templates.append(
5567  (re.compile(r'(<|\b)' + _template + r'\s*<'),
5568  _template + '<>',
5569  _header))
5570 
5571 
5572 def FilesBelongToSameModule(filename_cc, filename_h):
5573  """Check if these two filenames belong to the same module.
5574 
5575  The concept of a 'module' here is a as follows:
5576  foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
5577  same 'module' if they are in the same directory.
5578  some/path/public/xyzzy and some/path/internal/xyzzy are also considered
5579  to belong to the same module here.
5580 
5581  If the filename_cc contains a longer path than the filename_h, for example,
5582  '/absolute/path/to/base/sysinfo.cc', and this file would include
5583  'base/sysinfo.h', this function also produces the prefix needed to open the
5584  header. This is used by the caller of this function to more robustly open the
5585  header file. We don't have access to the real include paths in this context,
5586  so we need this guesswork here.
5587 
5588  Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
5589  according to this implementation. Because of this, this function gives
5590  some false positives. This should be sufficiently rare in practice.
5591 
5592  Args:
5593  filename_cc: is the path for the source (e.g. .cc) file
5594  filename_h: is the path for the header path
5595 
5596  Returns:
5597  Tuple with a bool and a string:
5598  bool: True if filename_cc and filename_h belong to the same module.
5599  string: the additional prefix needed to open the header file.
5600  """
5601  fileinfo_cc = FileInfo(filename_cc)
5602  if not fileinfo_cc.Extension().lstrip('.') in GetNonHeaderExtensions():
5603  return (False, '')
5604 
5605  fileinfo_h = FileInfo(filename_h)
5606  if not fileinfo_h.Extension().lstrip('.') in GetHeaderExtensions():
5607  return (False, '')
5608 
5609  filename_cc = filename_cc[:-(len(fileinfo_cc.Extension()))]
5610  matched_test_suffix = Search(_TEST_FILE_SUFFIX, fileinfo_cc.BaseName())
5611  if matched_test_suffix:
5612  filename_cc = filename_cc[:-len(matched_test_suffix.group(1))]
5613 
5614  filename_cc = filename_cc.replace('/public/', '/')
5615  filename_cc = filename_cc.replace('/internal/', '/')
5616 
5617  filename_h = filename_h[:-(len(fileinfo_h.Extension()))]
5618  if filename_h.endswith('-inl'):
5619  filename_h = filename_h[:-len('-inl')]
5620  filename_h = filename_h.replace('/public/', '/')
5621  filename_h = filename_h.replace('/internal/', '/')
5622 
5623  files_belong_to_same_module = filename_cc.endswith(filename_h)
5624  common_path = ''
5625  if files_belong_to_same_module:
5626  common_path = filename_cc[:-len(filename_h)]
5627  return files_belong_to_same_module, common_path
5628 
5629 
5630 def UpdateIncludeState(filename, include_dict, io=codecs):
5631  """Fill up the include_dict with new includes found from the file.
5632 
5633  Args:
5634  filename: the name of the header to read.
5635  include_dict: a dictionary in which the headers are inserted.
5636  io: The io factory to use to read the file. Provided for testability.
5637 
5638  Returns:
5639  True if a header was successfully added. False otherwise.
5640  """
5641  headerfile = None
5642  try:
5643  headerfile = io.open(filename, 'r', 'utf8', 'replace')
5644  except IOError:
5645  return False
5646  linenum = 0
5647  for line in headerfile:
5648  linenum += 1
5649  clean_line = CleanseComments(line)
5650  match = _RE_PATTERN_INCLUDE.search(clean_line)
5651  if match:
5652  include = match.group(2)
5653  include_dict.setdefault(include, linenum)
5654  return True
5655 
5656 
5657 def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
5658  io=codecs):
5659  """Reports for missing stl includes.
5660 
5661  This function will output warnings to make sure you are including the headers
5662  necessary for the stl containers and functions that you use. We only give one
5663  reason to include a header. For example, if you use both equal_to<> and
5664  less<> in a .h file, only one (the latter in the file) of these will be
5665  reported as a reason to include the <functional>.
5666 
5667  Args:
5668  filename: The name of the current file.
5669  clean_lines: A CleansedLines instance containing the file.
5670  include_state: An _IncludeState instance.
5671  error: The function to call with any errors found.
5672  io: The IO factory to use to read the header file. Provided for unittest
5673  injection.
5674  """
5675  required = {} # A map of header name to linenumber and the template entity.
5676  # Example of required: { '<functional>': (1219, 'less<>') }
5677 
5678  for linenum in range(clean_lines.NumLines()):
5679  line = clean_lines.elided[linenum]
5680  if not line or line[0] == '#':
5681  continue
5682 
5683  # String is special -- it is a non-templatized type in STL.
5684  matched = _RE_PATTERN_STRING.search(line)
5685  if matched:
5686  # Don't warn about strings in non-STL namespaces:
5687  # (We check only the first match per line; good enough.)
5688  prefix = line[:matched.start()]
5689  if prefix.endswith('std::') or not prefix.endswith('::'):
5690  required['<string>'] = (linenum, 'string')
5691 
5692  for pattern, template, header in _re_pattern_headers_maybe_templates:
5693  if pattern.search(line):
5694  required[header] = (linenum, template)
5695 
5696  # The following function is just a speed up, no semantics are changed.
5697  if not '<' in line: # Reduces the cpu time usage by skipping lines.
5698  continue
5699 
5700  for pattern, template, header in _re_pattern_templates:
5701  matched = pattern.search(line)
5702  if matched:
5703  # Don't warn about IWYU in non-STL namespaces:
5704  # (We check only the first match per line; good enough.)
5705  prefix = line[:matched.start()]
5706  if prefix.endswith('std::') or not prefix.endswith('::'):
5707  required[header] = (linenum, template)
5708 
5709  # The policy is that if you #include something in foo.h you don't need to
5710  # include it again in foo.cc. Here, we will look at possible includes.
5711  # Let's flatten the include_state include_list and copy it into a dictionary.
5712  include_dict = dict([item for sublist in include_state.include_list
5713  for item in sublist])
5714 
5715  # Did we find the header for this file (if any) and successfully load it?
5716  header_found = False
5717 
5718  # Use the absolute path so that matching works properly.
5719  abs_filename = FileInfo(filename).FullName()
5720 
5721  # For Emacs's flymake.
5722  # If cpplint is invoked from Emacs's flymake, a temporary file is generated
5723  # by flymake and that file name might end with '_flymake.cc'. In that case,
5724  # restore original file name here so that the corresponding header file can be
5725  # found.
5726  # e.g. If the file name is 'foo_flymake.cc', we should search for 'foo.h'
5727  # instead of 'foo_flymake.h'
5728  abs_filename = re.sub(r'_flymake\.cc$', '.cc', abs_filename)
5729 
5730  # include_dict is modified during iteration, so we iterate over a copy of
5731  # the keys.
5732  header_keys = list(include_dict.keys())
5733  for header in header_keys:
5734  (same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
5735  fullpath = common_path + header
5736  if same_module and UpdateIncludeState(fullpath, include_dict, io):
5737  header_found = True
5738 
5739  # If we can't find the header file for a .cc, assume it's because we don't
5740  # know where to look. In that case we'll give up as we're not sure they
5741  # didn't include it in the .h file.
5742  # TODO(unknown): Do a better job of finding .h files so we are confident that
5743  # not having the .h file means there isn't one.
5744  if not header_found:
5745  for extension in GetNonHeaderExtensions():
5746  if filename.endswith('.' + extension):
5747  return
5748 
5749  # All the lines have been processed, report the errors found.
5750  for required_header_unstripped in sorted(required, key=required.__getitem__):
5751  template = required[required_header_unstripped][1]
5752  if required_header_unstripped.strip('<>"') not in include_dict:
5753  error(filename, required[required_header_unstripped][0],
5754  'build/include_what_you_use', 4,
5755  'Add #include ' + required_header_unstripped + ' for ' + template)
5756 
5757 
5758 _RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<')
5759 
5760 
5761 def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error):
5762  """Check that make_pair's template arguments are deduced.
5763 
5764  G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are
5765  specified explicitly, and such use isn't intended in any case.
5766 
5767  Args:
5768  filename: The name of the current file.
5769  clean_lines: A CleansedLines instance containing the file.
5770  linenum: The number of the line to check.
5771  error: The function to call with any errors found.
5772  """
5773  line = clean_lines.elided[linenum]
5774  match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line)
5775  if match:
5776  error(filename, linenum, 'build/explicit_make_pair',
5777  4, # 4 = high confidence
5778  'For C++11-compatibility, omit template arguments from make_pair'
5779  ' OR use pair directly OR if appropriate, construct a pair directly')
5780 
5781 
5782 def CheckRedundantVirtual(filename, clean_lines, linenum, error):
5783  """Check if line contains a redundant "virtual" function-specifier.
5784 
5785  Args:
5786  filename: The name of the current file.
5787  clean_lines: A CleansedLines instance containing the file.
5788  linenum: The number of the line to check.
5789  error: The function to call with any errors found.
5790  """
5791  # Look for "virtual" on current line.
5792  line = clean_lines.elided[linenum]
5793  virtual = Match(r'^(.*)(\bvirtual\b)(.*)$', line)
5794  if not virtual: return
5795 
5796  # Ignore "virtual" keywords that are near access-specifiers. These
5797  # are only used in class base-specifier and do not apply to member
5798  # functions.
5799  if (Search(r'\b(public|protected|private)\s+$', virtual.group(1)) or
5800  Match(r'^\s+(public|protected|private)\b', virtual.group(3))):
5801  return
5802 
5803  # Ignore the "virtual" keyword from virtual base classes. Usually
5804  # there is a column on the same line in these cases (virtual base
5805  # classes are rare in google3 because multiple inheritance is rare).
5806  if Match(r'^.*[^:]:[^:].*$', line): return
5807 
5808  # Look for the next opening parenthesis. This is the start of the
5809  # parameter list (possibly on the next line shortly after virtual).
5810  # TODO(unknown): doesn't work if there are virtual functions with
5811  # decltype() or other things that use parentheses, but csearch suggests
5812  # that this is rare.
5813  end_col = -1
5814  end_line = -1
5815  start_col = len(virtual.group(2))
5816  for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())):
5817  line = clean_lines.elided[start_line][start_col:]
5818  parameter_list = Match(r'^([^(]*)\‍(', line)
5819  if parameter_list:
5820  # Match parentheses to find the end of the parameter list
5821  (_, end_line, end_col) = CloseExpression(
5822  clean_lines, start_line, start_col + len(parameter_list.group(1)))
5823  break
5824  start_col = 0
5825 
5826  if end_col < 0:
5827  return # Couldn't find end of parameter list, give up
5828 
5829  # Look for "override" or "final" after the parameter list
5830  # (possibly on the next few lines).
5831  for i in xrange(end_line, min(end_line + 3, clean_lines.NumLines())):
5832  line = clean_lines.elided[i][end_col:]
5833  match = Search(r'\b(override|final)\b', line)
5834  if match:
5835  error(filename, linenum, 'readability/inheritance', 4,
5836  ('"virtual" is redundant since function is '
5837  'already declared as "%s"' % match.group(1)))
5838 
5839  # Set end_col to check whole lines after we are done with the
5840  # first line.
5841  end_col = 0
5842  if Search(r'[^\w]\s*$', line):
5843  break
5844 
5845 
5846 def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error):
5847  """Check if line contains a redundant "override" or "final" virt-specifier.
5848 
5849  Args:
5850  filename: The name of the current file.
5851  clean_lines: A CleansedLines instance containing the file.
5852  linenum: The number of the line to check.
5853  error: The function to call with any errors found.
5854  """
5855  # Look for closing parenthesis nearby. We need one to confirm where
5856  # the declarator ends and where the virt-specifier starts to avoid
5857  # false positives.
5858  line = clean_lines.elided[linenum]
5859  declarator_end = line.rfind(')')
5860  if declarator_end >= 0:
5861  fragment = line[declarator_end:]
5862  else:
5863  if linenum > 1 and clean_lines.elided[linenum - 1].rfind(')') >= 0:
5864  fragment = line
5865  else:
5866  return
5867 
5868  # Check that at most one of "override" or "final" is present, not both
5869  if Search(r'\boverride\b', fragment) and Search(r'\bfinal\b', fragment):
5870  error(filename, linenum, 'readability/inheritance', 4,
5871  ('"override" is redundant since function is '
5872  'already declared as "final"'))
5873 
5874 
5875 
5876 
5877 # Returns true if we are at a new block, and it is directly
5878 # inside of a namespace.
5879 def IsBlockInNameSpace(nesting_state, is_forward_declaration):
5880  """Checks that the new block is directly in a namespace.
5881 
5882  Args:
5883  nesting_state: The _NestingState object that contains info about our state.
5884  is_forward_declaration: If the class is a forward declared class.
5885  Returns:
5886  Whether or not the new block is directly in a namespace.
5887  """
5888  if is_forward_declaration:
5889  return len(nesting_state.stack) >= 1 and (
5890  isinstance(nesting_state.stack[-1], _NamespaceInfo))
5891 
5892 
5893  return (len(nesting_state.stack) > 1 and
5894  nesting_state.stack[-1].check_namespace_indentation and
5895  isinstance(nesting_state.stack[-2], _NamespaceInfo))
5896 
5897 
5898 def ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
5899  raw_lines_no_comments, linenum):
5900  """This method determines if we should apply our namespace indentation check.
5901 
5902  Args:
5903  nesting_state: The current nesting state.
5904  is_namespace_indent_item: If we just put a new class on the stack, True.
5905  If the top of the stack is not a class, or we did not recently
5906  add the class, False.
5907  raw_lines_no_comments: The lines without the comments.
5908  linenum: The current line number we are processing.
5909 
5910  Returns:
5911  True if we should apply our namespace indentation check. Currently, it
5912  only works for classes and namespaces inside of a namespace.
5913  """
5914 
5915  is_forward_declaration = IsForwardClassDeclaration(raw_lines_no_comments,
5916  linenum)
5917 
5918  if not (is_namespace_indent_item or is_forward_declaration):
5919  return False
5920 
5921  # If we are in a macro, we do not want to check the namespace indentation.
5922  if IsMacroDefinition(raw_lines_no_comments, linenum):
5923  return False
5924 
5925  return IsBlockInNameSpace(nesting_state, is_forward_declaration)
5926 
5927 
5928 # Call this method if the line is directly inside of a namespace.
5929 # If the line above is blank (excluding comments) or the start of
5930 # an inner namespace, it cannot be indented.
5931 def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum,
5932  error):
5933  line = raw_lines_no_comments[linenum]
5934  if Match(r'^\s+', line):
5935  error(filename, linenum, 'runtime/indentation_namespace', 4,
5936  'Do not indent within a namespace')
5937 
5938 
5939 def ProcessLine(filename, file_extension, clean_lines, line,
5940  include_state, function_state, nesting_state, error,
5941  extra_check_functions=None):
5942  """Processes a single line in the file.
5943 
5944  Args:
5945  filename: Filename of the file that is being processed.
5946  file_extension: The extension (dot not included) of the file.
5947  clean_lines: An array of strings, each representing a line of the file,
5948  with comments stripped.
5949  line: Number of line being processed.
5950  include_state: An _IncludeState instance in which the headers are inserted.
5951  function_state: A _FunctionState instance which counts function lines, etc.
5952  nesting_state: A NestingState instance which maintains information about
5953  the current stack of nested blocks being parsed.
5954  error: A callable to which errors are reported, which takes 4 arguments:
5955  filename, line number, error level, and message
5956  extra_check_functions: An array of additional check functions that will be
5957  run on each source line. Each function takes 4
5958  arguments: filename, clean_lines, line, error
5959  """
5960  raw_lines = clean_lines.raw_lines
5961  ParseNolintSuppressions(filename, raw_lines[line], line, error)
5962  nesting_state.Update(filename, clean_lines, line, error)
5963 # CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
5964 # error)
5965  if nesting_state.InAsmBlock(): return
5966  CheckForFunctionLengths(filename, clean_lines, line, function_state, error)
5967  CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error)
5968  CheckStyle(filename, clean_lines, line, file_extension, nesting_state, error)
5969  CheckLanguage(filename, clean_lines, line, file_extension, include_state,
5970  nesting_state, error)
5971  CheckForNonConstReference(filename, clean_lines, line, nesting_state, error)
5972  CheckForNonStandardConstructs(filename, clean_lines, line,
5973  nesting_state, error)
5974  CheckVlogArguments(filename, clean_lines, line, error)
5975  CheckPosixThreading(filename, clean_lines, line, error)
5976  CheckInvalidIncrement(filename, clean_lines, line, error)
5977  CheckMakePairUsesDeduction(filename, clean_lines, line, error)
5978  CheckRedundantVirtual(filename, clean_lines, line, error)
5979  CheckRedundantOverrideOrFinal(filename, clean_lines, line, error)
5980  if extra_check_functions:
5981  for check_fn in extra_check_functions:
5982  check_fn(filename, clean_lines, line, error)
5983 
5984 def FlagCxx11Features(filename, clean_lines, linenum, error):
5985  """Flag those c++11 features that we only allow in certain places.
5986 
5987  Args:
5988  filename: The name of the current file.
5989  clean_lines: A CleansedLines instance containing the file.
5990  linenum: The number of the line to check.
5991  error: The function to call with any errors found.
5992  """
5993  line = clean_lines.elided[linenum]
5994 
5995  include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line)
5996 
5997  # Flag unapproved C++ TR1 headers.
5998  if include and include.group(1).startswith('tr1/'):
5999  error(filename, linenum, 'build/c++tr1', 5,
6000  ('C++ TR1 headers such as <%s> are unapproved.') % include.group(1))
6001 
6002  # Flag unapproved C++11 headers.
6003  if include and include.group(1) in ('condition_variable',
6004  'fenv.h',
6005  'future',
6006  'mutex',
6007  'thread',
6008  'chrono',
6009  'ratio',
6010  'system_error',
6011  ):
6012  error(filename, linenum, 'build/c++11', 5,
6013  ('<%s> is an unapproved C++11 header.') % include.group(1))
6014 
6015  # The only place where we need to worry about C++11 keywords and library
6016  # features in preprocessor directives is in macro definitions.
6017  if Match(r'\s*#', line) and not Match(r'\s*#\s*define\b', line): return
6018 
6019  # These are classes and free functions. The classes are always
6020  # mentioned as std::*, but we only catch the free functions if
6021  # they're not found by ADL. They're alphabetical by header.
6022  for top_name in (
6023  # type_traits
6024  'alignment_of',
6025  'aligned_union',
6026  ):
6027  if Search(r'\bstd::%s\b' % top_name, line):
6028  error(filename, linenum, 'build/c++11', 5,
6029  ('std::%s is an unapproved C++11 class or function. Send c-style '
6030  'an example of where it would make your code more readable, and '
6031  'they may let you use it.') % top_name)
6032 
6033 
6034 def FlagCxx14Features(filename, clean_lines, linenum, error):
6035  """Flag those C++14 features that we restrict.
6036 
6037  Args:
6038  filename: The name of the current file.
6039  clean_lines: A CleansedLines instance containing the file.
6040  linenum: The number of the line to check.
6041  error: The function to call with any errors found.
6042  """
6043  line = clean_lines.elided[linenum]
6044 
6045  include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line)
6046 
6047  # Flag unapproved C++14 headers.
6048  if include and include.group(1) in ('scoped_allocator', 'shared_mutex'):
6049  error(filename, linenum, 'build/c++14', 5,
6050  ('<%s> is an unapproved C++14 header.') % include.group(1))
6051 
6052 
6053 def ProcessFileData(filename, file_extension, lines, error,
6054  extra_check_functions=None):
6055  """Performs lint checks and reports any errors to the given error function.
6056 
6057  Args:
6058  filename: Filename of the file that is being processed.
6059  file_extension: The extension (dot not included) of the file.
6060  lines: An array of strings, each representing a line of the file, with the
6061  last element being empty if the file is terminated with a newline.
6062  error: A callable to which errors are reported, which takes 4 arguments:
6063  filename, line number, error level, and message
6064  extra_check_functions: An array of additional check functions that will be
6065  run on each source line. Each function takes 4
6066  arguments: filename, clean_lines, line, error
6067  """
6068  lines = (['// marker so line numbers and indices both start at 1'] + lines +
6069  ['// marker so line numbers end in a known way'])
6070 
6071  include_state = _IncludeState()
6072  function_state = _FunctionState()
6073  nesting_state = NestingState()
6074 
6076 
6077  CheckForCopyright(filename, lines, error)
6079  RemoveMultiLineComments(filename, lines, error)
6080  clean_lines = CleansedLines(lines)
6081 
6082  if file_extension in GetHeaderExtensions():
6083  CheckForHeaderGuard(filename, clean_lines, error)
6084 
6085  for line in range(clean_lines.NumLines()):
6086  ProcessLine(filename, file_extension, clean_lines, line,
6087  include_state, function_state, nesting_state, error,
6088  extra_check_functions)
6089  FlagCxx11Features(filename, clean_lines, line, error)
6090  nesting_state.CheckCompletedBlocks(filename, error)
6091 
6092  CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error)
6093 
6094  # Check that the .cc file has included its header if it exists.
6095  if _IsSourceExtension(file_extension):
6096  CheckHeaderFileIncluded(filename, include_state, error)
6097 
6098  # We check here rather than inside ProcessLine so that we see raw
6099  # lines rather than "cleaned" lines.
6100  CheckForBadCharacters(filename, lines, error)
6101 
6102  CheckForNewlineAtEOF(filename, lines, error)
6103 
6105  """ Loads the configuration files and processes the config overrides.
6106 
6107  Args:
6108  filename: The name of the file being processed by the linter.
6109 
6110  Returns:
6111  False if the current |filename| should not be processed further.
6112  """
6113 
6114  abs_filename = os.path.abspath(filename)
6115  cfg_filters = []
6116  keep_looking = True
6117  while keep_looking:
6118  abs_path, base_name = os.path.split(abs_filename)
6119  if not base_name:
6120  break # Reached the root directory.
6121 
6122  cfg_file = os.path.join(abs_path, "CPPLINT.cfg")
6123  abs_filename = abs_path
6124  if not os.path.isfile(cfg_file):
6125  continue
6126 
6127  try:
6128  with open(cfg_file) as file_handle:
6129  for line in file_handle:
6130  line, _, _ = line.partition('#') # Remove comments.
6131  if not line.strip():
6132  continue
6133 
6134  name, _, val = line.partition('=')
6135  name = name.strip()
6136  val = val.strip()
6137  if name == 'set noparent':
6138  keep_looking = False
6139  elif name == 'filter':
6140  cfg_filters.append(val)
6141  elif name == 'exclude_files':
6142  # When matching exclude_files pattern, use the base_name of
6143  # the current file name or the directory name we are processing.
6144  # For example, if we are checking for lint errors in /foo/bar/baz.cc
6145  # and we found the .cfg file at /foo/CPPLINT.cfg, then the config
6146  # file's "exclude_files" filter is meant to be checked against "bar"
6147  # and not "baz" nor "bar/baz.cc".
6148  if base_name:
6149  pattern = re.compile(val)
6150  if pattern.match(base_name):
6151  _cpplint_state.PrintInfo('Ignoring "%s": file excluded by '
6152  '"%s". File path component "%s" matches pattern "%s"\n' %
6153  (filename, cfg_file, base_name, val))
6154  return False
6155  elif name == 'linelength':
6156  global _line_length
6157  try:
6158  _line_length = int(val)
6159  except ValueError:
6160  _cpplint_state.PrintError('Line length must be numeric.')
6161  elif name == 'extensions':
6162  global _valid_extensions
6163  try:
6164  extensions = [ext.strip() for ext in val.split(',')]
6165  _valid_extensions = set(extensions)
6166  except ValueError:
6167  sys.stderr.write('Extensions should be a comma-separated list of values;'
6168  'for example: extensions=hpp,cpp\n'
6169  'This could not be parsed: "%s"' % (val,))
6170  elif name == 'headers':
6171  global _header_extensions
6172  try:
6173  extensions = [ext.strip() for ext in val.split(',')]
6174  _header_extensions = set(extensions)
6175  except ValueError:
6176  sys.stderr.write('Extensions should be a comma-separated list of values;'
6177  'for example: extensions=hpp,cpp\n'
6178  'This could not be parsed: "%s"' % (val,))
6179  elif name == 'root':
6180  global _root
6181  _root = val
6182  else:
6183  _cpplint_state.PrintError(
6184  'Invalid configuration option (%s) in file %s\n' %
6185  (name, cfg_file))
6186 
6187  except IOError:
6188  _cpplint_state.PrintError(
6189  "Skipping config file '%s': Can't open for reading\n" % cfg_file)
6190  keep_looking = False
6191 
6192  # Apply all the accumulated filters in reverse order (top-level directory
6193  # config options having the least priority).
6194  for cfg_filter in reversed(cfg_filters):
6195  _AddFilters(cfg_filter)
6196 
6197  return True
6198 
6199 
6200 def ProcessFile(filename, vlevel, extra_check_functions=None):
6201  """Does google-lint on a single file.
6202 
6203  Args:
6204  filename: The name of the file to parse.
6205 
6206  vlevel: The level of errors to report. Every error of confidence
6207  >= verbose_level will be reported. 0 is a good default.
6208 
6209  extra_check_functions: An array of additional check functions that will be
6210  run on each source line. Each function takes 4
6211  arguments: filename, clean_lines, line, error
6212  """
6213 
6214  _SetVerboseLevel(vlevel)
6215  _BackupFilters()
6216 
6217  if not ProcessConfigOverrides(filename):
6218  _RestoreFilters()
6219  return
6220 
6221  lf_lines = []
6222  crlf_lines = []
6223  try:
6224  # Support the UNIX convention of using "-" for stdin. Note that
6225  # we are not opening the file with universal newline support
6226  # (which codecs doesn't support anyway), so the resulting lines do
6227  # contain trailing '\r' characters if we are reading a file that
6228  # has CRLF endings.
6229  # If after the split a trailing '\r' is present, it is removed
6230  # below.
6231  if filename == '-':
6232  lines = codecs.StreamReaderWriter(sys.stdin,
6233  codecs.getreader('utf8'),
6234  codecs.getwriter('utf8'),
6235  'replace').read().split('\n')
6236  else:
6237  lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n')
6238 
6239  # Remove trailing '\r'.
6240  # The -1 accounts for the extra trailing blank line we get from split()
6241  for linenum in range(len(lines) - 1):
6242  if lines[linenum].endswith('\r'):
6243  lines[linenum] = lines[linenum].rstrip('\r')
6244  crlf_lines.append(linenum + 1)
6245  else:
6246  lf_lines.append(linenum + 1)
6247 
6248  except IOError:
6249  _cpplint_state.PrintError(
6250  "Skipping input '%s': Can't open for reading\n" % filename)
6251  _RestoreFilters()
6252  return
6253 
6254  # Note, if no dot is found, this will give the entire filename as the ext.
6255  file_extension = filename[filename.rfind('.') + 1:]
6256 
6257  # When reading from stdin, the extension is unknown, so no cpplint tests
6258  # should rely on the extension.
6259  if filename != '-' and file_extension not in GetAllExtensions():
6260  _cpplint_state.PrintError('Ignoring %s; not a valid file name '
6261  '(%s)\n' % (filename, ', '.join(GetAllExtensions())))
6262  else:
6263  ProcessFileData(filename, file_extension, lines, Error,
6264  extra_check_functions)
6265 
6266  # If end-of-line sequences are a mix of LF and CR-LF, issue
6267  # warnings on the lines with CR.
6268  #
6269  # Don't issue any warnings if all lines are uniformly LF or CR-LF,
6270  # since critique can handle these just fine, and the style guide
6271  # doesn't dictate a particular end of line sequence.
6272  #
6273  # We can't depend on os.linesep to determine what the desired
6274  # end-of-line sequence should be, since that will return the
6275  # server-side end-of-line sequence.
6276  if lf_lines and crlf_lines:
6277  # Warn on every line with CR. An alternative approach might be to
6278  # check whether the file is mostly CRLF or just LF, and warn on the
6279  # minority, we bias toward LF here since most tools prefer LF.
6280  for linenum in crlf_lines:
6281  Error(filename, linenum, 'whitespace/newline', 1,
6282  'Unexpected \\r (^M) found; better to use only \\n')
6283 
6284  _cpplint_state.PrintInfo('Done processing %s\n' % filename)
6285  _RestoreFilters()
6286 
6287 
6288 def PrintUsage(message):
6289  """Prints a brief usage string and exits, optionally with an error message.
6290 
6291  Args:
6292  message: The optional error message.
6293  """
6294  sys.stderr.write(_USAGE)
6295 
6296  if message:
6297  sys.exit('\nFATAL ERROR: ' + message)
6298  else:
6299  sys.exit(0)
6300 
6301 
6303  """Prints a list of all the error-categories used by error messages.
6304 
6305  These are the categories used to filter messages via --filter.
6306  """
6307  sys.stderr.write(''.join(' %s\n' % cat for cat in _ERROR_CATEGORIES))
6308  sys.exit(0)
6309 
6310 
6311 def ParseArguments(args):
6312  """Parses the command line arguments.
6313 
6314  This may set the output format and verbosity level as side-effects.
6315 
6316  Args:
6317  args: The command line arguments:
6318 
6319  Returns:
6320  The list of filenames to lint.
6321  """
6322  try:
6323  (opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=',
6324  'counting=',
6325  'filter=',
6326  'root=',
6327  'repository=',
6328  'linelength=',
6329  'extensions=',
6330  'exclude=',
6331  'headers=',
6332  'quiet',
6333  'recursive'])
6334  except getopt.GetoptError:
6335  PrintUsage('Invalid arguments.')
6336 
6337  verbosity = _VerboseLevel()
6338  output_format = _OutputFormat()
6339  filters = ''
6340  counting_style = ''
6341  recursive = False
6342 
6343  for (opt, val) in opts:
6344  if opt == '--help':
6345  PrintUsage(None)
6346  elif opt == '--output':
6347  if val not in ('emacs', 'vs7', 'eclipse', 'junit'):
6348  PrintUsage('The only allowed output formats are emacs, vs7, eclipse '
6349  'and junit.')
6350  output_format = val
6351  elif opt == '--verbose':
6352  verbosity = int(val)
6353  elif opt == '--filter':
6354  filters = val
6355  if not filters:
6356  PrintCategories()
6357  elif opt == '--counting':
6358  if val not in ('total', 'toplevel', 'detailed'):
6359  PrintUsage('Valid counting options are total, toplevel, and detailed')
6360  counting_style = val
6361  elif opt == '--root':
6362  global _root
6363  _root = val
6364  elif opt == '--repository':
6365  global _repository
6366  _repository = val
6367  elif opt == '--linelength':
6368  global _line_length
6369  try:
6370  _line_length = int(val)
6371  except ValueError:
6372  PrintUsage('Line length must be digits.')
6373  elif opt == '--exclude':
6374  global _excludes
6375  if not _excludes:
6376  _excludes = set()
6377  _excludes.update(glob.glob(val))
6378  elif opt == '--extensions':
6379  global _valid_extensions
6380  try:
6381  _valid_extensions = set(val.split(','))
6382  except ValueError:
6383  PrintUsage('Extensions must be comma seperated list.')
6384  elif opt == '--headers':
6385  global _header_extensions
6386  try:
6387  _header_extensions = set(val.split(','))
6388  except ValueError:
6389  PrintUsage('Extensions must be comma seperated list.')
6390  elif opt == '--recursive':
6391  recursive = True
6392  elif opt == '--quiet':
6393  global _quiet
6394  _quiet = True
6395 
6396  if not filenames:
6397  PrintUsage('No files were specified.')
6398 
6399  if recursive:
6400  filenames = _ExpandDirectories(filenames)
6401 
6402  if _excludes:
6403  filenames = _FilterExcludedFiles(filenames)
6404 
6405  _SetOutputFormat(output_format)
6406  _SetVerboseLevel(verbosity)
6407  _SetFilters(filters)
6408  _SetCountingStyle(counting_style)
6409 
6410  return filenames
6411 
6412 def _ExpandDirectories(filenames):
6413  """Searches a list of filenames and replaces directories in the list with
6414  all files descending from those directories. Files with extensions not in
6415  the valid extensions list are excluded.
6416 
6417  Args:
6418  filenames: A list of files or directories
6419 
6420  Returns:
6421  A list of all files that are members of filenames or descended from a
6422  directory in filenames
6423  """
6424  expanded = set()
6425  for filename in filenames:
6426  if not os.path.isdir(filename):
6427  expanded.add(filename)
6428  continue
6429 
6430  for root, _, files in os.walk(filename):
6431  for loopfile in files:
6432  fullname = os.path.join(root, loopfile)
6433  if fullname.startswith('.' + os.path.sep):
6434  fullname = fullname[len('.' + os.path.sep):]
6435  expanded.add(fullname)
6436 
6437  filtered = []
6438  for filename in expanded:
6439  if os.path.splitext(filename)[1][1:] in GetAllExtensions():
6440  filtered.append(filename)
6441 
6442  return filtered
6443 
6444 def _FilterExcludedFiles(filenames):
6445  """Filters out files listed in the --exclude command line switch. File paths
6446  in the switch are evaluated relative to the current working directory
6447  """
6448  exclude_paths = [os.path.abspath(f) for f in _excludes]
6449  return [f for f in filenames if os.path.abspath(f) not in exclude_paths]
6450 
6451 def main():
6452  filenames = ParseArguments(sys.argv[1:])
6453  backup_err = sys.stderr
6454  try:
6455  # Change stderr to write with replacement characters so we don't die
6456  # if we try to print something containing non-ASCII characters.
6457  sys.stderr = codecs.StreamReader(sys.stderr, 'replace')
6458 
6459  _cpplint_state.ResetErrorCounts()
6460  for filename in filenames:
6461  ProcessFile(filename, _cpplint_state.verbose_level)
6462  _cpplint_state.PrintErrorCounts()
6463 
6464  if _cpplint_state.output_format == 'junit':
6465  sys.stderr.write(_cpplint_state.FormatJUnitXML())
6466 
6467  finally:
6468  sys.stderr = backup_err
6469 
6470  sys.exit(_cpplint_state.error_count > 0)
6471 
6472 
6473 if __name__ == '__main__':
6474  main()
saber_cpplint.CheckForFunctionLengths
def CheckForFunctionLengths(filename, clean_lines, linenum, function_state, error)
Definition: saber_cpplint.py:3158
saber_cpplint.FileInfo.Extension
def Extension(self)
Definition: saber_cpplint.py:1343
saber_cpplint.NestingState.SeenOpenBrace
def SeenOpenBrace(self)
Definition: saber_cpplint.py:2541
saber_cpplint.IsOutOfLineMethodDefinition
def IsOutOfLineMethodDefinition(clean_lines, linenum)
Definition: saber_cpplint.py:5111
saber_cpplint._ExternCInfo.__init__
def __init__(self, linenum)
Definition: saber_cpplint.py:2367
saber_cpplint._NamespaceInfo.__init__
def __init__(self, name, linenum)
Definition: saber_cpplint.py:2443
saber_cpplint.ReverseCloseExpression
def ReverseCloseExpression(clean_lines, linenum, pos)
Definition: saber_cpplint.py:1889
saber_cpplint.FindNextMultiLineCommentEnd
def FindNextMultiLineCommentEnd(lines, lineix)
Definition: saber_cpplint.py:1546
saber_cpplint._CppLintState.ResetErrorCounts
def ResetErrorCounts(self)
Definition: saber_cpplint.py:1040
saber_cpplint.CleanseRawStrings
def CleanseRawStrings(raw_lines)
Definition: saber_cpplint.py:1459
saber_cpplint._ShouldPrintError
def _ShouldPrintError(category, confidence, linenum)
Definition: saber_cpplint.py:1356
saber_cpplint._NamespaceInfo
Definition: saber_cpplint.py:2440
saber_cpplint._PreprocessorInfo.stack_before_else
stack_before_else
Definition: saber_cpplint.py:2509
saber_cpplint.ParseArguments
def ParseArguments(args)
Definition: saber_cpplint.py:6311
saber_cpplint.PrintUsage
def PrintUsage(message)
Definition: saber_cpplint.py:6288
saber_cpplint.CheckOperatorSpacing
def CheckOperatorSpacing(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:3441
saber_cpplint._CppLintState.PrintInfo
def PrintInfo(self, message)
Definition: saber_cpplint.py:1063
saber_cpplint.CheckSectionSpacing
def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error)
Definition: saber_cpplint.py:3799
saber_cpplint._CppLintState.PrintErrorCounts
def PrintErrorCounts(self)
Definition: saber_cpplint.py:1055
saber_cpplint.FlagCxx14Features
def FlagCxx14Features(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:6034
saber_cpplint._CppLintState.IncrementErrorCount
def IncrementErrorCount(self, category)
Definition: saber_cpplint.py:1045
saber_cpplint.itervalues
itervalues
Definition: saber_cpplint.py:663
tools_func::add
subroutine, public add(mpl, val, cumul, num, wgt)
Check if value missing and add if not missing.
Definition: tools_func.F90:330
saber_cpplint.CheckRedundantVirtual
def CheckRedundantVirtual(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:5782
saber_cpplint.FileInfo.FullName
def FullName(self)
Definition: saber_cpplint.py:1260
saber_cpplint._CppLintState._filters_backup
_filters_backup
Definition: saber_cpplint.py:973
saber_cpplint._SetCountingStyle
def _SetCountingStyle(level)
Definition: saber_cpplint.py:1144
saber_cpplint.NestingState.previous_stack_top
previous_stack_top
Definition: saber_cpplint.py:2536
saber_cpplint.Search
def Search(pattern, s)
Definition: saber_cpplint.py:785
saber_cpplint.ProcessLine
def ProcessLine(filename, file_extension, clean_lines, line, include_state, function_state, nesting_state, error, extra_check_functions=None)
Definition: saber_cpplint.py:5939
saber_cpplint.CheckForNewlineAtEOF
def CheckForNewlineAtEOF(filename, lines, error)
Definition: saber_cpplint.py:2142
saber_cpplint._IncludeState._CPP_SECTION
int _CPP_SECTION
Definition: saber_cpplint.py:814
saber_cpplint.FindCheckMacro
def FindCheckMacro(line)
Definition: saber_cpplint.py:4247
saber_cpplint.PrintCategories
def PrintCategories()
Definition: saber_cpplint.py:6302
saber_cpplint._BackupFilters
def _BackupFilters()
Definition: saber_cpplint.py:1178
saber_cpplint._IncludeState._SECTION_NAMES
dictionary _SECTION_NAMES
Definition: saber_cpplint.py:824
saber_cpplint.CheckForMultilineCommentsAndStrings
def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2160
saber_cpplint.FileInfo
Definition: saber_cpplint.py:1250
saber_cpplint.NestingState.__init__
def __init__(self)
Definition: saber_cpplint.py:2518
saber_cpplint.NestingState.InExternC
def InExternC(self)
Definition: saber_cpplint.py:2558
saber_cpplint._ClassifyInclude
def _ClassifyInclude(fileinfo, include, is_system)
Definition: saber_cpplint.py:4608
saber_cpplint.CheckEmptyBlockBody
def CheckEmptyBlockBody(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:4143
saber_cpplint.CheckBracesSpacing
def CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error)
Definition: saber_cpplint.py:3693
saber_cpplint._CppLintState.SetOutputFormat
def SetOutputFormat(self, output_format)
Definition: saber_cpplint.py:989
saber_cpplint.FileInfo.BaseName
def BaseName(self)
Definition: saber_cpplint.py:1339
saber_cpplint.CheckCheck
def CheckCheck(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:4270
saber_cpplint.ProcessFileData
def ProcessFileData(filename, file_extension, lines, error, extra_check_functions=None)
Definition: saber_cpplint.py:6053
saber_cpplint.CleansedLines.NumLines
def NumLines(self)
Definition: saber_cpplint.py:1618
saber_cpplint.GetIndentLevel
def GetIndentLevel(line)
Definition: saber_cpplint.py:1940
saber_cpplint.CleansedLines.__init__
def __init__(self, lines)
Definition: saber_cpplint.py:1606
saber_cpplint._CppLintState.error_count
error_count
Definition: saber_cpplint.py:969
saber_cpplint._ClassInfo.name
name
Definition: saber_cpplint.py:2376
saber_cpplint._AddFilters
def _AddFilters(filters)
Definition: saber_cpplint.py:1166
saber_cpplint.CheckVlogArguments
def CheckVlogArguments(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2254
saber_cpplint._BlockInfo.check_namespace_indentation
check_namespace_indentation
Definition: saber_cpplint.py:2322
saber_cpplint._FunctionState.Begin
def Begin(self, function_name)
Definition: saber_cpplint.py:1197
saber_cpplint.ShouldCheckNamespaceIndentation
def ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item, raw_lines_no_comments, linenum)
Definition: saber_cpplint.py:5898
saber_cpplint.NestingState
Definition: saber_cpplint.py:2515
saber_cpplint.CheckIncludeLine
def CheckIncludeLine(filename, clean_lines, linenum, include_state, error)
Definition: saber_cpplint.py:4674
saber_cpplint._SetOutputFormat
def _SetOutputFormat(output_format)
Definition: saber_cpplint.py:1129
saber_cpplint.ParseNolintSuppressions
def ParseNolintSuppressions(filename, raw_line, linenum, error)
Definition: saber_cpplint.py:683
saber_cpplint._CppLintState.BackupFilters
def BackupFilters(self)
Definition: saber_cpplint.py:1032
saber_cpplint.ProcessConfigOverrides
def ProcessConfigOverrides(filename)
Definition: saber_cpplint.py:6104
saber_cpplint.Error
def Error(filename, linenum, category, confidence, message)
Definition: saber_cpplint.py:1384
saber_cpplint._GetTextInside
def _GetTextInside(text, start_pattern)
Definition: saber_cpplint.py:4753
saber_cpplint.ExpectingFunctionArgs
def ExpectingFunctionArgs(clean_lines, linenum)
Definition: saber_cpplint.py:5480
saber_cpplint._FunctionState.in_a_function
in_a_function
Definition: saber_cpplint.py:1193
saber_cpplint._FunctionState.__init__
def __init__(self)
Definition: saber_cpplint.py:1192
saber_cpplint._BlockInfo.open_parentheses
open_parentheses
Definition: saber_cpplint.py:2320
saber_cpplint.CheckAccess
def CheckAccess(filename, clean_lines, linenum, nesting_state, error)
Definition: saber_cpplint.py:3283
saber_cpplint.CheckAltTokens
def CheckAltTokens(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:4388
saber_cpplint._NamespaceInfo.name
name
Definition: saber_cpplint.py:2445
saber_cpplint._IncludeState.IsInAlphabeticalOrder
def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path)
Definition: saber_cpplint.py:889
saber_cpplint.CleansedLines._CollapseStrings
def _CollapseStrings(elided)
Definition: saber_cpplint.py:1623
saber_cpplint.FileInfo.IsSource
def IsSource(self)
Definition: saber_cpplint.py:1351
saber_cpplint.NestingState.Update
def Update(self, filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2691
saber_cpplint._BlockInfo.IsBlockInfo
def IsBlockInfo(self)
Definition: saber_cpplint.py:2352
saber_cpplint.FileInfo._filename
_filename
Definition: saber_cpplint.py:1258
saber_cpplint.CheckSpacing
def CheckSpacing(filename, clean_lines, linenum, nesting_state, error)
Definition: saber_cpplint.py:3313
saber_cpplint.unicode_escape_decode
def unicode_escape_decode(x)
Definition: saber_cpplint.py:670
saber_cpplint.NestingState.stack
stack
Definition: saber_cpplint.py:2525
saber_cpplint.CheckItemIndentationInNamespace
def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum, error)
Definition: saber_cpplint.py:5931
saber_cpplint.long
long
Definition: saber_cpplint.py:658
saber_cpplint._FunctionState.End
def End(self)
Definition: saber_cpplint.py:1240
saber_cpplint.CleansedLines.raw_lines
raw_lines
Definition: saber_cpplint.py:1609
saber_cpplint._BlockInfo.__class__
__class__
Definition: saber_cpplint.py:2361
saber_cpplint._IncludeState._TYPE_NAMES
dictionary _TYPE_NAMES
Definition: saber_cpplint.py:817
saber_cpplint._PreprocessorInfo
Definition: saber_cpplint.py:2501
saber_cpplint.FileInfo.RepositoryName
def RepositoryName(self)
Definition: saber_cpplint.py:1264
saber_cpplint._BlockInfo.__init__
def __init__(self, linenum, seen_open_brace)
Definition: saber_cpplint.py:2317
saber_cpplint._ExternCInfo
Definition: saber_cpplint.py:2364
saber_cpplint._ClassInfo.__init__
def __init__(self, name, class_or_struct, clean_lines, linenum)
Definition: saber_cpplint.py:2374
saber_cpplint._IncludeState.include_list
include_list
Definition: saber_cpplint.py:833
saber_cpplint._CppLintState.SetFilters
def SetFilters(self, filters)
Definition: saber_cpplint.py:1003
saber_cpplint._SetVerboseLevel
def _SetVerboseLevel(level)
Definition: saber_cpplint.py:1139
saber_cpplint._FunctionState._TEST_TRIGGER
int _TEST_TRIGGER
Definition: saber_cpplint.py:1190
saber_cpplint._PreprocessorInfo.seen_else
seen_else
Definition: saber_cpplint.py:2512
saber_cpplint.CheckForNamespaceIndentation
def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line, error)
Definition: saber_cpplint.py:3144
saber_cpplint.FilesBelongToSameModule
def FilesBelongToSameModule(filename_cc, filename_h)
Definition: saber_cpplint.py:5572
saber_cpplint.GetNonHeaderExtensions
def GetNonHeaderExtensions()
Definition: saber_cpplint.py:81
saber_cpplint.CheckGlobalStatic
def CheckGlobalStatic(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:4999
saber_cpplint._IsType
def _IsType(clean_lines, nesting_state, expr)
Definition: saber_cpplint.py:3630
saber_cpplint.CheckLanguage
def CheckLanguage(filename, clean_lines, linenum, file_extension, include_state, nesting_state, error)
Definition: saber_cpplint.py:4838
saber_cpplint._IncludeState.FindHeader
def FindHeader(self, header)
Definition: saber_cpplint.py:838
saber_cpplint.FileInfo.__init__
def __init__(self, filename)
Definition: saber_cpplint.py:1257
saber_cpplint._Filters
def _Filters()
Definition: saber_cpplint.py:1149
saber_cpplint.IsErrorSuppressedByNolint
def IsErrorSuppressedByNolint(category, linenum)
Definition: saber_cpplint.py:739
saber_cpplint._IncludeState._last_header
_last_header
Definition: saber_cpplint.py:835
saber_cpplint._IncludeState._INITIAL_SECTION
int _INITIAL_SECTION
Definition: saber_cpplint.py:811
saber_cpplint.FindStartOfExpressionInLine
def FindStartOfExpressionInLine(line, endpos, stack)
Definition: saber_cpplint.py:1812
saber_cpplint._ClassInfo.is_derived
is_derived
Definition: saber_cpplint.py:2377
saber_cpplint._FunctionState
Definition: saber_cpplint.py:1186
saber_cpplint._FunctionState._NORMAL_TRIGGER
int _NORMAL_TRIGGER
Definition: saber_cpplint.py:1189
saber_cpplint._BlockInfo.starting_linenum
starting_linenum
Definition: saber_cpplint.py:2318
saber_cpplint.CheckCStyleCast
def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error)
Definition: saber_cpplint.py:5427
saber_cpplint.NestingState.InAsmBlock
def InAsmBlock(self)
Definition: saber_cpplint.py:2574
saber_cpplint.ResetNolintSuppressions
def ResetNolintSuppressions()
Definition: saber_cpplint.py:733
saber_cpplint.NestingState.InNamespaceBody
def InNamespaceBody(self)
Definition: saber_cpplint.py:2550
saber_cpplint._FilterExcludedFiles
def _FilterExcludedFiles(filenames)
Definition: saber_cpplint.py:6444
saber_cpplint.CheckForCopyright
def CheckForCopyright(filename, lines, error)
Definition: saber_cpplint.py:1927
saber_cpplint.IsForwardClassDeclaration
def IsForwardClassDeclaration(clean_lines, linenum)
Definition: saber_cpplint.py:2310
saber_cpplint.CleansedLines
Definition: saber_cpplint.py:1595
saber_cpplint.CleansedLines.lines_without_raw_strings
lines_without_raw_strings
Definition: saber_cpplint.py:1611
saber_cpplint._BlockInfo
Definition: saber_cpplint.py:2314
saber_cpplint.Match
def Match(pattern, s)
Definition: saber_cpplint.py:757
saber_cpplint.CheckInvalidIncrement
def CheckInvalidIncrement(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2278
saber_cpplint.CheckBraces
def CheckBraces(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:3877
saber_cpplint.CheckCasts
def CheckCasts(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:5308
saber_cpplint._CppLintState
Definition: saber_cpplint.py:964
saber_cpplint._IncludeState.SetLastHeader
def SetLastHeader(self, header_path)
Definition: saber_cpplint.py:871
saber_cpplint.CheckForHeaderGuard
def CheckForHeaderGuard(filename, clean_lines, error)
Definition: saber_cpplint.py:1987
saber_cpplint.UpdateIncludeState
def UpdateIncludeState(filename, include_dict, io=codecs)
Definition: saber_cpplint.py:5630
saber_cpplint._CppLintState.AddJUnitFailure
def AddJUnitFailure(self, filename, linenum, message, category, confidence)
Definition: saber_cpplint.py:1073
saber_cpplint.CheckCommaSpacing
def CheckCommaSpacing(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:3594
saber_cpplint.RemoveMultiLineCommentsFromRange
def RemoveMultiLineCommentsFromRange(lines, begin, end)
Definition: saber_cpplint.py:1555
saber_cpplint.CheckParenthesisSpacing
def CheckParenthesisSpacing(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:3556
saber_cpplint._CppLintState._junit_failures
_junit_failures
Definition: saber_cpplint.py:987
saber_cpplint._IncludeError
Definition: saber_cpplint.py:1245
saber_cpplint.FileInfo.NoExtension
def NoExtension(self)
Definition: saber_cpplint.py:1347
saber_cpplint._IncludeState.ResetSection
def ResetSection(self, directive)
Definition: saber_cpplint.py:853
saber_cpplint.CheckForIncludeWhatYouUse
def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, io=codecs)
Definition: saber_cpplint.py:5657
saber_cpplint.CheckForNonStandardConstructs
def CheckForNonStandardConstructs(filename, clean_lines, linenum, nesting_state, error)
Definition: saber_cpplint.py:2889
saber_cpplint.GetHeaderExtensions
def GetHeaderExtensions()
Definition: saber_cpplint.py:69
saber_cpplint.CleansedLines.num_lines
num_lines
Definition: saber_cpplint.py:1610
saber_cpplint._BlockInfo.inline_asm
inline_asm
Definition: saber_cpplint.py:2321
saber_cpplint._ExpandDirectories
def _ExpandDirectories(filenames)
Definition: saber_cpplint.py:6412
saber_cpplint._IncludeState._MY_H_SECTION
int _MY_H_SECTION
Definition: saber_cpplint.py:812
saber_cpplint.CheckTrailingSemicolon
def CheckTrailingSemicolon(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:3996
saber_cpplint._CppLintState.SetVerboseLevel
def SetVerboseLevel(self, level)
Definition: saber_cpplint.py:993
saber_cpplint._CppLintState.filters
filters
Definition: saber_cpplint.py:971
saber_cpplint._CppLintState.RestoreFilters
def RestoreFilters(self)
Definition: saber_cpplint.py:1036
saber_cpplint.CheckStyle
def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state, error)
Definition: saber_cpplint.py:4442
saber_cpplint._SetFilters
def _SetFilters(filters)
Definition: saber_cpplint.py:1154
saber_cpplint.CheckForBadCharacters
def CheckForBadCharacters(filename, lines, error)
Definition: saber_cpplint.py:2117
saber_cpplint.FindNextMultiLineCommentStart
def FindNextMultiLineCommentStart(lines, lineix)
Definition: saber_cpplint.py:1535
saber_cpplint.ReplaceAll
def ReplaceAll(pattern, rep, s)
Definition: saber_cpplint.py:767
saber_cpplint.IsBlankLine
def IsBlankLine(line)
Definition: saber_cpplint.py:3129
saber_cpplint._IncludeState.CheckNextIncludeOrder
def CheckNextIncludeOrder(self, header_type)
Definition: saber_cpplint.py:910
saber_cpplint._ClassInfo.CheckEnd
def CheckEnd(self, filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2409
saber_cpplint.GetLineWidth
def GetLineWidth(line)
Definition: saber_cpplint.py:4420
saber_cpplint._IncludeState.__init__
def __init__(self)
Definition: saber_cpplint.py:832
saber_cpplint._CppLintState.__init__
def __init__(self)
Definition: saber_cpplint.py:967
saber_cpplint._CppLintState.output_format
output_format
Definition: saber_cpplint.py:982
saber_cpplint.IsBlockInNameSpace
def IsBlockInNameSpace(nesting_state, is_forward_declaration)
Definition: saber_cpplint.py:5879
saber_cpplint._FunctionState.current_function
current_function
Definition: saber_cpplint.py:1195
saber_cpplint.main
def main()
Definition: saber_cpplint.py:6451
saber_cpplint._BlockInfo.seen_open_brace
seen_open_brace
Definition: saber_cpplint.py:2319
saber_cpplint.iteritems
iteritems
Definition: saber_cpplint.py:664
saber_cpplint._CppLintState.FormatJUnitXML
def FormatJUnitXML(self)
Definition: saber_cpplint.py:1077
saber_cpplint._NamespaceInfo.CheckEnd
def CheckEnd(self, filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2448
saber_cpplint.CleansedLines.lines
lines
Definition: saber_cpplint.py:1608
saber_cpplint.CloseExpression
def CloseExpression(clean_lines, linenum, pos)
Definition: saber_cpplint.py:1768
saber_cpplint.NestingState.InTemplateArgumentList
def InTemplateArgumentList(self, clean_lines, linenum, pos)
Definition: saber_cpplint.py:2582
saber_cpplint.FindEndOfExpressionInLine
def FindEndOfExpressionInLine(line, startpos, stack)
Definition: saber_cpplint.py:1690
saber_cpplint._IncludeState
Definition: saber_cpplint.py:797
saber_cpplint.IsDecltype
def IsDecltype(clean_lines, linenum, column)
Definition: saber_cpplint.py:3782
saber_cpplint._RestoreFilters
def _RestoreFilters()
Definition: saber_cpplint.py:1182
saber_cpplint._IncludeState._C_SECTION
int _C_SECTION
Definition: saber_cpplint.py:813
saber_cpplint._BlockInfo.CheckBegin
def CheckBegin(self, filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2324
saber_cpplint._IncludeState._section
_section
Definition: saber_cpplint.py:834
saber_cpplint.CleansedLines.elided
elided
Definition: saber_cpplint.py:1607
saber_cpplint._ClassInfo.class_indent
class_indent
Definition: saber_cpplint.py:2388
saber_cpplint.FlagCxx11Features
def FlagCxx11Features(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:5984
saber_cpplint.NestingState.InClassDeclaration
def InClassDeclaration(self)
Definition: saber_cpplint.py:2566
saber_cpplint._DropCommonSuffixes
def _DropCommonSuffixes(filename)
Definition: saber_cpplint.py:4578
saber_cpplint._CppLintState._junit_errors
_junit_errors
Definition: saber_cpplint.py:986
saber_cpplint._ClassInfo.access
access
Definition: saber_cpplint.py:2380
saber_cpplint.IsInitializerList
def IsInitializerList(clean_lines, linenum)
Definition: saber_cpplint.py:5127
saber_cpplint._CppLintState.errors_by_category
errors_by_category
Definition: saber_cpplint.py:975
saber_cpplint._ClassInfo.last_line
last_line
Definition: saber_cpplint.py:2395
saber_cpplint._PreprocessorInfo.__init__
def __init__(self, stack_before_if)
Definition: saber_cpplint.py:2504
saber_cpplint.xrange
xrange
Definition: saber_cpplint.py:646
saber_cpplint.CheckPosixThreading
def CheckPosixThreading(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2228
saber_cpplint.CheckComment
def CheckComment(line, filename, linenum, next_line_start, error)
Definition: saber_cpplint.py:3229
saber_cpplint.NestingState.CheckCompletedBlocks
def CheckCompletedBlocks(self, filename, error)
Definition: saber_cpplint.py:2867
saber_cpplint._ClassInfo
Definition: saber_cpplint.py:2371
saber_cpplint.IsMacroDefinition
def IsMacroDefinition(clean_lines, linenum)
Definition: saber_cpplint.py:2300
saber_cpplint._CppLintState.AddFilters
def AddFilters(self, filters)
Definition: saber_cpplint.py:1021
saber_cpplint._FunctionState.lines_in_function
lines_in_function
Definition: saber_cpplint.py:1194
saber_cpplint.ProcessFile
def ProcessFile(filename, vlevel, extra_check_functions=None)
Definition: saber_cpplint.py:6200
saber_cpplint._FunctionState.Check
def Check(self, error, filename, linenum)
Definition: saber_cpplint.py:1212
saber_cpplint._CppLintState.verbose_level
verbose_level
Definition: saber_cpplint.py:968
saber_cpplint._BlockInfo.CheckEnd
def CheckEnd(self, filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2339
saber_cpplint._CppLintState.PrintError
def PrintError(self, message)
Definition: saber_cpplint.py:1067
saber_cpplint.NestingState.pp_stack
pp_stack
Definition: saber_cpplint.py:2539
saber_cpplint.NestingState.InnermostClass
def InnermostClass(self)
Definition: saber_cpplint.py:2855
saber_cpplint.IsDerivedFunction
def IsDerivedFunction(clean_lines, linenum)
Definition: saber_cpplint.py:5089
saber_cpplint.CheckHeaderFileIncluded
def CheckHeaderFileIncluded(filename, include_state, error)
Definition: saber_cpplint.py:2090
saber_cpplint.CleanseComments
def CleanseComments(line)
Definition: saber_cpplint.py:1579
saber_cpplint.ProcessGlobalSuppresions
def ProcessGlobalSuppresions(lines)
Definition: saber_cpplint.py:715
saber_cpplint.NestingState.UpdatePreprocessor
def UpdatePreprocessor(self, line)
Definition: saber_cpplint.py:2634
saber_cpplint._CppLintState.counting
counting
Definition: saber_cpplint.py:974
saber_cpplint.CheckPrintf
def CheckPrintf(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:5060
saber_cpplint._ClassInfo.CheckBegin
def CheckBegin(self, filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:2404
saber_cpplint.GetPreviousNonBlankLine
def GetPreviousNonBlankLine(clean_lines, linenum)
Definition: saber_cpplint.py:3854
saber_cpplint._OutputFormat
def _OutputFormat()
Definition: saber_cpplint.py:1124
saber_cpplint.CheckSpacingForFunctionCall
def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:3052
saber_cpplint.IsCppString
def IsCppString(line)
Definition: saber_cpplint.py:1442
saber_cpplint.GetHeaderGuardCPPVariable
def GetHeaderGuardCPPVariable(filename)
Definition: saber_cpplint.py:1956
saber_cpplint._VerboseLevel
def _VerboseLevel()
Definition: saber_cpplint.py:1134
saber_cpplint.CheckMakePairUsesDeduction
def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:5761
saber_cpplint._PreprocessorInfo.stack_before_if
stack_before_if
Definition: saber_cpplint.py:2506
saber_cpplint.GetAllExtensions
def GetAllExtensions()
Definition: saber_cpplint.py:76
saber_cpplint._IncludeState.CanonicalizeAlphabeticalOrder
def CanonicalizeAlphabeticalOrder(self, header_path)
Definition: saber_cpplint.py:874
saber_cpplint._FunctionState.Count
def Count(self)
Definition: saber_cpplint.py:1207
saber_cpplint.CheckRedundantOverrideOrFinal
def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error)
Definition: saber_cpplint.py:5846
saber_cpplint._ClassInfo.is_struct
is_struct
Definition: saber_cpplint.py:2381
saber_cpplint._IncludeState._OTHER_H_SECTION
int _OTHER_H_SECTION
Definition: saber_cpplint.py:815
saber_cpplint.RemoveMultiLineComments
def RemoveMultiLineComments(filename, lines, error)
Definition: saber_cpplint.py:1563
saber_cpplint._IsSourceExtension
def _IsSourceExtension(s)
Definition: saber_cpplint.py:792
saber_cpplint._CppLintState.SetCountingStyle
def SetCountingStyle(self, counting_style)
Definition: saber_cpplint.py:999
saber_cpplint.CheckForNonConstReference
def CheckForNonConstReference(filename, clean_lines, linenum, nesting_state, error)
Definition: saber_cpplint.py:5169
saber_cpplint.FileInfo.Split
def Split(self)
Definition: saber_cpplint.py:1325