Home | History | Annotate | Download | only in catapult_build
      1 # Copyright 2015 The Chromium Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 """Checks to use in PRESUBMIT.py for HTML style violations."""
      6 
      7 import collections
      8 import difflib
      9 import re
     10 
     11 import bs4
     12 
     13 from catapult_build import parse_html
     14 
     15 
     16 def RunChecks(input_api, output_api, excluded_paths=None):
     17 
     18   def ShouldCheck(affected_file):
     19     path = affected_file.LocalPath()
     20     if not path.endswith('.html'):
     21       return False
     22     if not excluded_paths:
     23       return True
     24     return not any(re.match(pattern, path) for pattern in excluded_paths)
     25 
     26   affected_files = input_api.AffectedFiles(
     27       file_filter=ShouldCheck, include_deletes=False)
     28   results = []
     29   for f in affected_files:
     30     CheckAffectedFile(f, results, output_api)
     31   return results
     32 
     33 
     34 def CheckAffectedFile(affected_file, results, output_api):
     35   path = affected_file.LocalPath()
     36   soup = parse_html.BeautifulSoup('\n'.join(affected_file.NewContents()))
     37   for check in [CheckDoctype, CheckImportOrder]:
     38     check(path, soup, results, output_api)
     39 
     40 
     41 def CheckDoctype(path, soup, results, output_api):
     42   if _HasHtml5Declaration(soup):
     43     return
     44   error_text = 'Could not find "<!DOCTYPE html>" in %s.' % path
     45   results.append(output_api.PresubmitError(error_text))
     46 
     47 
     48 def _HasHtml5Declaration(soup):
     49   for item in soup.contents:
     50     if isinstance(item, bs4.Doctype) and item.lower() == 'html':
     51       return True
     52   return False
     53 
     54 
     55 def CheckImportOrder(path, soup, results, output_api):
     56   grouped_hrefs = collections.defaultdict(list)  # Link rel -> [link hrefs].
     57   for link in soup.find_all('link'):
     58     if link.get('data-suppress-import-order') is not None:
     59       continue
     60 
     61     grouped_hrefs[','.join(link.get('rel'))].append(link.get('href'))
     62 
     63   for rel, actual_hrefs in grouped_hrefs.iteritems():
     64     expected_hrefs = list(sorted(set(actual_hrefs)))
     65     if actual_hrefs != expected_hrefs:
     66       error_text = (
     67           'Invalid "%s" link sort order in %s:\n' % (rel, path) + ' ' +
     68           '\n  '.join(difflib.ndiff(actual_hrefs, expected_hrefs)) +
     69           '\nIf this error is invalid, you can suppress it by adding a ' +
     70           '"data-suppress-import-order" attribute to the out-of-order <link> ' +
     71           'element.')
     72       results.append(output_api.PresubmitError(error_text))
     73