2013-01-17 12:55:34 +00:00
|
|
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
|
|
|
|
# Use of this source code is governed by a BSD-style license that can be
|
|
|
|
# found in the LICENSE file.
|
|
|
|
|
|
|
|
|
|
|
|
"""Top-level presubmit script for Skia.
|
|
|
|
|
|
|
|
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
|
|
|
|
for more details about the presubmit API built into gcl.
|
|
|
|
"""
|
|
|
|
|
2015-10-01 15:24:03 +00:00
|
|
|
import collections
|
2015-03-25 19:53:35 +00:00
|
|
|
import csv
|
2014-02-03 14:18:32 +00:00
|
|
|
import fnmatch
|
2013-03-29 17:26:00 +00:00
|
|
|
import os
|
2014-01-31 17:33:04 +00:00
|
|
|
import re
|
2015-02-26 18:16:13 +00:00
|
|
|
import subprocess
|
2013-03-29 17:26:00 +00:00
|
|
|
import sys
|
2014-02-03 14:18:32 +00:00
|
|
|
import traceback
|
2013-03-29 17:26:00 +00:00
|
|
|
|
2013-01-23 14:35:58 +00:00
|
|
|
|
2014-01-31 17:33:04 +00:00
|
|
|
REVERT_CL_SUBJECT_PREFIX = 'Revert '
|
|
|
|
|
2013-04-12 19:45:46 +00:00
|
|
|
SKIA_TREE_STATUS_URL = 'http://skia-tree-status.appspot.com'
|
|
|
|
|
2014-08-26 17:30:29 +00:00
|
|
|
# Please add the complete email address here (and not just 'xyz@' or 'xyz').
|
2013-08-12 14:51:20 +00:00
|
|
|
PUBLIC_API_OWNERS = (
|
|
|
|
'reed@chromium.org',
|
|
|
|
'reed@google.com',
|
|
|
|
'bsalomon@chromium.org',
|
|
|
|
'bsalomon@google.com',
|
2014-07-18 12:25:56 +00:00
|
|
|
'djsollen@chromium.org',
|
|
|
|
'djsollen@google.com',
|
2017-01-19 17:00:08 +00:00
|
|
|
'hcm@chromium.org',
|
|
|
|
'hcm@google.com',
|
2013-08-12 14:51:20 +00:00
|
|
|
)
|
|
|
|
|
2014-02-03 14:18:32 +00:00
|
|
|
AUTHORS_FILE_NAME = 'AUTHORS'
|
|
|
|
|
2018-05-22 15:29:03 +00:00
|
|
|
DOCS_PREVIEW_URL = 'https://skia.org/?cl='
|
2016-05-20 10:50:01 +00:00
|
|
|
GOLD_TRYBOT_URL = 'https://gold.skia.org/search?issue='
|
2015-02-26 18:16:13 +00:00
|
|
|
|
2018-04-26 17:09:48 +00:00
|
|
|
SERVICE_ACCOUNT_SUFFIX = [
|
2018-04-26 18:02:43 +00:00
|
|
|
'@%s.iam.gserviceaccount.com' % project for project in [
|
2018-09-07 18:22:16 +00:00
|
|
|
'skia-buildbots.google.com', 'skia-swarming-bots', 'skia-public',
|
|
|
|
'skia-corp.google.com']]
|
2018-01-02 18:29:21 +00:00
|
|
|
|
2013-04-12 19:45:46 +00:00
|
|
|
|
2013-01-25 18:27:34 +00:00
|
|
|
def _CheckChangeHasEol(input_api, output_api, source_file_filter=None):
|
|
|
|
"""Checks that files end with atleast one \n (LF)."""
|
|
|
|
eof_files = []
|
|
|
|
for f in input_api.AffectedSourceFiles(source_file_filter):
|
|
|
|
contents = input_api.ReadFile(f, 'rb')
|
|
|
|
# Check that the file ends in atleast one newline character.
|
|
|
|
if len(contents) > 1 and contents[-1:] != '\n':
|
|
|
|
eof_files.append(f.LocalPath())
|
|
|
|
|
|
|
|
if eof_files:
|
|
|
|
return [output_api.PresubmitPromptWarning(
|
|
|
|
'These files should end in a newline character:',
|
|
|
|
items=eof_files)]
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
2014-06-25 15:13:27 +00:00
|
|
|
def _PythonChecks(input_api, output_api):
|
|
|
|
"""Run checks on any modified Python files."""
|
2018-06-22 14:13:52 +00:00
|
|
|
blacklist = [
|
|
|
|
r'infra[\\\/]bots[\\\/]recipes.py',
|
|
|
|
|
|
|
|
# Blacklist DEPS. Those under third_party are already covered by
|
|
|
|
# input_api.DEFAULT_BLACK_LIST.
|
|
|
|
r'common[\\\/].*',
|
|
|
|
r'buildtools[\\\/].*',
|
2018-06-22 19:53:56 +00:00
|
|
|
r'.*[\\\/]\.recipe_deps[\\\/].*',
|
2018-06-22 14:13:52 +00:00
|
|
|
]
|
|
|
|
blacklist.extend(input_api.DEFAULT_BLACK_LIST)
|
|
|
|
|
2014-06-25 15:13:27 +00:00
|
|
|
pylint_disabled_warnings = (
|
|
|
|
'F0401', # Unable to import.
|
|
|
|
'E0611', # No name in module.
|
|
|
|
'W0232', # Class has no __init__ method.
|
|
|
|
'E1002', # Use of super on an old style class.
|
|
|
|
'W0403', # Relative import used.
|
|
|
|
'R0201', # Method could be a function.
|
|
|
|
'E1003', # Using class name in super.
|
|
|
|
'W0613', # Unused argument.
|
2016-02-18 16:05:48 +00:00
|
|
|
'W0105', # String statement has no effect.
|
2014-06-25 15:13:27 +00:00
|
|
|
)
|
|
|
|
return input_api.canned_checks.RunPylint(
|
|
|
|
input_api, output_api,
|
|
|
|
disabled_warnings=pylint_disabled_warnings,
|
2018-06-22 14:13:52 +00:00
|
|
|
black_list=blacklist)
|
2014-06-25 15:13:27 +00:00
|
|
|
|
|
|
|
|
2018-02-09 22:41:20 +00:00
|
|
|
def _JsonChecks(input_api, output_api):
|
|
|
|
"""Run checks on any modified json files."""
|
|
|
|
failing_files = []
|
|
|
|
for affected_file in input_api.AffectedFiles(None):
|
|
|
|
affected_file_path = affected_file.LocalPath()
|
|
|
|
is_json = affected_file_path.endswith('.json')
|
|
|
|
is_metadata = (affected_file_path.startswith('site/') and
|
|
|
|
affected_file_path.endswith('/METADATA'))
|
|
|
|
if is_json or is_metadata:
|
|
|
|
try:
|
|
|
|
input_api.json.load(open(affected_file_path, 'r'))
|
|
|
|
except ValueError:
|
|
|
|
failing_files.append(affected_file_path)
|
|
|
|
|
|
|
|
results = []
|
|
|
|
if failing_files:
|
|
|
|
results.append(
|
|
|
|
output_api.PresubmitError(
|
|
|
|
'The following files contain invalid json:\n%s\n\n' %
|
|
|
|
'\n'.join(failing_files)))
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2015-03-12 14:48:40 +00:00
|
|
|
def _IfDefChecks(input_api, output_api):
|
|
|
|
"""Ensures if/ifdef are not before includes. See skbug/3362 for details."""
|
|
|
|
comment_block_start_pattern = re.compile('^\s*\/\*.*$')
|
|
|
|
comment_block_middle_pattern = re.compile('^\s+\*.*')
|
|
|
|
comment_block_end_pattern = re.compile('^\s+\*\/.*$')
|
|
|
|
single_line_comment_pattern = re.compile('^\s*//.*$')
|
|
|
|
def is_comment(line):
|
|
|
|
return (comment_block_start_pattern.match(line) or
|
|
|
|
comment_block_middle_pattern.match(line) or
|
|
|
|
comment_block_end_pattern.match(line) or
|
|
|
|
single_line_comment_pattern.match(line))
|
|
|
|
|
|
|
|
empty_line_pattern = re.compile('^\s*$')
|
|
|
|
def is_empty_line(line):
|
|
|
|
return empty_line_pattern.match(line)
|
|
|
|
|
|
|
|
failing_files = []
|
|
|
|
for affected_file in input_api.AffectedSourceFiles(None):
|
|
|
|
affected_file_path = affected_file.LocalPath()
|
|
|
|
if affected_file_path.endswith('.cpp') or affected_file_path.endswith('.h'):
|
|
|
|
f = open(affected_file_path)
|
|
|
|
for line in f.xreadlines():
|
|
|
|
if is_comment(line) or is_empty_line(line):
|
|
|
|
continue
|
|
|
|
# The below will be the first real line after comments and newlines.
|
|
|
|
if line.startswith('#if 0 '):
|
|
|
|
pass
|
|
|
|
elif line.startswith('#if ') or line.startswith('#ifdef '):
|
|
|
|
failing_files.append(affected_file_path)
|
|
|
|
break
|
|
|
|
|
|
|
|
results = []
|
|
|
|
if failing_files:
|
|
|
|
results.append(
|
|
|
|
output_api.PresubmitError(
|
|
|
|
'The following files have #if or #ifdef before includes:\n%s\n\n'
|
2015-11-07 13:29:00 +00:00
|
|
|
'See https://bug.skia.org/3362 for why this should be fixed.' %
|
2015-03-12 14:48:40 +00:00
|
|
|
'\n'.join(failing_files)))
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2015-03-25 11:47:02 +00:00
|
|
|
def _CopyrightChecks(input_api, output_api, source_file_filter=None):
|
|
|
|
results = []
|
|
|
|
year_pattern = r'\d{4}'
|
|
|
|
year_range_pattern = r'%s(-%s)?' % (year_pattern, year_pattern)
|
|
|
|
years_pattern = r'%s(,%s)*,?' % (year_range_pattern, year_range_pattern)
|
|
|
|
copyright_pattern = (
|
|
|
|
r'Copyright (\([cC]\) )?%s \w+' % years_pattern)
|
|
|
|
|
|
|
|
for affected_file in input_api.AffectedSourceFiles(source_file_filter):
|
|
|
|
if 'third_party' in affected_file.LocalPath():
|
|
|
|
continue
|
|
|
|
contents = input_api.ReadFile(affected_file, 'rb')
|
|
|
|
if not re.search(copyright_pattern, contents):
|
|
|
|
results.append(output_api.PresubmitError(
|
|
|
|
'%s is missing a correct copyright header.' % affected_file))
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2015-05-05 17:28:44 +00:00
|
|
|
def _ToolFlags(input_api, output_api):
|
|
|
|
"""Make sure `{dm,nanobench}_flags.py test` passes if modified."""
|
|
|
|
results = []
|
|
|
|
sources = lambda x: ('dm_flags.py' in x.LocalPath() or
|
|
|
|
'nanobench_flags.py' in x.LocalPath())
|
|
|
|
for f in input_api.AffectedSourceFiles(sources):
|
|
|
|
if 0 != subprocess.call(['python', f.LocalPath(), 'test']):
|
|
|
|
results.append(output_api.PresubmitError('`python %s test` failed' % f))
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2016-10-14 13:32:09 +00:00
|
|
|
def _InfraTests(input_api, output_api):
|
|
|
|
"""Run the infra tests."""
|
2016-07-26 18:52:17 +00:00
|
|
|
results = []
|
2016-07-27 11:14:07 +00:00
|
|
|
if not any(f.LocalPath().startswith('infra')
|
|
|
|
for f in input_api.AffectedFiles()):
|
|
|
|
return results
|
|
|
|
|
2016-10-14 13:32:09 +00:00
|
|
|
cmd = ['python', os.path.join('infra', 'bots', 'infra_tests.py')]
|
2016-10-04 19:45:41 +00:00
|
|
|
try:
|
|
|
|
subprocess.check_output(cmd)
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
results.append(output_api.PresubmitError(
|
|
|
|
'`%s` failed:\n%s' % (' '.join(cmd), e.output)))
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2016-08-03 21:18:22 +00:00
|
|
|
def _CheckGNFormatted(input_api, output_api):
|
|
|
|
"""Make sure any .gn files we're changing have been formatted."""
|
|
|
|
results = []
|
|
|
|
for f in input_api.AffectedFiles():
|
2016-10-26 18:17:04 +00:00
|
|
|
if (not f.LocalPath().endswith('.gn') and
|
|
|
|
not f.LocalPath().endswith('.gni')):
|
2016-08-03 21:18:22 +00:00
|
|
|
continue
|
|
|
|
|
2016-10-11 18:03:06 +00:00
|
|
|
gn = 'gn.bat' if 'win32' in sys.platform else 'gn'
|
|
|
|
cmd = [gn, 'format', '--dry-run', f.LocalPath()]
|
2016-08-03 21:18:22 +00:00
|
|
|
try:
|
|
|
|
subprocess.check_output(cmd)
|
|
|
|
except subprocess.CalledProcessError:
|
2016-08-10 14:30:58 +00:00
|
|
|
fix = 'gn format ' + f.LocalPath()
|
2016-08-03 21:18:22 +00:00
|
|
|
results.append(output_api.PresubmitError(
|
2016-08-10 14:30:58 +00:00
|
|
|
'`%s` failed, try\n\t%s' % (' '.join(cmd), fix)))
|
2016-08-03 21:18:22 +00:00
|
|
|
return results
|
|
|
|
|
2016-07-26 18:52:17 +00:00
|
|
|
|
2017-10-12 21:55:19 +00:00
|
|
|
class _WarningsAsErrors():
|
|
|
|
def __init__(self, output_api):
|
|
|
|
self.output_api = output_api
|
|
|
|
self.old_warning = None
|
|
|
|
def __enter__(self):
|
|
|
|
self.old_warning = self.output_api.PresubmitPromptWarning
|
|
|
|
self.output_api.PresubmitPromptWarning = self.output_api.PresubmitError
|
|
|
|
return self.output_api
|
|
|
|
def __exit__(self, ex_type, ex_value, ex_traceback):
|
|
|
|
self.output_api.PresubmitPromptWarning = self.old_warning
|
|
|
|
|
|
|
|
|
2013-01-17 14:50:59 +00:00
|
|
|
def _CommonChecks(input_api, output_api):
|
|
|
|
"""Presubmit checks common to upload and commit."""
|
|
|
|
results = []
|
|
|
|
sources = lambda x: (x.LocalPath().endswith('.h') or
|
|
|
|
x.LocalPath().endswith('.py') or
|
|
|
|
x.LocalPath().endswith('.sh') or
|
2015-03-25 14:21:20 +00:00
|
|
|
x.LocalPath().endswith('.m') or
|
|
|
|
x.LocalPath().endswith('.mm') or
|
|
|
|
x.LocalPath().endswith('.go') or
|
|
|
|
x.LocalPath().endswith('.c') or
|
|
|
|
x.LocalPath().endswith('.cc') or
|
2013-01-17 14:50:59 +00:00
|
|
|
x.LocalPath().endswith('.cpp'))
|
2017-10-12 21:55:19 +00:00
|
|
|
results.extend(_CheckChangeHasEol(
|
|
|
|
input_api, output_api, source_file_filter=sources))
|
|
|
|
with _WarningsAsErrors(output_api):
|
|
|
|
results.extend(input_api.canned_checks.CheckChangeHasNoCR(
|
|
|
|
input_api, output_api, source_file_filter=sources))
|
|
|
|
results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
|
|
|
|
input_api, output_api, source_file_filter=sources))
|
2014-06-25 15:13:27 +00:00
|
|
|
results.extend(_PythonChecks(input_api, output_api))
|
2018-02-09 22:41:20 +00:00
|
|
|
results.extend(_JsonChecks(input_api, output_api))
|
2015-03-12 14:48:40 +00:00
|
|
|
results.extend(_IfDefChecks(input_api, output_api))
|
2015-03-25 11:47:02 +00:00
|
|
|
results.extend(_CopyrightChecks(input_api, output_api,
|
|
|
|
source_file_filter=sources))
|
2015-05-05 17:28:44 +00:00
|
|
|
results.extend(_ToolFlags(input_api, output_api))
|
2013-01-17 14:50:59 +00:00
|
|
|
return results
|
|
|
|
|
2013-01-17 12:55:34 +00:00
|
|
|
|
|
|
|
def CheckChangeOnUpload(input_api, output_api):
|
2013-01-17 14:50:59 +00:00
|
|
|
"""Presubmit checks for the change on upload.
|
|
|
|
|
|
|
|
The following are the presubmit checks:
|
|
|
|
* Check change has one and only one EOL.
|
|
|
|
"""
|
|
|
|
results = []
|
|
|
|
results.extend(_CommonChecks(input_api, output_api))
|
2016-07-26 18:52:17 +00:00
|
|
|
# Run on upload, not commit, since the presubmit bot apparently doesn't have
|
2016-10-04 19:45:41 +00:00
|
|
|
# coverage or Go installed.
|
2016-10-14 13:32:09 +00:00
|
|
|
results.extend(_InfraTests(input_api, output_api))
|
2016-10-04 19:45:41 +00:00
|
|
|
|
2016-08-03 21:18:22 +00:00
|
|
|
results.extend(_CheckGNFormatted(input_api, output_api))
|
2013-01-17 14:50:59 +00:00
|
|
|
return results
|
2013-01-17 12:55:34 +00:00
|
|
|
|
|
|
|
|
2013-01-23 14:35:58 +00:00
|
|
|
def _CheckTreeStatus(input_api, output_api, json_url):
|
|
|
|
"""Check whether to allow commit.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
input_api: input related apis.
|
|
|
|
output_api: output related apis.
|
|
|
|
json_url: url to download json style status.
|
|
|
|
"""
|
|
|
|
tree_status_results = input_api.canned_checks.CheckTreeIsOpen(
|
|
|
|
input_api, output_api, json_url=json_url)
|
|
|
|
if not tree_status_results:
|
|
|
|
# Check for caution state only if tree is not closed.
|
|
|
|
connection = input_api.urllib2.urlopen(json_url)
|
|
|
|
status = input_api.json.loads(connection.read())
|
|
|
|
connection.close()
|
2013-03-29 17:26:00 +00:00
|
|
|
if ('caution' in status['message'].lower() and
|
|
|
|
os.isatty(sys.stdout.fileno())):
|
|
|
|
# Display a prompt only if we are in an interactive shell. Without this
|
|
|
|
# check the commit queue behaves incorrectly because it considers
|
|
|
|
# prompts to be failures.
|
2013-01-23 14:35:58 +00:00
|
|
|
short_text = 'Tree state is: ' + status['general_state']
|
|
|
|
long_text = status['message'] + '\n' + json_url
|
|
|
|
tree_status_results.append(
|
|
|
|
output_api.PresubmitPromptWarning(
|
|
|
|
message=short_text, long_text=long_text))
|
2013-04-12 19:45:46 +00:00
|
|
|
else:
|
|
|
|
# Tree status is closed. Put in message about contacting sheriff.
|
|
|
|
connection = input_api.urllib2.urlopen(
|
|
|
|
SKIA_TREE_STATUS_URL + '/current-sheriff')
|
|
|
|
sheriff_details = input_api.json.loads(connection.read())
|
|
|
|
if sheriff_details:
|
|
|
|
tree_status_results[0]._message += (
|
|
|
|
'\n\nPlease contact the current Skia sheriff (%s) if you are trying '
|
|
|
|
'to submit a build fix\nand do not know how to submit because the '
|
|
|
|
'tree is closed') % sheriff_details['username']
|
2013-01-23 14:35:58 +00:00
|
|
|
return tree_status_results
|
|
|
|
|
|
|
|
|
2016-08-29 15:13:29 +00:00
|
|
|
class CodeReview(object):
|
|
|
|
"""Abstracts which codereview tool is used for the specified issue."""
|
|
|
|
|
|
|
|
def __init__(self, input_api):
|
|
|
|
self._issue = input_api.change.issue
|
|
|
|
self._gerrit = input_api.gerrit
|
|
|
|
|
|
|
|
def GetOwnerEmail(self):
|
2017-10-09 19:50:52 +00:00
|
|
|
return self._gerrit.GetChangeOwner(self._issue)
|
2016-08-29 15:13:29 +00:00
|
|
|
|
|
|
|
def GetSubject(self):
|
2017-10-09 19:50:52 +00:00
|
|
|
return self._gerrit.GetChangeInfo(self._issue)['subject']
|
2016-08-29 15:13:29 +00:00
|
|
|
|
|
|
|
def GetDescription(self):
|
2017-10-09 19:50:52 +00:00
|
|
|
return self._gerrit.GetChangeDescription(self._issue)
|
2016-08-29 15:13:29 +00:00
|
|
|
|
|
|
|
def IsDryRun(self):
|
2017-10-09 19:50:52 +00:00
|
|
|
return self._gerrit.GetChangeInfo(
|
|
|
|
self._issue)['labels']['Commit-Queue'].get('value', 0) == 1
|
2016-08-29 15:13:29 +00:00
|
|
|
|
2016-10-05 12:41:12 +00:00
|
|
|
def GetReviewers(self):
|
2017-10-09 19:50:52 +00:00
|
|
|
code_review_label = (
|
|
|
|
self._gerrit.GetChangeInfo(self._issue)['labels']['Code-Review'])
|
|
|
|
return [r['email'] for r in code_review_label.get('all', [])]
|
2016-10-05 12:41:12 +00:00
|
|
|
|
2016-08-29 15:13:29 +00:00
|
|
|
def GetApprovers(self):
|
|
|
|
approvers = []
|
2017-10-09 19:50:52 +00:00
|
|
|
code_review_label = (
|
|
|
|
self._gerrit.GetChangeInfo(self._issue)['labels']['Code-Review'])
|
|
|
|
for m in code_review_label.get('all', []):
|
|
|
|
if m.get("value") == 1:
|
|
|
|
approvers.append(m["email"])
|
2016-08-29 15:13:29 +00:00
|
|
|
return approvers
|
|
|
|
|
|
|
|
|
2014-02-03 14:18:32 +00:00
|
|
|
def _CheckOwnerIsInAuthorsFile(input_api, output_api):
|
|
|
|
results = []
|
2016-08-29 15:13:29 +00:00
|
|
|
if input_api.change.issue:
|
|
|
|
cr = CodeReview(input_api)
|
2014-02-03 14:18:32 +00:00
|
|
|
|
2016-08-29 15:13:29 +00:00
|
|
|
owner_email = cr.GetOwnerEmail()
|
2018-01-02 18:29:21 +00:00
|
|
|
|
|
|
|
# Service accounts don't need to be in AUTHORS.
|
2018-04-26 17:09:48 +00:00
|
|
|
for suffix in SERVICE_ACCOUNT_SUFFIX:
|
|
|
|
if owner_email.endswith(suffix):
|
|
|
|
return results
|
2018-01-02 18:29:21 +00:00
|
|
|
|
2014-02-03 14:18:32 +00:00
|
|
|
try:
|
|
|
|
authors_content = ''
|
|
|
|
for line in open(AUTHORS_FILE_NAME):
|
|
|
|
if not line.startswith('#'):
|
|
|
|
authors_content += line
|
|
|
|
email_fnmatches = re.findall('<(.*)>', authors_content)
|
|
|
|
for email_fnmatch in email_fnmatches:
|
|
|
|
if fnmatch.fnmatch(owner_email, email_fnmatch):
|
|
|
|
# Found a match, the user is in the AUTHORS file break out of the loop
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
results.append(
|
|
|
|
output_api.PresubmitError(
|
|
|
|
'The email %s is not in Skia\'s AUTHORS file.\n'
|
|
|
|
'Issue owner, this CL must include an addition to the Skia AUTHORS '
|
2015-10-01 15:10:54 +00:00
|
|
|
'file.'
|
2014-07-18 12:25:56 +00:00
|
|
|
% owner_email))
|
2014-02-03 14:18:32 +00:00
|
|
|
except IOError:
|
|
|
|
# Do not fail if authors file cannot be found.
|
|
|
|
traceback.print_exc()
|
|
|
|
input_api.logging.error('AUTHORS file not found!')
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2013-08-12 14:51:20 +00:00
|
|
|
def _CheckLGTMsForPublicAPI(input_api, output_api):
|
|
|
|
"""Check LGTMs for public API changes.
|
|
|
|
|
|
|
|
For public API files make sure there is an LGTM from the list of owners in
|
|
|
|
PUBLIC_API_OWNERS.
|
|
|
|
"""
|
|
|
|
results = []
|
|
|
|
requires_owner_check = False
|
2014-08-26 21:00:54 +00:00
|
|
|
for affected_file in input_api.AffectedFiles():
|
|
|
|
affected_file_path = affected_file.LocalPath()
|
2013-08-12 14:51:20 +00:00
|
|
|
file_path, file_ext = os.path.splitext(affected_file_path)
|
2014-08-26 21:00:54 +00:00
|
|
|
# We only care about files that end in .h and are under the top-level
|
2015-07-28 15:54:12 +00:00
|
|
|
# include dir, but not include/private.
|
|
|
|
if (file_ext == '.h' and
|
|
|
|
'include' == file_path.split(os.path.sep)[0] and
|
|
|
|
'private' not in file_path):
|
2013-08-12 14:51:20 +00:00
|
|
|
requires_owner_check = True
|
|
|
|
|
|
|
|
if not requires_owner_check:
|
|
|
|
return results
|
|
|
|
|
|
|
|
lgtm_from_owner = False
|
2016-08-29 15:13:29 +00:00
|
|
|
if input_api.change.issue:
|
|
|
|
cr = CodeReview(input_api)
|
|
|
|
|
|
|
|
if re.match(REVERT_CL_SUBJECT_PREFIX, cr.GetSubject(), re.I):
|
2014-01-31 17:33:04 +00:00
|
|
|
# It is a revert CL, ignore the public api owners check.
|
|
|
|
return results
|
2014-08-26 17:30:29 +00:00
|
|
|
|
2016-08-29 15:13:29 +00:00
|
|
|
if cr.IsDryRun():
|
2015-04-07 14:41:51 +00:00
|
|
|
# Ignore public api owners check for dry run CLs since they are not
|
2015-03-12 16:48:10 +00:00
|
|
|
# going to be committed.
|
|
|
|
return results
|
|
|
|
|
2016-10-05 12:41:12 +00:00
|
|
|
if input_api.gerrit:
|
|
|
|
for reviewer in cr.GetReviewers():
|
|
|
|
if reviewer in PUBLIC_API_OWNERS:
|
|
|
|
# If an owner is specified as an reviewer in Gerrit then ignore the
|
|
|
|
# public api owners check.
|
2014-08-26 17:30:29 +00:00
|
|
|
return results
|
2016-10-05 12:41:12 +00:00
|
|
|
else:
|
|
|
|
match = re.search(r'^TBR=(.*)$', cr.GetDescription(), re.M)
|
|
|
|
if match:
|
|
|
|
tbr_section = match.group(1).strip().split(' ')[0]
|
|
|
|
tbr_entries = tbr_section.split(',')
|
|
|
|
for owner in PUBLIC_API_OWNERS:
|
|
|
|
if owner in tbr_entries or owner.split('@')[0] in tbr_entries:
|
|
|
|
# If an owner is specified in the TBR= line then ignore the public
|
|
|
|
# api owners check.
|
|
|
|
return results
|
2014-08-26 17:30:29 +00:00
|
|
|
|
2016-08-29 15:13:29 +00:00
|
|
|
if cr.GetOwnerEmail() in PUBLIC_API_OWNERS:
|
2013-08-12 14:51:20 +00:00
|
|
|
# An owner created the CL that is an automatic LGTM.
|
|
|
|
lgtm_from_owner = True
|
|
|
|
|
2016-08-29 15:13:29 +00:00
|
|
|
for approver in cr.GetApprovers():
|
|
|
|
if approver in PUBLIC_API_OWNERS:
|
|
|
|
# Found an lgtm in a message from an owner.
|
|
|
|
lgtm_from_owner = True
|
|
|
|
break
|
2014-01-31 17:33:04 +00:00
|
|
|
|
2013-08-12 14:51:20 +00:00
|
|
|
if not lgtm_from_owner:
|
|
|
|
results.append(
|
|
|
|
output_api.PresubmitError(
|
2015-07-28 15:54:12 +00:00
|
|
|
"If this CL adds to or changes Skia's public API, you need an LGTM "
|
|
|
|
"from any of %s. If this CL only removes from or doesn't change "
|
2016-10-05 16:47:44 +00:00
|
|
|
"Skia's public API, please add a short note to the CL saying so. "
|
2017-10-09 19:50:52 +00:00
|
|
|
"Add one of the owners as a reviewer to your CL as well as to the "
|
|
|
|
"TBR= line. If you don't know if this CL affects Skia's public "
|
|
|
|
"API, treat it like it does." % str(PUBLIC_API_OWNERS)))
|
2013-08-12 14:51:20 +00:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
2017-05-23 18:24:08 +00:00
|
|
|
def _FooterExists(footers, key, value):
|
|
|
|
for k, v in footers:
|
|
|
|
if k == key and v == value:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2015-02-26 18:16:13 +00:00
|
|
|
def PostUploadHook(cl, change, output_api):
|
|
|
|
"""git cl upload will call this hook after the issue is created/modified.
|
|
|
|
|
|
|
|
This hook does the following:
|
|
|
|
* Adds a link to preview docs changes if there are any docs changes in the CL.
|
2017-05-23 18:24:08 +00:00
|
|
|
* Adds 'No-Try: true' if the CL contains only docs changes.
|
|
|
|
* Adds 'No-Tree-Checks: true' for non master branch changes since they do not
|
2015-02-26 19:52:05 +00:00
|
|
|
need to be gated on the master branch's tree.
|
2017-05-23 18:24:08 +00:00
|
|
|
* Adds 'No-Try: true' for non master branch changes since trybots do not yet
|
2015-02-26 19:52:05 +00:00
|
|
|
work on them.
|
2017-05-23 18:24:08 +00:00
|
|
|
* Adds 'No-Presubmit: true' for non master branch changes since those don't
|
2015-06-04 22:15:42 +00:00
|
|
|
run the presubmit checks.
|
2015-02-26 18:16:13 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
results = []
|
|
|
|
atleast_one_docs_change = False
|
|
|
|
all_docs_changes = True
|
|
|
|
for affected_file in change.AffectedFiles():
|
|
|
|
affected_file_path = affected_file.LocalPath()
|
|
|
|
file_path, _ = os.path.splitext(affected_file_path)
|
|
|
|
if 'site' == file_path.split(os.path.sep)[0]:
|
|
|
|
atleast_one_docs_change = True
|
|
|
|
else:
|
|
|
|
all_docs_changes = False
|
|
|
|
if atleast_one_docs_change and not all_docs_changes:
|
|
|
|
break
|
|
|
|
|
|
|
|
issue = cl.issue
|
2016-09-01 16:52:32 +00:00
|
|
|
if issue:
|
2018-05-03 12:02:03 +00:00
|
|
|
# Skip PostUploadHooks for all auto-commit service account bots. New
|
|
|
|
# patchsets (caused due to PostUploadHooks) invalidates the CQ+2 vote from
|
|
|
|
# the "--use-commit-queue" flag to "git cl upload".
|
|
|
|
for suffix in SERVICE_ACCOUNT_SUFFIX:
|
|
|
|
if cl.GetIssueOwner().endswith(suffix):
|
|
|
|
return results
|
2017-12-07 16:10:11 +00:00
|
|
|
|
2017-04-03 12:30:35 +00:00
|
|
|
original_description_lines, footers = cl.GetDescriptionFooters()
|
|
|
|
new_description_lines = list(original_description_lines)
|
2015-02-26 18:16:13 +00:00
|
|
|
|
2017-05-23 18:24:08 +00:00
|
|
|
# If the change includes only doc changes then add No-Try: true in the
|
2017-12-07 16:10:11 +00:00
|
|
|
# CL's description if it does not exist yet.
|
|
|
|
if all_docs_changes and not _FooterExists(footers, 'No-Try', 'true'):
|
2017-05-23 18:24:08 +00:00
|
|
|
new_description_lines.append('No-Try: true')
|
2015-02-26 18:16:13 +00:00
|
|
|
results.append(
|
|
|
|
output_api.PresubmitNotifyResult(
|
|
|
|
'This change has only doc changes. Automatically added '
|
2017-05-23 18:24:08 +00:00
|
|
|
'\'No-Try: true\' to the CL\'s description'))
|
2015-02-26 18:16:13 +00:00
|
|
|
|
|
|
|
# If there is atleast one docs change then add preview link in the CL's
|
|
|
|
# description if it does not already exist there.
|
2017-05-23 18:24:08 +00:00
|
|
|
docs_preview_link = '%s%s' % (DOCS_PREVIEW_URL, issue)
|
|
|
|
docs_preview_line = 'Docs-Preview: %s' % docs_preview_link
|
2017-04-03 12:30:35 +00:00
|
|
|
if (atleast_one_docs_change and
|
2017-05-23 18:24:08 +00:00
|
|
|
not _FooterExists(footers, 'Docs-Preview', docs_preview_link)):
|
2015-02-26 18:16:13 +00:00
|
|
|
# Automatically add a link to where the docs can be previewed.
|
2017-04-03 12:30:35 +00:00
|
|
|
new_description_lines.append(docs_preview_line)
|
2015-02-26 18:16:13 +00:00
|
|
|
results.append(
|
|
|
|
output_api.PresubmitNotifyResult(
|
|
|
|
'Automatically added a link to preview the docs changes to the '
|
|
|
|
'CL\'s description'))
|
|
|
|
|
2017-05-23 18:24:08 +00:00
|
|
|
# If the target ref is not master then add 'No-Tree-Checks: true' and
|
|
|
|
# 'No-Try: true' to the CL's description if it does not already exist there.
|
2016-09-01 16:52:32 +00:00
|
|
|
target_ref = cl.GetRemoteBranch()[1]
|
|
|
|
if target_ref != 'refs/remotes/origin/master':
|
2017-05-23 18:24:08 +00:00
|
|
|
if not _FooterExists(footers, 'No-Tree-Checks', 'true'):
|
|
|
|
new_description_lines.append('No-Tree-Checks: true')
|
2015-02-26 19:52:05 +00:00
|
|
|
results.append(
|
|
|
|
output_api.PresubmitNotifyResult(
|
|
|
|
'Branch changes do not need to rely on the master branch\'s '
|
2017-05-23 18:24:08 +00:00
|
|
|
'tree status. Automatically added \'No-Tree-Checks: true\' to '
|
|
|
|
'the CL\'s description'))
|
|
|
|
if not _FooterExists(footers, 'No-Try', 'true'):
|
|
|
|
new_description_lines.append('No-Try: true')
|
2015-02-26 19:52:05 +00:00
|
|
|
results.append(
|
|
|
|
output_api.PresubmitNotifyResult(
|
|
|
|
'Trybots do not yet work for non-master branches. '
|
2017-05-23 18:24:08 +00:00
|
|
|
'Automatically added \'No-Try: true\' to the CL\'s '
|
|
|
|
'description'))
|
|
|
|
if not _FooterExists(footers, 'No-Presubmit', 'true'):
|
|
|
|
new_description_lines.append('No-Presubmit: true')
|
2015-06-04 22:15:42 +00:00
|
|
|
results.append(
|
|
|
|
output_api.PresubmitNotifyResult(
|
|
|
|
'Branch changes do not run the presubmit checks.'))
|
2015-02-26 19:52:05 +00:00
|
|
|
|
2015-02-26 18:16:13 +00:00
|
|
|
# If the description has changed update it.
|
2017-04-03 12:30:35 +00:00
|
|
|
if new_description_lines != original_description_lines:
|
2017-05-08 16:59:56 +00:00
|
|
|
# Add a new line separating the new contents from the old contents.
|
|
|
|
new_description_lines.insert(len(original_description_lines), '')
|
2017-04-03 12:30:35 +00:00
|
|
|
cl.UpdateDescriptionFooters(new_description_lines, footers)
|
2015-02-26 18:16:13 +00:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2013-01-17 12:55:34 +00:00
|
|
|
def CheckChangeOnCommit(input_api, output_api):
|
|
|
|
"""Presubmit checks for the change on commit.
|
|
|
|
|
|
|
|
The following are the presubmit checks:
|
2013-01-17 14:50:59 +00:00
|
|
|
* Check change has one and only one EOL.
|
2013-01-23 14:35:58 +00:00
|
|
|
* Ensures that the Skia tree is open in
|
|
|
|
http://skia-tree-status.appspot.com/. Shows a warning if it is in 'Caution'
|
|
|
|
state and an error if it is in 'Closed' state.
|
2013-01-17 12:55:34 +00:00
|
|
|
"""
|
|
|
|
results = []
|
2013-01-17 14:50:59 +00:00
|
|
|
results.extend(_CommonChecks(input_api, output_api))
|
2013-01-17 12:55:34 +00:00
|
|
|
results.extend(
|
2013-01-23 14:35:58 +00:00
|
|
|
_CheckTreeStatus(input_api, output_api, json_url=(
|
2013-04-12 19:45:46 +00:00
|
|
|
SKIA_TREE_STATUS_URL + '/banner-status?format=json')))
|
2013-08-12 14:51:20 +00:00
|
|
|
results.extend(_CheckLGTMsForPublicAPI(input_api, output_api))
|
2014-02-03 14:18:32 +00:00
|
|
|
results.extend(_CheckOwnerIsInAuthorsFile(input_api, output_api))
|
2017-09-12 17:52:05 +00:00
|
|
|
# Checks for the presence of 'DO NOT''SUBMIT' in CL description and in
|
|
|
|
# content of files.
|
|
|
|
results.extend(
|
|
|
|
input_api.canned_checks.CheckDoNotSubmit(input_api, output_api))
|
2013-01-17 12:55:34 +00:00
|
|
|
return results
|