[test] Refactoring presubmit for readability
No-Try: true Bug: v8:9396 Change-Id: Ife254c964a418b5a2c666acf618b66e5273f31d7 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1800284 Commit-Queue: Liviu Rau <liviurau@chromium.org> Reviewed-by: Tamer Tas <tmrts@chromium.org> Reviewed-by: Michael Achenbach <machenbach@chromium.org> Reviewed-by: Clemens Hammacher <clemensh@chromium.org> Cr-Commit-Position: refs/heads/master@{#63778}
This commit is contained in:
parent
7bbe5322b4
commit
64cc3c5c21
106
PRESUBMIT.py
106
PRESUBMIT.py
@ -301,39 +301,43 @@ def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def _CheckGenderNeutralInLicenses(input_api, output_api):
|
||||||
|
# License files are taken as is, even if they include gendered pronouns.
|
||||||
|
def LicenseFilter(path):
|
||||||
|
input_api.FilterSourceFile(path, black_list=_LICENSE_FILE)
|
||||||
|
|
||||||
|
return input_api.canned_checks.CheckGenderNeutral(
|
||||||
|
input_api, output_api, source_file_filter=LicenseFilter)
|
||||||
|
|
||||||
|
|
||||||
|
def _RunTestsWithVPythonSpec(input_api, output_api):
|
||||||
|
return input_api.RunTests(
|
||||||
|
input_api.canned_checks.CheckVPythonSpec(input_api, output_api))
|
||||||
|
|
||||||
|
|
||||||
def _CommonChecks(input_api, output_api):
|
def _CommonChecks(input_api, output_api):
|
||||||
"""Checks common to both upload and commit."""
|
"""Checks common to both upload and commit."""
|
||||||
results = []
|
|
||||||
# TODO(machenbach): Replace some of those checks, e.g. owners and copyright,
|
# TODO(machenbach): Replace some of those checks, e.g. owners and copyright,
|
||||||
# with the canned PanProjectChecks. Need to make sure that the checks all
|
# with the canned PanProjectChecks. Need to make sure that the checks all
|
||||||
# pass on all existing files.
|
# pass on all existing files.
|
||||||
results.extend(input_api.canned_checks.CheckOwnersFormat(
|
checks = [
|
||||||
input_api, output_api))
|
input_api.canned_checks.CheckOwnersFormat,
|
||||||
results.extend(input_api.canned_checks.CheckOwners(
|
input_api.canned_checks.CheckOwners,
|
||||||
input_api, output_api))
|
_CheckCommitMessageBugEntry,
|
||||||
results.extend(_CheckCommitMessageBugEntry(input_api, output_api))
|
input_api.canned_checks.CheckPatchFormatted,
|
||||||
results.extend(input_api.canned_checks.CheckPatchFormatted(
|
_CheckGenderNeutralInLicenses,
|
||||||
input_api, output_api))
|
_V8PresubmitChecks,
|
||||||
|
_CheckUnwantedDependencies,
|
||||||
|
_CheckNoProductionCodeUsingTestOnlyFunctions,
|
||||||
|
_CheckHeadersHaveIncludeGuards,
|
||||||
|
_CheckNoInlineHeaderIncludesInNormalHeaders,
|
||||||
|
_CheckJSONFiles,
|
||||||
|
_CheckMacroUndefs,
|
||||||
|
_CheckNoexceptAnnotations,
|
||||||
|
_RunTestsWithVPythonSpec,
|
||||||
|
]
|
||||||
|
|
||||||
# License files are taken as is, even if they include gendered pronouns.
|
return sum([check(input_api, output_api) for check in checks], [])
|
||||||
license_filter = lambda path: input_api.FilterSourceFile(
|
|
||||||
path, black_list=_LICENSE_FILE)
|
|
||||||
results.extend(input_api.canned_checks.CheckGenderNeutral(
|
|
||||||
input_api, output_api, source_file_filter=license_filter))
|
|
||||||
|
|
||||||
results.extend(_V8PresubmitChecks(input_api, output_api))
|
|
||||||
results.extend(_CheckUnwantedDependencies(input_api, output_api))
|
|
||||||
results.extend(
|
|
||||||
_CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
|
|
||||||
results.extend(_CheckHeadersHaveIncludeGuards(input_api, output_api))
|
|
||||||
results.extend(
|
|
||||||
_CheckNoInlineHeaderIncludesInNormalHeaders(input_api, output_api))
|
|
||||||
results.extend(_CheckJSONFiles(input_api, output_api))
|
|
||||||
results.extend(_CheckMacroUndefs(input_api, output_api))
|
|
||||||
results.extend(_CheckNoexceptAnnotations(input_api, output_api))
|
|
||||||
results.extend(input_api.RunTests(
|
|
||||||
input_api.canned_checks.CheckVPythonSpec(input_api, output_api)))
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def _SkipTreeCheck(input_api, output_api):
|
def _SkipTreeCheck(input_api, output_api):
|
||||||
@ -404,13 +408,29 @@ def _CheckMacroUndefs(input_api, output_api):
|
|||||||
white_list = (r'.+\.cc',r'.+\.cpp',r'.+\.c')
|
white_list = (r'.+\.cc',r'.+\.cpp',r'.+\.c')
|
||||||
return input_api.FilterSourceFile(affected_file, white_list=white_list)
|
return input_api.FilterSourceFile(affected_file, white_list=white_list)
|
||||||
|
|
||||||
|
def Touches(line):
|
||||||
|
return line.startswith('+') or line.startswith('-')
|
||||||
|
|
||||||
|
def InvolvesMacros(text):
|
||||||
|
return define_pattern.match(text) or undef_pattern.match(text)
|
||||||
|
|
||||||
def TouchesMacros(f):
|
def TouchesMacros(f):
|
||||||
for line in f.GenerateScmDiff().splitlines():
|
return any(Touches(line) and InvolvesMacros(line[1:])
|
||||||
if not line.startswith('+') and not line.startswith('-'):
|
for line in f.GenerateScmDiff().splitlines())
|
||||||
continue
|
|
||||||
if define_pattern.match(line[1:]) or undef_pattern.match(line[1:]):
|
def CollectUndefsWithNoDef(defined_macros, errors, f, line, line_nr):
|
||||||
return True
|
define_match = define_pattern.match(line)
|
||||||
return False
|
if define_match:
|
||||||
|
name = define_match.group(1)
|
||||||
|
defined_macros[name] = line_nr
|
||||||
|
undef_match = undef_pattern.match(line)
|
||||||
|
if undef_match and not "// NOLINT" in line:
|
||||||
|
name = undef_match.group(1)
|
||||||
|
if name in defined_macros:
|
||||||
|
del defined_macros[name]
|
||||||
|
else:
|
||||||
|
errors.append('{}:{}: Macro named \'{}\' was not defined before.'
|
||||||
|
.format(f.LocalPath(), line_nr, name))
|
||||||
|
|
||||||
define_pattern = input_api.re.compile(r'#define (\w+)')
|
define_pattern = input_api.re.compile(r'#define (\w+)')
|
||||||
undef_pattern = input_api.re.compile(r'#undef (\w+)')
|
undef_pattern = input_api.re.compile(r'#undef (\w+)')
|
||||||
@ -422,25 +442,9 @@ def _CheckMacroUndefs(input_api, output_api):
|
|||||||
|
|
||||||
defined_macros = dict()
|
defined_macros = dict()
|
||||||
with open(f.LocalPath()) as fh:
|
with open(f.LocalPath()) as fh:
|
||||||
line_nr = 0
|
for line_nr, line in enumerate(fh, start=1):
|
||||||
for line in fh:
|
CollectUndefsWithNoDef(defined_macros, errors, f, line, line_nr)
|
||||||
line_nr += 1
|
|
||||||
|
|
||||||
define_match = define_pattern.match(line)
|
|
||||||
if define_match:
|
|
||||||
name = define_match.group(1)
|
|
||||||
defined_macros[name] = line_nr
|
|
||||||
|
|
||||||
undef_match = undef_pattern.match(line)
|
|
||||||
if undef_match:
|
|
||||||
if "// NOLINT" in line:
|
|
||||||
continue
|
|
||||||
name = undef_match.group(1)
|
|
||||||
if not name in defined_macros:
|
|
||||||
errors.append('{}:{}: Macro named \'{}\' was not defined before.'
|
|
||||||
.format(f.LocalPath(), line_nr, name))
|
|
||||||
else:
|
|
||||||
del defined_macros[name]
|
|
||||||
for name, line_nr in sorted(defined_macros.items(), key=lambda e: e[1]):
|
for name, line_nr in sorted(defined_macros.items(), key=lambda e: e[1]):
|
||||||
errors.append('{}:{}: Macro missing #undef: {}'
|
errors.append('{}:{}: Macro missing #undef: {}'
|
||||||
.format(f.LocalPath(), line_nr, name))
|
.format(f.LocalPath(), line_nr, name))
|
||||||
|
Loading…
Reference in New Issue
Block a user