[test] Include shard info in test records
This will allow on the infra side to easier link to the respective shard on a test failure. Without that it's a hassle to find out on which shard the failing test ran. This also simplifies how the global test_config stores information. Some information was duplicated, but is now rather shared through properties if the owning object is already present. Bug: v8:13681 Change-Id: I52f01a4fac74627575d80f25923faba99eb6a1fb Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4181030 Reviewed-by: Liviu Rau <liviurau@google.com> Commit-Queue: Michael Achenbach <machenbach@chromium.org> Cr-Commit-Position: refs/heads/main@{#85429}
This commit is contained in:
parent
7a3a6e88bd
commit
29b3bd3826
@ -55,7 +55,7 @@ class TestCase(testcase.D8TestCase):
|
||||
|
||||
def _get_files_params(self):
|
||||
files = self._source_files
|
||||
if self._test_config.isolates:
|
||||
if self.test_config.isolates:
|
||||
files = files + ['--isolate'] + files
|
||||
return files
|
||||
|
||||
|
@ -23,7 +23,11 @@ class PYTestCase(testcase.TestCase):
|
||||
return super(PYTestCase, self).get_command()
|
||||
|
||||
def _get_cmd_params(self):
|
||||
return self._get_files_params() + ['--', os.path.join(self._test_config.shell_dir, 'd8')] + self._get_source_flags()
|
||||
return (
|
||||
self._get_files_params() +
|
||||
['--', os.path.join(self.test_config.shell_dir, 'd8')] +
|
||||
self._get_source_flags()
|
||||
)
|
||||
|
||||
def _get_shell_flags(self):
|
||||
return []
|
||||
|
@ -67,4 +67,4 @@ class TestCase(testcase.TestCase):
|
||||
return outproc.ExpectedOutProc(
|
||||
self.expected_outcomes,
|
||||
os.path.join(self.suite.root, self.path) + EXPECTED_SUFFIX,
|
||||
self.suite.test_config.regenerate_expected_files)
|
||||
self.test_config.regenerate_expected_files)
|
||||
|
@ -80,7 +80,7 @@ class TestCase(testcase.D8TestCase):
|
||||
]
|
||||
]
|
||||
|
||||
if self._test_config.isolates:
|
||||
if self.test_config.isolates:
|
||||
files += ['--isolate'] + files
|
||||
return files
|
||||
|
||||
|
@ -98,4 +98,4 @@ class TestCase(testcase.D8TestCase):
|
||||
self._base_path,
|
||||
self._expected_fail(),
|
||||
self._base_path + '.out',
|
||||
self.suite.test_config.regenerate_expected_files)
|
||||
self.test_config.regenerate_expected_files)
|
||||
|
@ -105,7 +105,7 @@ class TestCase(testcase.D8TestCase):
|
||||
else:
|
||||
mjsunit_files = [os.path.join(self.suite.root, "mjsunit.js")]
|
||||
|
||||
if self.suite.framework_name == 'num_fuzzer':
|
||||
if self.framework_name == 'num_fuzzer':
|
||||
mjsunit_files.append(os.path.join(self.suite.root, "mjsunit_numfuzz.js"))
|
||||
|
||||
self._source_files = files
|
||||
@ -128,10 +128,10 @@ class TestCase(testcase.D8TestCase):
|
||||
|
||||
def _get_files_params(self):
|
||||
files = list(self._source_files)
|
||||
if not self._test_config.no_harness:
|
||||
if not self.test_config.no_harness:
|
||||
files += self._mjsunit_files
|
||||
files += self._files_suffix
|
||||
if self._test_config.isolates:
|
||||
if self.test_config.isolates:
|
||||
files += ['--isolate'] + files
|
||||
|
||||
return files
|
||||
@ -190,8 +190,7 @@ class CombinedTest(testcase.D8TestCase):
|
||||
passed as arguments.
|
||||
"""
|
||||
def __init__(self, name, tests):
|
||||
super(CombinedTest, self).__init__(tests[0].suite, '', name,
|
||||
tests[0]._test_config)
|
||||
super(CombinedTest, self).__init__(tests[0].suite, '', name)
|
||||
self._tests = tests
|
||||
|
||||
def _prepare_outcomes(self, force_update=True):
|
||||
|
@ -81,7 +81,7 @@ class TestCase(testcase.D8TestCase):
|
||||
|
||||
def _get_files_params(self):
|
||||
files = self._source_files
|
||||
if self._test_config.isolates:
|
||||
if self.test_config.isolates:
|
||||
files = files + ['--isolate'] + files
|
||||
return files
|
||||
|
||||
|
@ -528,8 +528,7 @@ class BaseTestRunner(object):
|
||||
if self.options.verbose:
|
||||
print('>>> Loading test suite: %s' % name)
|
||||
suite = testsuite.TestSuite.Load(
|
||||
ctx, os.path.join(self.options.test_root, name), test_config,
|
||||
self.framework_name)
|
||||
ctx, os.path.join(self.options.test_root, name), test_config)
|
||||
|
||||
if self._is_testsuite_supported(suite):
|
||||
tests = suite.load_tests_from_disk(variables)
|
||||
@ -659,17 +658,21 @@ class BaseTestRunner(object):
|
||||
return [] # pragma: no cover
|
||||
|
||||
def _create_test_config(self):
|
||||
shard_id, shard_count = self.options.shard_info
|
||||
timeout = self.build_config.timeout_scalefactor(
|
||||
self.options.timeout * self.mode_options.timeout_scalefactor)
|
||||
return TestConfig(
|
||||
command_prefix=self.options.command_prefix,
|
||||
extra_flags=self.options.extra_flags,
|
||||
framework_name=self.framework_name,
|
||||
isolates=self.options.isolates,
|
||||
mode_flags=self.mode_options.flags + self._runner_flags(),
|
||||
no_harness=self.options.no_harness,
|
||||
noi18n=self.build_config.no_i18n,
|
||||
random_seed=self.options.random_seed,
|
||||
run_skipped=self.options.run_skipped,
|
||||
shard_count=shard_count,
|
||||
shard_id=shard_id,
|
||||
shell_dir=self.outdir,
|
||||
timeout=timeout,
|
||||
verbose=self.options.verbose,
|
||||
|
@ -115,9 +115,7 @@ class TestLoader(object):
|
||||
|
||||
def _create_test(self, path, suite, **kwargs):
|
||||
"""Converts paths into test objects using the given options"""
|
||||
return self.test_class(suite, path, self._path_to_name(path),
|
||||
self.test_config, self.suite.framework_name,
|
||||
**kwargs)
|
||||
return self.test_class(suite, path, self._path_to_name(path), **kwargs)
|
||||
|
||||
def list_tests(self):
|
||||
"""Loads and returns the test objects for a TestSuite"""
|
||||
@ -248,22 +246,25 @@ def _load_testsuite_module(name, root):
|
||||
|
||||
class TestSuite(object):
|
||||
@staticmethod
|
||||
def Load(ctx, root, test_config, framework_name):
|
||||
def Load(ctx, root, test_config):
|
||||
name = root.split(os.path.sep)[-1]
|
||||
with _load_testsuite_module(name, root) as module:
|
||||
return module.TestSuite(ctx, name, root, test_config, framework_name)
|
||||
return module.TestSuite(ctx, name, root, test_config)
|
||||
|
||||
def __init__(self, ctx, name, root, test_config, framework_name):
|
||||
def __init__(self, ctx, name, root, test_config):
|
||||
self.name = name # string
|
||||
self.root = root # string containing path
|
||||
self.test_config = test_config
|
||||
self.framework_name = framework_name # name of the test runner impl
|
||||
self.tests = None # list of TestCase objects
|
||||
self.statusfile = None
|
||||
|
||||
self._test_loader = self._test_loader_class()(ctx, self, self._test_class(),
|
||||
self.test_config, self.root)
|
||||
|
||||
@property
|
||||
def framework_name(self):
|
||||
return self.test_config.framework_name
|
||||
|
||||
def status_file(self):
|
||||
return "%s/%s.status" % (self.root, self.name)
|
||||
|
||||
|
@ -26,20 +26,22 @@ class TestSuiteTest(unittest.TestCase):
|
||||
self.test_config = TestConfig(
|
||||
command_prefix=[],
|
||||
extra_flags=[],
|
||||
framework_name='standard_runner',
|
||||
isolates=False,
|
||||
mode_flags=[],
|
||||
no_harness=False,
|
||||
noi18n=False,
|
||||
random_seed=0,
|
||||
run_skipped=False,
|
||||
shard_count=1,
|
||||
shard_id=0,
|
||||
shell_dir='fake_testsuite/fake_d8',
|
||||
timeout=10,
|
||||
verbose=False,
|
||||
)
|
||||
|
||||
self.suite = TestSuite.Load(
|
||||
DefaultOSContext(PosixCommand), self.test_root, self.test_config,
|
||||
"standard_runner")
|
||||
DefaultOSContext(PosixCommand), self.test_root, self.test_config)
|
||||
|
||||
def testLoadingTestSuites(self):
|
||||
self.assertEqual(self.suite.name, "fake_testsuite")
|
||||
|
@ -79,7 +79,7 @@ def read_file(file):
|
||||
|
||||
class TestCase(object):
|
||||
|
||||
def __init__(self, suite, path, name, test_config, framework_name):
|
||||
def __init__(self, suite, path, name):
|
||||
self.suite = suite # TestSuite object
|
||||
|
||||
self.path = path # string, e.g. 'div-mod', 'test-api/foo'
|
||||
@ -95,9 +95,6 @@ class TestCase(object):
|
||||
self.processor = DuckProcessor()
|
||||
self.procid = '%s/%s' % (self.suite.name, self.name) # unique id
|
||||
self.keep_output = False # Can output of this test be dropped
|
||||
|
||||
# Test config contains information needed to build the command.
|
||||
self._test_config = test_config
|
||||
self._random_seed = None # Overrides test config value if not None
|
||||
|
||||
# Outcomes
|
||||
@ -107,8 +104,6 @@ class TestCase(object):
|
||||
self._statusfile_flags = None
|
||||
self.expected_failure_reason = None
|
||||
|
||||
self.framework_name = framework_name
|
||||
|
||||
self._prepare_outcomes()
|
||||
|
||||
def create_subtest(self, processor, subtest_id, variant=None, flags=None,
|
||||
@ -271,10 +266,26 @@ class TestCase(object):
|
||||
"INCOMPATIBLE_FLAGS_PER_EXTRA_FLAG[\"" + extra_flag + "\"]")
|
||||
return self._expected_outcomes
|
||||
|
||||
@property
|
||||
def test_config(self):
|
||||
return self.suite.test_config
|
||||
|
||||
@property
|
||||
def framework_name(self):
|
||||
return self.test_config.framework_name
|
||||
|
||||
@property
|
||||
def shard_id(self):
|
||||
return self.test_config.shard_id
|
||||
|
||||
@property
|
||||
def shard_count(self):
|
||||
return self.test_config.shard_count
|
||||
|
||||
@property
|
||||
def do_skip(self):
|
||||
return (statusfile.SKIP in self._statusfile_outcomes and
|
||||
not self.suite.test_config.run_skipped)
|
||||
not self.test_config.run_skipped)
|
||||
|
||||
@property
|
||||
def is_heavy(self):
|
||||
@ -357,10 +368,10 @@ class TestCase(object):
|
||||
|
||||
@property
|
||||
def random_seed(self):
|
||||
return self._random_seed or self._test_config.random_seed
|
||||
return self._random_seed or self.test_config.random_seed
|
||||
|
||||
def _get_extra_flags(self):
|
||||
return self._test_config.extra_flags
|
||||
return self.test_config.extra_flags
|
||||
|
||||
def _get_variant_flags(self):
|
||||
return self.variant_flags
|
||||
@ -373,7 +384,7 @@ class TestCase(object):
|
||||
return self._statusfile_flags
|
||||
|
||||
def _get_mode_flags(self):
|
||||
return self._test_config.mode_flags
|
||||
return self.test_config.mode_flags
|
||||
|
||||
def _get_source_flags(self):
|
||||
return []
|
||||
@ -385,7 +396,7 @@ class TestCase(object):
|
||||
return []
|
||||
|
||||
def _get_timeout(self, params):
|
||||
timeout = self._test_config.timeout
|
||||
timeout = self.test_config.timeout
|
||||
if "--jitless" in params:
|
||||
timeout *= 2
|
||||
if "--no-turbofan" in params:
|
||||
@ -406,12 +417,12 @@ class TestCase(object):
|
||||
|
||||
def _create_cmd(self, ctx, shell, params, env, timeout):
|
||||
return ctx.command(
|
||||
cmd_prefix=self._test_config.command_prefix,
|
||||
shell=os.path.abspath(os.path.join(self._test_config.shell_dir, shell)),
|
||||
cmd_prefix=self.test_config.command_prefix,
|
||||
shell=os.path.abspath(os.path.join(self.test_config.shell_dir, shell)),
|
||||
args=params,
|
||||
env=env,
|
||||
timeout=timeout,
|
||||
verbose=self._test_config.verbose,
|
||||
verbose=self.test_config.verbose,
|
||||
resources_func=self._get_resources,
|
||||
handle_sigterm=True,
|
||||
)
|
||||
|
@ -21,9 +21,7 @@ class TestCaseTest(unittest.TestCase):
|
||||
test = TestCase(
|
||||
suite=FakeSuite(),
|
||||
path='far/away',
|
||||
name='parent',
|
||||
test_config=None,
|
||||
framework_name='none')
|
||||
name='parent')
|
||||
self.assertEqual(test.rdb_test_id, 'fakeSuite/parent')
|
||||
# provide by DuckProcessor
|
||||
self.assertEqual(test.processor.name, None)
|
||||
|
@ -9,18 +9,22 @@ class TestConfig(object):
|
||||
def __init__(self,
|
||||
command_prefix,
|
||||
extra_flags,
|
||||
framework_name,
|
||||
isolates,
|
||||
mode_flags,
|
||||
no_harness,
|
||||
noi18n,
|
||||
random_seed,
|
||||
run_skipped,
|
||||
shard_count,
|
||||
shard_id,
|
||||
shell_dir,
|
||||
timeout,
|
||||
verbose,
|
||||
regenerate_expected_files=False):
|
||||
self.command_prefix = command_prefix
|
||||
self.extra_flags = extra_flags
|
||||
self.framework_name = framework_name
|
||||
self.isolates = isolates
|
||||
self.mode_flags = mode_flags
|
||||
self.no_harness = no_harness
|
||||
@ -28,6 +32,8 @@ class TestConfig(object):
|
||||
# random_seed is always not None.
|
||||
self.random_seed = random_seed or random_utils.random_seed()
|
||||
self.run_skipped = run_skipped
|
||||
self.shard_count = shard_count
|
||||
self.shard_id = shard_id
|
||||
self.shell_dir = shell_dir
|
||||
self.timeout = timeout
|
||||
self.verbose = verbose
|
||||
|
@ -23,6 +23,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "FAIL",
|
||||
"run": 1,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"stderr": "",
|
||||
"stdout": "--test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
|
||||
"target_name": "d8_mocked.py",
|
||||
@ -51,6 +53,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "FAIL",
|
||||
"run": 2,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"stderr": "",
|
||||
"stdout": "--test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
|
||||
"target_name": "d8_mocked.py",
|
||||
@ -79,6 +83,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "FAIL",
|
||||
"run": 3,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"stderr": "",
|
||||
"stdout": "--test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
|
||||
"target_name": "d8_mocked.py",
|
||||
@ -107,6 +113,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "FAIL",
|
||||
"run": 1,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"target_name": "d8_mocked.py",
|
||||
"variant": "default",
|
||||
"variant_flags": []
|
||||
@ -131,6 +139,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "FAIL",
|
||||
"run": 2,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"target_name": "d8_mocked.py",
|
||||
"variant": "default",
|
||||
"variant_flags": []
|
||||
@ -155,6 +165,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "FAIL",
|
||||
"run": 3,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"target_name": "d8_mocked.py",
|
||||
"variant": "default",
|
||||
"variant_flags": []
|
||||
|
@ -22,6 +22,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "FAIL",
|
||||
"run": 1,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"stderr": "",
|
||||
"stdout": "bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
|
||||
"target_name": "d8_mocked.py",
|
||||
@ -49,6 +51,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "PASS",
|
||||
"run": 2,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"stderr": "",
|
||||
"stdout": "bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
|
||||
"target_name": "d8_mocked.py",
|
||||
@ -76,6 +80,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "",
|
||||
"run": 2,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"target_name": "d8_mocked.py",
|
||||
"variant": "default",
|
||||
"variant_flags": []
|
||||
@ -99,6 +105,8 @@
|
||||
"random_seed": 123,
|
||||
"result": "FAIL",
|
||||
"run": 1,
|
||||
"shard_count": 1,
|
||||
"shard_id": 0,
|
||||
"target_name": "d8_mocked.py",
|
||||
"variant": "default",
|
||||
"variant_flags": []
|
||||
|
@ -20,7 +20,7 @@ def radix_hash(capacity, key):
|
||||
class ShardProc(base.TestProcFilter):
|
||||
@staticmethod
|
||||
def create(options):
|
||||
myid, count = options.shard_info()
|
||||
myid, count = options.shard_info
|
||||
if count == 1:
|
||||
return None
|
||||
return ShardProc(myid, count)
|
||||
|
@ -56,15 +56,17 @@ def kill_processes_linux():
|
||||
|
||||
def base_test_record(test, result, run):
|
||||
record = {
|
||||
'name': test.full_name,
|
||||
'flags': result.cmd.args,
|
||||
'run': run + 1,
|
||||
'expected': test.expected_outcomes,
|
||||
'flags': result.cmd.args,
|
||||
'framework_name': test.framework_name,
|
||||
'name': test.full_name,
|
||||
'random_seed': test.random_seed,
|
||||
'run': run + 1,
|
||||
'shard_id': test.shard_id,
|
||||
'shard_count': test.shard_count,
|
||||
'target_name': test.get_shell(),
|
||||
'variant': test.variant,
|
||||
'variant_flags': test.variant_flags,
|
||||
'framework_name': test.framework_name,
|
||||
}
|
||||
if result.output:
|
||||
record.update(
|
||||
|
@ -5,8 +5,12 @@
|
||||
import optparse
|
||||
import os
|
||||
import random
|
||||
|
||||
from functools import cached_property
|
||||
|
||||
from testrunner.testproc import fuzzer
|
||||
|
||||
|
||||
class AugmentedOptions(optparse.Values):
|
||||
"""This class will augment exiting options object with
|
||||
a couple of convenient methods and properties.
|
||||
@ -21,6 +25,7 @@ class AugmentedOptions(optparse.Values):
|
||||
self._fuzzer_rng = random.Random(self.fuzzer_random_seed)
|
||||
return self._fuzzer_rng
|
||||
|
||||
@cached_property
|
||||
def shard_info(self):
|
||||
"""
|
||||
Returns pair:
|
||||
|
Loading…
Reference in New Issue
Block a user