[test] Include shard info in test records

This will allow on the infra side to easier link to the respective
shard on a test failure. Without that it's a hassle to find out on
which shard the failing test ran.

This also simplifies how the global test_config stores information.
Some information was duplicated, but is now rather shared through
properties if the owning object is already present.

Bug: v8:13681
Change-Id: I52f01a4fac74627575d80f25923faba99eb6a1fb
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4181030
Reviewed-by: Liviu Rau <liviurau@google.com>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85429}
This commit is contained in:
Michael Achenbach 2023-01-20 17:22:22 +01:00 committed by V8 LUCI CQ
parent 7a3a6e88bd
commit 29b3bd3826
18 changed files with 281 additions and 230 deletions

View File

@ -55,7 +55,7 @@ class TestCase(testcase.D8TestCase):
def _get_files_params(self): def _get_files_params(self):
files = self._source_files files = self._source_files
if self._test_config.isolates: if self.test_config.isolates:
files = files + ['--isolate'] + files files = files + ['--isolate'] + files
return files return files

View File

@ -23,7 +23,11 @@ class PYTestCase(testcase.TestCase):
return super(PYTestCase, self).get_command() return super(PYTestCase, self).get_command()
def _get_cmd_params(self): def _get_cmd_params(self):
return self._get_files_params() + ['--', os.path.join(self._test_config.shell_dir, 'd8')] + self._get_source_flags() return (
self._get_files_params() +
['--', os.path.join(self.test_config.shell_dir, 'd8')] +
self._get_source_flags()
)
def _get_shell_flags(self): def _get_shell_flags(self):
return [] return []

View File

@ -67,4 +67,4 @@ class TestCase(testcase.TestCase):
return outproc.ExpectedOutProc( return outproc.ExpectedOutProc(
self.expected_outcomes, self.expected_outcomes,
os.path.join(self.suite.root, self.path) + EXPECTED_SUFFIX, os.path.join(self.suite.root, self.path) + EXPECTED_SUFFIX,
self.suite.test_config.regenerate_expected_files) self.test_config.regenerate_expected_files)

View File

@ -80,7 +80,7 @@ class TestCase(testcase.D8TestCase):
] ]
] ]
if self._test_config.isolates: if self.test_config.isolates:
files += ['--isolate'] + files files += ['--isolate'] + files
return files return files

View File

@ -98,4 +98,4 @@ class TestCase(testcase.D8TestCase):
self._base_path, self._base_path,
self._expected_fail(), self._expected_fail(),
self._base_path + '.out', self._base_path + '.out',
self.suite.test_config.regenerate_expected_files) self.test_config.regenerate_expected_files)

View File

@ -105,7 +105,7 @@ class TestCase(testcase.D8TestCase):
else: else:
mjsunit_files = [os.path.join(self.suite.root, "mjsunit.js")] mjsunit_files = [os.path.join(self.suite.root, "mjsunit.js")]
if self.suite.framework_name == 'num_fuzzer': if self.framework_name == 'num_fuzzer':
mjsunit_files.append(os.path.join(self.suite.root, "mjsunit_numfuzz.js")) mjsunit_files.append(os.path.join(self.suite.root, "mjsunit_numfuzz.js"))
self._source_files = files self._source_files = files
@ -128,10 +128,10 @@ class TestCase(testcase.D8TestCase):
def _get_files_params(self): def _get_files_params(self):
files = list(self._source_files) files = list(self._source_files)
if not self._test_config.no_harness: if not self.test_config.no_harness:
files += self._mjsunit_files files += self._mjsunit_files
files += self._files_suffix files += self._files_suffix
if self._test_config.isolates: if self.test_config.isolates:
files += ['--isolate'] + files files += ['--isolate'] + files
return files return files
@ -190,8 +190,7 @@ class CombinedTest(testcase.D8TestCase):
passed as arguments. passed as arguments.
""" """
def __init__(self, name, tests): def __init__(self, name, tests):
super(CombinedTest, self).__init__(tests[0].suite, '', name, super(CombinedTest, self).__init__(tests[0].suite, '', name)
tests[0]._test_config)
self._tests = tests self._tests = tests
def _prepare_outcomes(self, force_update=True): def _prepare_outcomes(self, force_update=True):

View File

@ -81,7 +81,7 @@ class TestCase(testcase.D8TestCase):
def _get_files_params(self): def _get_files_params(self):
files = self._source_files files = self._source_files
if self._test_config.isolates: if self.test_config.isolates:
files = files + ['--isolate'] + files files = files + ['--isolate'] + files
return files return files

View File

@ -528,8 +528,7 @@ class BaseTestRunner(object):
if self.options.verbose: if self.options.verbose:
print('>>> Loading test suite: %s' % name) print('>>> Loading test suite: %s' % name)
suite = testsuite.TestSuite.Load( suite = testsuite.TestSuite.Load(
ctx, os.path.join(self.options.test_root, name), test_config, ctx, os.path.join(self.options.test_root, name), test_config)
self.framework_name)
if self._is_testsuite_supported(suite): if self._is_testsuite_supported(suite):
tests = suite.load_tests_from_disk(variables) tests = suite.load_tests_from_disk(variables)
@ -659,17 +658,21 @@ class BaseTestRunner(object):
return [] # pragma: no cover return [] # pragma: no cover
def _create_test_config(self): def _create_test_config(self):
shard_id, shard_count = self.options.shard_info
timeout = self.build_config.timeout_scalefactor( timeout = self.build_config.timeout_scalefactor(
self.options.timeout * self.mode_options.timeout_scalefactor) self.options.timeout * self.mode_options.timeout_scalefactor)
return TestConfig( return TestConfig(
command_prefix=self.options.command_prefix, command_prefix=self.options.command_prefix,
extra_flags=self.options.extra_flags, extra_flags=self.options.extra_flags,
framework_name=self.framework_name,
isolates=self.options.isolates, isolates=self.options.isolates,
mode_flags=self.mode_options.flags + self._runner_flags(), mode_flags=self.mode_options.flags + self._runner_flags(),
no_harness=self.options.no_harness, no_harness=self.options.no_harness,
noi18n=self.build_config.no_i18n, noi18n=self.build_config.no_i18n,
random_seed=self.options.random_seed, random_seed=self.options.random_seed,
run_skipped=self.options.run_skipped, run_skipped=self.options.run_skipped,
shard_count=shard_count,
shard_id=shard_id,
shell_dir=self.outdir, shell_dir=self.outdir,
timeout=timeout, timeout=timeout,
verbose=self.options.verbose, verbose=self.options.verbose,

View File

@ -115,9 +115,7 @@ class TestLoader(object):
def _create_test(self, path, suite, **kwargs): def _create_test(self, path, suite, **kwargs):
"""Converts paths into test objects using the given options""" """Converts paths into test objects using the given options"""
return self.test_class(suite, path, self._path_to_name(path), return self.test_class(suite, path, self._path_to_name(path), **kwargs)
self.test_config, self.suite.framework_name,
**kwargs)
def list_tests(self): def list_tests(self):
"""Loads and returns the test objects for a TestSuite""" """Loads and returns the test objects for a TestSuite"""
@ -248,22 +246,25 @@ def _load_testsuite_module(name, root):
class TestSuite(object): class TestSuite(object):
@staticmethod @staticmethod
def Load(ctx, root, test_config, framework_name): def Load(ctx, root, test_config):
name = root.split(os.path.sep)[-1] name = root.split(os.path.sep)[-1]
with _load_testsuite_module(name, root) as module: with _load_testsuite_module(name, root) as module:
return module.TestSuite(ctx, name, root, test_config, framework_name) return module.TestSuite(ctx, name, root, test_config)
def __init__(self, ctx, name, root, test_config, framework_name): def __init__(self, ctx, name, root, test_config):
self.name = name # string self.name = name # string
self.root = root # string containing path self.root = root # string containing path
self.test_config = test_config self.test_config = test_config
self.framework_name = framework_name # name of the test runner impl
self.tests = None # list of TestCase objects self.tests = None # list of TestCase objects
self.statusfile = None self.statusfile = None
self._test_loader = self._test_loader_class()(ctx, self, self._test_class(), self._test_loader = self._test_loader_class()(ctx, self, self._test_class(),
self.test_config, self.root) self.test_config, self.root)
@property
def framework_name(self):
return self.test_config.framework_name
def status_file(self): def status_file(self):
return "%s/%s.status" % (self.root, self.name) return "%s/%s.status" % (self.root, self.name)

View File

@ -26,20 +26,22 @@ class TestSuiteTest(unittest.TestCase):
self.test_config = TestConfig( self.test_config = TestConfig(
command_prefix=[], command_prefix=[],
extra_flags=[], extra_flags=[],
framework_name='standard_runner',
isolates=False, isolates=False,
mode_flags=[], mode_flags=[],
no_harness=False, no_harness=False,
noi18n=False, noi18n=False,
random_seed=0, random_seed=0,
run_skipped=False, run_skipped=False,
shard_count=1,
shard_id=0,
shell_dir='fake_testsuite/fake_d8', shell_dir='fake_testsuite/fake_d8',
timeout=10, timeout=10,
verbose=False, verbose=False,
) )
self.suite = TestSuite.Load( self.suite = TestSuite.Load(
DefaultOSContext(PosixCommand), self.test_root, self.test_config, DefaultOSContext(PosixCommand), self.test_root, self.test_config)
"standard_runner")
def testLoadingTestSuites(self): def testLoadingTestSuites(self):
self.assertEqual(self.suite.name, "fake_testsuite") self.assertEqual(self.suite.name, "fake_testsuite")

View File

@ -79,7 +79,7 @@ def read_file(file):
class TestCase(object): class TestCase(object):
def __init__(self, suite, path, name, test_config, framework_name): def __init__(self, suite, path, name):
self.suite = suite # TestSuite object self.suite = suite # TestSuite object
self.path = path # string, e.g. 'div-mod', 'test-api/foo' self.path = path # string, e.g. 'div-mod', 'test-api/foo'
@ -95,9 +95,6 @@ class TestCase(object):
self.processor = DuckProcessor() self.processor = DuckProcessor()
self.procid = '%s/%s' % (self.suite.name, self.name) # unique id self.procid = '%s/%s' % (self.suite.name, self.name) # unique id
self.keep_output = False # Can output of this test be dropped self.keep_output = False # Can output of this test be dropped
# Test config contains information needed to build the command.
self._test_config = test_config
self._random_seed = None # Overrides test config value if not None self._random_seed = None # Overrides test config value if not None
# Outcomes # Outcomes
@ -107,8 +104,6 @@ class TestCase(object):
self._statusfile_flags = None self._statusfile_flags = None
self.expected_failure_reason = None self.expected_failure_reason = None
self.framework_name = framework_name
self._prepare_outcomes() self._prepare_outcomes()
def create_subtest(self, processor, subtest_id, variant=None, flags=None, def create_subtest(self, processor, subtest_id, variant=None, flags=None,
@ -271,10 +266,26 @@ class TestCase(object):
"INCOMPATIBLE_FLAGS_PER_EXTRA_FLAG[\"" + extra_flag + "\"]") "INCOMPATIBLE_FLAGS_PER_EXTRA_FLAG[\"" + extra_flag + "\"]")
return self._expected_outcomes return self._expected_outcomes
@property
def test_config(self):
return self.suite.test_config
@property
def framework_name(self):
return self.test_config.framework_name
@property
def shard_id(self):
return self.test_config.shard_id
@property
def shard_count(self):
return self.test_config.shard_count
@property @property
def do_skip(self): def do_skip(self):
return (statusfile.SKIP in self._statusfile_outcomes and return (statusfile.SKIP in self._statusfile_outcomes and
not self.suite.test_config.run_skipped) not self.test_config.run_skipped)
@property @property
def is_heavy(self): def is_heavy(self):
@ -357,10 +368,10 @@ class TestCase(object):
@property @property
def random_seed(self): def random_seed(self):
return self._random_seed or self._test_config.random_seed return self._random_seed or self.test_config.random_seed
def _get_extra_flags(self): def _get_extra_flags(self):
return self._test_config.extra_flags return self.test_config.extra_flags
def _get_variant_flags(self): def _get_variant_flags(self):
return self.variant_flags return self.variant_flags
@ -373,7 +384,7 @@ class TestCase(object):
return self._statusfile_flags return self._statusfile_flags
def _get_mode_flags(self): def _get_mode_flags(self):
return self._test_config.mode_flags return self.test_config.mode_flags
def _get_source_flags(self): def _get_source_flags(self):
return [] return []
@ -385,7 +396,7 @@ class TestCase(object):
return [] return []
def _get_timeout(self, params): def _get_timeout(self, params):
timeout = self._test_config.timeout timeout = self.test_config.timeout
if "--jitless" in params: if "--jitless" in params:
timeout *= 2 timeout *= 2
if "--no-turbofan" in params: if "--no-turbofan" in params:
@ -406,12 +417,12 @@ class TestCase(object):
def _create_cmd(self, ctx, shell, params, env, timeout): def _create_cmd(self, ctx, shell, params, env, timeout):
return ctx.command( return ctx.command(
cmd_prefix=self._test_config.command_prefix, cmd_prefix=self.test_config.command_prefix,
shell=os.path.abspath(os.path.join(self._test_config.shell_dir, shell)), shell=os.path.abspath(os.path.join(self.test_config.shell_dir, shell)),
args=params, args=params,
env=env, env=env,
timeout=timeout, timeout=timeout,
verbose=self._test_config.verbose, verbose=self.test_config.verbose,
resources_func=self._get_resources, resources_func=self._get_resources,
handle_sigterm=True, handle_sigterm=True,
) )

View File

@ -21,9 +21,7 @@ class TestCaseTest(unittest.TestCase):
test = TestCase( test = TestCase(
suite=FakeSuite(), suite=FakeSuite(),
path='far/away', path='far/away',
name='parent', name='parent')
test_config=None,
framework_name='none')
self.assertEqual(test.rdb_test_id, 'fakeSuite/parent') self.assertEqual(test.rdb_test_id, 'fakeSuite/parent')
# provide by DuckProcessor # provide by DuckProcessor
self.assertEqual(test.processor.name, None) self.assertEqual(test.processor.name, None)

View File

@ -9,18 +9,22 @@ class TestConfig(object):
def __init__(self, def __init__(self,
command_prefix, command_prefix,
extra_flags, extra_flags,
framework_name,
isolates, isolates,
mode_flags, mode_flags,
no_harness, no_harness,
noi18n, noi18n,
random_seed, random_seed,
run_skipped, run_skipped,
shard_count,
shard_id,
shell_dir, shell_dir,
timeout, timeout,
verbose, verbose,
regenerate_expected_files=False): regenerate_expected_files=False):
self.command_prefix = command_prefix self.command_prefix = command_prefix
self.extra_flags = extra_flags self.extra_flags = extra_flags
self.framework_name = framework_name
self.isolates = isolates self.isolates = isolates
self.mode_flags = mode_flags self.mode_flags = mode_flags
self.no_harness = no_harness self.no_harness = no_harness
@ -28,6 +32,8 @@ class TestConfig(object):
# random_seed is always not None. # random_seed is always not None.
self.random_seed = random_seed or random_utils.random_seed() self.random_seed = random_seed or random_utils.random_seed()
self.run_skipped = run_skipped self.run_skipped = run_skipped
self.shard_count = shard_count
self.shard_id = shard_id
self.shell_dir = shell_dir self.shell_dir = shell_dir
self.timeout = timeout self.timeout = timeout
self.verbose = verbose self.verbose = verbose

View File

@ -1,164 +1,176 @@
{ {
"duration_mean": 1, "duration_mean": 1,
"results": [ "results": [
{ {
"command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner",
"crash_state": "", "crash_state": "",
"crash_type": "", "crash_type": "",
"duration": 1, "duration": 1,
"error_details": "+Mock diff", "error_details": "+Mock diff",
"exit_code": 1, "exit_code": 1,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"--test", "--test",
"strawberries", "strawberries",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"name": "sweet/strawberries", "name": "sweet/strawberries",
"random_seed": 123, "random_seed": 123,
"result": "FAIL", "result": "FAIL",
"run": 1, "run": 1,
"stderr": "", "shard_count": 1,
"stdout": "--test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner\n", "shard_id": 0,
"target_name": "d8_mocked.py", "stderr": "",
"variant": "default", "stdout": "--test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
}, },
{ {
"command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner",
"crash_state": "", "crash_state": "",
"crash_type": "", "crash_type": "",
"duration": 1, "duration": 1,
"error_details": "+Mock diff", "error_details": "+Mock diff",
"exit_code": 1, "exit_code": 1,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"--test", "--test",
"strawberries", "strawberries",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"name": "sweet/strawberries", "name": "sweet/strawberries",
"random_seed": 123, "random_seed": 123,
"result": "FAIL", "result": "FAIL",
"run": 2, "run": 2,
"stderr": "", "shard_count": 1,
"stdout": "--test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner\n", "shard_id": 0,
"target_name": "d8_mocked.py", "stderr": "",
"variant": "default", "stdout": "--test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
}, },
{ {
"command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner",
"crash_state": "", "crash_state": "",
"crash_type": "", "crash_type": "",
"duration": 1, "duration": 1,
"error_details": "+Mock diff", "error_details": "+Mock diff",
"exit_code": 1, "exit_code": 1,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"--test", "--test",
"strawberries", "strawberries",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"name": "sweet/strawberries", "name": "sweet/strawberries",
"random_seed": 123, "random_seed": 123,
"result": "FAIL", "result": "FAIL",
"run": 3, "run": 3,
"stderr": "", "shard_count": 1,
"stdout": "--test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner\n", "shard_id": 0,
"target_name": "d8_mocked.py", "stderr": "",
"variant": "default", "stdout": "--test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
} }
], ],
"slowest_tests": [ "slowest_tests": [
{ {
"command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner",
"duration": 1, "duration": 1,
"exit_code": 1, "exit_code": 1,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"--test", "--test",
"strawberries", "strawberries",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"marked_slow": true, "marked_slow": true,
"name": "sweet/strawberries", "name": "sweet/strawberries",
"random_seed": 123, "random_seed": 123,
"result": "FAIL", "result": "FAIL",
"run": 1, "run": 1,
"target_name": "d8_mocked.py", "shard_count": 1,
"variant": "default", "shard_id": 0,
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
}, },
{ {
"command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner",
"duration": 1, "duration": 1,
"exit_code": 1, "exit_code": 1,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"--test", "--test",
"strawberries", "strawberries",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"marked_slow": true, "marked_slow": true,
"name": "sweet/strawberries", "name": "sweet/strawberries",
"random_seed": 123, "random_seed": 123,
"result": "FAIL", "result": "FAIL",
"run": 2, "run": 2,
"target_name": "d8_mocked.py", "shard_count": 1,
"variant": "default", "shard_id": 0,
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
}, },
{ {
"command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py --test strawberries --random-seed=123 --nohard-abort --testing-d8-test-runner",
"duration": 1, "duration": 1,
"exit_code": 1, "exit_code": 1,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"--test", "--test",
"strawberries", "strawberries",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"marked_slow": true, "marked_slow": true,
"name": "sweet/strawberries", "name": "sweet/strawberries",
"random_seed": 123, "random_seed": 123,
"result": "FAIL", "result": "FAIL",
"run": 3, "run": 3,
"target_name": "d8_mocked.py", "shard_count": 1,
"variant": "default", "shard_id": 0,
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
} }
], ],
"test_total": 3 "test_total": 3
} }

View File

@ -1,108 +1,116 @@
{ {
"duration_mean": 1, "duration_mean": 1,
"results": [ "results": [
{ {
"command": "/usr/bin/python out/build/d8_mocked.py bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner",
"crash_state": "", "crash_state": "",
"crash_type": "", "crash_type": "",
"duration": 1, "duration": 1,
"error_details": null, "error_details": null,
"exit_code": 1, "exit_code": 1,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"bananaflakes", "bananaflakes",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"name": "sweet/bananaflakes", "name": "sweet/bananaflakes",
"random_seed": 123, "random_seed": 123,
"result": "FAIL", "result": "FAIL",
"run": 1, "run": 1,
"stderr": "", "shard_count": 1,
"stdout": "bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner\n", "shard_id": 0,
"target_name": "d8_mocked.py", "stderr": "",
"variant": "default", "stdout": "bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
}, },
{ {
"command": "/usr/bin/python out/build/d8_mocked.py bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner",
"crash_state": "", "crash_state": "",
"crash_type": "", "crash_type": "",
"duration": 1, "duration": 1,
"error_details": null, "error_details": null,
"exit_code": 0, "exit_code": 0,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"bananaflakes", "bananaflakes",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"name": "sweet/bananaflakes", "name": "sweet/bananaflakes",
"random_seed": 123, "random_seed": 123,
"result": "PASS", "result": "PASS",
"run": 2, "run": 2,
"stderr": "", "shard_count": 1,
"stdout": "bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner\n", "shard_id": 0,
"target_name": "d8_mocked.py", "stderr": "",
"variant": "default", "stdout": "bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner\n",
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
} }
], ],
"slowest_tests": [ "slowest_tests": [
{ {
"command": "/usr/bin/python out/build/d8_mocked.py bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner",
"duration": 1, "duration": 1,
"exit_code": 0, "exit_code": 0,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"bananaflakes", "bananaflakes",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"marked_slow": false, "marked_slow": false,
"name": "sweet/bananaflakes", "name": "sweet/bananaflakes",
"random_seed": 123, "random_seed": 123,
"result": "", "result": "",
"run": 2, "run": 2,
"target_name": "d8_mocked.py", "shard_count": 1,
"variant": "default", "shard_id": 0,
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
}, },
{ {
"command": "/usr/bin/python out/build/d8_mocked.py bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner", "command": "/usr/bin/python out/build/d8_mocked.py bananaflakes --random-seed=123 --nohard-abort --testing-d8-test-runner",
"duration": 1, "duration": 1,
"exit_code": 1, "exit_code": 1,
"expected": [ "expected": [
"PASS" "PASS"
], ],
"flags": [ "flags": [
"bananaflakes", "bananaflakes",
"--random-seed=123", "--random-seed=123",
"--nohard-abort", "--nohard-abort",
"--testing-d8-test-runner" "--testing-d8-test-runner"
], ],
"framework_name": "standard_runner", "framework_name": "standard_runner",
"marked_slow": false, "marked_slow": false,
"name": "sweet/bananaflakes", "name": "sweet/bananaflakes",
"random_seed": 123, "random_seed": 123,
"result": "FAIL", "result": "FAIL",
"run": 1, "run": 1,
"target_name": "d8_mocked.py", "shard_count": 1,
"variant": "default", "shard_id": 0,
"target_name": "d8_mocked.py",
"variant": "default",
"variant_flags": [] "variant_flags": []
} }
], ],
"test_total": 2 "test_total": 2
} }

View File

@ -20,7 +20,7 @@ def radix_hash(capacity, key):
class ShardProc(base.TestProcFilter): class ShardProc(base.TestProcFilter):
@staticmethod @staticmethod
def create(options): def create(options):
myid, count = options.shard_info() myid, count = options.shard_info
if count == 1: if count == 1:
return None return None
return ShardProc(myid, count) return ShardProc(myid, count)

View File

@ -56,15 +56,17 @@ def kill_processes_linux():
def base_test_record(test, result, run): def base_test_record(test, result, run):
record = { record = {
'name': test.full_name,
'flags': result.cmd.args,
'run': run + 1,
'expected': test.expected_outcomes, 'expected': test.expected_outcomes,
'flags': result.cmd.args,
'framework_name': test.framework_name,
'name': test.full_name,
'random_seed': test.random_seed, 'random_seed': test.random_seed,
'run': run + 1,
'shard_id': test.shard_id,
'shard_count': test.shard_count,
'target_name': test.get_shell(), 'target_name': test.get_shell(),
'variant': test.variant, 'variant': test.variant,
'variant_flags': test.variant_flags, 'variant_flags': test.variant_flags,
'framework_name': test.framework_name,
} }
if result.output: if result.output:
record.update( record.update(

View File

@ -5,8 +5,12 @@
import optparse import optparse
import os import os
import random import random
from functools import cached_property
from testrunner.testproc import fuzzer from testrunner.testproc import fuzzer
class AugmentedOptions(optparse.Values): class AugmentedOptions(optparse.Values):
"""This class will augment exiting options object with """This class will augment exiting options object with
a couple of convenient methods and properties. a couple of convenient methods and properties.
@ -21,6 +25,7 @@ class AugmentedOptions(optparse.Values):
self._fuzzer_rng = random.Random(self.fuzzer_random_seed) self._fuzzer_rng = random.Random(self.fuzzer_random_seed)
return self._fuzzer_rng return self._fuzzer_rng
@cached_property
def shard_info(self): def shard_info(self):
""" """
Returns pair: Returns pair: