[test][cleanup] Revive --time, speed up some tests

This reimplements the "--time" option of run-tests.py to print the
20 slowest tests, on top of json_test_results infrastructure just
like the bots do it.
Additionally this CL speeds up a bunch of slow tests.

Bug: v8:9396
Change-Id: I40797d2c8c3bfdd310b72f15cd1a035844b7c6f3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1803635
Commit-Queue: Jakob Kummerow <jkummerow@chromium.org>
Reviewed-by: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63786}
This commit is contained in:
Jakob Kummerow 2019-09-16 12:43:46 +02:00 committed by Commit Bot
parent 2804a4a9f1
commit b823bf1ba6
14 changed files with 81 additions and 17 deletions

View File

@ -36,9 +36,6 @@
# https://code.google.com/p/v8/issues/detail?id=9312
'regress-9312': [FAIL],
# Slow tests.
'regress-903566': [PASS, SLOW],
}], # ALWAYS
['variant == no_wasm_traps', {
@ -71,12 +68,6 @@
'relative-time-format/resolved-options-nu-extended': [FAIL],
}], # 'system == android'
##############################################################################
['variant == stress', {
# Too slow.
'regress-903566': [SKIP],
}], # 'variant == stress'
##############################################################################
['variant == jitless and not embedded_builtins', {
'*': [SKIP],

View File

@ -13,7 +13,9 @@ let arr = ["a","b","c"];
// Test under no HasHoleyElements();
assertFalse(%HasHoleyElements(arr));
assertDoesNotThrow(()=>(new Intl.ListFormat()).format(arr));
for (var i = 0; i < 10000; i++) {
// ICU uses bubblesort, so keep the array reasonably small (as of mid-2019:
// 100 entries -> 1ms, 1,000 entries -> 64ms, 10,000 entries -> 5s).
for (var i = 0; i < 100; i++) {
arr.push("xx");
}
assertFalse(%HasHoleyElements(arr));

View File

@ -3,6 +3,8 @@
// found in the LICENSE file.
// Flags: --allow-natives-syntax --noturbo-inlining --noturbo-verify-allocation
// This test invokes optimization manually, no need for stress modes:
// Flags: --nostress-opt --noalways-opt
// Ensure that very large stack frames can be used successfully.
// The flag --noturbo-verify-allocation is to make this run a little faster; it

View File

@ -3,6 +3,8 @@
// found in the LICENSE file.
// Test for conflicting variable bindings.
// Stress-testing this test is very slow and provides no useful coverage.
// Flags: --nostress-opt --noalways-opt
function CheckException(e) {
var string = e.toString();

View File

@ -29,6 +29,9 @@
// when using an immutable binding in an assigment or with
// prefix/postfix decrement/increment operators.
// Optimization stress is not useful for early syntax errors.
// Flags: --nostress-opt --noalways-opt
"use strict";
const decls = [
@ -135,7 +138,8 @@ let usecontexts = [
function Test(program, error) {
program = "'use strict'; " + program;
try {
print(program, " // throw " + error.name);
// If you need to debug this test, enable the following line:
// print(program, " // throw " + error.name);
eval(program);
} catch (e) {
assertInstanceof(e, error);

View File

@ -3,6 +3,8 @@
// found in the LICENSE file.
// Flags: --allow-natives-syntax
// This gets very slow with stress flags, and triggers optimization anyway:
// Flags: --nostress-opt --noalways-opt
(function testLargeClassesProperties(){
// This is to test for dictionary mode when there more than

View File

@ -26,6 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
// This test manually triggers optimization, no need for stress modes.
// Flags: --nostress-opt --noalways-opt
// Different ways to create an object.

View File

@ -3,6 +3,8 @@
// found in the LICENSE file.
// Flags: --allow-natives-syntax --nostress-incremental-marking
// Stress-testing this test is very slow and provides no useful coverage.
// Flags: --nostress-opt --noalways-opt
// This test uses a lot of memory and fails with flaky OOM when run
// with --stress-incremental-marking on TSAN.

View File

@ -10,4 +10,4 @@ for (var i = 0; i < 17; i++) {
} catch (e) {
}
}
s.replace(/[a]/g);
s.replace(/a/g);

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --verify-heap
// Flags: --verify-heap --expose-gc
function __f_3(f) {
arguments.__defineGetter__('length', f);
@ -13,6 +13,7 @@ function __f_4() { return "boom"; }
__v_4 = [];
__v_13 = "";
for (var i = 0; i < 12800; ++i) {
for (var i = 0; i < 128; ++i) {
__v_13 += __v_4.__proto__ = __f_3(__f_4);
}
gc();

View File

@ -3,6 +3,8 @@
// found in the LICENSE file.
// Flags: --expose-gc
// Stress-testing this test is very slow and doesn't provide useful coverage.
// Flags: --nostress-opt --noalways-opt
const f = eval(`(function f(i) {
if (i == 0) {

View File

@ -3,6 +3,8 @@
// found in the LICENSE file.
// Flags: --allow-natives-syntax --noenable-slow-asserts
// This test triggers optimization manually, no stress mode necessary.
// Flags: --nostress-opt --noalways-opt
// This call ensures that TurboFan won't inline array constructors.
Array(2 ** 30);

View File

@ -30,7 +30,8 @@ USE_PTY = "linux" in sys.platform
if USE_PTY:
import pty
BUILD_TARGETS_TEST = ["d8", "cctest", "unittests"]
BUILD_TARGETS_TEST = ["d8", "cctest", "inspector-test", "unittests",
"wasm_api_tests"]
BUILD_TARGETS_ALL = ["all"]
# All arches that this script understands.

View File

@ -8,8 +8,11 @@
from __future__ import print_function
from functools import reduce
import datetime
import json
import os
import sys
import tempfile
# Adds testrunner to the path hence it has to be imported at the beggining.
import base_runner
@ -120,6 +123,10 @@ class StandardTestRunner(base_runner.BaseTestRunner):
'with test processors: 0 means infinite '
'generation.')
# Extra features.
parser.add_option('--time', help='Print timing information after running',
default=False, action='store_true')
# Noop
parser.add_option('--cfi-vptr',
help='Run tests with UBSAN cfi_vptr option.',
@ -146,8 +153,6 @@ class StandardTestRunner(base_runner.BaseTestRunner):
default=False, action='store_true')
parser.add_option('--flakiness-results',
help='Path to a file for storing flakiness json.')
parser.add_option('--time', help='Print timing information after running',
default=False, action='store_true')
parser.add_option('--warn-unused', help='Report unused rules',
default=False, action='store_true')
parser.add_option('--report', default=False, action='store_true',
@ -231,6 +236,14 @@ class StandardTestRunner(base_runner.BaseTestRunner):
# TODO(machenbach): uncomment after infra side lands.
# base_runner.TEST_MAP['d8_default'].remove('intl')
if options.time and not options.json_test_results:
# We retrieve the slowest tests from the JSON output file, so create
# a temporary output file (which will automatically get deleted on exit)
# if the user didn't specify one.
self._temporary_json_output_file = tempfile.NamedTemporaryFile(
prefix="v8-test-runner-")
options.json_test_results = self._temporary_json_output_file.name
def _parse_variants(self, aliases_str):
# Use developer defaults if no variant was specified.
aliases_str = aliases_str or 'dev'
@ -341,9 +354,47 @@ class StandardTestRunner(base_runner.BaseTestRunner):
if not results.total:
exit_code = utils.EXIT_CODE_NO_TESTS
if options.time:
self._print_durations(options)
# Indicate if a SIGINT or SIGTERM happened.
return max(exit_code, sigproc.exit_code)
def _print_durations(self, options):
def format_duration(duration_in_seconds):
duration = datetime.timedelta(seconds=duration_in_seconds)
time = (datetime.datetime.min + duration).time()
return time.strftime('%M:%S:') + '%03i' % int(time.microsecond / 1000)
def _duration_results_text(test):
return [
'Test: %s' % test['name'],
'Flags: %s' % ' '.join(test['flags']),
'Command: %s' % test['command'],
'Duration: %s' % format_duration(test['duration']),
]
assert os.path.exists(options.json_test_results)
complete_results = []
with open(options.json_test_results, "r") as f:
complete_results = json.loads(f.read())
output = complete_results[0]
lines = []
for test in output['slowest_tests']:
suffix = ''
if test.get('marked_slow') is False:
suffix = ' *'
lines.append(
'%s %s%s' % (format_duration(test['duration']),
test['name'], suffix))
# Slowest tests duration details.
lines.extend(['', 'Details:', ''])
for test in output['slowest_tests']:
lines.extend(_duration_results_text(test))
print("\n".join(lines))
def _create_predictable_filter(self):
if not self.build_config.predictable:
return None