[testrunner] enable the progress indicator
Using test generators meant that we had to remove the progress indicator since the total number of tests weren't known before-hand. This CL implements a progress indicator using test number estimations. cctest and unittests progress indicator is accurate, however estimating means the progress will terminate over 100% in big test suites and sometimes under 100%. R=machenbach@chromium.org CC=sergiyb@chromium.org,yangguo@chromium.org Bug: v8:8769 Change-Id: I40ca5b40f9b1223376d33707f0945900ea98cea3 Reviewed-on: https://chromium-review.googlesource.com/c/1460471 Commit-Queue: Tamer Tas <tmrts@chromium.org> Reviewed-by: Michael Achenbach <machenbach@chromium.org> Cr-Commit-Position: refs/heads/master@{#59538}
This commit is contained in:
parent
9d863741fd
commit
baeb4e324d
@ -38,7 +38,10 @@ class VariantsGenerator(testsuite.VariantsGenerator):
|
||||
|
||||
|
||||
class TestLoader(testsuite.TestLoader):
|
||||
pass
|
||||
def _list_test_filenames(self):
|
||||
for file in os.listdir(self.suite.root):
|
||||
if file.endswith(".pyt"):
|
||||
yield file[:-4]
|
||||
|
||||
|
||||
# TODO(tmrts): refactor the python template parsing then use the TestLoader.
|
||||
@ -68,11 +71,12 @@ class TestSuite(testsuite.TestSuite):
|
||||
def ListTests(self):
|
||||
result = []
|
||||
|
||||
# Find all .pyt files in this directory.
|
||||
filenames = [f[:-4] for f in os.listdir(self.root) if f.endswith(".pyt")]
|
||||
filenames.sort()
|
||||
for f in filenames:
|
||||
filenames = self._test_loader._list_test_filenames()
|
||||
for f in sorted(filenames):
|
||||
self._ParsePythonTestTemplates(result, f)
|
||||
|
||||
# TODO: remove after converting to use a full TestLoader
|
||||
self._test_loader.test_count_estimation = len(result)
|
||||
return result
|
||||
|
||||
def _create_test(self, path, source, template_flags):
|
||||
|
@ -610,7 +610,9 @@ class BaseTestRunner(object):
|
||||
names = self._args_to_suite_names(args, options.test_root)
|
||||
test_config = self._create_test_config(options)
|
||||
variables = self._get_statusfile_variables(options)
|
||||
slow_chain, fast_chain = [], []
|
||||
|
||||
# Head generator with no elements
|
||||
test_chain = testsuite.TestGenerator(0, [], [])
|
||||
for name in names:
|
||||
if options.verbose:
|
||||
print '>>> Loading test suite: %s' % name
|
||||
@ -618,17 +620,10 @@ class BaseTestRunner(object):
|
||||
os.path.join(options.test_root, name), test_config)
|
||||
|
||||
if self._is_testsuite_supported(suite, options):
|
||||
slow_tests, fast_tests = suite.load_tests_from_disk(variables)
|
||||
slow_chain.append(slow_tests)
|
||||
fast_chain.append(fast_tests)
|
||||
tests = suite.load_tests_from_disk(variables)
|
||||
test_chain.merge(tests)
|
||||
|
||||
for tests in slow_chain:
|
||||
for test in tests:
|
||||
yield test
|
||||
|
||||
for tests in fast_chain:
|
||||
for test in tests:
|
||||
yield test
|
||||
return test_chain
|
||||
|
||||
def _is_testsuite_supported(self, suite, options):
|
||||
"""A predicate that can be overridden to filter out unsupported TestSuite
|
||||
@ -762,13 +757,20 @@ class BaseTestRunner(object):
|
||||
|
||||
return shard_run, shard_count
|
||||
|
||||
def _create_progress_indicators(self, options):
|
||||
def _create_progress_indicators(self, test_count, options):
|
||||
procs = [PROGRESS_INDICATORS[options.progress]()]
|
||||
if options.json_test_results:
|
||||
procs.append(progress.JsonTestProgressIndicator(
|
||||
options.json_test_results,
|
||||
self.build_config.arch,
|
||||
self.mode_options.execution_mode))
|
||||
|
||||
for proc in procs:
|
||||
try:
|
||||
proc.set_test_count(test_count)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return procs
|
||||
|
||||
def _create_result_tracker(self, options):
|
||||
|
@ -9,7 +9,18 @@ from testrunner.local import testsuite, statusfile
|
||||
|
||||
|
||||
class TestLoader(testsuite.TestLoader):
|
||||
pass
|
||||
def _list_test_filenames(self):
|
||||
return ["fast", "slow"]
|
||||
|
||||
def list_tests(self):
|
||||
self.test_count_estimation = 2
|
||||
fast = self._create_test("fast", self.suite)
|
||||
slow = self._create_test("slow", self.suite)
|
||||
|
||||
slow._statusfile_outcomes.append(statusfile.SLOW)
|
||||
yield fast
|
||||
yield slow
|
||||
|
||||
|
||||
class TestSuite(testsuite.TestSuite):
|
||||
def _test_loader_class(self):
|
||||
@ -18,12 +29,5 @@ class TestSuite(testsuite.TestSuite):
|
||||
def _test_class(self):
|
||||
return testsuite.TestCase
|
||||
|
||||
def ListTests(self):
|
||||
fast = self._test_loader._create_test("fast", self)
|
||||
slow = self._test_loader._create_test("slow", self)
|
||||
slow._statusfile_outcomes.append(statusfile.SLOW)
|
||||
yield fast
|
||||
yield slow
|
||||
|
||||
def GetSuite(*args, **kwargs):
|
||||
return TestSuite(*args, **kwargs)
|
||||
|
@ -28,6 +28,7 @@
|
||||
|
||||
import fnmatch
|
||||
import imp
|
||||
import itertools
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
|
||||
@ -88,6 +89,7 @@ class TestLoader(object):
|
||||
self.test_class = test_class
|
||||
self.test_config = test_config
|
||||
self.test_root = test_root
|
||||
self.test_count_estimation = len(list(self._list_test_filenames()))
|
||||
|
||||
def _list_test_filenames(self):
|
||||
"""Implemented by the subclassed TestLoaders to list filenames.
|
||||
@ -199,6 +201,34 @@ class JSTestLoader(GenericTestLoader):
|
||||
return ".js"
|
||||
|
||||
|
||||
class TestGenerator(object):
|
||||
def __init__(self, test_count_estimate, slow_tests, fast_tests):
|
||||
self.test_count_estimate = test_count_estimate
|
||||
self.slow_tests = slow_tests
|
||||
self.fast_tests = fast_tests
|
||||
self._rebuild_iterator()
|
||||
|
||||
def _rebuild_iterator(self):
|
||||
self._iterator = itertools.chain(self.slow_tests, self.fast_tests)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return self.next()
|
||||
|
||||
def next(self):
|
||||
return next(self._iterator)
|
||||
|
||||
def merge(self, test_generator):
|
||||
self.test_count_estimate += test_generator.test_count_estimate
|
||||
self.slow_tests = itertools.chain(
|
||||
self.slow_tests, test_generator.slow_tests)
|
||||
self.fast_tests = itertools.chain(
|
||||
self.fast_tests, test_generator.fast_tests)
|
||||
self._rebuild_iterator()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _load_testsuite_module(name, root):
|
||||
f = None
|
||||
@ -236,14 +266,22 @@ class TestSuite(object):
|
||||
def ListTests(self):
|
||||
return self._test_loader.list_tests()
|
||||
|
||||
def __initialize_test_count_estimation(self):
|
||||
# Retrieves a single test to initialize the test generator.
|
||||
next(iter(self.ListTests()))
|
||||
|
||||
def __calculate_test_count(self):
|
||||
self.__initialize_test_count_estimation()
|
||||
return self._test_loader.test_count_estimation
|
||||
|
||||
def load_tests_from_disk(self, statusfile_variables):
|
||||
self.statusfile = statusfile.StatusFile(
|
||||
self.status_file(), statusfile_variables)
|
||||
|
||||
test_count = self.__calculate_test_count()
|
||||
slow_tests = (test for test in self.ListTests() if test.is_slow)
|
||||
fast_tests = (test for test in self.ListTests() if not test.is_slow)
|
||||
|
||||
return slow_tests, fast_tests
|
||||
return TestGenerator(test_count, slow_tests, fast_tests)
|
||||
|
||||
def get_variants_gen(self, variants):
|
||||
return self._variants_gen_class()(variants)
|
||||
|
@ -3,6 +3,7 @@
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
@ -13,7 +14,7 @@ TOOLS_PATH = os.path.dirname(os.path.dirname(os.path.dirname(
|
||||
os.path.abspath(__file__))))
|
||||
sys.path.append(TOOLS_PATH)
|
||||
|
||||
from testrunner.local.testsuite import TestSuite
|
||||
from testrunner.local.testsuite import TestSuite, TestGenerator
|
||||
from testrunner.objects.testcase import TestCase
|
||||
from testrunner.test_config import TestConfig
|
||||
|
||||
@ -47,21 +48,38 @@ class TestSuiteTest(unittest.TestCase):
|
||||
self.assertIsNone(self.suite.statusfile)
|
||||
|
||||
def testLoadingTestsFromDisk(self):
|
||||
slow_tests, fast_tests = self.suite.load_tests_from_disk(
|
||||
tests = self.suite.load_tests_from_disk(
|
||||
statusfile_variables={})
|
||||
def is_generator(iterator):
|
||||
return iterator == iter(iterator)
|
||||
|
||||
self.assertTrue(is_generator(slow_tests))
|
||||
self.assertTrue(is_generator(fast_tests))
|
||||
self.assertTrue(is_generator(tests))
|
||||
self.assertEquals(tests.test_count_estimate, 2)
|
||||
|
||||
slow_tests, fast_tests = list(slow_tests), list(fast_tests)
|
||||
slow_tests, fast_tests = list(tests.slow_tests), list(tests.fast_tests)
|
||||
# Verify that the components of the TestSuite are loaded.
|
||||
self.assertTrue(len(slow_tests) == len(fast_tests) == 1)
|
||||
self.assertTrue(all(test.is_slow for test in slow_tests))
|
||||
self.assertFalse(any(test.is_slow for test in fast_tests))
|
||||
self.assertIsNotNone(self.suite.statusfile)
|
||||
|
||||
def testMergingTestGenerators(self):
|
||||
tests = self.suite.load_tests_from_disk(
|
||||
statusfile_variables={})
|
||||
more_tests = self.suite.load_tests_from_disk(
|
||||
statusfile_variables={})
|
||||
|
||||
# Merge the test generators
|
||||
tests.merge(more_tests)
|
||||
self.assertEquals(tests.test_count_estimate, 4)
|
||||
|
||||
# Check the tests are sorted by speed
|
||||
test_speeds = []
|
||||
for test in tests:
|
||||
test_speeds.append(test.is_slow)
|
||||
|
||||
self.assertEquals(test_speeds, [True, True, False, False])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -134,7 +134,8 @@ class NumFuzzer(base_runner.BaseTestRunner):
|
||||
results = self._create_result_tracker(options)
|
||||
execproc = ExecutionProc(options.j)
|
||||
sigproc = self._create_signal_proc()
|
||||
indicators = self._create_progress_indicators(options)
|
||||
indicators = self._create_progress_indicators(
|
||||
tests.test_count_estimate, options)
|
||||
procs = [
|
||||
loader,
|
||||
NameFilterProc(args) if args else None,
|
||||
|
@ -283,7 +283,8 @@ class StandardTestRunner(base_runner.BaseTestRunner):
|
||||
print '>>> Running with test processors'
|
||||
loader = LoadProc(tests)
|
||||
results = self._create_result_tracker(options)
|
||||
indicators = self._create_progress_indicators(options)
|
||||
indicators = self._create_progress_indicators(
|
||||
tests.test_count_estimate, options)
|
||||
|
||||
outproc_factory = None
|
||||
if self.build_config.predictable:
|
||||
|
@ -162,6 +162,9 @@ class CompactProgressIndicator(ProgressIndicator):
|
||||
self._passed = 0
|
||||
self._failed = 0
|
||||
|
||||
def set_test_count(self, test_count):
|
||||
self._total = test_count
|
||||
|
||||
def _on_result_for(self, test, result):
|
||||
# TODO(majeski): Support for dummy/grouped results
|
||||
if result.has_unexpected_output:
|
||||
@ -195,8 +198,13 @@ class CompactProgressIndicator(ProgressIndicator):
|
||||
def _print_progress(self, name):
|
||||
self._clear_line(self._last_status_length)
|
||||
elapsed = time.time() - self._start_time
|
||||
if self._total:
|
||||
progress = (self._passed + self._failed) * 100 // self._total
|
||||
else:
|
||||
progress = 0
|
||||
status = self._templates['status_line'] % {
|
||||
'passed': self._passed,
|
||||
'progress': progress,
|
||||
'failed': self._failed,
|
||||
'test': name,
|
||||
'mins': int(elapsed) / 60,
|
||||
@ -221,6 +229,7 @@ class ColorProgressIndicator(CompactProgressIndicator):
|
||||
def __init__(self):
|
||||
templates = {
|
||||
'status_line': ("[%(mins)02i:%(secs)02i|"
|
||||
"\033[34m%%%(progress) 4d\033[0m|"
|
||||
"\033[32m+%(passed) 4d\033[0m|"
|
||||
"\033[31m-%(failed) 4d\033[0m]: %(test)s"),
|
||||
'stdout': "\033[1m%s\033[0m",
|
||||
@ -235,7 +244,7 @@ class ColorProgressIndicator(CompactProgressIndicator):
|
||||
class MonochromeProgressIndicator(CompactProgressIndicator):
|
||||
def __init__(self):
|
||||
templates = {
|
||||
'status_line': ("[%(mins)02i:%(secs)02i|"
|
||||
'status_line': ("[%(mins)02i:%(secs)02i|%%%(progress) 4d|"
|
||||
"+%(passed) 4d|-%(failed) 4d]: %(test)s"),
|
||||
'stdout': '%s',
|
||||
'stderr': '%s',
|
||||
|
@ -622,10 +622,11 @@ class SystemTest(unittest.TestCase):
|
||||
infra_staging=False,
|
||||
)
|
||||
if name == 'color':
|
||||
expected = ('\033[32m+ 1\033[0m|'
|
||||
expected = ('\033[34m% 28\033[0m|'
|
||||
'\033[32m+ 1\033[0m|'
|
||||
'\033[31m- 1\033[0m]: Done')
|
||||
else:
|
||||
expected = '+ 1|- 1]: Done'
|
||||
expected = '% 28|+ 1|- 1]: Done'
|
||||
self.assertIn(expected, result.stdout)
|
||||
self.assertIn('sweet/cherries', result.stdout)
|
||||
self.assertIn('sweet/bananas', result.stdout)
|
||||
|
Loading…
Reference in New Issue
Block a user