2022-04-05 07:18:21 +00:00
|
|
|
#!/usr/bin/env python3
|
2014-09-02 09:17:26 +00:00
|
|
|
# Copyright 2014 the V8 project authors. All rights reserved.
|
|
|
|
# Use of this source code is governed by a BSD-style license that can be
|
|
|
|
# found in the LICENSE file.
|
|
|
|
|
|
|
|
"""
|
|
|
|
Performance runner for d8.
|
|
|
|
|
|
|
|
Call e.g. with tools/run-perf.py --arch ia32 some_suite.json
|
|
|
|
|
|
|
|
The suite json format is expected to be:
|
|
|
|
{
|
|
|
|
"path": <relative path chunks to perf resources and main file>,
|
2018-05-11 16:12:21 +00:00
|
|
|
"owners": [<list of email addresses of benchmark owners (required)>],
|
2014-09-02 09:17:26 +00:00
|
|
|
"name": <optional suite name, file name is default>,
|
|
|
|
"archs": [<architecture name for which this suite is run>, ...],
|
|
|
|
"binary": <name of binary to run, default "d8">,
|
|
|
|
"flags": [<flag to d8>, ...],
|
2014-11-28 10:34:28 +00:00
|
|
|
"test_flags": [<flag to the test file>, ...],
|
2014-09-02 09:17:26 +00:00
|
|
|
"run_count": <how often will this suite run (optional)>,
|
|
|
|
"run_count_XXX": <how often will this suite run for arch XXX (optional)>,
|
2019-04-12 11:06:19 +00:00
|
|
|
"timeout": <how long test is allowed to run>,
|
|
|
|
"timeout_XXX": <how long test is allowed run run for arch XXX>,
|
|
|
|
"retry_count": <how many times to retry failures (in addition to first try)",
|
|
|
|
"retry_count_XXX": <how many times to retry failures for arch XXX>
|
2014-11-28 11:35:37 +00:00
|
|
|
"resources": [<js file to be moved to android device>, ...]
|
2022-05-02 13:30:33 +00:00
|
|
|
"variants": [
|
|
|
|
{
|
|
|
|
"name": <name of the variant>,
|
|
|
|
"flags": [<flag to the test file>, ...],
|
|
|
|
<other suite properties>
|
|
|
|
}, ...
|
|
|
|
]
|
2014-09-02 09:17:26 +00:00
|
|
|
"main": <main js perf runner file>,
|
|
|
|
"results_regexp": <optional regexp>,
|
|
|
|
"results_processor": <optional python results processor script>,
|
|
|
|
"units": <the unit specification for the performance dashboard>,
|
2017-07-17 15:08:54 +00:00
|
|
|
"process_size": <flag - collect maximum memory used by the process>,
|
2014-09-02 09:17:26 +00:00
|
|
|
"tests": [
|
|
|
|
{
|
|
|
|
"name": <name of the trace>,
|
|
|
|
"results_regexp": <optional more specific regexp>,
|
|
|
|
"results_processor": <optional python results processor script>,
|
|
|
|
"units": <the unit specification for the performance dashboard>,
|
2017-07-17 15:08:54 +00:00
|
|
|
"process_size": <flag - collect maximum memory used by the process>,
|
2014-09-02 09:17:26 +00:00
|
|
|
}, ...
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
The tests field can also nest other suites in arbitrary depth. A suite
|
|
|
|
with a "main" file is a leaf suite that can contain one more level of
|
|
|
|
tests.
|
|
|
|
|
|
|
|
A suite's results_regexp is expected to have one string place holder
|
|
|
|
"%s" for the trace name. A trace's results_regexp overwrites suite
|
|
|
|
defaults.
|
|
|
|
|
|
|
|
A suite's results_processor may point to an optional python script. If
|
2017-02-16 13:48:31 +00:00
|
|
|
specified, it is called after running the tests (with a path relative to the
|
|
|
|
suite level's path). It is expected to read the measurement's output text
|
|
|
|
on stdin and print the processed output to stdout.
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2017-02-16 13:48:31 +00:00
|
|
|
The results_regexp will be applied to the processed output.
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
A suite without "tests" is considered a performance test itself.
|
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
Variants can be used to run different configurations at the current level. This
|
|
|
|
essentially copies the sub suites at the current level and can be used to avoid
|
|
|
|
duplicating a lot of nested "tests" were for instance only the "flags" change.
|
|
|
|
|
2014-09-02 09:17:26 +00:00
|
|
|
Full example (suite with one runner):
|
|
|
|
{
|
|
|
|
"path": ["."],
|
2018-07-25 11:20:08 +00:00
|
|
|
"owners": ["username@chromium.org"],
|
2014-09-02 09:17:26 +00:00
|
|
|
"flags": ["--expose-gc"],
|
2014-11-28 10:34:28 +00:00
|
|
|
"test_flags": ["5"],
|
2014-09-02 09:17:26 +00:00
|
|
|
"archs": ["ia32", "x64"],
|
|
|
|
"run_count": 5,
|
|
|
|
"run_count_ia32": 3,
|
|
|
|
"main": "run.js",
|
|
|
|
"results_regexp": "^%s: (.+)$",
|
|
|
|
"units": "score",
|
|
|
|
"tests": [
|
|
|
|
{"name": "Richards"},
|
|
|
|
{"name": "DeltaBlue"},
|
|
|
|
{"name": "NavierStokes",
|
|
|
|
"results_regexp": "^NavierStokes: (.+)$"}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
Full example (suite with several runners):
|
|
|
|
{
|
|
|
|
"path": ["."],
|
2018-07-25 11:20:08 +00:00
|
|
|
"owners": ["username@chromium.org", "otherowner@google.com"],
|
2014-09-02 09:17:26 +00:00
|
|
|
"archs": ["ia32", "x64"],
|
2022-05-02 13:30:33 +00:00
|
|
|
"flags": ["--expose-gc"]},
|
2014-09-02 09:17:26 +00:00
|
|
|
"run_count": 5,
|
|
|
|
"units": "score",
|
2022-05-02 13:30:33 +00:00
|
|
|
"variants:" {
|
|
|
|
{"name": "default", "flags": []},
|
|
|
|
{"name": "future", "flags": ["--future"]},
|
|
|
|
{"name": "noopt", "flags": ["--noopt"]},
|
|
|
|
}
|
2014-09-02 09:17:26 +00:00
|
|
|
"tests": [
|
|
|
|
{"name": "Richards",
|
|
|
|
"path": ["richards"],
|
|
|
|
"main": "run.js",
|
|
|
|
"run_count": 3,
|
|
|
|
"results_regexp": "^Richards: (.+)$"},
|
|
|
|
{"name": "NavierStokes",
|
|
|
|
"path": ["navier_stokes"],
|
|
|
|
"main": "run.js",
|
|
|
|
"results_regexp": "^NavierStokes: (.+)$"}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
Path pieces are concatenated. D8 is always run with the suite's path as cwd.
|
2014-11-28 10:34:28 +00:00
|
|
|
|
|
|
|
The test flags are passed to the js test file after '--'.
|
2014-09-02 09:17:26 +00:00
|
|
|
"""
|
|
|
|
|
2014-10-10 07:12:38 +00:00
|
|
|
from collections import OrderedDict
|
2022-04-05 07:18:21 +00:00
|
|
|
from math import sqrt
|
|
|
|
from statistics import mean, stdev
|
2019-04-16 18:27:57 +00:00
|
|
|
import copy
|
2014-09-02 09:17:26 +00:00
|
|
|
import json
|
2014-12-02 14:23:51 +00:00
|
|
|
import logging
|
2014-09-02 09:17:26 +00:00
|
|
|
import math
|
2019-04-23 16:42:23 +00:00
|
|
|
import argparse
|
2014-09-02 09:17:26 +00:00
|
|
|
import os
|
|
|
|
import re
|
2015-09-14 23:14:20 +00:00
|
|
|
import subprocess
|
2014-09-02 09:17:26 +00:00
|
|
|
import sys
|
2019-04-23 11:32:13 +00:00
|
|
|
import time
|
2018-10-09 15:45:35 +00:00
|
|
|
import traceback
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2018-08-06 12:37:39 +00:00
|
|
|
from testrunner.local import android
|
2017-11-30 12:57:45 +00:00
|
|
|
from testrunner.local import command
|
2014-09-02 09:17:26 +00:00
|
|
|
from testrunner.local import utils
|
2019-04-23 11:32:13 +00:00
|
|
|
from testrunner.objects.output import Output, NULL_OUTPUT
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-02-19 08:28:26 +00:00
|
|
|
|
2019-04-12 11:00:18 +00:00
|
|
|
SUPPORTED_ARCHS = ['arm',
|
|
|
|
'ia32',
|
|
|
|
'mips',
|
|
|
|
'mipsel',
|
|
|
|
'x64',
|
2021-06-16 03:05:53 +00:00
|
|
|
'arm64',
|
|
|
|
'riscv64']
|
2019-04-12 11:00:18 +00:00
|
|
|
|
|
|
|
GENERIC_RESULTS_RE = re.compile(r'^RESULT ([^:]+): ([^=]+)= ([^ ]+) ([^ ]*)$')
|
|
|
|
RESULT_STDDEV_RE = re.compile(r'^\{([^\}]+)\}$')
|
|
|
|
RESULT_LIST_RE = re.compile(r'^\[([^\]]+)\]$')
|
2015-09-15 08:15:54 +00:00
|
|
|
TOOLS_BASE = os.path.abspath(os.path.dirname(__file__))
|
2018-10-09 15:45:35 +00:00
|
|
|
INFRA_FAILURE_RETCODE = 87
|
2019-05-09 09:05:37 +00:00
|
|
|
MIN_RUNS_FOR_CONFIDENCE = 10
|
2016-07-08 17:30:11 +00:00
|
|
|
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
def GeometricMean(values):
|
|
|
|
"""Returns the geometric mean of a list of values.
|
|
|
|
|
|
|
|
The mean is calculated using log to avoid overflow.
|
|
|
|
"""
|
2021-08-19 14:49:57 +00:00
|
|
|
values = list(map(float, values))
|
2019-05-09 09:05:37 +00:00
|
|
|
return math.exp(sum(map(math.log, values)) / len(values))
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
|
2019-04-25 17:23:45 +00:00
|
|
|
class ResultTracker(object):
|
|
|
|
"""Class that tracks trace/runnable results and produces script output.
|
|
|
|
|
|
|
|
The output is structured like this:
|
|
|
|
{
|
|
|
|
"traces": [
|
|
|
|
{
|
|
|
|
"graphs": ["path", "to", "trace", "config"],
|
|
|
|
"units": <string describing units, e.g. "ms" or "KB">,
|
|
|
|
"results": [<list of values measured over several runs>],
|
|
|
|
"stddev": <stddev of the value if measure by script or ''>
|
|
|
|
},
|
|
|
|
...
|
|
|
|
],
|
|
|
|
"runnables": [
|
|
|
|
{
|
|
|
|
"graphs": ["path", "to", "runnable", "config"],
|
|
|
|
"durations": [<list of durations of each runnable run in seconds>],
|
|
|
|
"timeout": <timeout configured for runnable in seconds>,
|
|
|
|
},
|
|
|
|
...
|
|
|
|
],
|
|
|
|
"errors": [<list of strings describing errors>],
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
self.traces = {}
|
|
|
|
self.errors = []
|
|
|
|
self.runnables = {}
|
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
def AddTraceResult(self, trace, result, stddev):
|
2019-04-25 17:23:45 +00:00
|
|
|
if trace.name not in self.traces:
|
|
|
|
self.traces[trace.name] = {
|
|
|
|
'graphs': trace.graphs,
|
|
|
|
'units': trace.units,
|
2019-04-26 13:05:52 +00:00
|
|
|
'results': [result],
|
2019-04-25 17:23:45 +00:00
|
|
|
'stddev': stddev or '',
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
existing_entry = self.traces[trace.name]
|
|
|
|
assert trace.graphs == existing_entry['graphs']
|
|
|
|
assert trace.units == existing_entry['units']
|
2019-04-26 13:05:52 +00:00
|
|
|
if stddev:
|
|
|
|
existing_entry['stddev'] = stddev
|
|
|
|
existing_entry['results'].append(result)
|
2019-04-25 17:23:45 +00:00
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
def TraceHasStdDev(self, trace):
|
|
|
|
return trace.name in self.traces and self.traces[trace.name]['stddev'] != ''
|
2019-04-25 17:23:45 +00:00
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
def AddError(self, error):
|
|
|
|
self.errors.append(error)
|
|
|
|
|
|
|
|
def AddRunnableDuration(self, runnable, duration):
|
|
|
|
"""Records a duration of a specific run of the runnable."""
|
2019-04-25 17:23:45 +00:00
|
|
|
if runnable.name not in self.runnables:
|
|
|
|
self.runnables[runnable.name] = {
|
|
|
|
'graphs': runnable.graphs,
|
2019-04-26 13:05:52 +00:00
|
|
|
'durations': [duration],
|
2019-04-25 17:23:45 +00:00
|
|
|
'timeout': runnable.timeout,
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
existing_entry = self.runnables[runnable.name]
|
|
|
|
assert runnable.timeout == existing_entry['timeout']
|
|
|
|
assert runnable.graphs == existing_entry['graphs']
|
2019-04-26 13:05:52 +00:00
|
|
|
existing_entry['durations'].append(duration)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
def ToDict(self):
|
2019-04-09 13:15:15 +00:00
|
|
|
return {
|
2021-08-19 14:49:57 +00:00
|
|
|
'traces': list(self.traces.values()),
|
2019-04-12 11:00:18 +00:00
|
|
|
'errors': self.errors,
|
2021-08-19 14:49:57 +00:00
|
|
|
'runnables': list(self.runnables.values()),
|
2019-04-09 13:15:15 +00:00
|
|
|
}
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
def WriteToFile(self, file_name):
|
2019-04-12 11:00:18 +00:00
|
|
|
with open(file_name, 'w') as f:
|
2014-09-02 09:17:26 +00:00
|
|
|
f.write(json.dumps(self.ToDict()))
|
|
|
|
|
2019-05-09 09:05:37 +00:00
|
|
|
def HasEnoughRuns(self, graph_config, confidence_level):
|
|
|
|
"""Checks if the mean of the results for a given trace config is within
|
|
|
|
0.1% of the true value with the specified confidence level.
|
|
|
|
|
|
|
|
This assumes Gaussian distribution of the noise and based on
|
|
|
|
https://en.wikipedia.org/wiki/68%E2%80%9395%E2%80%9399.7_rule.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
graph_config: An instance of GraphConfig.
|
|
|
|
confidence_level: Number of standard deviations from the mean that all
|
|
|
|
values must lie within. Typical values are 1, 2 and 3 and correspond
|
|
|
|
to 68%, 95% and 99.7% probability that the measured value is within
|
|
|
|
0.1% of the true value.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if specified confidence level have been achieved.
|
|
|
|
"""
|
2022-05-02 13:30:33 +00:00
|
|
|
if not isinstance(graph_config, LeafTraceConfig):
|
2019-05-09 09:05:37 +00:00
|
|
|
return all(self.HasEnoughRuns(child, confidence_level)
|
|
|
|
for child in graph_config.children)
|
|
|
|
|
|
|
|
trace = self.traces.get(graph_config.name, {})
|
|
|
|
results = trace.get('results', [])
|
|
|
|
logging.debug('HasEnoughRuns for %s', graph_config.name)
|
|
|
|
|
|
|
|
if len(results) < MIN_RUNS_FOR_CONFIDENCE:
|
|
|
|
logging.debug(' Ran %d times, need at least %d',
|
|
|
|
len(results), MIN_RUNS_FOR_CONFIDENCE)
|
|
|
|
return False
|
|
|
|
|
|
|
|
logging.debug(' Results: %d entries', len(results))
|
2022-02-09 13:32:20 +00:00
|
|
|
avg = mean(results)
|
|
|
|
avg_stderr = stdev(results) / sqrt(len(results))
|
|
|
|
logging.debug(' Mean: %.2f, mean_stderr: %.2f', avg, avg_stderr)
|
2022-04-05 07:18:21 +00:00
|
|
|
logging.info('>>> Confidence level is %.2f',
|
|
|
|
avg / max(1000.0 * avg_stderr, .1))
|
2022-02-09 13:32:20 +00:00
|
|
|
return confidence_level * avg_stderr < avg / 1000.0
|
2019-05-09 09:05:37 +00:00
|
|
|
|
2014-09-02 09:17:26 +00:00
|
|
|
def __str__(self): # pragma: no cover
|
2019-04-25 17:23:45 +00:00
|
|
|
return json.dumps(self.ToDict(), indent=2, separators=(',', ': '))
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
|
2019-04-16 18:27:57 +00:00
|
|
|
def RunResultsProcessor(results_processor, output, count):
|
2017-02-16 13:48:31 +00:00
|
|
|
# Dummy pass through for null-runs.
|
2019-04-23 11:32:13 +00:00
|
|
|
if output.stdout is None:
|
2019-04-16 18:27:57 +00:00
|
|
|
return output
|
2017-02-16 13:48:31 +00:00
|
|
|
|
|
|
|
# We assume the results processor is relative to the suite.
|
|
|
|
assert os.path.exists(results_processor)
|
|
|
|
p = subprocess.Popen(
|
|
|
|
[sys.executable, results_processor],
|
|
|
|
stdin=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
)
|
2019-04-16 18:27:57 +00:00
|
|
|
new_output = copy.copy(output)
|
2022-02-03 12:27:53 +00:00
|
|
|
new_output.stdout = p.communicate(
|
|
|
|
input=output.stdout.encode('utf-8'))[0].decode('utf-8')
|
2019-04-16 18:27:57 +00:00
|
|
|
logging.info('>>> Processed stdout (#%d):\n%s', count, output.stdout)
|
|
|
|
return new_output
|
2017-02-16 13:48:31 +00:00
|
|
|
|
|
|
|
|
2014-09-02 09:17:26 +00:00
|
|
|
class Node(object):
|
|
|
|
"""Represents a node in the suite tree structure."""
|
|
|
|
def __init__(self, *args):
|
|
|
|
self._children = []
|
|
|
|
|
|
|
|
def AppendChild(self, child):
|
|
|
|
self._children.append(child)
|
|
|
|
|
2019-04-25 17:23:45 +00:00
|
|
|
@property
|
|
|
|
def children(self):
|
|
|
|
return self._children
|
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
def __iter__(self):
|
|
|
|
yield self
|
|
|
|
for child in self.children:
|
|
|
|
yield from iter(child)
|
|
|
|
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
class DefaultSentinel(Node):
|
|
|
|
"""Fake parent node with all default values."""
|
2019-04-12 11:00:18 +00:00
|
|
|
def __init__(self, binary = 'd8'):
|
2014-09-02 09:17:26 +00:00
|
|
|
super(DefaultSentinel, self).__init__()
|
2016-02-01 20:27:23 +00:00
|
|
|
self.binary = binary
|
2014-09-02 09:17:26 +00:00
|
|
|
self.run_count = 10
|
2014-09-15 13:00:32 +00:00
|
|
|
self.timeout = 60
|
2019-05-02 20:49:36 +00:00
|
|
|
self.retry_count = 4
|
2014-09-02 09:17:26 +00:00
|
|
|
self.path = []
|
|
|
|
self.graphs = []
|
|
|
|
self.flags = []
|
2014-11-28 10:34:28 +00:00
|
|
|
self.test_flags = []
|
2017-07-17 15:08:54 +00:00
|
|
|
self.process_size = False
|
2014-09-02 09:17:26 +00:00
|
|
|
self.resources = []
|
2017-02-16 13:48:31 +00:00
|
|
|
self.results_processor = None
|
2014-09-02 09:17:26 +00:00
|
|
|
self.results_regexp = None
|
|
|
|
self.stddev_regexp = None
|
2019-04-12 11:00:18 +00:00
|
|
|
self.units = 'score'
|
2014-09-02 09:17:26 +00:00
|
|
|
self.total = False
|
2018-05-14 09:53:47 +00:00
|
|
|
self.owners = []
|
2022-05-02 13:30:33 +00:00
|
|
|
self.main = None
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return type(self).__name__
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
|
2015-07-08 12:31:29 +00:00
|
|
|
class GraphConfig(Node):
|
2014-09-02 09:17:26 +00:00
|
|
|
"""Represents a suite definition.
|
|
|
|
|
|
|
|
Can either be a leaf or an inner node that provides default values.
|
|
|
|
"""
|
|
|
|
def __init__(self, suite, parent, arch):
|
2015-07-08 12:31:29 +00:00
|
|
|
super(GraphConfig, self).__init__()
|
2014-09-02 09:17:26 +00:00
|
|
|
self._suite = suite
|
|
|
|
|
2019-04-12 11:00:18 +00:00
|
|
|
assert isinstance(suite.get('path', []), list)
|
|
|
|
assert isinstance(suite.get('owners', []), list)
|
2022-04-05 07:18:21 +00:00
|
|
|
assert isinstance(suite['name'], str)
|
2019-04-12 11:00:18 +00:00
|
|
|
assert isinstance(suite.get('flags', []), list)
|
|
|
|
assert isinstance(suite.get('test_flags', []), list)
|
|
|
|
assert isinstance(suite.get('resources', []), list)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
# Only used by child classes
|
|
|
|
self.main = suite.get('main', parent.main)
|
|
|
|
# Keep parent for easier debugging
|
|
|
|
self.parent = parent
|
|
|
|
|
2014-09-02 09:17:26 +00:00
|
|
|
# Accumulated values.
|
2019-04-12 11:00:18 +00:00
|
|
|
self.path = parent.path[:] + suite.get('path', [])
|
|
|
|
self.graphs = parent.graphs[:] + [suite['name']]
|
|
|
|
self.flags = parent.flags[:] + suite.get('flags', [])
|
|
|
|
self.test_flags = parent.test_flags[:] + suite.get('test_flags', [])
|
|
|
|
self.owners = parent.owners[:] + suite.get('owners', [])
|
2014-12-04 10:37:36 +00:00
|
|
|
|
|
|
|
# Values independent of parent node.
|
2019-04-12 11:00:18 +00:00
|
|
|
self.resources = suite.get('resources', [])
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
# Descrete values (with parent defaults).
|
2019-04-12 11:00:18 +00:00
|
|
|
self.binary = suite.get('binary', parent.binary)
|
|
|
|
self.run_count = suite.get('run_count', parent.run_count)
|
|
|
|
self.run_count = suite.get('run_count_%s' % arch, self.run_count)
|
2019-04-12 11:06:19 +00:00
|
|
|
self.retry_count = suite.get('retry_count', parent.retry_count)
|
|
|
|
self.retry_count = suite.get('retry_count_%s' % arch, self.retry_count)
|
2019-04-12 11:00:18 +00:00
|
|
|
self.timeout = suite.get('timeout', parent.timeout)
|
|
|
|
self.timeout = suite.get('timeout_%s' % arch, self.timeout)
|
|
|
|
self.units = suite.get('units', parent.units)
|
|
|
|
self.total = suite.get('total', parent.total)
|
2017-02-16 13:48:31 +00:00
|
|
|
self.results_processor = suite.get(
|
2019-04-12 11:00:18 +00:00
|
|
|
'results_processor', parent.results_processor)
|
|
|
|
self.process_size = suite.get('process_size', parent.process_size)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
# A regular expression for results. If the parent graph provides a
|
|
|
|
# regexp and the current suite has none, a string place holder for the
|
|
|
|
# suite name is expected.
|
|
|
|
# TODO(machenbach): Currently that makes only sense for the leaf level.
|
|
|
|
# Multiple place holders for multiple levels are not supported.
|
2022-05-02 13:30:33 +00:00
|
|
|
self.results_regexp = suite.get('results_regexp', None)
|
|
|
|
if self.results_regexp is None and parent.results_regexp:
|
|
|
|
try:
|
|
|
|
self.results_regexp = parent.results_regexp % re.escape(suite['name'])
|
|
|
|
except TypeError as e:
|
|
|
|
raise TypeError(
|
|
|
|
"Got error while preparing results_regexp: "
|
|
|
|
"parent.results_regexp='%s' suite.name='%s' suite='%s', error: %s" %
|
|
|
|
(parent.results_regexp, suite['name'], str(suite)[:100], e))
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
# A similar regular expression for the standard deviation (optional).
|
|
|
|
if parent.stddev_regexp:
|
2019-04-12 11:00:18 +00:00
|
|
|
stddev_default = parent.stddev_regexp % re.escape(suite['name'])
|
2014-09-02 09:17:26 +00:00
|
|
|
else:
|
|
|
|
stddev_default = None
|
2019-04-12 11:00:18 +00:00
|
|
|
self.stddev_regexp = suite.get('stddev_regexp', stddev_default)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-25 17:23:45 +00:00
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return '/'.join(self.graphs)
|
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
def __str__(self):
|
|
|
|
return "%s(%s)" % (type(self).__name__, self.name)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
|
|
|
|
class VariantConfig(GraphConfig):
|
|
|
|
"""Represents an intermediate node that has children that are all
|
|
|
|
variants of each other"""
|
|
|
|
|
|
|
|
def __init__(self, suite, parent, arch):
|
|
|
|
super(VariantConfig, self).__init__(suite, parent, arch)
|
|
|
|
assert "variants" in suite
|
|
|
|
for variant in suite.get('variants'):
|
|
|
|
assert "variants" not in variant, \
|
|
|
|
"Cannot directly nest variants:" + str(variant)[:100]
|
|
|
|
assert "name" in variant, \
|
|
|
|
"Variant must have 'name' property: " + str(variant)[:100]
|
|
|
|
assert len(variant) >= 2, \
|
|
|
|
"Variant must define other properties than 'name': " + str(variant)
|
|
|
|
|
|
|
|
|
|
|
|
class LeafTraceConfig(GraphConfig):
|
2015-07-08 12:31:29 +00:00
|
|
|
"""Represents a leaf in the suite tree structure."""
|
2014-09-02 09:17:26 +00:00
|
|
|
def __init__(self, suite, parent, arch):
|
2022-05-02 13:30:33 +00:00
|
|
|
super(LeafTraceConfig, self).__init__(suite, parent, arch)
|
2014-09-02 09:17:26 +00:00
|
|
|
assert self.results_regexp
|
2022-05-02 13:30:33 +00:00
|
|
|
if '%s' in self.results_regexp:
|
|
|
|
raise Exception(
|
|
|
|
"results_regexp at the wrong level. "
|
|
|
|
"Regexp should not contain '%%s': results_regexp='%s' name=%s" %
|
|
|
|
(self.results_regexp, self.name))
|
|
|
|
|
|
|
|
def AppendChild(self, node):
|
|
|
|
raise Exception("%s cannot have child configs." % type(self).__name__)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
def ConsumeOutput(self, output, result_tracker):
|
|
|
|
"""Extracts trace results from the output.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
output: Output object from the test run.
|
|
|
|
result_tracker: Result tracker to be updated.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The raw extracted result value or None if an error occurred.
|
|
|
|
"""
|
2022-05-02 13:30:33 +00:00
|
|
|
|
|
|
|
if len(self.children) > 0:
|
|
|
|
results_for_total = []
|
|
|
|
for trace in self.children:
|
|
|
|
result = trace.ConsumeOutput(output, result_tracker)
|
|
|
|
if result:
|
|
|
|
results_for_total.append(result)
|
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
result = None
|
|
|
|
stddev = None
|
|
|
|
|
|
|
|
try:
|
2019-05-09 09:05:37 +00:00
|
|
|
result = float(
|
|
|
|
re.search(self.results_regexp, output.stdout, re.M).group(1))
|
2019-04-26 13:05:52 +00:00
|
|
|
except ValueError:
|
|
|
|
result_tracker.AddError(
|
|
|
|
'Regexp "%s" returned a non-numeric for test %s.' %
|
|
|
|
(self.results_regexp, self.name))
|
|
|
|
except:
|
|
|
|
result_tracker.AddError(
|
|
|
|
'Regexp "%s" did not match for test %s.' %
|
|
|
|
(self.results_regexp, self.name))
|
2015-07-10 13:02:09 +00:00
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
try:
|
|
|
|
if self.stddev_regexp:
|
|
|
|
if result_tracker.TraceHasStdDev(self):
|
|
|
|
result_tracker.AddError(
|
|
|
|
'Test %s should only run once since a stddev is provided by the '
|
|
|
|
'test.' % self.name)
|
|
|
|
stddev = re.search(self.stddev_regexp, output.stdout, re.M).group(1)
|
|
|
|
except:
|
|
|
|
result_tracker.AddError(
|
|
|
|
'Regexp "%s" did not match for test %s.' %
|
|
|
|
(self.stddev_regexp, self.name))
|
|
|
|
|
|
|
|
if result:
|
|
|
|
result_tracker.AddTraceResult(self, result, stddev)
|
|
|
|
return result
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
class TraceConfig(GraphConfig):
|
|
|
|
"""
|
|
|
|
A TraceConfig contains either TraceConfigs or LeafTraceConfigs
|
|
|
|
"""
|
|
|
|
|
|
|
|
def ConsumeOutput(self, output, result_tracker):
|
|
|
|
"""Processes test run output and updates result tracker.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
output: Output object from the test run.
|
|
|
|
result_tracker: ResultTracker object to be updated.
|
|
|
|
count: Index of the test run (used for better logging).
|
|
|
|
"""
|
|
|
|
results_for_total = []
|
|
|
|
for trace in self.children:
|
|
|
|
result = trace.ConsumeOutput(output, result_tracker)
|
|
|
|
if result:
|
|
|
|
results_for_total.append(result)
|
|
|
|
|
|
|
|
if self.total:
|
|
|
|
# Produce total metric only when all traces have produced results.
|
|
|
|
if len(self.children) != len(results_for_total):
|
|
|
|
result_tracker.AddError(
|
|
|
|
'Not all traces have produced results. Can not compute total for '
|
|
|
|
'%s.' % self.name)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Calculate total as a the geometric mean for results from all traces.
|
|
|
|
total_trace = LeafTraceConfig(
|
|
|
|
{
|
|
|
|
'name': 'Total',
|
|
|
|
'units': self.children[0].units
|
|
|
|
}, self, self.arch)
|
|
|
|
result_tracker.AddTraceResult(total_trace,
|
|
|
|
GeometricMean(results_for_total), '')
|
|
|
|
|
|
|
|
def AppendChild(self, node):
|
|
|
|
if node.__class__ not in (TraceConfig, LeafTraceConfig):
|
|
|
|
raise Exception(
|
|
|
|
"%s only allows TraceConfig and LeafTraceConfig as child configs." %
|
|
|
|
type(self).__name__)
|
|
|
|
super(TraceConfig, self).AppendChild(node)
|
|
|
|
|
|
|
|
|
|
|
|
class RunnableConfig(TraceConfig):
|
2014-09-02 09:17:26 +00:00
|
|
|
"""Represents a runnable suite definition (i.e. has a main file).
|
|
|
|
"""
|
2019-04-09 13:15:15 +00:00
|
|
|
def __init__(self, suite, parent, arch):
|
|
|
|
super(RunnableConfig, self).__init__(suite, parent, arch)
|
2019-04-26 13:05:52 +00:00
|
|
|
self.arch = arch
|
2022-05-02 13:30:33 +00:00
|
|
|
assert self.main, "No main js file provided"
|
|
|
|
if not self.owners:
|
|
|
|
logging.error("No owners provided for %s" % self.name)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
def ChangeCWD(self, suite_path):
|
|
|
|
"""Changes the cwd to to path defined in the current graph.
|
|
|
|
|
|
|
|
The tests are supposed to be relative to the suite configuration.
|
|
|
|
"""
|
|
|
|
suite_dir = os.path.abspath(os.path.dirname(suite_path))
|
|
|
|
bench_dir = os.path.normpath(os.path.join(*self.path))
|
2021-09-21 10:28:08 +00:00
|
|
|
cwd = os.path.join(suite_dir, bench_dir)
|
|
|
|
logging.debug('Changing CWD to: %s' % cwd)
|
|
|
|
os.chdir(cwd)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2015-05-13 10:00:43 +00:00
|
|
|
def GetCommandFlags(self, extra_flags=None):
|
2019-04-12 11:00:18 +00:00
|
|
|
suffix = ['--'] + self.test_flags if self.test_flags else []
|
2015-05-13 10:00:43 +00:00
|
|
|
return self.flags + (extra_flags or []) + [self.main] + suffix
|
2014-12-02 14:23:51 +00:00
|
|
|
|
2017-11-30 12:57:45 +00:00
|
|
|
def GetCommand(self, cmd_prefix, shell_dir, extra_flags=None):
|
2014-09-02 09:17:26 +00:00
|
|
|
# TODO(machenbach): This requires +.exe if run on windows.
|
2015-09-14 23:14:20 +00:00
|
|
|
extra_flags = extra_flags or []
|
2015-09-15 08:15:54 +00:00
|
|
|
if self.binary != 'd8' and '--prof' in extra_flags:
|
2019-04-12 11:00:18 +00:00
|
|
|
logging.info('Profiler supported only on a benchmark run with d8')
|
2017-11-30 12:57:45 +00:00
|
|
|
|
|
|
|
if self.process_size:
|
2019-04-12 11:00:18 +00:00
|
|
|
cmd_prefix = ['/usr/bin/time', '--format=MaxMemory: %MKB'] + cmd_prefix
|
2017-11-30 12:57:45 +00:00
|
|
|
if self.binary.endswith('.py'):
|
|
|
|
# Copy cmd_prefix instead of update (+=).
|
|
|
|
cmd_prefix = cmd_prefix + [sys.executable]
|
|
|
|
|
|
|
|
return command.Command(
|
|
|
|
cmd_prefix=cmd_prefix,
|
|
|
|
shell=os.path.join(shell_dir, self.binary),
|
|
|
|
args=self.GetCommandFlags(extra_flags=extra_flags),
|
2020-01-15 19:38:25 +00:00
|
|
|
timeout=self.timeout or 60,
|
|
|
|
handle_sigterm=True)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
def ProcessOutput(self, output, result_tracker, count):
|
|
|
|
"""Processes test run output and updates result tracker.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
output: Output object from the test run.
|
|
|
|
result_tracker: ResultTracker object to be updated.
|
|
|
|
count: Index of the test run (used for better logging).
|
|
|
|
"""
|
|
|
|
if self.results_processor:
|
|
|
|
output = RunResultsProcessor(self.results_processor, output, count)
|
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
self.ConsumeOutput(output, result_tracker)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2015-07-08 12:31:29 +00:00
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
class RunnableLeafTraceConfig(LeafTraceConfig, RunnableConfig):
|
2014-09-02 09:17:26 +00:00
|
|
|
"""Represents a runnable suite definition that is a leaf."""
|
|
|
|
def __init__(self, suite, parent, arch):
|
2022-05-02 13:30:33 +00:00
|
|
|
super(RunnableLeafTraceConfig, self).__init__(suite, parent, arch)
|
|
|
|
if not self.owners:
|
|
|
|
logging.error("No owners provided for %s" % self.name)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
def ProcessOutput(self, output, result_tracker, count):
|
|
|
|
result_tracker.AddRunnableDuration(self, output.duration)
|
|
|
|
self.ConsumeOutput(output, result_tracker)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
def MakeGraphConfig(suite, parent, arch):
|
|
|
|
cls = GetGraphConfigClass(suite, parent)
|
|
|
|
return cls(suite, parent, arch)
|
|
|
|
|
|
|
|
|
|
|
|
def GetGraphConfigClass(suite, parent):
|
2015-07-08 12:31:29 +00:00
|
|
|
"""Factory method for making graph configuration objects."""
|
2022-05-02 13:30:33 +00:00
|
|
|
if isinstance(parent, TraceConfig):
|
|
|
|
if suite.get("tests"):
|
|
|
|
return TraceConfig
|
|
|
|
return LeafTraceConfig
|
2019-04-12 11:00:18 +00:00
|
|
|
elif suite.get('main') is not None:
|
2015-04-10 09:48:54 +00:00
|
|
|
# A main file makes this graph runnable. Empty strings are accepted.
|
2019-04-12 11:00:18 +00:00
|
|
|
if suite.get('tests'):
|
2014-09-02 09:17:26 +00:00
|
|
|
# This graph has subgraphs (traces).
|
2022-05-02 13:30:33 +00:00
|
|
|
return RunnableConfig
|
2014-09-02 09:17:26 +00:00
|
|
|
else:
|
|
|
|
# This graph has no subgraphs, it's a leaf.
|
2022-05-02 13:30:33 +00:00
|
|
|
return RunnableLeafTraceConfig
|
2019-04-12 11:00:18 +00:00
|
|
|
elif suite.get('tests'):
|
2014-09-02 09:17:26 +00:00
|
|
|
# This is neither a leaf nor a runnable.
|
2022-05-02 13:30:33 +00:00
|
|
|
return GraphConfig
|
2014-09-02 09:17:26 +00:00
|
|
|
else: # pragma: no cover
|
2022-05-02 13:30:33 +00:00
|
|
|
raise Exception('Invalid suite configuration.' + str(suite)[:200])
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
def BuildGraphConfigs(suite, parent, arch):
|
2014-09-02 09:17:26 +00:00
|
|
|
"""Builds a tree structure of graph objects that corresponds to the suite
|
|
|
|
configuration.
|
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
- GraphConfig:
|
|
|
|
- Can have arbitrary children
|
|
|
|
- can be used to store properties used by it's children
|
|
|
|
|
|
|
|
- VariantConfig
|
|
|
|
- Has variants of the same (any) type as children
|
|
|
|
|
|
|
|
For all other configs see the override AppendChild methods.
|
|
|
|
|
|
|
|
Example 1:
|
|
|
|
- GraphConfig
|
|
|
|
- RunnableLeafTraceConfig (no children)
|
|
|
|
- ...
|
|
|
|
|
|
|
|
Example 2:
|
|
|
|
- RunnableConfig
|
|
|
|
- LeafTraceConfig (no children)
|
|
|
|
- ...
|
|
|
|
|
|
|
|
Example 3:
|
|
|
|
- RunnableConfig
|
|
|
|
- LeafTraceConfig (optional)
|
|
|
|
- TraceConfig
|
|
|
|
- LeafTraceConfig (no children)
|
|
|
|
- ...
|
|
|
|
- TraceConfig (optional)
|
|
|
|
- ...
|
|
|
|
- ...
|
|
|
|
|
|
|
|
Example 4:
|
|
|
|
- VariantConfig
|
|
|
|
- RunnableConfig
|
|
|
|
- ...
|
|
|
|
- RunnableConfig
|
|
|
|
- ...
|
|
|
|
"""
|
2014-09-02 09:17:26 +00:00
|
|
|
# TODO(machenbach): Implement notion of cpu type?
|
2019-04-12 11:00:18 +00:00
|
|
|
if arch not in suite.get('archs', SUPPORTED_ARCHS):
|
2014-09-02 09:17:26 +00:00
|
|
|
return None
|
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
variants = suite.get('variants', [])
|
|
|
|
if len(variants) == 0:
|
|
|
|
graph = MakeGraphConfig(suite, parent, arch)
|
|
|
|
for subsuite in suite.get('tests', []):
|
|
|
|
BuildGraphConfigs(subsuite, graph, arch)
|
|
|
|
else:
|
|
|
|
graph = VariantConfig(suite, parent, arch)
|
|
|
|
variant_class = GetGraphConfigClass(suite, parent)
|
|
|
|
for variant_suite in variants:
|
|
|
|
# Propagate down the results_regexp if it's not override in the variant
|
|
|
|
variant_suite.setdefault('results_regexp',
|
|
|
|
suite.get('results_regexp', None))
|
|
|
|
variant_graph = variant_class(variant_suite, graph, arch)
|
|
|
|
graph.AppendChild(variant_graph)
|
|
|
|
for subsuite in suite.get('tests', []):
|
|
|
|
BuildGraphConfigs(subsuite, variant_graph, arch)
|
2014-09-02 09:17:26 +00:00
|
|
|
parent.AppendChild(graph)
|
|
|
|
return graph
|
|
|
|
|
|
|
|
|
2014-12-03 15:34:53 +00:00
|
|
|
def FlattenRunnables(node, node_cb):
|
2014-09-02 09:17:26 +00:00
|
|
|
"""Generator that traverses the tree structure and iterates over all
|
|
|
|
runnables.
|
|
|
|
"""
|
2014-12-03 15:34:53 +00:00
|
|
|
node_cb(node)
|
2015-07-08 12:31:29 +00:00
|
|
|
if isinstance(node, RunnableConfig):
|
2014-09-02 09:17:26 +00:00
|
|
|
yield node
|
|
|
|
elif isinstance(node, Node):
|
|
|
|
for child in node._children:
|
2014-12-03 15:34:53 +00:00
|
|
|
for result in FlattenRunnables(child, node_cb):
|
2014-09-02 09:17:26 +00:00
|
|
|
yield result
|
|
|
|
else: # pragma: no cover
|
2019-04-12 11:00:18 +00:00
|
|
|
raise Exception('Invalid suite configuration.')
|
2014-09-02 09:17:26 +00:00
|
|
|
|
|
|
|
|
2020-09-25 17:10:03 +00:00
|
|
|
def find_build_directory(base_path, arch):
|
|
|
|
"""Returns the location of d8 or node in the build output directory.
|
|
|
|
|
|
|
|
This supports a seamless transition between legacy build location
|
|
|
|
(out/Release) and new build location (out/build).
|
|
|
|
"""
|
|
|
|
def is_build(path):
|
|
|
|
# We support d8 or node as executables. We don't support testing on
|
|
|
|
# Windows.
|
|
|
|
return (os.path.isfile(os.path.join(path, 'd8')) or
|
|
|
|
os.path.isfile(os.path.join(path, 'node')))
|
|
|
|
possible_paths = [
|
|
|
|
# Location developer wrapper scripts is using.
|
|
|
|
'%s.release' % arch,
|
|
|
|
# Current build location on bots.
|
|
|
|
'build',
|
|
|
|
# Legacy build location on bots.
|
|
|
|
'Release',
|
|
|
|
]
|
|
|
|
possible_paths = [os.path.join(base_path, p) for p in possible_paths]
|
2021-08-19 14:49:57 +00:00
|
|
|
actual_paths = list(filter(is_build, possible_paths))
|
2020-09-25 17:10:03 +00:00
|
|
|
assert actual_paths, 'No build directory found.'
|
2021-08-19 14:49:57 +00:00
|
|
|
assert len(
|
|
|
|
actual_paths
|
|
|
|
) == 1, 'Found ambiguous build directories use --binary-override-path.'
|
2020-09-25 17:10:03 +00:00
|
|
|
return actual_paths[0]
|
|
|
|
|
|
|
|
|
2014-11-28 18:39:41 +00:00
|
|
|
class Platform(object):
|
2019-04-23 16:42:23 +00:00
|
|
|
def __init__(self, args):
|
|
|
|
self.shell_dir = args.shell_dir
|
|
|
|
self.shell_dir_secondary = args.shell_dir_secondary
|
2022-05-02 13:30:33 +00:00
|
|
|
self.is_dry_run = args.dry_run
|
2019-04-23 16:42:23 +00:00
|
|
|
self.extra_flags = args.extra_flags.split()
|
|
|
|
self.args = args
|
2015-05-18 15:11:05 +00:00
|
|
|
|
2018-07-25 11:20:08 +00:00
|
|
|
@staticmethod
|
2019-04-23 16:42:23 +00:00
|
|
|
def ReadBuildConfig(args):
|
|
|
|
config_path = os.path.join(args.shell_dir, 'v8_build_config.json')
|
2018-07-25 11:20:08 +00:00
|
|
|
if not os.path.isfile(config_path):
|
|
|
|
return {}
|
|
|
|
with open(config_path) as f:
|
|
|
|
return json.load(f)
|
|
|
|
|
2014-11-28 18:39:41 +00:00
|
|
|
@staticmethod
|
2019-04-23 16:42:23 +00:00
|
|
|
def GetPlatform(args):
|
|
|
|
if Platform.ReadBuildConfig(args).get('is_android', False):
|
|
|
|
return AndroidPlatform(args)
|
2014-11-28 18:39:41 +00:00
|
|
|
else:
|
2019-04-23 16:42:23 +00:00
|
|
|
return DesktopPlatform(args)
|
2014-11-28 18:39:41 +00:00
|
|
|
|
2017-11-21 16:10:50 +00:00
|
|
|
def _Run(self, runnable, count, secondary=False):
|
2015-07-10 13:02:09 +00:00
|
|
|
raise NotImplementedError() # pragma: no cover
|
|
|
|
|
2019-04-23 16:07:52 +00:00
|
|
|
def _LoggedRun(self, runnable, count, secondary=False):
|
2019-04-23 11:32:13 +00:00
|
|
|
suffix = ' - secondary' if secondary else ''
|
|
|
|
title = '>>> %%s (#%d)%s:' % ((count + 1), suffix)
|
|
|
|
try:
|
|
|
|
output = self._Run(runnable, count, secondary)
|
|
|
|
except OSError:
|
|
|
|
logging.exception(title % 'OSError')
|
|
|
|
raise
|
|
|
|
if output.stdout:
|
|
|
|
logging.info(title % 'Stdout' + '\n%s', output.stdout)
|
|
|
|
if output.stderr: # pragma: no cover
|
|
|
|
# Print stderr for debugging.
|
|
|
|
logging.info(title % 'Stderr' + '\n%s', output.stderr)
|
|
|
|
logging.warning('>>> Test timed out after %ss.', runnable.timeout)
|
|
|
|
if output.exit_code != 0:
|
|
|
|
logging.warning('>>> Test crashed with exit code %d.', output.exit_code)
|
|
|
|
return output
|
2019-04-09 13:15:15 +00:00
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
def Run(self, runnable, count, secondary):
|
2015-07-10 13:02:09 +00:00
|
|
|
"""Execute the benchmark's main file.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
runnable: A Runnable benchmark instance.
|
|
|
|
count: The number of this (repeated) run.
|
2019-04-26 13:05:52 +00:00
|
|
|
secondary: True if secondary run should be executed.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A tuple with the two benchmark outputs. The latter will be NULL_OUTPUT if
|
|
|
|
secondary is False.
|
2015-07-10 13:02:09 +00:00
|
|
|
"""
|
2019-04-23 16:07:52 +00:00
|
|
|
output = self._LoggedRun(runnable, count, secondary=False)
|
2019-04-26 13:05:52 +00:00
|
|
|
if secondary:
|
2019-04-23 16:07:52 +00:00
|
|
|
return output, self._LoggedRun(runnable, count, secondary=True)
|
2015-07-10 13:02:09 +00:00
|
|
|
else:
|
2019-04-23 11:32:13 +00:00
|
|
|
return output, NULL_OUTPUT
|
2015-07-10 13:02:09 +00:00
|
|
|
|
2014-11-28 18:39:41 +00:00
|
|
|
|
|
|
|
class DesktopPlatform(Platform):
|
2019-04-23 16:42:23 +00:00
|
|
|
def __init__(self, args):
|
|
|
|
super(DesktopPlatform, self).__init__(args)
|
2016-04-29 15:51:05 +00:00
|
|
|
self.command_prefix = []
|
|
|
|
|
2018-08-10 17:09:34 +00:00
|
|
|
# Setup command class to OS specific version.
|
2019-04-23 16:42:23 +00:00
|
|
|
command.setup(utils.GuessOS(), args.device)
|
2018-08-10 17:09:34 +00:00
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.prioritize or args.affinitize != None:
|
2019-04-12 11:00:18 +00:00
|
|
|
self.command_prefix = ['schedtool']
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.prioritize:
|
2019-04-12 11:00:18 +00:00
|
|
|
self.command_prefix += ['-n', '-20']
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.affinitize != None:
|
2021-08-19 14:49:57 +00:00
|
|
|
# schedtool expects a bit pattern when setting affinity, where each
|
|
|
|
# bit set to '1' corresponds to a core where the process may run on.
|
|
|
|
# First bit corresponds to CPU 0. Since the 'affinitize' parameter is
|
|
|
|
# a core number, we need to map to said bit pattern.
|
2019-04-23 16:42:23 +00:00
|
|
|
cpu = int(args.affinitize)
|
2016-04-29 15:51:05 +00:00
|
|
|
core = 1 << cpu
|
2019-04-12 11:00:18 +00:00
|
|
|
self.command_prefix += ['-a', ('0x%x' % core)]
|
|
|
|
self.command_prefix += ['-e']
|
2014-11-28 18:39:41 +00:00
|
|
|
|
2016-02-16 12:56:21 +00:00
|
|
|
def PreExecution(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def PostExecution(self):
|
|
|
|
pass
|
2014-11-28 18:39:41 +00:00
|
|
|
|
2014-12-03 15:34:53 +00:00
|
|
|
def PreTests(self, node, path):
|
2015-07-08 12:31:29 +00:00
|
|
|
if isinstance(node, RunnableConfig):
|
2014-12-03 15:34:53 +00:00
|
|
|
node.ChangeCWD(path)
|
2014-11-28 18:39:41 +00:00
|
|
|
|
2017-11-21 16:10:50 +00:00
|
|
|
def _Run(self, runnable, count, secondary=False):
|
|
|
|
shell_dir = self.shell_dir_secondary if secondary else self.shell_dir
|
2017-11-30 12:57:45 +00:00
|
|
|
cmd = runnable.GetCommand(self.command_prefix, shell_dir, self.extra_flags)
|
2021-09-21 09:46:17 +00:00
|
|
|
logging.debug('Running command: %s' % cmd)
|
2022-05-02 13:30:33 +00:00
|
|
|
output = Output() if self.is_dry_run else cmd.execute()
|
2016-04-29 15:51:05 +00:00
|
|
|
|
2019-04-23 11:32:13 +00:00
|
|
|
if output.IsSuccess() and '--prof' in self.extra_flags:
|
2019-04-12 11:00:18 +00:00
|
|
|
os_prefix = {'linux': 'linux', 'macos': 'mac'}.get(utils.GuessOS())
|
2015-09-15 08:15:54 +00:00
|
|
|
if os_prefix:
|
2022-05-02 13:30:33 +00:00
|
|
|
if not self.is_dry_run:
|
|
|
|
tick_tools = os.path.join(TOOLS_BASE, '%s-tick-processor' % os_prefix)
|
|
|
|
subprocess.check_call(tick_tools + ' --only-summary', shell=True)
|
2015-09-15 08:15:54 +00:00
|
|
|
else: # pragma: no cover
|
2018-08-01 23:10:11 +00:00
|
|
|
logging.warning(
|
2019-04-12 11:00:18 +00:00
|
|
|
'Profiler option currently supported on Linux and Mac OS.')
|
2017-07-17 15:08:54 +00:00
|
|
|
|
2019-04-23 11:32:13 +00:00
|
|
|
# /usr/bin/time outputs to stderr
|
2017-07-17 15:08:54 +00:00
|
|
|
if runnable.process_size:
|
2019-04-23 11:32:13 +00:00
|
|
|
output.stdout += output.stderr
|
|
|
|
return output
|
2014-11-28 18:39:41 +00:00
|
|
|
|
|
|
|
|
2014-12-02 14:23:51 +00:00
|
|
|
class AndroidPlatform(Platform): # pragma: no cover
|
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
def __init__(self, args):
|
|
|
|
super(AndroidPlatform, self).__init__(args)
|
2022-07-18 09:05:55 +00:00
|
|
|
self.driver = android.Driver.instance(args.device)
|
2014-12-02 14:23:51 +00:00
|
|
|
|
|
|
|
def PreExecution(self):
|
2018-08-06 12:37:39 +00:00
|
|
|
self.driver.set_high_perf_mode()
|
2014-12-03 15:34:53 +00:00
|
|
|
|
2014-12-02 14:23:51 +00:00
|
|
|
def PostExecution(self):
|
2018-08-06 12:37:39 +00:00
|
|
|
self.driver.set_default_perf_mode()
|
|
|
|
self.driver.tear_down()
|
2015-02-24 14:07:15 +00:00
|
|
|
|
2015-07-10 13:02:09 +00:00
|
|
|
def PreTests(self, node, path):
|
2017-02-17 13:09:14 +00:00
|
|
|
if isinstance(node, RunnableConfig):
|
|
|
|
node.ChangeCWD(path)
|
2015-07-10 13:02:09 +00:00
|
|
|
suite_dir = os.path.abspath(os.path.dirname(path))
|
|
|
|
if node.path:
|
|
|
|
bench_rel = os.path.normpath(os.path.join(*node.path))
|
|
|
|
bench_abs = os.path.join(suite_dir, bench_rel)
|
|
|
|
else:
|
2019-04-12 11:00:18 +00:00
|
|
|
bench_rel = '.'
|
2015-07-10 13:02:09 +00:00
|
|
|
bench_abs = suite_dir
|
|
|
|
|
2019-04-12 11:00:18 +00:00
|
|
|
self.driver.push_executable(self.shell_dir, 'bin', node.binary)
|
2017-11-21 16:10:50 +00:00
|
|
|
if self.shell_dir_secondary:
|
2018-08-06 12:37:39 +00:00
|
|
|
self.driver.push_executable(
|
2019-04-12 11:00:18 +00:00
|
|
|
self.shell_dir_secondary, 'bin_secondary', node.binary)
|
2015-07-10 13:02:09 +00:00
|
|
|
|
2015-07-08 12:31:29 +00:00
|
|
|
if isinstance(node, RunnableConfig):
|
2019-02-01 20:05:43 +00:00
|
|
|
self.driver.push_file(bench_abs, node.main, bench_rel)
|
|
|
|
for resource in node.resources:
|
|
|
|
self.driver.push_file(bench_abs, resource, bench_rel)
|
2014-12-02 14:23:51 +00:00
|
|
|
|
2017-11-21 16:10:50 +00:00
|
|
|
def _Run(self, runnable, count, secondary=False):
|
2019-04-12 11:00:18 +00:00
|
|
|
target_dir = 'bin_secondary' if secondary else 'bin'
|
2018-08-06 12:37:39 +00:00
|
|
|
self.driver.drop_ram_caches()
|
2014-12-04 10:37:36 +00:00
|
|
|
|
|
|
|
# Relative path to benchmark directory.
|
|
|
|
if runnable.path:
|
|
|
|
bench_rel = os.path.normpath(os.path.join(*runnable.path))
|
|
|
|
else:
|
2019-04-12 11:00:18 +00:00
|
|
|
bench_rel = '.'
|
2014-12-04 10:37:36 +00:00
|
|
|
|
2018-08-11 09:02:00 +00:00
|
|
|
logcat_file = None
|
2019-04-23 16:42:23 +00:00
|
|
|
if self.args.dump_logcats_to:
|
2018-08-11 09:02:00 +00:00
|
|
|
runnable_name = '-'.join(runnable.graphs)
|
|
|
|
logcat_file = os.path.join(
|
2019-04-23 16:42:23 +00:00
|
|
|
self.args.dump_logcats_to, 'logcat-%s-#%d%s.log' % (
|
2018-08-11 09:02:00 +00:00
|
|
|
runnable_name, count + 1, '-secondary' if secondary else ''))
|
|
|
|
logging.debug('Dumping logcat into %s', logcat_file)
|
|
|
|
|
2019-04-23 11:32:13 +00:00
|
|
|
output = Output()
|
|
|
|
start = time.time()
|
2014-12-02 14:23:51 +00:00
|
|
|
try:
|
2022-05-02 13:30:33 +00:00
|
|
|
if not self.is_dry_run:
|
|
|
|
output.stdout = self.driver.run(
|
|
|
|
target_dir=target_dir,
|
|
|
|
binary=runnable.binary,
|
|
|
|
args=runnable.GetCommandFlags(self.extra_flags),
|
|
|
|
rel_path=bench_rel,
|
|
|
|
timeout=runnable.timeout,
|
|
|
|
logcat_file=logcat_file,
|
|
|
|
)
|
2018-08-09 06:54:56 +00:00
|
|
|
except android.CommandFailedException as e:
|
2019-04-23 11:32:13 +00:00
|
|
|
output.stdout = e.output
|
|
|
|
output.exit_code = e.status
|
2018-09-12 14:57:46 +00:00
|
|
|
except android.TimeoutException as e:
|
2019-04-23 11:32:13 +00:00
|
|
|
output.stdout = e.output
|
|
|
|
output.timed_out = True
|
2017-07-17 15:08:54 +00:00
|
|
|
if runnable.process_size:
|
2019-04-23 11:32:13 +00:00
|
|
|
output.stdout += 'MaxMemory: Unsupported'
|
|
|
|
output.duration = time.time() - start
|
|
|
|
return output
|
2014-11-28 18:39:41 +00:00
|
|
|
|
2019-05-09 09:05:37 +00:00
|
|
|
|
2016-04-29 15:51:05 +00:00
|
|
|
class CustomMachineConfiguration:
|
|
|
|
def __init__(self, disable_aslr = False, governor = None):
|
|
|
|
self.aslr_backup = None
|
|
|
|
self.governor_backup = None
|
|
|
|
self.disable_aslr = disable_aslr
|
|
|
|
self.governor = governor
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
if self.disable_aslr:
|
|
|
|
self.aslr_backup = CustomMachineConfiguration.GetASLR()
|
|
|
|
CustomMachineConfiguration.SetASLR(0)
|
|
|
|
if self.governor != None:
|
|
|
|
self.governor_backup = CustomMachineConfiguration.GetCPUGovernor()
|
|
|
|
CustomMachineConfiguration.SetCPUGovernor(self.governor)
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, type, value, traceback):
|
|
|
|
if self.aslr_backup != None:
|
|
|
|
CustomMachineConfiguration.SetASLR(self.aslr_backup)
|
|
|
|
if self.governor_backup != None:
|
|
|
|
CustomMachineConfiguration.SetCPUGovernor(self.governor_backup)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def GetASLR():
|
|
|
|
try:
|
2019-04-12 11:00:18 +00:00
|
|
|
with open('/proc/sys/kernel/randomize_va_space', 'r') as f:
|
2016-04-29 15:51:05 +00:00
|
|
|
return int(f.readline().strip())
|
2018-08-01 23:10:11 +00:00
|
|
|
except Exception:
|
2019-04-12 11:00:18 +00:00
|
|
|
logging.exception('Failed to get current ASLR settings.')
|
2018-08-01 23:10:11 +00:00
|
|
|
raise
|
2016-04-29 15:51:05 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def SetASLR(value):
|
|
|
|
try:
|
2019-04-12 11:00:18 +00:00
|
|
|
with open('/proc/sys/kernel/randomize_va_space', 'w') as f:
|
2016-04-29 15:51:05 +00:00
|
|
|
f.write(str(value))
|
2018-08-01 23:10:11 +00:00
|
|
|
except Exception:
|
|
|
|
logging.exception(
|
2019-04-12 11:00:18 +00:00
|
|
|
'Failed to update ASLR to %s. Are we running under sudo?', value)
|
2018-08-01 23:10:11 +00:00
|
|
|
raise
|
2016-04-29 15:51:05 +00:00
|
|
|
|
|
|
|
new_value = CustomMachineConfiguration.GetASLR()
|
|
|
|
if value != new_value:
|
2019-04-12 11:00:18 +00:00
|
|
|
raise Exception('Present value is %s' % new_value)
|
2016-04-29 15:51:05 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def GetCPUCoresRange():
|
|
|
|
try:
|
2019-04-12 11:00:18 +00:00
|
|
|
with open('/sys/devices/system/cpu/present', 'r') as f:
|
2016-04-29 15:51:05 +00:00
|
|
|
indexes = f.readline()
|
2021-08-19 14:49:57 +00:00
|
|
|
r = list(map(int, indexes.split('-')))
|
2016-05-20 09:07:28 +00:00
|
|
|
if len(r) == 1:
|
2021-08-19 14:49:57 +00:00
|
|
|
return list(range(r[0], r[0] + 1))
|
|
|
|
return list(range(r[0], r[1] + 1))
|
2018-08-01 23:10:11 +00:00
|
|
|
except Exception:
|
2019-04-12 11:00:18 +00:00
|
|
|
logging.exception('Failed to retrieve number of CPUs.')
|
2018-08-01 23:10:11 +00:00
|
|
|
raise
|
2016-04-29 15:51:05 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def GetCPUPathForId(cpu_index):
|
2019-04-12 11:00:18 +00:00
|
|
|
ret = '/sys/devices/system/cpu/cpu'
|
2016-04-29 15:51:05 +00:00
|
|
|
ret += str(cpu_index)
|
2019-04-12 11:00:18 +00:00
|
|
|
ret += '/cpufreq/scaling_governor'
|
2016-04-29 15:51:05 +00:00
|
|
|
return ret
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def GetCPUGovernor():
|
|
|
|
try:
|
|
|
|
cpu_indices = CustomMachineConfiguration.GetCPUCoresRange()
|
|
|
|
ret = None
|
|
|
|
for cpu_index in cpu_indices:
|
|
|
|
cpu_device = CustomMachineConfiguration.GetCPUPathForId(cpu_index)
|
2019-04-12 11:00:18 +00:00
|
|
|
with open(cpu_device, 'r') as f:
|
2016-04-29 15:51:05 +00:00
|
|
|
# We assume the governors of all CPUs are set to the same value
|
|
|
|
val = f.readline().strip()
|
|
|
|
if ret == None:
|
|
|
|
ret = val
|
|
|
|
elif ret != val:
|
2019-04-12 11:00:18 +00:00
|
|
|
raise Exception('CPU cores have differing governor settings')
|
2016-04-29 15:51:05 +00:00
|
|
|
return ret
|
2018-08-01 23:10:11 +00:00
|
|
|
except Exception:
|
2019-04-12 11:00:18 +00:00
|
|
|
logging.exception('Failed to get the current CPU governor. Is the CPU '
|
|
|
|
'governor disabled? Check BIOS.')
|
2018-08-01 23:10:11 +00:00
|
|
|
raise
|
2016-04-29 15:51:05 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def SetCPUGovernor(value):
|
|
|
|
try:
|
|
|
|
cpu_indices = CustomMachineConfiguration.GetCPUCoresRange()
|
|
|
|
for cpu_index in cpu_indices:
|
|
|
|
cpu_device = CustomMachineConfiguration.GetCPUPathForId(cpu_index)
|
2019-04-12 11:00:18 +00:00
|
|
|
with open(cpu_device, 'w') as f:
|
2016-04-29 15:51:05 +00:00
|
|
|
f.write(value)
|
|
|
|
|
2018-08-01 23:10:11 +00:00
|
|
|
except Exception:
|
2019-04-12 11:00:18 +00:00
|
|
|
logging.exception('Failed to change CPU governor to %s. Are we '
|
|
|
|
'running under sudo?', value)
|
2018-08-01 23:10:11 +00:00
|
|
|
raise
|
2016-04-29 15:51:05 +00:00
|
|
|
|
|
|
|
cur_value = CustomMachineConfiguration.GetCPUGovernor()
|
|
|
|
if cur_value != value:
|
2019-04-12 11:00:18 +00:00
|
|
|
raise Exception('Could not set CPU governor. Present value is %s'
|
2016-04-29 15:51:05 +00:00
|
|
|
% cur_value )
|
2014-11-28 18:39:41 +00:00
|
|
|
|
2019-05-09 09:05:37 +00:00
|
|
|
|
|
|
|
class MaxTotalDurationReachedError(Exception):
|
|
|
|
"""Exception used to stop running tests when max total duration is reached."""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
def Main(argv):
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('--arch',
|
|
|
|
help='The architecture to run tests for. Pass "auto" '
|
|
|
|
'to auto-detect.', default='x64',
|
|
|
|
choices=SUPPORTED_ARCHS + ['auto'])
|
|
|
|
parser.add_argument('--buildbot',
|
2020-09-25 17:10:03 +00:00
|
|
|
help='Deprecated',
|
2019-04-23 16:42:23 +00:00
|
|
|
default=False, action='store_true')
|
|
|
|
parser.add_argument('-d', '--device',
|
|
|
|
help='The device ID to run Android tests on. If not '
|
|
|
|
'given it will be autodetected.')
|
|
|
|
parser.add_argument('--extra-flags',
|
|
|
|
help='Additional flags to pass to the test executable',
|
|
|
|
default='')
|
|
|
|
parser.add_argument('--json-test-results',
|
|
|
|
help='Path to a file for storing json results.')
|
|
|
|
parser.add_argument('--json-test-results-secondary',
|
|
|
|
help='Path to a file for storing json results from run '
|
|
|
|
'without patch or for reference build run.')
|
|
|
|
parser.add_argument('--outdir', help='Base directory with compile output',
|
|
|
|
default='out')
|
|
|
|
parser.add_argument('--outdir-secondary',
|
|
|
|
help='Base directory with compile output without patch '
|
|
|
|
'or for reference build')
|
|
|
|
parser.add_argument('--binary-override-path',
|
|
|
|
help='JavaScript engine binary. By default, d8 under '
|
|
|
|
'architecture-specific build dir. '
|
|
|
|
'Not supported in conjunction with outdir-secondary.')
|
|
|
|
parser.add_argument('--prioritize',
|
|
|
|
help='Raise the priority to nice -20 for the '
|
|
|
|
'benchmarking process.Requires Linux, schedtool, and '
|
|
|
|
'sudo privileges.', default=False, action='store_true')
|
|
|
|
parser.add_argument('--affinitize',
|
|
|
|
help='Run benchmarking process on the specified core. '
|
|
|
|
'For example: --affinitize=0 will run the benchmark '
|
|
|
|
'process on core 0. --affinitize=3 will run the '
|
|
|
|
'benchmark process on core 3. Requires Linux, schedtool, '
|
|
|
|
'and sudo privileges.', default=None)
|
|
|
|
parser.add_argument('--noaslr',
|
|
|
|
help='Disable ASLR for the duration of the benchmarked '
|
|
|
|
'process. Requires Linux and sudo privileges.',
|
|
|
|
default=False, action='store_true')
|
|
|
|
parser.add_argument('--cpu-governor',
|
|
|
|
help='Set cpu governor to specified policy for the '
|
|
|
|
'duration of the benchmarked process. Typical options: '
|
|
|
|
'"powersave" for more stable results, or "performance" '
|
|
|
|
'for shorter completion time of suite, with potentially '
|
|
|
|
'more noise in results.')
|
2022-05-02 13:30:33 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--filter',
|
|
|
|
help='Only run the benchmarks matching with this '
|
|
|
|
'regex. For example: '
|
|
|
|
'--filter=JSTests/TypedArrays/ will run only TypedArray '
|
|
|
|
'benchmarks from the JSTests suite.')
|
2019-06-14 10:32:00 +00:00
|
|
|
parser.add_argument('--confidence-level', type=float,
|
2019-05-09 09:05:37 +00:00
|
|
|
help='Repeatedly runs each benchmark until specified '
|
|
|
|
'confidence level is reached. The value is interpreted '
|
|
|
|
'as the number of standard deviations from the mean that '
|
|
|
|
'all values must lie within. Typical values are 1, 2 and '
|
2019-06-14 10:32:00 +00:00
|
|
|
'3 and correspond to 68%%, 95%% and 99.7%% probability '
|
|
|
|
'that the measured value is within 0.1%% of the true '
|
|
|
|
'value. Larger values result in more retries and thus '
|
|
|
|
'longer runtime, but also provide more reliable results. '
|
|
|
|
'Also see --max-total-duration flag.')
|
2019-05-09 09:05:37 +00:00
|
|
|
parser.add_argument('--max-total-duration', type=int, default=7140, # 1h 59m
|
|
|
|
help='Max total duration in seconds allowed for retries '
|
|
|
|
'across all tests. This is especially useful in '
|
|
|
|
'combination with the --confidence-level flag.')
|
2019-04-23 16:42:23 +00:00
|
|
|
parser.add_argument('--dump-logcats-to',
|
|
|
|
help='Writes logcat output from each test into specified '
|
|
|
|
'directory. Only supported for android targets.')
|
2019-05-09 09:05:37 +00:00
|
|
|
parser.add_argument('--run-count', type=int, default=0,
|
|
|
|
help='Override the run count specified by the test '
|
|
|
|
'suite. The default 0 uses the suite\'s config.')
|
2022-05-02 13:30:33 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--dry-run',
|
|
|
|
default=False,
|
|
|
|
action='store_true',
|
|
|
|
help='Do not run any actual tests.')
|
2019-05-09 09:05:37 +00:00
|
|
|
parser.add_argument('-v', '--verbose', default=False, action='store_true',
|
|
|
|
help='Be verbose and print debug output.')
|
2019-04-23 16:42:23 +00:00
|
|
|
parser.add_argument('suite', nargs='+', help='Path to the suite config file.')
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
try:
|
|
|
|
args = parser.parse_args(argv)
|
|
|
|
except SystemExit:
|
2018-10-09 15:45:35 +00:00
|
|
|
return INFRA_FAILURE_RETCODE
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
logging.basicConfig(
|
2019-05-09 09:05:37 +00:00
|
|
|
level=logging.DEBUG if args.verbose else logging.INFO,
|
|
|
|
format='%(asctime)s %(levelname)-8s %(message)s')
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.arch == 'auto': # pragma: no cover
|
|
|
|
args.arch = utils.DefaultArch()
|
|
|
|
if args.arch not in SUPPORTED_ARCHS:
|
|
|
|
logging.error(
|
|
|
|
'Auto-detected architecture "%s" is not supported.', args.arch)
|
|
|
|
return INFRA_FAILURE_RETCODE
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
if (args.json_test_results_secondary and
|
|
|
|
not args.outdir_secondary): # pragma: no cover
|
2019-04-12 11:00:18 +00:00
|
|
|
logging.error('For writing secondary json test results, a secondary outdir '
|
|
|
|
'patch must be specified.')
|
2018-10-09 15:45:35 +00:00
|
|
|
return INFRA_FAILURE_RETCODE
|
2015-07-10 13:02:09 +00:00
|
|
|
|
2019-04-12 11:00:18 +00:00
|
|
|
workspace = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
2014-12-02 14:23:51 +00:00
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.binary_override_path == None:
|
2020-09-25 17:10:03 +00:00
|
|
|
args.shell_dir = find_build_directory(
|
|
|
|
os.path.join(workspace, args.outdir), args.arch)
|
2019-04-12 11:00:18 +00:00
|
|
|
default_binary_name = 'd8'
|
2016-02-01 20:27:23 +00:00
|
|
|
else:
|
2019-04-23 16:42:23 +00:00
|
|
|
if not os.path.isfile(args.binary_override_path):
|
2019-04-12 11:00:18 +00:00
|
|
|
logging.error('binary-override-path must be a file name')
|
2018-10-09 15:45:35 +00:00
|
|
|
return INFRA_FAILURE_RETCODE
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.outdir_secondary:
|
2019-04-12 11:00:18 +00:00
|
|
|
logging.error('specify either binary-override-path or outdir-secondary')
|
2018-10-09 15:45:35 +00:00
|
|
|
return INFRA_FAILURE_RETCODE
|
2019-04-23 16:42:23 +00:00
|
|
|
args.shell_dir = os.path.abspath(
|
|
|
|
os.path.dirname(args.binary_override_path))
|
|
|
|
default_binary_name = os.path.basename(args.binary_override_path)
|
2015-07-10 13:02:09 +00:00
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.outdir_secondary:
|
2020-09-25 17:10:03 +00:00
|
|
|
args.shell_dir_secondary = find_build_directory(
|
|
|
|
os.path.join(workspace, args.outdir_secondary), args.arch)
|
2015-07-10 13:02:09 +00:00
|
|
|
else:
|
2019-04-23 16:42:23 +00:00
|
|
|
args.shell_dir_secondary = None
|
2014-12-02 14:23:51 +00:00
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.json_test_results:
|
|
|
|
args.json_test_results = os.path.abspath(args.json_test_results)
|
2017-03-21 00:04:51 +00:00
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.json_test_results_secondary:
|
|
|
|
args.json_test_results_secondary = os.path.abspath(
|
|
|
|
args.json_test_results_secondary)
|
2017-03-21 00:04:51 +00:00
|
|
|
|
2022-05-02 13:30:33 +00:00
|
|
|
try:
|
|
|
|
if args.filter:
|
|
|
|
args.filter = re.compile(args.filter)
|
|
|
|
except re.error:
|
|
|
|
logging.error("Invalid regular expression for --filter=%s" % args.filter)
|
|
|
|
return INFRA_FAILURE_RETCODE
|
|
|
|
|
2017-03-21 00:04:51 +00:00
|
|
|
# Ensure all arguments have absolute path before we start changing current
|
|
|
|
# directory.
|
2021-08-19 14:49:57 +00:00
|
|
|
args.suite = list(map(os.path.abspath, args.suite))
|
2017-03-21 00:04:51 +00:00
|
|
|
|
2016-04-29 15:51:05 +00:00
|
|
|
prev_aslr = None
|
|
|
|
prev_cpu_gov = None
|
2019-04-23 16:42:23 +00:00
|
|
|
platform = Platform.GetPlatform(args)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-25 17:23:45 +00:00
|
|
|
result_tracker = ResultTracker()
|
|
|
|
result_tracker_secondary = ResultTracker()
|
2019-05-09 09:05:37 +00:00
|
|
|
have_failed_tests = False
|
2019-04-23 16:42:23 +00:00
|
|
|
with CustomMachineConfiguration(governor = args.cpu_governor,
|
|
|
|
disable_aslr = args.noaslr) as conf:
|
|
|
|
for path in args.suite:
|
2016-04-29 15:51:05 +00:00
|
|
|
if not os.path.exists(path): # pragma: no cover
|
2019-04-26 13:05:52 +00:00
|
|
|
result_tracker.AddError('Configuration file %s does not exist.' % path)
|
2016-04-29 15:51:05 +00:00
|
|
|
continue
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2016-04-29 15:51:05 +00:00
|
|
|
with open(path) as f:
|
|
|
|
suite = json.loads(f.read())
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2016-04-29 15:51:05 +00:00
|
|
|
# If no name is given, default to the file name without .json.
|
2019-04-12 11:00:18 +00:00
|
|
|
suite.setdefault('name', os.path.splitext(os.path.basename(path))[0])
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2016-04-29 15:51:05 +00:00
|
|
|
# Setup things common to one test suite.
|
|
|
|
platform.PreExecution()
|
2014-12-03 15:34:53 +00:00
|
|
|
|
2016-04-29 15:51:05 +00:00
|
|
|
# Build the graph/trace tree structure.
|
|
|
|
default_parent = DefaultSentinel(default_binary_name)
|
2022-05-02 13:30:33 +00:00
|
|
|
root = BuildGraphConfigs(suite, default_parent, args.arch)
|
|
|
|
|
|
|
|
if logging.DEBUG >= logging.root.level:
|
|
|
|
logging.debug("Config tree:")
|
|
|
|
for node in iter(root):
|
|
|
|
logging.debug(" %s", node)
|
2014-12-03 15:34:53 +00:00
|
|
|
|
2016-04-29 15:51:05 +00:00
|
|
|
# Callback to be called on each node on traversal.
|
|
|
|
def NodeCB(node):
|
|
|
|
platform.PreTests(node, path)
|
2014-12-03 15:34:53 +00:00
|
|
|
|
2017-04-27 07:57:11 +00:00
|
|
|
# Traverse graph/trace tree and iterate over all runnables.
|
2019-05-09 09:05:37 +00:00
|
|
|
start = time.time()
|
|
|
|
try:
|
|
|
|
for runnable in FlattenRunnables(root, NodeCB):
|
|
|
|
runnable_name = '/'.join(runnable.graphs)
|
2022-05-09 12:34:41 +00:00
|
|
|
if args.filter and not args.filter.match(runnable_name):
|
2022-05-02 13:30:33 +00:00
|
|
|
logging.info('Skipping suite "%s" due to filter', runnable_name)
|
2019-05-09 09:05:37 +00:00
|
|
|
continue
|
|
|
|
logging.info('>>> Running suite: %s', runnable_name)
|
|
|
|
|
|
|
|
def RunGenerator(runnable):
|
|
|
|
if args.confidence_level:
|
|
|
|
counter = 0
|
|
|
|
while not result_tracker.HasEnoughRuns(
|
|
|
|
runnable, args.confidence_level):
|
|
|
|
yield counter
|
|
|
|
counter += 1
|
2019-04-26 13:05:52 +00:00
|
|
|
else:
|
2019-05-09 09:05:37 +00:00
|
|
|
for i in range(0, max(1, args.run_count or runnable.run_count)):
|
|
|
|
yield i
|
|
|
|
|
|
|
|
for i in RunGenerator(runnable):
|
|
|
|
attempts_left = runnable.retry_count + 1
|
|
|
|
while attempts_left:
|
|
|
|
total_duration = time.time() - start
|
|
|
|
if total_duration > args.max_total_duration:
|
|
|
|
logging.info(
|
|
|
|
'>>> Stopping now since running for too long (%ds > %ds)',
|
|
|
|
total_duration, args.max_total_duration)
|
|
|
|
raise MaxTotalDurationReachedError()
|
|
|
|
|
|
|
|
output, output_secondary = platform.Run(
|
|
|
|
runnable, i, secondary=args.shell_dir_secondary)
|
|
|
|
result_tracker.AddRunnableDuration(runnable, output.duration)
|
|
|
|
result_tracker_secondary.AddRunnableDuration(
|
|
|
|
runnable, output_secondary.duration)
|
|
|
|
|
|
|
|
if output.IsSuccess() and output_secondary.IsSuccess():
|
|
|
|
runnable.ProcessOutput(output, result_tracker, i)
|
|
|
|
if output_secondary is not NULL_OUTPUT:
|
|
|
|
runnable.ProcessOutput(
|
|
|
|
output_secondary, result_tracker_secondary, i)
|
|
|
|
break
|
|
|
|
|
|
|
|
attempts_left -= 1
|
2019-07-16 09:09:36 +00:00
|
|
|
if not attempts_left:
|
|
|
|
logging.info('>>> Suite %s failed after %d retries',
|
|
|
|
runnable_name, runnable.retry_count + 1)
|
|
|
|
have_failed_tests = True
|
|
|
|
else:
|
2019-05-09 09:05:37 +00:00
|
|
|
logging.info('>>> Retrying suite: %s', runnable_name)
|
|
|
|
except MaxTotalDurationReachedError:
|
|
|
|
have_failed_tests = True
|
2019-04-23 16:07:52 +00:00
|
|
|
|
2016-04-29 15:51:05 +00:00
|
|
|
platform.PostExecution()
|
|
|
|
|
2019-04-23 16:42:23 +00:00
|
|
|
if args.json_test_results:
|
2019-04-25 17:23:45 +00:00
|
|
|
result_tracker.WriteToFile(args.json_test_results)
|
2016-04-29 15:51:05 +00:00
|
|
|
else: # pragma: no cover
|
2019-04-25 17:23:45 +00:00
|
|
|
print('Primary results:', result_tracker)
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-26 13:05:52 +00:00
|
|
|
if args.shell_dir_secondary:
|
|
|
|
if args.json_test_results_secondary:
|
|
|
|
result_tracker_secondary.WriteToFile(args.json_test_results_secondary)
|
|
|
|
else: # pragma: no cover
|
|
|
|
print('Secondary results:', result_tracker_secondary)
|
2015-07-10 13:02:09 +00:00
|
|
|
|
2019-04-25 17:23:45 +00:00
|
|
|
if (result_tracker.errors or result_tracker_secondary.errors or
|
2019-05-09 09:05:37 +00:00
|
|
|
have_failed_tests):
|
2018-10-09 15:45:35 +00:00
|
|
|
return 1
|
|
|
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
|
|
def MainWrapper():
|
|
|
|
try:
|
|
|
|
return Main(sys.argv[1:])
|
|
|
|
except:
|
|
|
|
# Log uncaptured exceptions and report infra failure to the caller.
|
|
|
|
traceback.print_exc()
|
|
|
|
return INFRA_FAILURE_RETCODE
|
|
|
|
|
2014-09-02 09:17:26 +00:00
|
|
|
|
2019-04-12 11:00:18 +00:00
|
|
|
if __name__ == '__main__': # pragma: no cover
|
2018-10-09 15:45:35 +00:00
|
|
|
sys.exit(MainWrapper())
|