2016-05-06 10:17:15 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# Copyright 2016 the V8 project authors. All rights reserved.
|
|
|
|
# Use of this source code is governed by a BSD-style license that can be
|
|
|
|
# found in the LICENSE file.
|
|
|
|
'''
|
2016-07-05 14:58:28 +00:00
|
|
|
Usage: callstats.py [-h] <command> ...
|
2016-05-06 10:17:15 +00:00
|
|
|
|
|
|
|
Optional arguments:
|
|
|
|
-h, --help show this help message and exit
|
|
|
|
|
|
|
|
Commands:
|
|
|
|
run run chrome with --runtime-call-stats and generate logs
|
|
|
|
stats process logs and print statistics
|
|
|
|
json process logs from several versions and generate JSON
|
|
|
|
help help information
|
|
|
|
|
|
|
|
For each command, you can try ./runtime-call-stats.py help command.
|
|
|
|
'''
|
|
|
|
|
2019-02-19 08:28:26 +00:00
|
|
|
# for py2/py3 compatibility
|
|
|
|
from __future__ import print_function
|
|
|
|
|
2016-05-06 10:17:15 +00:00
|
|
|
import argparse
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tempfile
|
2016-06-06 13:07:20 +00:00
|
|
|
import operator
|
2016-05-06 10:17:15 +00:00
|
|
|
|
|
|
|
import numpy
|
|
|
|
import scipy
|
|
|
|
import scipy.stats
|
|
|
|
from math import sqrt
|
|
|
|
|
|
|
|
|
2018-07-12 08:53:25 +00:00
|
|
|
MAX_NOF_RETRIES = 5
|
|
|
|
|
|
|
|
|
2016-05-06 10:17:15 +00:00
|
|
|
# Run benchmarks.
|
|
|
|
|
|
|
|
def print_command(cmd_args):
|
|
|
|
def fix_for_printing(arg):
|
|
|
|
m = re.match(r'^--([^=]+)=(.*)$', arg)
|
|
|
|
if m and (' ' in m.group(2) or m.group(2).startswith('-')):
|
|
|
|
arg = "--{}='{}'".format(m.group(1), m.group(2))
|
|
|
|
elif ' ' in arg:
|
|
|
|
arg = "'{}'".format(arg)
|
|
|
|
return arg
|
2019-02-19 08:28:26 +00:00
|
|
|
print(" ".join(map(fix_for_printing, cmd_args)))
|
2016-05-06 10:17:15 +00:00
|
|
|
|
|
|
|
|
2016-09-28 15:59:32 +00:00
|
|
|
def start_replay_server(args, sites, discard_output=True):
|
2016-05-11 12:23:19 +00:00
|
|
|
with tempfile.NamedTemporaryFile(prefix='callstats-inject-', suffix='.js',
|
|
|
|
mode='wt', delete=False) as f:
|
|
|
|
injection = f.name
|
2016-05-12 15:53:33 +00:00
|
|
|
generate_injection(f, sites, args.refresh)
|
2016-06-06 13:07:20 +00:00
|
|
|
http_port = 4080 + args.port_offset
|
|
|
|
https_port = 4443 + args.port_offset
|
2016-05-06 10:17:15 +00:00
|
|
|
cmd_args = [
|
|
|
|
args.replay_bin,
|
2016-06-06 13:07:20 +00:00
|
|
|
"--port=%s" % http_port,
|
|
|
|
"--ssl_port=%s" % https_port,
|
2016-05-06 10:17:15 +00:00
|
|
|
"--no-dns_forwarding",
|
|
|
|
"--use_closest_match",
|
|
|
|
"--no-diff_unknown_requests",
|
2016-05-11 12:23:19 +00:00
|
|
|
"--inject_scripts=deterministic.js,{}".format(injection),
|
2016-05-06 10:17:15 +00:00
|
|
|
args.replay_wpr,
|
|
|
|
]
|
2019-02-19 08:28:26 +00:00
|
|
|
print("=" * 80)
|
2016-05-06 10:17:15 +00:00
|
|
|
print_command(cmd_args)
|
2016-09-28 15:59:32 +00:00
|
|
|
if discard_output:
|
|
|
|
with open(os.devnull, 'w') as null:
|
|
|
|
server = subprocess.Popen(cmd_args, stdout=null, stderr=null)
|
|
|
|
else:
|
|
|
|
server = subprocess.Popen(cmd_args)
|
2019-02-19 08:28:26 +00:00
|
|
|
print("RUNNING REPLAY SERVER: %s with PID=%s" % (args.replay_bin, server.pid))
|
|
|
|
print("=" * 80)
|
2016-05-11 12:23:19 +00:00
|
|
|
return {'process': server, 'injection': injection}
|
2016-05-06 10:17:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
def stop_replay_server(server):
|
2016-05-11 12:23:19 +00:00
|
|
|
print("SHUTTING DOWN REPLAY SERVER %s" % server['process'].pid)
|
|
|
|
server['process'].terminate()
|
|
|
|
os.remove(server['injection'])
|
|
|
|
|
|
|
|
|
2016-05-12 15:53:33 +00:00
|
|
|
def generate_injection(f, sites, refreshes=0):
|
2019-02-19 08:28:26 +00:00
|
|
|
print("""\
|
2016-05-11 12:23:19 +00:00
|
|
|
(function() {
|
2016-06-06 13:07:20 +00:00
|
|
|
var s = window.sessionStorage.getItem("refreshCounter");
|
|
|
|
var refreshTotal = """, refreshes, """;
|
|
|
|
var refreshCounter = s ? parseInt(s) : refreshTotal;
|
|
|
|
var refreshId = refreshTotal - refreshCounter;
|
2016-05-12 15:53:33 +00:00
|
|
|
if (refreshCounter > 0) {
|
|
|
|
window.sessionStorage.setItem("refreshCounter", refreshCounter-1);
|
|
|
|
}
|
2016-05-11 12:23:19 +00:00
|
|
|
function match(url, item) {
|
2016-06-06 13:07:20 +00:00
|
|
|
if ('regexp' in item) { return url.match(item.regexp) !== null };
|
|
|
|
var url_wanted = item.url;
|
|
|
|
/* Allow automatic redirections from http to https. */
|
2016-05-11 12:23:19 +00:00
|
|
|
if (url_wanted.startsWith("http://") && url.startsWith("https://")) {
|
|
|
|
url_wanted = "https://" + url_wanted.substr(7);
|
|
|
|
}
|
|
|
|
return url.startsWith(url_wanted);
|
2016-11-02 20:36:35 +00:00
|
|
|
};
|
2016-06-06 13:07:20 +00:00
|
|
|
function onLoad(url) {
|
|
|
|
for (var item of sites) {
|
2016-05-11 12:23:19 +00:00
|
|
|
if (!match(url, item)) continue;
|
2016-06-06 13:07:20 +00:00
|
|
|
var timeout = 'timeline' in item ? 2000 * item.timeline
|
2016-05-11 12:23:19 +00:00
|
|
|
: 'timeout' in item ? 1000 * (item.timeout - 3)
|
|
|
|
: 10000;
|
|
|
|
console.log("Setting time out of " + timeout + " for: " + url);
|
2016-05-12 15:53:33 +00:00
|
|
|
window.setTimeout(function() {
|
2016-05-11 12:23:19 +00:00
|
|
|
console.log("Time is out for: " + url);
|
2016-06-06 13:07:20 +00:00
|
|
|
var msg = "STATS: (" + refreshId + ") " + url;
|
2016-05-12 15:53:33 +00:00
|
|
|
%GetAndResetRuntimeCallStats(1, msg);
|
|
|
|
if (refreshCounter > 0) {
|
2016-06-06 13:07:20 +00:00
|
|
|
console.log(
|
|
|
|
"Refresh counter is " + refreshCounter + ", refreshing: " + url);
|
2016-05-12 15:53:33 +00:00
|
|
|
window.location.reload();
|
|
|
|
}
|
2016-05-11 12:23:19 +00:00
|
|
|
}, timeout);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
console.log("Ignoring: " + url);
|
|
|
|
};
|
2016-11-02 20:36:35 +00:00
|
|
|
var sites =
|
|
|
|
""", json.dumps(sites), """;
|
|
|
|
onLoad(window.location.href);
|
2019-02-19 08:28:26 +00:00
|
|
|
})();""", file=f)
|
2016-05-06 10:17:15 +00:00
|
|
|
|
2016-12-09 11:48:28 +00:00
|
|
|
def get_chrome_flags(js_flags, user_data_dir, arg_delimiter=""):
|
2016-09-28 09:37:39 +00:00
|
|
|
return [
|
|
|
|
"--no-default-browser-check",
|
|
|
|
"--no-sandbox",
|
|
|
|
"--disable-translate",
|
|
|
|
"--enable-benchmarking",
|
2016-12-09 11:48:28 +00:00
|
|
|
"--enable-stats-table",
|
|
|
|
"--js-flags={}{}{}".format(arg_delimiter, js_flags, arg_delimiter),
|
2016-09-28 09:37:39 +00:00
|
|
|
"--no-first-run",
|
2016-12-09 11:48:28 +00:00
|
|
|
"--user-data-dir={}{}{}".format(arg_delimiter, user_data_dir,
|
|
|
|
arg_delimiter),
|
2018-10-22 12:20:04 +00:00
|
|
|
"--data-path={}{}{}".format(arg_delimiter,
|
|
|
|
os.path.join(user_data_dir, 'content-shell-data'), arg_delimiter),
|
2016-09-28 09:37:39 +00:00
|
|
|
]
|
|
|
|
|
2016-12-09 11:48:28 +00:00
|
|
|
def get_chrome_replay_flags(args, arg_delimiter=""):
|
2016-09-28 09:37:39 +00:00
|
|
|
http_port = 4080 + args.port_offset
|
|
|
|
https_port = 4443 + args.port_offset
|
|
|
|
return [
|
2016-12-09 11:48:28 +00:00
|
|
|
"--host-resolver-rules=%sMAP *:80 localhost:%s, " \
|
|
|
|
"MAP *:443 localhost:%s, " \
|
|
|
|
"EXCLUDE localhost%s" % (
|
|
|
|
arg_delimiter, http_port, https_port,
|
|
|
|
arg_delimiter),
|
2016-09-28 09:37:39 +00:00
|
|
|
"--ignore-certificate-errors",
|
|
|
|
"--disable-seccomp-sandbox",
|
|
|
|
"--disable-web-security",
|
|
|
|
"--reduce-security-for-testing",
|
|
|
|
"--allow-insecure-localhost",
|
|
|
|
]
|
2016-05-06 10:17:15 +00:00
|
|
|
|
|
|
|
def run_site(site, domain, args, timeout=None):
|
2019-02-19 08:28:26 +00:00
|
|
|
print("="*80)
|
|
|
|
print("RUNNING DOMAIN %s" % domain)
|
|
|
|
print("="*80)
|
2016-05-06 10:17:15 +00:00
|
|
|
result_template = "{domain}#{count}.txt" if args.repeat else "{domain}.txt"
|
|
|
|
count = 0
|
2016-05-11 12:23:19 +00:00
|
|
|
if timeout is None: timeout = args.timeout
|
2016-05-12 15:53:33 +00:00
|
|
|
if args.replay_wpr:
|
|
|
|
timeout *= 1 + args.refresh
|
|
|
|
timeout += 1
|
2016-06-06 13:07:20 +00:00
|
|
|
retries_since_good_run = 0
|
2016-05-06 10:17:15 +00:00
|
|
|
while count == 0 or args.repeat is not None and count < args.repeat:
|
|
|
|
count += 1
|
|
|
|
result = result_template.format(domain=domain, count=count)
|
|
|
|
retries = 0
|
|
|
|
while args.retries is None or retries < args.retries:
|
|
|
|
retries += 1
|
|
|
|
try:
|
2016-05-11 12:23:19 +00:00
|
|
|
if args.user_data_dir:
|
|
|
|
user_data_dir = args.user_data_dir
|
|
|
|
else:
|
2016-05-06 10:17:15 +00:00
|
|
|
user_data_dir = tempfile.mkdtemp(prefix="chr_")
|
2018-09-06 14:23:36 +00:00
|
|
|
js_flags = "--runtime-call-stats"
|
2016-05-11 12:23:19 +00:00
|
|
|
if args.replay_wpr: js_flags += " --allow-natives-syntax"
|
2016-05-06 10:17:15 +00:00
|
|
|
if args.js_flags: js_flags += " " + args.js_flags
|
2016-09-28 09:37:39 +00:00
|
|
|
chrome_flags = get_chrome_flags(js_flags, user_data_dir)
|
2016-05-06 10:17:15 +00:00
|
|
|
if args.replay_wpr:
|
2016-09-28 09:37:39 +00:00
|
|
|
chrome_flags += get_chrome_replay_flags(args)
|
2016-05-11 12:23:19 +00:00
|
|
|
else:
|
2016-09-28 09:37:39 +00:00
|
|
|
chrome_flags += [ "--single-process", ]
|
2016-05-06 10:17:15 +00:00
|
|
|
if args.chrome_flags:
|
|
|
|
chrome_flags += args.chrome_flags.split()
|
|
|
|
cmd_args = [
|
|
|
|
"timeout", str(timeout),
|
|
|
|
args.with_chrome
|
|
|
|
] + chrome_flags + [ site ]
|
2019-02-19 08:28:26 +00:00
|
|
|
print("- " * 40)
|
2016-05-06 10:17:15 +00:00
|
|
|
print_command(cmd_args)
|
2019-02-19 08:28:26 +00:00
|
|
|
print("- " * 40)
|
2016-05-06 10:17:15 +00:00
|
|
|
with open(result, "wt") as f:
|
2016-06-06 13:07:20 +00:00
|
|
|
with open(args.log_stderr or os.devnull, 'at') as err:
|
|
|
|
status = subprocess.call(cmd_args, stdout=f, stderr=err)
|
2016-05-06 10:17:15 +00:00
|
|
|
# 124 means timeout killed chrome, 0 means the user was bored first!
|
|
|
|
# If none of these two happened, then chrome apparently crashed, so
|
|
|
|
# it must be called again.
|
|
|
|
if status != 124 and status != 0:
|
|
|
|
print("CHROME CRASHED, REPEATING RUN");
|
|
|
|
continue
|
|
|
|
# If the stats file is empty, chrome must be called again.
|
|
|
|
if os.path.isfile(result) and os.path.getsize(result) > 0:
|
|
|
|
if args.print_url:
|
|
|
|
with open(result, "at") as f:
|
2019-02-19 08:28:26 +00:00
|
|
|
print(file=f)
|
|
|
|
print("URL: {}".format(site), file=f)
|
2016-06-06 13:07:20 +00:00
|
|
|
retries_since_good_run = 0
|
2016-05-06 10:17:15 +00:00
|
|
|
break
|
2018-07-12 08:53:25 +00:00
|
|
|
if retries_since_good_run > MAX_NOF_RETRIES:
|
|
|
|
# Abort after too many retries, no point in ever increasing the
|
|
|
|
# timeout.
|
|
|
|
print("TOO MANY EMPTY RESULTS ABORTING RUN")
|
2018-09-18 12:43:49 +00:00
|
|
|
return
|
2018-07-12 08:53:25 +00:00
|
|
|
timeout += 2 ** retries_since_good_run
|
|
|
|
retries_since_good_run += 1
|
2016-06-06 13:07:20 +00:00
|
|
|
print("EMPTY RESULT, REPEATING RUN ({})".format(
|
|
|
|
retries_since_good_run));
|
2016-05-06 10:17:15 +00:00
|
|
|
finally:
|
2016-05-11 12:23:19 +00:00
|
|
|
if not args.user_data_dir:
|
2016-05-06 10:17:15 +00:00
|
|
|
shutil.rmtree(user_data_dir)
|
|
|
|
|
|
|
|
|
|
|
|
def read_sites_file(args):
|
|
|
|
try:
|
|
|
|
sites = []
|
|
|
|
try:
|
|
|
|
with open(args.sites_file, "rt") as f:
|
|
|
|
for item in json.load(f):
|
|
|
|
if 'timeout' not in item:
|
|
|
|
# This is more-or-less arbitrary.
|
2016-06-06 13:07:20 +00:00
|
|
|
item['timeout'] = int(1.5 * item['timeline'] + 7)
|
2016-05-06 10:17:15 +00:00
|
|
|
if item['timeout'] > args.timeout: item['timeout'] = args.timeout
|
|
|
|
sites.append(item)
|
|
|
|
except ValueError:
|
2018-09-06 14:23:36 +00:00
|
|
|
args.error("Warning: Could not read sites file as JSON, falling back to "
|
|
|
|
"primitive file")
|
2016-05-06 10:17:15 +00:00
|
|
|
with open(args.sites_file, "rt") as f:
|
|
|
|
for line in f:
|
|
|
|
line = line.strip()
|
|
|
|
if not line or line.startswith('#'): continue
|
|
|
|
sites.append({'url': line, 'timeout': args.timeout})
|
|
|
|
return sites
|
|
|
|
except IOError as e:
|
|
|
|
args.error("Cannot read from {}. {}.".format(args.sites_file, e.strerror))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2016-09-28 09:37:39 +00:00
|
|
|
def read_sites(args):
|
2016-05-06 10:17:15 +00:00
|
|
|
# Determine the websites to benchmark.
|
|
|
|
if args.sites_file:
|
2016-09-28 09:37:39 +00:00
|
|
|
return read_sites_file(args)
|
|
|
|
return [{'url': site, 'timeout': args.timeout} for site in args.sites]
|
|
|
|
|
|
|
|
def do_run(args):
|
|
|
|
sites = read_sites(args)
|
|
|
|
replay_server = start_replay_server(args, sites) if args.replay_wpr else None
|
2016-05-06 10:17:15 +00:00
|
|
|
# Disambiguate domains, if needed.
|
|
|
|
L = []
|
|
|
|
domains = {}
|
|
|
|
for item in sites:
|
|
|
|
site = item['url']
|
2016-06-06 13:07:20 +00:00
|
|
|
domain = None
|
|
|
|
if args.domain:
|
|
|
|
domain = args.domain
|
|
|
|
elif 'domain' in item:
|
|
|
|
domain = item['domain']
|
|
|
|
else:
|
|
|
|
m = re.match(r'^(https?://)?([^/]+)(/.*)?$', site)
|
|
|
|
if not m:
|
|
|
|
args.error("Invalid URL {}.".format(site))
|
|
|
|
continue
|
|
|
|
domain = m.group(2)
|
2016-05-06 10:17:15 +00:00
|
|
|
entry = [site, domain, None, item['timeout']]
|
|
|
|
if domain not in domains:
|
|
|
|
domains[domain] = entry
|
|
|
|
else:
|
|
|
|
if not isinstance(domains[domain], int):
|
|
|
|
domains[domain][2] = 1
|
|
|
|
domains[domain] = 1
|
|
|
|
domains[domain] += 1
|
|
|
|
entry[2] = domains[domain]
|
|
|
|
L.append(entry)
|
|
|
|
try:
|
|
|
|
# Run them.
|
|
|
|
for site, domain, count, timeout in L:
|
|
|
|
if count is not None: domain = "{}%{}".format(domain, count)
|
2019-02-19 08:28:26 +00:00
|
|
|
print((site, domain, timeout))
|
2016-05-06 10:17:15 +00:00
|
|
|
run_site(site, domain, args, timeout)
|
|
|
|
finally:
|
|
|
|
if replay_server:
|
|
|
|
stop_replay_server(replay_server)
|
|
|
|
|
|
|
|
|
2016-09-28 09:37:39 +00:00
|
|
|
def do_run_replay_server(args):
|
|
|
|
sites = read_sites(args)
|
|
|
|
print("- " * 40)
|
|
|
|
print("Available URLs:")
|
|
|
|
for site in sites:
|
|
|
|
print(" "+site['url'])
|
|
|
|
print("- " * 40)
|
|
|
|
print("Launch chromium with the following commands for debugging:")
|
2016-12-09 11:48:28 +00:00
|
|
|
flags = get_chrome_flags("--runtime-call-stats --allow-natives-syntax",
|
|
|
|
"/var/tmp/`date +%s`", '"')
|
|
|
|
flags += get_chrome_replay_flags(args, "'")
|
|
|
|
print(" $CHROMIUM_DIR/out/Release/chrome " + (" ".join(flags)) + " <URL>")
|
2016-09-28 09:37:39 +00:00
|
|
|
print("- " * 40)
|
2016-09-28 15:59:32 +00:00
|
|
|
replay_server = start_replay_server(args, sites, discard_output=False)
|
2016-09-28 09:37:39 +00:00
|
|
|
try:
|
|
|
|
replay_server['process'].wait()
|
|
|
|
finally:
|
|
|
|
stop_replay_server(replay_server)
|
|
|
|
|
|
|
|
|
2016-05-06 10:17:15 +00:00
|
|
|
# Calculate statistics.
|
|
|
|
|
|
|
|
def statistics(data):
|
|
|
|
N = len(data)
|
|
|
|
average = numpy.average(data)
|
|
|
|
median = numpy.median(data)
|
|
|
|
low = numpy.min(data)
|
|
|
|
high= numpy.max(data)
|
|
|
|
if N > 1:
|
|
|
|
# evaluate sample variance by setting delta degrees of freedom (ddof) to
|
|
|
|
# 1. The degree used in calculations is N - ddof
|
|
|
|
stddev = numpy.std(data, ddof=1)
|
|
|
|
# Get the endpoints of the range that contains 95% of the distribution
|
|
|
|
t_bounds = scipy.stats.t.interval(0.95, N-1)
|
|
|
|
#assert abs(t_bounds[0] + t_bounds[1]) < 1e-6
|
|
|
|
# sum mean to the confidence interval
|
|
|
|
ci = {
|
|
|
|
'abs': t_bounds[1] * stddev / sqrt(N),
|
|
|
|
'low': average + t_bounds[0] * stddev / sqrt(N),
|
|
|
|
'high': average + t_bounds[1] * stddev / sqrt(N)
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
stddev = 0
|
|
|
|
ci = { 'abs': 0, 'low': average, 'high': average }
|
|
|
|
if abs(stddev) > 0.0001 and abs(average) > 0.0001:
|
|
|
|
ci['perc'] = t_bounds[1] * stddev / sqrt(N) / average * 100
|
|
|
|
else:
|
|
|
|
ci['perc'] = 0
|
|
|
|
return { 'samples': N, 'average': average, 'median': median,
|
|
|
|
'stddev': stddev, 'min': low, 'max': high, 'ci': ci }
|
|
|
|
|
|
|
|
|
2018-09-06 14:23:36 +00:00
|
|
|
def add_category_total(entries, groups, category_prefix):
|
|
|
|
group_data = { 'time': 0, 'count': 0 }
|
|
|
|
for group_name, regexp in groups:
|
|
|
|
if not group_name.startswith('Group-' + category_prefix): continue
|
|
|
|
group_data['time'] += entries[group_name]['time']
|
|
|
|
group_data['count'] += entries[group_name]['count']
|
|
|
|
entries['Group-' + category_prefix + '-Total'] = group_data
|
|
|
|
|
|
|
|
|
2016-06-06 13:07:20 +00:00
|
|
|
def read_stats(path, domain, args):
|
|
|
|
groups = [];
|
|
|
|
if args.aggregate:
|
|
|
|
groups = [
|
2016-12-08 11:35:06 +00:00
|
|
|
('Group-IC', re.compile(".*IC_.*")),
|
2019-11-29 11:00:03 +00:00
|
|
|
('Group-OptimizeBackground', re.compile(".*OptimizeBackground.*")),
|
2016-06-06 13:07:20 +00:00
|
|
|
('Group-Optimize',
|
|
|
|
re.compile("StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*")),
|
2017-01-03 18:11:58 +00:00
|
|
|
('Group-CompileBackground', re.compile("(.*CompileBackground.*)")),
|
2016-12-08 11:35:06 +00:00
|
|
|
('Group-Compile', re.compile("(^Compile.*)|(.*_Compile.*)")),
|
2016-11-16 18:51:32 +00:00
|
|
|
('Group-ParseBackground', re.compile(".*ParseBackground.*")),
|
2016-09-26 13:06:35 +00:00
|
|
|
('Group-Parse', re.compile(".*Parse.*")),
|
|
|
|
('Group-Callback', re.compile(".*Callback.*")),
|
|
|
|
('Group-API', re.compile(".*API.*")),
|
2017-08-01 13:28:52 +00:00
|
|
|
('Group-GC-Custom', re.compile("GC_Custom_.*")),
|
2017-12-04 14:51:04 +00:00
|
|
|
('Group-GC-Background', re.compile(".*GC.*BACKGROUND.*")),
|
2017-08-01 13:28:52 +00:00
|
|
|
('Group-GC', re.compile("GC_.*|AllocateInTargetSpace")),
|
2016-06-06 13:07:20 +00:00
|
|
|
('Group-JavaScript', re.compile("JS_Execution")),
|
|
|
|
('Group-Runtime', re.compile(".*"))]
|
2016-05-06 10:17:15 +00:00
|
|
|
with open(path, "rt") as f:
|
|
|
|
# Process the whole file and sum repeating entries.
|
2016-06-06 13:07:20 +00:00
|
|
|
entries = { 'Sum': {'time': 0, 'count': 0} }
|
|
|
|
for group_name, regexp in groups:
|
|
|
|
entries[group_name] = { 'time': 0, 'count': 0 }
|
2016-05-06 10:17:15 +00:00
|
|
|
for line in f:
|
|
|
|
line = line.strip()
|
|
|
|
# Discard headers and footers.
|
|
|
|
if not line: continue
|
|
|
|
if line.startswith("Runtime Function"): continue
|
|
|
|
if line.startswith("===="): continue
|
|
|
|
if line.startswith("----"): continue
|
|
|
|
if line.startswith("URL:"): continue
|
2016-05-12 15:53:33 +00:00
|
|
|
if line.startswith("STATS:"): continue
|
2016-05-06 10:17:15 +00:00
|
|
|
# We have a regular line.
|
|
|
|
fields = line.split()
|
|
|
|
key = fields[0]
|
|
|
|
time = float(fields[1].replace("ms", ""))
|
|
|
|
count = int(fields[3])
|
2016-06-06 13:07:20 +00:00
|
|
|
if key not in entries: entries[key] = { 'time': 0, 'count': 0 }
|
|
|
|
entries[key]['time'] += time
|
|
|
|
entries[key]['count'] += count
|
2016-05-06 10:17:15 +00:00
|
|
|
# We calculate the sum, if it's not the "total" line.
|
|
|
|
if key != "Total":
|
2016-06-06 13:07:20 +00:00
|
|
|
entries['Sum']['time'] += time
|
|
|
|
entries['Sum']['count'] += count
|
|
|
|
for group_name, regexp in groups:
|
|
|
|
if not regexp.match(key): continue
|
|
|
|
entries[group_name]['time'] += time
|
|
|
|
entries[group_name]['count'] += count
|
|
|
|
break
|
2016-09-23 13:36:42 +00:00
|
|
|
# Calculate the V8-Total (all groups except Callback)
|
2016-11-16 18:51:32 +00:00
|
|
|
group_data = { 'time': 0, 'count': 0 }
|
2016-09-23 13:36:42 +00:00
|
|
|
for group_name, regexp in groups:
|
|
|
|
if group_name == 'Group-Callback': continue
|
2016-11-16 18:51:32 +00:00
|
|
|
group_data['time'] += entries[group_name]['time']
|
|
|
|
group_data['count'] += entries[group_name]['count']
|
|
|
|
entries['Group-Total-V8'] = group_data
|
2018-09-06 14:23:36 +00:00
|
|
|
# Calculate the Parse-Total, Compile-Total and Optimize-Total groups
|
|
|
|
add_category_total(entries, groups, 'Parse')
|
|
|
|
add_category_total(entries, groups, 'Compile')
|
|
|
|
add_category_total(entries, groups, 'Optimize')
|
2016-06-06 13:07:20 +00:00
|
|
|
# Append the sums as single entries to domain.
|
2016-09-23 13:36:42 +00:00
|
|
|
for key in entries:
|
2016-06-06 13:07:20 +00:00
|
|
|
if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] }
|
|
|
|
domain[key]['time_list'].append(entries[key]['time'])
|
|
|
|
domain[key]['count_list'].append(entries[key]['count'])
|
2016-05-06 10:17:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
def print_stats(S, args):
|
|
|
|
# Sort by ascending/descending time average, then by ascending/descending
|
|
|
|
# count average, then by ascending name.
|
|
|
|
def sort_asc_func(item):
|
|
|
|
return (item[1]['time_stat']['average'],
|
|
|
|
item[1]['count_stat']['average'],
|
|
|
|
item[0])
|
|
|
|
def sort_desc_func(item):
|
|
|
|
return (-item[1]['time_stat']['average'],
|
|
|
|
-item[1]['count_stat']['average'],
|
|
|
|
item[0])
|
|
|
|
# Sorting order is in the commend-line arguments.
|
|
|
|
sort_func = sort_asc_func if args.sort == "asc" else sort_desc_func
|
|
|
|
# Possibly limit how many elements to print.
|
|
|
|
L = [item for item in sorted(S.items(), key=sort_func)
|
|
|
|
if item[0] not in ["Total", "Sum"]]
|
|
|
|
N = len(L)
|
|
|
|
if args.limit == 0:
|
|
|
|
low, high = 0, N
|
|
|
|
elif args.sort == "desc":
|
|
|
|
low, high = 0, args.limit
|
|
|
|
else:
|
|
|
|
low, high = N-args.limit, N
|
|
|
|
# How to print entries.
|
|
|
|
def print_entry(key, value):
|
|
|
|
def stats(s, units=""):
|
|
|
|
conf = "{:0.1f}({:0.2f}%)".format(s['ci']['abs'], s['ci']['perc'])
|
|
|
|
return "{:8.1f}{} +/- {:15s}".format(s['average'], units, conf)
|
2019-02-19 08:28:26 +00:00
|
|
|
print("{:>50s} {} {}".format(
|
2016-05-06 10:17:15 +00:00
|
|
|
key,
|
|
|
|
stats(value['time_stat'], units="ms"),
|
|
|
|
stats(value['count_stat'])
|
2019-02-19 08:28:26 +00:00
|
|
|
))
|
2016-05-06 10:17:15 +00:00
|
|
|
# Print and calculate partial sums, if necessary.
|
|
|
|
for i in range(low, high):
|
|
|
|
print_entry(*L[i])
|
2016-06-06 13:07:20 +00:00
|
|
|
if args.totals and args.limit != 0 and not args.aggregate:
|
2016-05-06 10:17:15 +00:00
|
|
|
if i == low:
|
|
|
|
partial = { 'time_list': [0] * len(L[i][1]['time_list']),
|
|
|
|
'count_list': [0] * len(L[i][1]['count_list']) }
|
|
|
|
assert len(partial['time_list']) == len(L[i][1]['time_list'])
|
|
|
|
assert len(partial['count_list']) == len(L[i][1]['count_list'])
|
|
|
|
for j, v in enumerate(L[i][1]['time_list']):
|
|
|
|
partial['time_list'][j] += v
|
|
|
|
for j, v in enumerate(L[i][1]['count_list']):
|
|
|
|
partial['count_list'][j] += v
|
|
|
|
# Print totals, if necessary.
|
|
|
|
if args.totals:
|
2019-02-19 08:28:26 +00:00
|
|
|
print('-' * 80)
|
2016-06-06 13:07:20 +00:00
|
|
|
if args.limit != 0 and not args.aggregate:
|
2016-05-06 10:17:15 +00:00
|
|
|
partial['time_stat'] = statistics(partial['time_list'])
|
|
|
|
partial['count_stat'] = statistics(partial['count_list'])
|
|
|
|
print_entry("Partial", partial)
|
|
|
|
print_entry("Sum", S["Sum"])
|
|
|
|
print_entry("Total", S["Total"])
|
|
|
|
|
|
|
|
|
|
|
|
def do_stats(args):
|
2016-06-06 13:07:20 +00:00
|
|
|
domains = {}
|
2016-05-06 10:17:15 +00:00
|
|
|
for path in args.logfiles:
|
|
|
|
filename = os.path.basename(path)
|
|
|
|
m = re.match(r'^([^#]+)(#.*)?$', filename)
|
|
|
|
domain = m.group(1)
|
2016-06-06 13:07:20 +00:00
|
|
|
if domain not in domains: domains[domain] = {}
|
|
|
|
read_stats(path, domains[domain], args)
|
|
|
|
if args.aggregate:
|
|
|
|
create_total_page_stats(domains, args)
|
|
|
|
for i, domain in enumerate(sorted(domains)):
|
|
|
|
if len(domains) > 1:
|
2019-02-19 08:28:26 +00:00
|
|
|
if i > 0: print()
|
|
|
|
print("{}:".format(domain))
|
|
|
|
print('=' * 80)
|
2016-06-06 13:07:20 +00:00
|
|
|
domain_stats = domains[domain]
|
|
|
|
for key in domain_stats:
|
|
|
|
domain_stats[key]['time_stat'] = \
|
|
|
|
statistics(domain_stats[key]['time_list'])
|
|
|
|
domain_stats[key]['count_stat'] = \
|
|
|
|
statistics(domain_stats[key]['count_list'])
|
|
|
|
print_stats(domain_stats, args)
|
|
|
|
|
|
|
|
|
|
|
|
# Create a Total page with all entries summed up.
|
|
|
|
def create_total_page_stats(domains, args):
|
|
|
|
total = {}
|
|
|
|
def sum_up(parent, key, other):
|
|
|
|
sums = parent[key]
|
|
|
|
for i, item in enumerate(other[key]):
|
|
|
|
if i >= len(sums):
|
|
|
|
sums.extend([0] * (i - len(sums) + 1))
|
|
|
|
if item is not None:
|
|
|
|
sums[i] += item
|
2017-03-03 12:15:22 +00:00
|
|
|
# Exclude adwords and speedometer pages from aggrigate total, since adwords
|
|
|
|
# dominates execution time and speedometer is measured elsewhere.
|
|
|
|
excluded_domains = ['adwords.google.com', 'speedometer-angular',
|
|
|
|
'speedometer-jquery', 'speedometer-backbone',
|
|
|
|
'speedometer-ember', 'speedometer-vanilla'];
|
|
|
|
# Sum up all the entries/metrics from all non-excluded domains
|
2016-06-06 13:07:20 +00:00
|
|
|
for domain, entries in domains.items():
|
2017-03-03 12:15:22 +00:00
|
|
|
if domain in excluded_domains:
|
|
|
|
continue;
|
2016-06-06 13:07:20 +00:00
|
|
|
for key, domain_stats in entries.items():
|
|
|
|
if key not in total:
|
|
|
|
total[key] = {}
|
|
|
|
total[key]['time_list'] = list(domain_stats['time_list'])
|
|
|
|
total[key]['count_list'] = list(domain_stats['count_list'])
|
|
|
|
else:
|
|
|
|
sum_up(total[key], 'time_list', domain_stats)
|
|
|
|
sum_up(total[key], 'count_list', domain_stats)
|
|
|
|
# Add a new "Total" page containing the summed up metrics.
|
|
|
|
domains['Total'] = total
|
2016-05-06 10:17:15 +00:00
|
|
|
|
2019-07-17 10:43:06 +00:00
|
|
|
# Generate Raw JSON file.
|
|
|
|
|
2019-07-17 13:35:09 +00:00
|
|
|
def _read_logs(args):
|
2019-07-17 10:43:06 +00:00
|
|
|
versions = {}
|
|
|
|
for path in args.logdirs:
|
|
|
|
if os.path.isdir(path):
|
|
|
|
for root, dirs, files in os.walk(path):
|
|
|
|
version = os.path.basename(root)
|
|
|
|
if version not in versions: versions[version] = {}
|
|
|
|
for filename in files:
|
|
|
|
if filename.endswith(".txt"):
|
|
|
|
m = re.match(r'^([^#]+)(#.*)?\.txt$', filename)
|
|
|
|
domain = m.group(1)
|
|
|
|
if domain not in versions[version]: versions[version][domain] = {}
|
|
|
|
read_stats(os.path.join(root, filename),
|
|
|
|
versions[version][domain], args)
|
|
|
|
|
2019-07-17 13:35:09 +00:00
|
|
|
return versions
|
|
|
|
|
|
|
|
def do_raw_json(args):
|
|
|
|
versions = _read_logs(args)
|
|
|
|
|
2019-07-17 10:43:06 +00:00
|
|
|
for version, domains in versions.items():
|
|
|
|
if args.aggregate:
|
|
|
|
create_total_page_stats(domains, args)
|
|
|
|
for domain, entries in domains.items():
|
|
|
|
raw_entries = []
|
|
|
|
for name, value in entries.items():
|
|
|
|
# We don't want the calculated sum in the JSON file.
|
|
|
|
if name == "Sum": continue
|
|
|
|
raw_entries.append({
|
|
|
|
'name': name,
|
|
|
|
'duration': value['time_list'],
|
|
|
|
'count': value['count_list'],
|
|
|
|
})
|
|
|
|
|
|
|
|
domains[domain] = raw_entries
|
|
|
|
|
|
|
|
print(json.dumps(versions, separators=(',', ':')))
|
|
|
|
|
2016-05-06 10:17:15 +00:00
|
|
|
|
|
|
|
# Generate JSON file.
|
|
|
|
|
|
|
|
def do_json(args):
|
2019-07-17 13:35:09 +00:00
|
|
|
versions = _read_logs(args)
|
|
|
|
|
2016-06-06 13:07:20 +00:00
|
|
|
for version, domains in versions.items():
|
|
|
|
if args.aggregate:
|
|
|
|
create_total_page_stats(domains, args)
|
|
|
|
for domain, entries in domains.items():
|
|
|
|
stats = []
|
|
|
|
for name, value in entries.items():
|
2016-05-06 10:17:15 +00:00
|
|
|
# We don't want the calculated sum in the JSON file.
|
|
|
|
if name == "Sum": continue
|
|
|
|
entry = [name]
|
|
|
|
for x in ['time_list', 'count_list']:
|
2016-06-06 13:07:20 +00:00
|
|
|
s = statistics(entries[name][x])
|
2016-05-06 10:17:15 +00:00
|
|
|
entry.append(round(s['average'], 1))
|
|
|
|
entry.append(round(s['ci']['abs'], 1))
|
|
|
|
entry.append(round(s['ci']['perc'], 2))
|
2016-06-06 13:07:20 +00:00
|
|
|
stats.append(entry)
|
|
|
|
domains[domain] = stats
|
2019-02-19 08:28:26 +00:00
|
|
|
print(json.dumps(versions, separators=(',', ':')))
|
2016-05-06 10:17:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
# Help.
|
|
|
|
|
|
|
|
def do_help(parser, subparsers, args):
|
|
|
|
if args.help_cmd:
|
|
|
|
if args.help_cmd in subparsers:
|
|
|
|
subparsers[args.help_cmd].print_help()
|
|
|
|
else:
|
|
|
|
args.error("Unknown command '{}'".format(args.help_cmd))
|
|
|
|
else:
|
|
|
|
parser.print_help()
|
|
|
|
|
|
|
|
|
|
|
|
# Main program, parse command line and execute.
|
|
|
|
|
2016-05-11 12:23:19 +00:00
|
|
|
def coexist(*l):
|
|
|
|
given = sum(1 for x in l if x)
|
|
|
|
return given == 0 or given == len(l)
|
|
|
|
|
2016-05-06 10:17:15 +00:00
|
|
|
def main():
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
subparser_adder = parser.add_subparsers(title="commands", dest="command",
|
|
|
|
metavar="<command>")
|
|
|
|
subparsers = {}
|
|
|
|
# Command: run.
|
|
|
|
subparsers["run"] = subparser_adder.add_parser(
|
2016-09-28 09:37:39 +00:00
|
|
|
"run", help="Replay websites and collect runtime stats data.")
|
2016-05-06 10:17:15 +00:00
|
|
|
subparsers["run"].set_defaults(
|
|
|
|
func=do_run, error=subparsers["run"].error)
|
|
|
|
subparsers["run"].add_argument(
|
|
|
|
"--chrome-flags", type=str, default="",
|
|
|
|
help="specify additional chrome flags")
|
|
|
|
subparsers["run"].add_argument(
|
|
|
|
"--js-flags", type=str, default="",
|
|
|
|
help="specify additional V8 flags")
|
|
|
|
subparsers["run"].add_argument(
|
|
|
|
"-u", "--user-data-dir", type=str, metavar="<path>",
|
|
|
|
help="specify user data dir (default is temporary)")
|
|
|
|
subparsers["run"].add_argument(
|
|
|
|
"-c", "--with-chrome", type=str, metavar="<path>",
|
|
|
|
default="/usr/bin/google-chrome",
|
|
|
|
help="specify chrome executable to use")
|
2016-06-06 13:07:20 +00:00
|
|
|
subparsers["run"].add_argument(
|
2016-09-28 09:37:39 +00:00
|
|
|
"-r", "--retries", type=int, metavar="<num>",
|
|
|
|
help="specify retries if website is down (default: forever)")
|
|
|
|
subparsers["run"].add_argument(
|
|
|
|
"--no-url", dest="print_url", action="store_false", default=True,
|
|
|
|
help="do not include url in statistics file")
|
|
|
|
subparsers["run"].add_argument(
|
|
|
|
"--domain", type=str, default="",
|
|
|
|
help="specify the output file domain name")
|
2016-05-06 10:17:15 +00:00
|
|
|
subparsers["run"].add_argument(
|
2016-09-28 09:37:39 +00:00
|
|
|
"-n", "--repeat", type=int, metavar="<num>",
|
|
|
|
help="specify iterations for each website (default: once)")
|
|
|
|
|
|
|
|
def add_replay_args(subparser):
|
|
|
|
subparser.add_argument(
|
|
|
|
"-k", "--refresh", type=int, metavar="<num>", default=0,
|
|
|
|
help="specify refreshes for each iteration (default: 0)")
|
|
|
|
subparser.add_argument(
|
|
|
|
"--replay-wpr", type=str, metavar="<path>",
|
|
|
|
help="use the specified web page replay (.wpr) archive")
|
|
|
|
subparser.add_argument(
|
|
|
|
"--replay-bin", type=str, metavar="<path>",
|
|
|
|
help="specify the replay.py script typically located in " \
|
|
|
|
"$CHROMIUM/src/third_party/webpagereplay/replay.py")
|
|
|
|
subparser.add_argument(
|
|
|
|
"-f", "--sites-file", type=str, metavar="<path>",
|
|
|
|
help="specify file containing benchmark websites")
|
|
|
|
subparser.add_argument(
|
|
|
|
"-t", "--timeout", type=int, metavar="<seconds>", default=60,
|
|
|
|
help="specify seconds before chrome is killed")
|
|
|
|
subparser.add_argument(
|
|
|
|
"-p", "--port-offset", type=int, metavar="<offset>", default=0,
|
|
|
|
help="specify the offset for the replay server's default ports")
|
|
|
|
subparser.add_argument(
|
|
|
|
"-l", "--log-stderr", type=str, metavar="<path>",
|
|
|
|
help="specify where chrome's stderr should go (default: /dev/null)")
|
|
|
|
subparser.add_argument(
|
2018-09-06 14:23:36 +00:00
|
|
|
"--sites", type=str, metavar="<URL>", nargs="*",
|
2016-09-28 09:37:39 +00:00
|
|
|
help="specify benchmark website")
|
|
|
|
add_replay_args(subparsers["run"])
|
|
|
|
|
|
|
|
# Command: replay-server
|
|
|
|
subparsers["replay"] = subparser_adder.add_parser(
|
|
|
|
"replay", help="Run the replay server for debugging purposes")
|
|
|
|
subparsers["replay"].set_defaults(
|
|
|
|
func=do_run_replay_server, error=subparsers["replay"].error)
|
|
|
|
add_replay_args(subparsers["replay"])
|
|
|
|
|
2016-05-06 10:17:15 +00:00
|
|
|
# Command: stats.
|
|
|
|
subparsers["stats"] = subparser_adder.add_parser(
|
2016-09-28 09:37:39 +00:00
|
|
|
"stats", help="Analize the results file create by the 'run' command.")
|
2016-05-06 10:17:15 +00:00
|
|
|
subparsers["stats"].set_defaults(
|
|
|
|
func=do_stats, error=subparsers["stats"].error)
|
|
|
|
subparsers["stats"].add_argument(
|
|
|
|
"-l", "--limit", type=int, metavar="<num>", default=0,
|
|
|
|
help="limit how many items to print (default: none)")
|
|
|
|
subparsers["stats"].add_argument(
|
|
|
|
"-s", "--sort", choices=["asc", "desc"], default="asc",
|
|
|
|
help="specify sorting order (default: ascending)")
|
|
|
|
subparsers["stats"].add_argument(
|
|
|
|
"-n", "--no-total", dest="totals", action="store_false", default=True,
|
|
|
|
help="do not print totals")
|
|
|
|
subparsers["stats"].add_argument(
|
|
|
|
"logfiles", type=str, metavar="<logfile>", nargs="*",
|
|
|
|
help="specify log files to parse")
|
2016-06-06 13:07:20 +00:00
|
|
|
subparsers["stats"].add_argument(
|
|
|
|
"--aggregate", dest="aggregate", action="store_true", default=False,
|
2016-09-28 09:37:39 +00:00
|
|
|
help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
|
2016-06-06 13:07:20 +00:00
|
|
|
"Additionally creates a Total page with all entries.")
|
2016-09-28 09:37:39 +00:00
|
|
|
|
2016-05-06 10:17:15 +00:00
|
|
|
# Command: json.
|
|
|
|
subparsers["json"] = subparser_adder.add_parser(
|
2016-09-28 09:37:39 +00:00
|
|
|
"json", help="Collect results file created by the 'run' command into" \
|
|
|
|
"a single json file.")
|
2016-05-06 10:17:15 +00:00
|
|
|
subparsers["json"].set_defaults(
|
|
|
|
func=do_json, error=subparsers["json"].error)
|
|
|
|
subparsers["json"].add_argument(
|
|
|
|
"logdirs", type=str, metavar="<logdir>", nargs="*",
|
|
|
|
help="specify directories with log files to parse")
|
2016-06-06 13:07:20 +00:00
|
|
|
subparsers["json"].add_argument(
|
|
|
|
"--aggregate", dest="aggregate", action="store_true", default=False,
|
2016-09-28 09:37:39 +00:00
|
|
|
help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
|
2019-07-17 10:43:06 +00:00
|
|
|
"Additionally creates a Total page with all entries.")
|
|
|
|
|
|
|
|
# Command: raw-json.
|
|
|
|
subparsers["raw-json"] = subparser_adder.add_parser(
|
|
|
|
"raw-json", help="Collect raw results from 'run' command into" \
|
|
|
|
"a single json file.")
|
|
|
|
subparsers["raw-json"].set_defaults(
|
|
|
|
func=do_raw_json, error=subparsers["json"].error)
|
|
|
|
subparsers["raw-json"].add_argument(
|
|
|
|
"logdirs", type=str, metavar="<logdir>", nargs="*",
|
|
|
|
help="specify directories with log files to parse")
|
|
|
|
subparsers["raw-json"].add_argument(
|
|
|
|
"--aggregate", dest="aggregate", action="store_true", default=False,
|
|
|
|
help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
|
2016-06-06 13:07:20 +00:00
|
|
|
"Additionally creates a Total page with all entries.")
|
2016-09-28 09:37:39 +00:00
|
|
|
|
2016-05-06 10:17:15 +00:00
|
|
|
# Command: help.
|
|
|
|
subparsers["help"] = subparser_adder.add_parser(
|
|
|
|
"help", help="help information")
|
|
|
|
subparsers["help"].set_defaults(
|
|
|
|
func=lambda args: do_help(parser, subparsers, args),
|
|
|
|
error=subparsers["help"].error)
|
|
|
|
subparsers["help"].add_argument(
|
|
|
|
"help_cmd", type=str, metavar="<command>", nargs="?",
|
|
|
|
help="command for which to display help")
|
2016-09-28 09:37:39 +00:00
|
|
|
|
2016-05-06 10:17:15 +00:00
|
|
|
# Execute the command.
|
|
|
|
args = parser.parse_args()
|
2016-05-11 12:23:19 +00:00
|
|
|
setattr(args, 'script_path', os.path.dirname(sys.argv[0]))
|
|
|
|
if args.command == "run" and coexist(args.sites_file, args.sites):
|
|
|
|
args.error("use either option --sites-file or site URLs")
|
2016-05-06 10:17:15 +00:00
|
|
|
sys.exit(1)
|
2016-05-11 12:23:19 +00:00
|
|
|
elif args.command == "run" and not coexist(args.replay_wpr, args.replay_bin):
|
|
|
|
args.error("options --replay-wpr and --replay-bin must be used together")
|
2016-05-06 10:17:15 +00:00
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
args.func(args)
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
sys.exit(main())
|