[RCS / TurboFan] Enable basic off-thread RCS for Concurrent Optimization.
Also removes the --no-recompile-concurrent from the callstats.py script. BUG=v8:7790 Change-Id: I7e652dd273fd12565e086fe579bdf5974a876d9c Reviewed-on: https://chromium-review.googlesource.com/1202402 Reviewed-by: Camillo Bruni <cbruni@chromium.org> Commit-Queue: Ross McIlroy <rmcilroy@chromium.org> Cr-Commit-Position: refs/heads/master@{#55709}
This commit is contained in:
parent
dc5c741612
commit
b3a480a65e
@ -41,7 +41,11 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask {
|
||||
public:
|
||||
explicit CompileTask(Isolate* isolate,
|
||||
OptimizingCompileDispatcher* dispatcher)
|
||||
: CancelableTask(isolate), isolate_(isolate), dispatcher_(dispatcher) {
|
||||
: CancelableTask(isolate),
|
||||
isolate_(isolate),
|
||||
worker_thread_runtime_call_stats_(
|
||||
isolate->counters()->worker_thread_runtime_call_stats()),
|
||||
dispatcher_(dispatcher) {
|
||||
base::LockGuard<base::Mutex> lock_guard(&dispatcher_->ref_count_mutex_);
|
||||
++dispatcher_->ref_count_;
|
||||
}
|
||||
@ -56,8 +60,13 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask {
|
||||
DisallowHandleDereference no_deref;
|
||||
|
||||
{
|
||||
TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_);
|
||||
WorkerThreadRuntimeCallStatsScope runtime_call_stats_scope(
|
||||
worker_thread_runtime_call_stats_);
|
||||
RuntimeCallTimerScope runtimeTimer(
|
||||
runtime_call_stats_scope.Get(),
|
||||
RuntimeCallCounterId::kRecompileConcurrent);
|
||||
|
||||
TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_);
|
||||
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
|
||||
"V8.RecompileConcurrent");
|
||||
|
||||
@ -77,6 +86,7 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask {
|
||||
}
|
||||
|
||||
Isolate* isolate_;
|
||||
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats_;
|
||||
OptimizingCompileDispatcher* dispatcher_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(CompileTask);
|
||||
|
@ -1507,6 +1507,7 @@ code is governed by a BSD-style license that can be found in the LICENSE file.
|
||||
this.groups = [
|
||||
this.total,
|
||||
Group.groups.get('ic').entry(),
|
||||
Group.groups.get('optimize-background').entry(),
|
||||
Group.groups.get('optimize').entry(),
|
||||
Group.groups.get('compile-background').entry(),
|
||||
Group.groups.get('compile').entry(),
|
||||
@ -1715,14 +1716,16 @@ code is governed by a BSD-style license that can be found in the LICENSE file.
|
||||
}
|
||||
Group.add('total', new Group('Total', /.*Total.*/, '#BBB'));
|
||||
Group.add('ic', new Group('IC', /.*IC_.*/, "#3366CC"));
|
||||
Group.add('optimize-background', new Group('Optimize-Background',
|
||||
/(.*OptimizeConcurrent.*)|RecompileConcurrent.*/, "#702000"));
|
||||
Group.add('optimize', new Group('Optimize',
|
||||
/StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*/, "#DC3912"));
|
||||
Group.add('compile-background', new Group('Compile-Background',
|
||||
/(.*CompileBackground.*)/, "#b9a720"));
|
||||
/(.*CompileBackground.*)/, "#b08000"));
|
||||
Group.add('compile', new Group('Compile',
|
||||
/(^Compile.*)|(.*_Compile.*)/, "#FFAA00"));
|
||||
Group.add('parse-background',
|
||||
new Group('Parse-Background', /.*ParseBackground.*/, "#af744d"));
|
||||
new Group('Parse-Background', /.*ParseBackground.*/, "#c05000"));
|
||||
Group.add('parse', new Group('Parse', /.*Parse.*/, "#FF6600"));
|
||||
Group.add('callback', new Group('Blink C++', /.*Callback.*/, "#109618"));
|
||||
Group.add('api', new Group('API', /.*API.*/, "#990099"));
|
||||
|
@ -180,7 +180,7 @@ def run_site(site, domain, args, timeout=None):
|
||||
user_data_dir = args.user_data_dir
|
||||
else:
|
||||
user_data_dir = tempfile.mkdtemp(prefix="chr_")
|
||||
js_flags = "--runtime-call-stats --noconcurrent-recompilation"
|
||||
js_flags = "--runtime-call-stats"
|
||||
if args.replay_wpr: js_flags += " --allow-natives-syntax"
|
||||
if args.js_flags: js_flags += " " + args.js_flags
|
||||
chrome_flags = get_chrome_flags(js_flags, user_data_dir)
|
||||
@ -240,6 +240,8 @@ def read_sites_file(args):
|
||||
if item['timeout'] > args.timeout: item['timeout'] = args.timeout
|
||||
sites.append(item)
|
||||
except ValueError:
|
||||
args.error("Warning: Could not read sites file as JSON, falling back to "
|
||||
"primitive file")
|
||||
with open(args.sites_file, "rt") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
@ -349,11 +351,22 @@ def statistics(data):
|
||||
'stddev': stddev, 'min': low, 'max': high, 'ci': ci }
|
||||
|
||||
|
||||
def add_category_total(entries, groups, category_prefix):
|
||||
group_data = { 'time': 0, 'count': 0 }
|
||||
for group_name, regexp in groups:
|
||||
if not group_name.startswith('Group-' + category_prefix): continue
|
||||
group_data['time'] += entries[group_name]['time']
|
||||
group_data['count'] += entries[group_name]['count']
|
||||
entries['Group-' + category_prefix + '-Total'] = group_data
|
||||
|
||||
|
||||
def read_stats(path, domain, args):
|
||||
groups = [];
|
||||
if args.aggregate:
|
||||
groups = [
|
||||
('Group-IC', re.compile(".*IC_.*")),
|
||||
('Group-OptimizeBackground',
|
||||
re.compile(".*OptimizeConcurrent.*|RecompileConcurrent.*")),
|
||||
('Group-Optimize',
|
||||
re.compile("StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*")),
|
||||
('Group-CompileBackground', re.compile("(.*CompileBackground.*)")),
|
||||
@ -405,20 +418,10 @@ def read_stats(path, domain, args):
|
||||
group_data['time'] += entries[group_name]['time']
|
||||
group_data['count'] += entries[group_name]['count']
|
||||
entries['Group-Total-V8'] = group_data
|
||||
# Calculate the Parse-Total group
|
||||
group_data = { 'time': 0, 'count': 0 }
|
||||
for group_name, regexp in groups:
|
||||
if not group_name.startswith('Group-Parse'): continue
|
||||
group_data['time'] += entries[group_name]['time']
|
||||
group_data['count'] += entries[group_name]['count']
|
||||
entries['Group-Parse-Total'] = group_data
|
||||
# Calculate the Compile-Total group
|
||||
group_data = { 'time': 0, 'count': 0 }
|
||||
for group_name, regexp in groups:
|
||||
if not group_name.startswith('Group-Compile'): continue
|
||||
group_data['time'] += entries[group_name]['time']
|
||||
group_data['count'] += entries[group_name]['count']
|
||||
entries['Group-Compile-Total'] = group_data
|
||||
# Calculate the Parse-Total, Compile-Total and Optimize-Total groups
|
||||
add_category_total(entries, groups, 'Parse')
|
||||
add_category_total(entries, groups, 'Compile')
|
||||
add_category_total(entries, groups, 'Optimize')
|
||||
# Append the sums as single entries to domain.
|
||||
for key in entries:
|
||||
if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] }
|
||||
@ -651,7 +654,7 @@ def main():
|
||||
"-l", "--log-stderr", type=str, metavar="<path>",
|
||||
help="specify where chrome's stderr should go (default: /dev/null)")
|
||||
subparser.add_argument(
|
||||
"sites", type=str, metavar="<URL>", nargs="*",
|
||||
"--sites", type=str, metavar="<URL>", nargs="*",
|
||||
help="specify benchmark website")
|
||||
add_replay_args(subparsers["run"])
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user