[RCS / TurboFan] Enable basic off-thread RCS for Concurrent Optimization.

Also removes the --no-recompile-concurrent from the callstats.py script.

BUG=v8:7790

Change-Id: I7e652dd273fd12565e086fe579bdf5974a876d9c
Reviewed-on: https://chromium-review.googlesource.com/1202402
Reviewed-by: Camillo Bruni <cbruni@chromium.org>
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55709}
This commit is contained in:
Ross McIlroy 2018-09-06 15:23:36 +01:00 committed by Commit Bot
parent dc5c741612
commit b3a480a65e
3 changed files with 36 additions and 20 deletions

View File

@ -41,7 +41,11 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask {
public: public:
explicit CompileTask(Isolate* isolate, explicit CompileTask(Isolate* isolate,
OptimizingCompileDispatcher* dispatcher) OptimizingCompileDispatcher* dispatcher)
: CancelableTask(isolate), isolate_(isolate), dispatcher_(dispatcher) { : CancelableTask(isolate),
isolate_(isolate),
worker_thread_runtime_call_stats_(
isolate->counters()->worker_thread_runtime_call_stats()),
dispatcher_(dispatcher) {
base::LockGuard<base::Mutex> lock_guard(&dispatcher_->ref_count_mutex_); base::LockGuard<base::Mutex> lock_guard(&dispatcher_->ref_count_mutex_);
++dispatcher_->ref_count_; ++dispatcher_->ref_count_;
} }
@ -56,8 +60,13 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask {
DisallowHandleDereference no_deref; DisallowHandleDereference no_deref;
{ {
TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_); WorkerThreadRuntimeCallStatsScope runtime_call_stats_scope(
worker_thread_runtime_call_stats_);
RuntimeCallTimerScope runtimeTimer(
runtime_call_stats_scope.Get(),
RuntimeCallCounterId::kRecompileConcurrent);
TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.RecompileConcurrent"); "V8.RecompileConcurrent");
@ -77,6 +86,7 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask {
} }
Isolate* isolate_; Isolate* isolate_;
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats_;
OptimizingCompileDispatcher* dispatcher_; OptimizingCompileDispatcher* dispatcher_;
DISALLOW_COPY_AND_ASSIGN(CompileTask); DISALLOW_COPY_AND_ASSIGN(CompileTask);

View File

@ -1507,6 +1507,7 @@ code is governed by a BSD-style license that can be found in the LICENSE file.
this.groups = [ this.groups = [
this.total, this.total,
Group.groups.get('ic').entry(), Group.groups.get('ic').entry(),
Group.groups.get('optimize-background').entry(),
Group.groups.get('optimize').entry(), Group.groups.get('optimize').entry(),
Group.groups.get('compile-background').entry(), Group.groups.get('compile-background').entry(),
Group.groups.get('compile').entry(), Group.groups.get('compile').entry(),
@ -1715,14 +1716,16 @@ code is governed by a BSD-style license that can be found in the LICENSE file.
} }
Group.add('total', new Group('Total', /.*Total.*/, '#BBB')); Group.add('total', new Group('Total', /.*Total.*/, '#BBB'));
Group.add('ic', new Group('IC', /.*IC_.*/, "#3366CC")); Group.add('ic', new Group('IC', /.*IC_.*/, "#3366CC"));
Group.add('optimize-background', new Group('Optimize-Background',
/(.*OptimizeConcurrent.*)|RecompileConcurrent.*/, "#702000"));
Group.add('optimize', new Group('Optimize', Group.add('optimize', new Group('Optimize',
/StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*/, "#DC3912")); /StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*/, "#DC3912"));
Group.add('compile-background', new Group('Compile-Background', Group.add('compile-background', new Group('Compile-Background',
/(.*CompileBackground.*)/, "#b9a720")); /(.*CompileBackground.*)/, "#b08000"));
Group.add('compile', new Group('Compile', Group.add('compile', new Group('Compile',
/(^Compile.*)|(.*_Compile.*)/, "#FFAA00")); /(^Compile.*)|(.*_Compile.*)/, "#FFAA00"));
Group.add('parse-background', Group.add('parse-background',
new Group('Parse-Background', /.*ParseBackground.*/, "#af744d")); new Group('Parse-Background', /.*ParseBackground.*/, "#c05000"));
Group.add('parse', new Group('Parse', /.*Parse.*/, "#FF6600")); Group.add('parse', new Group('Parse', /.*Parse.*/, "#FF6600"));
Group.add('callback', new Group('Blink C++', /.*Callback.*/, "#109618")); Group.add('callback', new Group('Blink C++', /.*Callback.*/, "#109618"));
Group.add('api', new Group('API', /.*API.*/, "#990099")); Group.add('api', new Group('API', /.*API.*/, "#990099"));

View File

@ -180,7 +180,7 @@ def run_site(site, domain, args, timeout=None):
user_data_dir = args.user_data_dir user_data_dir = args.user_data_dir
else: else:
user_data_dir = tempfile.mkdtemp(prefix="chr_") user_data_dir = tempfile.mkdtemp(prefix="chr_")
js_flags = "--runtime-call-stats --noconcurrent-recompilation" js_flags = "--runtime-call-stats"
if args.replay_wpr: js_flags += " --allow-natives-syntax" if args.replay_wpr: js_flags += " --allow-natives-syntax"
if args.js_flags: js_flags += " " + args.js_flags if args.js_flags: js_flags += " " + args.js_flags
chrome_flags = get_chrome_flags(js_flags, user_data_dir) chrome_flags = get_chrome_flags(js_flags, user_data_dir)
@ -240,6 +240,8 @@ def read_sites_file(args):
if item['timeout'] > args.timeout: item['timeout'] = args.timeout if item['timeout'] > args.timeout: item['timeout'] = args.timeout
sites.append(item) sites.append(item)
except ValueError: except ValueError:
args.error("Warning: Could not read sites file as JSON, falling back to "
"primitive file")
with open(args.sites_file, "rt") as f: with open(args.sites_file, "rt") as f:
for line in f: for line in f:
line = line.strip() line = line.strip()
@ -349,11 +351,22 @@ def statistics(data):
'stddev': stddev, 'min': low, 'max': high, 'ci': ci } 'stddev': stddev, 'min': low, 'max': high, 'ci': ci }
def add_category_total(entries, groups, category_prefix):
group_data = { 'time': 0, 'count': 0 }
for group_name, regexp in groups:
if not group_name.startswith('Group-' + category_prefix): continue
group_data['time'] += entries[group_name]['time']
group_data['count'] += entries[group_name]['count']
entries['Group-' + category_prefix + '-Total'] = group_data
def read_stats(path, domain, args): def read_stats(path, domain, args):
groups = []; groups = [];
if args.aggregate: if args.aggregate:
groups = [ groups = [
('Group-IC', re.compile(".*IC_.*")), ('Group-IC', re.compile(".*IC_.*")),
('Group-OptimizeBackground',
re.compile(".*OptimizeConcurrent.*|RecompileConcurrent.*")),
('Group-Optimize', ('Group-Optimize',
re.compile("StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*")), re.compile("StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*")),
('Group-CompileBackground', re.compile("(.*CompileBackground.*)")), ('Group-CompileBackground', re.compile("(.*CompileBackground.*)")),
@ -405,20 +418,10 @@ def read_stats(path, domain, args):
group_data['time'] += entries[group_name]['time'] group_data['time'] += entries[group_name]['time']
group_data['count'] += entries[group_name]['count'] group_data['count'] += entries[group_name]['count']
entries['Group-Total-V8'] = group_data entries['Group-Total-V8'] = group_data
# Calculate the Parse-Total group # Calculate the Parse-Total, Compile-Total and Optimize-Total groups
group_data = { 'time': 0, 'count': 0 } add_category_total(entries, groups, 'Parse')
for group_name, regexp in groups: add_category_total(entries, groups, 'Compile')
if not group_name.startswith('Group-Parse'): continue add_category_total(entries, groups, 'Optimize')
group_data['time'] += entries[group_name]['time']
group_data['count'] += entries[group_name]['count']
entries['Group-Parse-Total'] = group_data
# Calculate the Compile-Total group
group_data = { 'time': 0, 'count': 0 }
for group_name, regexp in groups:
if not group_name.startswith('Group-Compile'): continue
group_data['time'] += entries[group_name]['time']
group_data['count'] += entries[group_name]['count']
entries['Group-Compile-Total'] = group_data
# Append the sums as single entries to domain. # Append the sums as single entries to domain.
for key in entries: for key in entries:
if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] } if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] }
@ -651,7 +654,7 @@ def main():
"-l", "--log-stderr", type=str, metavar="<path>", "-l", "--log-stderr", type=str, metavar="<path>",
help="specify where chrome's stderr should go (default: /dev/null)") help="specify where chrome's stderr should go (default: /dev/null)")
subparser.add_argument( subparser.add_argument(
"sites", type=str, metavar="<URL>", nargs="*", "--sites", type=str, metavar="<URL>", nargs="*",
help="specify benchmark website") help="specify benchmark website")
add_replay_args(subparsers["run"]) add_replay_args(subparsers["run"])