Split experimental profiler flags
Review URL: https://chromiumcodereview.appspot.com/9374015 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10660 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
1588f7224e
commit
de510c3a58
@ -114,7 +114,7 @@ void CompilationInfo::DisableOptimization() {
|
||||
// profiler, so they trigger their own optimization when they're called
|
||||
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
|
||||
bool CompilationInfo::ShouldSelfOptimize() {
|
||||
return FLAG_counting_profiler &&
|
||||
return FLAG_self_optimization &&
|
||||
FLAG_crankshaft &&
|
||||
!Serializer::enabled() &&
|
||||
!function()->flags()->Contains(kDontSelfOptimize) &&
|
||||
|
@ -165,8 +165,14 @@ DEFINE_int(stress_runs, 0, "number of stress runs")
|
||||
DEFINE_bool(optimize_closures, true, "optimize closures")
|
||||
DEFINE_int(loop_weight, 1, "loop weight for representation inference")
|
||||
|
||||
// Count-based optimization decisions.
|
||||
DEFINE_bool(counting_profiler, false, "use experimental counter-based profiler")
|
||||
// Experimental profiler changes.
|
||||
DEFINE_bool(experimental_profiler, false, "enable all profiler experiments")
|
||||
DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
|
||||
DEFINE_bool(self_optimization, false,
|
||||
"primitive functions trigger their own optimization")
|
||||
|
||||
DEFINE_implication(experimental_profiler, watch_ic_patching)
|
||||
DEFINE_implication(experimental_profiler, self_optimization)
|
||||
|
||||
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
|
||||
DEFINE_bool(debug_code, false,
|
||||
|
@ -1201,7 +1201,7 @@ void Heap::Scavenge() {
|
||||
promotion_queue_.Destroy();
|
||||
|
||||
LiveObjectList::UpdateReferencesForScavengeGC();
|
||||
if (!FLAG_counting_profiler) {
|
||||
if (!FLAG_watch_ic_patching) {
|
||||
isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
|
||||
}
|
||||
incremental_marking()->UpdateMarkingDequeAfterScavenge();
|
||||
|
@ -293,7 +293,7 @@ Failure* IC::ReferenceError(const char* type, Handle<String> name) {
|
||||
|
||||
|
||||
void IC::PostPatching() {
|
||||
if (FLAG_counting_profiler) {
|
||||
if (FLAG_watch_ic_patching) {
|
||||
Isolate::Current()->runtime_profiler()->NotifyICChanged();
|
||||
// We do not want to optimize until the ICs have settled down,
|
||||
// so when they are patched, we postpone optimization for the
|
||||
|
@ -2373,7 +2373,7 @@ void MarkCompactCollector::AfterMarking() {
|
||||
code_flusher_->ProcessCandidates();
|
||||
}
|
||||
|
||||
if (!FLAG_counting_profiler) {
|
||||
if (!FLAG_watch_ic_patching) {
|
||||
// Clean up dead objects from the runtime profiler.
|
||||
heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
|
||||
}
|
||||
@ -3383,7 +3383,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
||||
heap_->UpdateReferencesInExternalStringTable(
|
||||
&UpdateReferenceInExternalStringTableEntry);
|
||||
|
||||
if (!FLAG_counting_profiler) {
|
||||
if (!FLAG_watch_ic_patching) {
|
||||
// Update JSFunction pointers from the runtime profiler.
|
||||
heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
|
||||
&updating_visitor);
|
||||
|
@ -204,7 +204,7 @@ void RuntimeProfiler::OptimizeNow() {
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
JSFunction* function = JSFunction::cast(frame->function());
|
||||
|
||||
if (!FLAG_counting_profiler) {
|
||||
if (!FLAG_watch_ic_patching) {
|
||||
// Adjust threshold each time we have processed
|
||||
// a certain number of ticks.
|
||||
if (sampler_ticks_until_threshold_adjustment_ > 0) {
|
||||
@ -232,7 +232,7 @@ void RuntimeProfiler::OptimizeNow() {
|
||||
// Do not record non-optimizable functions.
|
||||
if (!function->IsOptimizable()) continue;
|
||||
|
||||
if (FLAG_counting_profiler) {
|
||||
if (FLAG_watch_ic_patching) {
|
||||
int ticks = function->shared()->profiler_ticks();
|
||||
|
||||
if (ticks >= kProfilerTicksBeforeOptimization) {
|
||||
@ -270,7 +270,7 @@ void RuntimeProfiler::OptimizeNow() {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (FLAG_counting_profiler) {
|
||||
if (FLAG_watch_ic_patching) {
|
||||
any_ic_changed_ = false;
|
||||
code_generated_ = false;
|
||||
} else { // !FLAG_counting_profiler
|
||||
@ -291,7 +291,7 @@ void RuntimeProfiler::NotifyTick() {
|
||||
|
||||
void RuntimeProfiler::SetUp() {
|
||||
ASSERT(has_been_globally_set_up_);
|
||||
if (!FLAG_counting_profiler) {
|
||||
if (!FLAG_watch_ic_patching) {
|
||||
ClearSampleBuffer();
|
||||
}
|
||||
// If the ticker hasn't already started, make sure to do so to get
|
||||
@ -301,7 +301,7 @@ void RuntimeProfiler::SetUp() {
|
||||
|
||||
|
||||
void RuntimeProfiler::Reset() {
|
||||
if (FLAG_counting_profiler) {
|
||||
if (FLAG_watch_ic_patching) {
|
||||
total_code_generated_ = 0;
|
||||
} else { // !FLAG_counting_profiler
|
||||
sampler_threshold_ = kSamplerThresholdInit;
|
||||
|
@ -64,7 +64,7 @@ class RuntimeProfiler {
|
||||
void NotifyICChanged() { any_ic_changed_ = true; }
|
||||
|
||||
void NotifyCodeGenerated(int generated_code_size) {
|
||||
if (FLAG_counting_profiler) {
|
||||
if (FLAG_watch_ic_patching) {
|
||||
code_generated_ = true;
|
||||
total_code_generated_ += generated_code_size;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user