Split experimental profiler flags
Review URL: https://chromiumcodereview.appspot.com/9374015 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10660 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
1588f7224e
commit
de510c3a58
@ -114,7 +114,7 @@ void CompilationInfo::DisableOptimization() {
|
|||||||
// profiler, so they trigger their own optimization when they're called
|
// profiler, so they trigger their own optimization when they're called
|
||||||
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
|
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
|
||||||
bool CompilationInfo::ShouldSelfOptimize() {
|
bool CompilationInfo::ShouldSelfOptimize() {
|
||||||
return FLAG_counting_profiler &&
|
return FLAG_self_optimization &&
|
||||||
FLAG_crankshaft &&
|
FLAG_crankshaft &&
|
||||||
!Serializer::enabled() &&
|
!Serializer::enabled() &&
|
||||||
!function()->flags()->Contains(kDontSelfOptimize) &&
|
!function()->flags()->Contains(kDontSelfOptimize) &&
|
||||||
|
@ -165,8 +165,14 @@ DEFINE_int(stress_runs, 0, "number of stress runs")
|
|||||||
DEFINE_bool(optimize_closures, true, "optimize closures")
|
DEFINE_bool(optimize_closures, true, "optimize closures")
|
||||||
DEFINE_int(loop_weight, 1, "loop weight for representation inference")
|
DEFINE_int(loop_weight, 1, "loop weight for representation inference")
|
||||||
|
|
||||||
// Count-based optimization decisions.
|
// Experimental profiler changes.
|
||||||
DEFINE_bool(counting_profiler, false, "use experimental counter-based profiler")
|
DEFINE_bool(experimental_profiler, false, "enable all profiler experiments")
|
||||||
|
DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
|
||||||
|
DEFINE_bool(self_optimization, false,
|
||||||
|
"primitive functions trigger their own optimization")
|
||||||
|
|
||||||
|
DEFINE_implication(experimental_profiler, watch_ic_patching)
|
||||||
|
DEFINE_implication(experimental_profiler, self_optimization)
|
||||||
|
|
||||||
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
|
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
|
||||||
DEFINE_bool(debug_code, false,
|
DEFINE_bool(debug_code, false,
|
||||||
|
@ -1201,7 +1201,7 @@ void Heap::Scavenge() {
|
|||||||
promotion_queue_.Destroy();
|
promotion_queue_.Destroy();
|
||||||
|
|
||||||
LiveObjectList::UpdateReferencesForScavengeGC();
|
LiveObjectList::UpdateReferencesForScavengeGC();
|
||||||
if (!FLAG_counting_profiler) {
|
if (!FLAG_watch_ic_patching) {
|
||||||
isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
|
isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
|
||||||
}
|
}
|
||||||
incremental_marking()->UpdateMarkingDequeAfterScavenge();
|
incremental_marking()->UpdateMarkingDequeAfterScavenge();
|
||||||
|
@ -293,7 +293,7 @@ Failure* IC::ReferenceError(const char* type, Handle<String> name) {
|
|||||||
|
|
||||||
|
|
||||||
void IC::PostPatching() {
|
void IC::PostPatching() {
|
||||||
if (FLAG_counting_profiler) {
|
if (FLAG_watch_ic_patching) {
|
||||||
Isolate::Current()->runtime_profiler()->NotifyICChanged();
|
Isolate::Current()->runtime_profiler()->NotifyICChanged();
|
||||||
// We do not want to optimize until the ICs have settled down,
|
// We do not want to optimize until the ICs have settled down,
|
||||||
// so when they are patched, we postpone optimization for the
|
// so when they are patched, we postpone optimization for the
|
||||||
|
@ -2373,7 +2373,7 @@ void MarkCompactCollector::AfterMarking() {
|
|||||||
code_flusher_->ProcessCandidates();
|
code_flusher_->ProcessCandidates();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!FLAG_counting_profiler) {
|
if (!FLAG_watch_ic_patching) {
|
||||||
// Clean up dead objects from the runtime profiler.
|
// Clean up dead objects from the runtime profiler.
|
||||||
heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
|
heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
|
||||||
}
|
}
|
||||||
@ -3383,7 +3383,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
|||||||
heap_->UpdateReferencesInExternalStringTable(
|
heap_->UpdateReferencesInExternalStringTable(
|
||||||
&UpdateReferenceInExternalStringTableEntry);
|
&UpdateReferenceInExternalStringTableEntry);
|
||||||
|
|
||||||
if (!FLAG_counting_profiler) {
|
if (!FLAG_watch_ic_patching) {
|
||||||
// Update JSFunction pointers from the runtime profiler.
|
// Update JSFunction pointers from the runtime profiler.
|
||||||
heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
|
heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
|
||||||
&updating_visitor);
|
&updating_visitor);
|
||||||
|
@ -204,7 +204,7 @@ void RuntimeProfiler::OptimizeNow() {
|
|||||||
JavaScriptFrame* frame = it.frame();
|
JavaScriptFrame* frame = it.frame();
|
||||||
JSFunction* function = JSFunction::cast(frame->function());
|
JSFunction* function = JSFunction::cast(frame->function());
|
||||||
|
|
||||||
if (!FLAG_counting_profiler) {
|
if (!FLAG_watch_ic_patching) {
|
||||||
// Adjust threshold each time we have processed
|
// Adjust threshold each time we have processed
|
||||||
// a certain number of ticks.
|
// a certain number of ticks.
|
||||||
if (sampler_ticks_until_threshold_adjustment_ > 0) {
|
if (sampler_ticks_until_threshold_adjustment_ > 0) {
|
||||||
@ -232,7 +232,7 @@ void RuntimeProfiler::OptimizeNow() {
|
|||||||
// Do not record non-optimizable functions.
|
// Do not record non-optimizable functions.
|
||||||
if (!function->IsOptimizable()) continue;
|
if (!function->IsOptimizable()) continue;
|
||||||
|
|
||||||
if (FLAG_counting_profiler) {
|
if (FLAG_watch_ic_patching) {
|
||||||
int ticks = function->shared()->profiler_ticks();
|
int ticks = function->shared()->profiler_ticks();
|
||||||
|
|
||||||
if (ticks >= kProfilerTicksBeforeOptimization) {
|
if (ticks >= kProfilerTicksBeforeOptimization) {
|
||||||
@ -270,7 +270,7 @@ void RuntimeProfiler::OptimizeNow() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (FLAG_counting_profiler) {
|
if (FLAG_watch_ic_patching) {
|
||||||
any_ic_changed_ = false;
|
any_ic_changed_ = false;
|
||||||
code_generated_ = false;
|
code_generated_ = false;
|
||||||
} else { // !FLAG_counting_profiler
|
} else { // !FLAG_counting_profiler
|
||||||
@ -291,7 +291,7 @@ void RuntimeProfiler::NotifyTick() {
|
|||||||
|
|
||||||
void RuntimeProfiler::SetUp() {
|
void RuntimeProfiler::SetUp() {
|
||||||
ASSERT(has_been_globally_set_up_);
|
ASSERT(has_been_globally_set_up_);
|
||||||
if (!FLAG_counting_profiler) {
|
if (!FLAG_watch_ic_patching) {
|
||||||
ClearSampleBuffer();
|
ClearSampleBuffer();
|
||||||
}
|
}
|
||||||
// If the ticker hasn't already started, make sure to do so to get
|
// If the ticker hasn't already started, make sure to do so to get
|
||||||
@ -301,7 +301,7 @@ void RuntimeProfiler::SetUp() {
|
|||||||
|
|
||||||
|
|
||||||
void RuntimeProfiler::Reset() {
|
void RuntimeProfiler::Reset() {
|
||||||
if (FLAG_counting_profiler) {
|
if (FLAG_watch_ic_patching) {
|
||||||
total_code_generated_ = 0;
|
total_code_generated_ = 0;
|
||||||
} else { // !FLAG_counting_profiler
|
} else { // !FLAG_counting_profiler
|
||||||
sampler_threshold_ = kSamplerThresholdInit;
|
sampler_threshold_ = kSamplerThresholdInit;
|
||||||
|
@ -64,7 +64,7 @@ class RuntimeProfiler {
|
|||||||
void NotifyICChanged() { any_ic_changed_ = true; }
|
void NotifyICChanged() { any_ic_changed_ = true; }
|
||||||
|
|
||||||
void NotifyCodeGenerated(int generated_code_size) {
|
void NotifyCodeGenerated(int generated_code_size) {
|
||||||
if (FLAG_counting_profiler) {
|
if (FLAG_watch_ic_patching) {
|
||||||
code_generated_ = true;
|
code_generated_ = true;
|
||||||
total_code_generated_ += generated_code_size;
|
total_code_generated_ += generated_code_size;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user