Profiler experiments: merge self-optimization code into interrupt-at-exit
Review URL: https://chromiumcodereview.appspot.com/9633012 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10991 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
15ffc2e249
commit
1d4221a4d3
@ -116,7 +116,6 @@ void CompilationInfo::DisableOptimization() {
|
||||
bool CompilationInfo::ShouldSelfOptimize() {
|
||||
return FLAG_self_optimization &&
|
||||
FLAG_crankshaft &&
|
||||
!Serializer::enabled() &&
|
||||
!function()->flags()->Contains(kDontSelfOptimize) &&
|
||||
!function()->flags()->Contains(kDontOptimize) &&
|
||||
function()->scope()->allows_lazy_recompilation() &&
|
||||
|
@ -180,6 +180,9 @@ DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
|
||||
DEFINE_int(frame_count, 2, "number of stack frames inspected by the profiler")
|
||||
DEFINE_bool(self_optimization, false,
|
||||
"primitive functions trigger their own optimization")
|
||||
DEFINE_bool(direct_self_opt, false,
|
||||
"call recompile stub directly when self-optimizing")
|
||||
DEFINE_bool(retry_self_opt, true, "re-try self-optimization if it failed")
|
||||
DEFINE_bool(count_based_interrupts, false,
|
||||
"trigger profiler ticks based on counting instead of timing")
|
||||
DEFINE_bool(interrupt_at_exit, false,
|
||||
@ -190,6 +193,7 @@ DEFINE_int(interrupt_budget, 10000,
|
||||
"execution budget before interrupt is triggered")
|
||||
DEFINE_int(type_info_threshold, 0,
|
||||
"percentage of ICs that must have type info to allow optimization")
|
||||
DEFINE_int(self_opt_count, 170, "call count before self-optimization")
|
||||
|
||||
DEFINE_implication(experimental_profiler, watch_ic_patching)
|
||||
DEFINE_implication(experimental_profiler, self_optimization)
|
||||
|
@ -437,6 +437,9 @@ class FullCodeGenerator: public AstVisitor {
|
||||
// the offset of the start of the table.
|
||||
unsigned EmitStackCheckTable();
|
||||
|
||||
void EmitProfilingCounterDecrement(int delta);
|
||||
void EmitProfilingCounterReset();
|
||||
|
||||
// Platform-specific return sequence
|
||||
void EmitReturnSequence();
|
||||
|
||||
|
@ -206,7 +206,7 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
|
||||
|
||||
|
||||
static const byte kJnsInstruction = 0x79;
|
||||
static const byte kJnsOffset = 0x11;
|
||||
static const byte kJnsOffset = 0x13;
|
||||
static const byte kJaeInstruction = 0x73;
|
||||
static const byte kJaeOffset = 0x07;
|
||||
static const byte kCallInstruction = 0xe8;
|
||||
@ -219,8 +219,8 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
|
||||
Code* check_code,
|
||||
Code* replacement_code) {
|
||||
Address call_target_address = pc_after - kIntSize;
|
||||
ASSERT(check_code->entry() ==
|
||||
Assembler::target_address_at(call_target_address));
|
||||
ASSERT_EQ(check_code->entry(),
|
||||
Assembler::target_address_at(call_target_address));
|
||||
// The stack check code matches the pattern:
|
||||
//
|
||||
// cmp esp, <limit>
|
||||
@ -239,13 +239,13 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
|
||||
// ok:
|
||||
|
||||
if (FLAG_count_based_interrupts) {
|
||||
ASSERT(*(call_target_address - 3) == kJnsInstruction);
|
||||
ASSERT(*(call_target_address - 2) == kJnsOffset);
|
||||
ASSERT_EQ(*(call_target_address - 3), kJnsInstruction);
|
||||
ASSERT_EQ(*(call_target_address - 2), kJnsOffset);
|
||||
} else {
|
||||
ASSERT(*(call_target_address - 3) == kJaeInstruction);
|
||||
ASSERT(*(call_target_address - 2) == kJaeOffset);
|
||||
ASSERT_EQ(*(call_target_address - 3), kJaeInstruction);
|
||||
ASSERT_EQ(*(call_target_address - 2), kJaeOffset);
|
||||
}
|
||||
ASSERT(*(call_target_address - 1) == kCallInstruction);
|
||||
ASSERT_EQ(*(call_target_address - 1), kCallInstruction);
|
||||
*(call_target_address - 3) = kNopByteOne;
|
||||
*(call_target_address - 2) = kNopByteTwo;
|
||||
Assembler::set_target_address_at(call_target_address,
|
||||
@ -261,14 +261,14 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
|
||||
Code* check_code,
|
||||
Code* replacement_code) {
|
||||
Address call_target_address = pc_after - kIntSize;
|
||||
ASSERT(replacement_code->entry() ==
|
||||
Assembler::target_address_at(call_target_address));
|
||||
ASSERT_EQ(replacement_code->entry(),
|
||||
Assembler::target_address_at(call_target_address));
|
||||
|
||||
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
|
||||
// restore the conditional branch.
|
||||
ASSERT(*(call_target_address - 3) == kNopByteOne &&
|
||||
*(call_target_address - 2) == kNopByteTwo &&
|
||||
*(call_target_address - 1) == kCallInstruction);
|
||||
ASSERT_EQ(*(call_target_address - 3), kNopByteOne);
|
||||
ASSERT_EQ(*(call_target_address - 2), kNopByteTwo);
|
||||
ASSERT_EQ(*(call_target_address - 1), kCallInstruction);
|
||||
if (FLAG_count_based_interrupts) {
|
||||
*(call_target_address - 3) = kJnsInstruction;
|
||||
*(call_target_address - 2) = kJnsOffset;
|
||||
|
@ -127,28 +127,6 @@ void FullCodeGenerator::Generate() {
|
||||
SetFunctionPosition(function());
|
||||
Comment cmnt(masm_, "[ function compiled by full code generator");
|
||||
|
||||
// We can optionally optimize based on counters rather than statistical
|
||||
// sampling.
|
||||
if (info->ShouldSelfOptimize()) {
|
||||
if (FLAG_trace_opt_verbose) {
|
||||
PrintF("[adding self-optimization header to %s]\n",
|
||||
*info->function()->debug_name()->ToCString());
|
||||
}
|
||||
has_self_optimization_header_ = true;
|
||||
MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
|
||||
Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
|
||||
JSGlobalPropertyCell* cell;
|
||||
if (maybe_cell->To(&cell)) {
|
||||
__ sub(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
|
||||
Immediate(Smi::FromInt(1)));
|
||||
Handle<Code> compile_stub(
|
||||
isolate()->builtins()->builtin(Builtins::kLazyRecompile));
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
__ j(zero, compile_stub);
|
||||
ASSERT(masm_->pc_offset() == self_optimization_header_size());
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
if (strlen(FLAG_stop_at) > 0 &&
|
||||
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
|
||||
@ -330,6 +308,25 @@ void FullCodeGenerator::ClearAccumulator() {
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
|
||||
__ mov(ebx, Immediate(profiling_counter_));
|
||||
__ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
|
||||
Immediate(Smi::FromInt(delta)));
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitProfilingCounterReset() {
|
||||
int reset_value = FLAG_interrupt_budget;
|
||||
if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
|
||||
// Self-optimization is a one-off thing: if it fails, don't try again.
|
||||
reset_value = Smi::kMaxValue;
|
||||
}
|
||||
__ mov(ebx, Immediate(profiling_counter_));
|
||||
__ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
|
||||
Immediate(Smi::FromInt(reset_value)));
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
|
||||
Label* back_edge_target) {
|
||||
Comment cmnt(masm_, "[ Stack check");
|
||||
@ -342,15 +339,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
|
||||
int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
|
||||
weight = Min(127, Max(1, distance / 100));
|
||||
}
|
||||
if (Serializer::enabled()) {
|
||||
__ mov(ebx, Immediate(profiling_counter_));
|
||||
__ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
|
||||
Immediate(Smi::FromInt(weight)));
|
||||
} else {
|
||||
// This version is slightly faster, but not snapshot safe.
|
||||
__ sub(Operand::Cell(profiling_counter_),
|
||||
Immediate(Smi::FromInt(weight)));
|
||||
}
|
||||
EmitProfilingCounterDecrement(weight);
|
||||
__ j(positive, &ok, Label::kNear);
|
||||
InterruptStub stub;
|
||||
__ CallStub(&stub);
|
||||
@ -379,15 +368,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
|
||||
__ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
|
||||
|
||||
if (FLAG_count_based_interrupts) {
|
||||
// Reset the countdown.
|
||||
if (Serializer::enabled()) {
|
||||
__ mov(ebx, Immediate(profiling_counter_));
|
||||
__ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
|
||||
Immediate(Smi::FromInt(FLAG_interrupt_budget)));
|
||||
} else {
|
||||
__ mov(Operand::Cell(profiling_counter_),
|
||||
Immediate(Smi::FromInt(FLAG_interrupt_budget)));
|
||||
}
|
||||
EmitProfilingCounterReset();
|
||||
}
|
||||
|
||||
__ bind(&ok);
|
||||
@ -410,37 +391,28 @@ void FullCodeGenerator::EmitReturnSequence() {
|
||||
__ push(eax);
|
||||
__ CallRuntime(Runtime::kTraceExit, 1);
|
||||
}
|
||||
if (FLAG_interrupt_at_exit) {
|
||||
if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
|
||||
// Pretend that the exit is a backwards jump to the entry.
|
||||
int weight = 1;
|
||||
if (FLAG_weighted_back_edges) {
|
||||
if (info_->ShouldSelfOptimize()) {
|
||||
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
|
||||
} else if (FLAG_weighted_back_edges) {
|
||||
int distance = masm_->pc_offset();
|
||||
weight = Min(127, Max(1, distance / 100));
|
||||
}
|
||||
if (Serializer::enabled()) {
|
||||
__ mov(ebx, Immediate(profiling_counter_));
|
||||
__ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
|
||||
Immediate(Smi::FromInt(weight)));
|
||||
} else {
|
||||
// This version is slightly faster, but not snapshot safe.
|
||||
__ sub(Operand::Cell(profiling_counter_),
|
||||
Immediate(Smi::FromInt(weight)));
|
||||
}
|
||||
EmitProfilingCounterDecrement(weight);
|
||||
Label ok;
|
||||
__ j(positive, &ok, Label::kNear);
|
||||
__ push(eax);
|
||||
InterruptStub stub;
|
||||
__ CallStub(&stub);
|
||||
__ pop(eax);
|
||||
// Reset the countdown.
|
||||
if (Serializer::enabled()) {
|
||||
__ mov(ebx, Immediate(profiling_counter_));
|
||||
__ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
|
||||
Immediate(Smi::FromInt(FLAG_interrupt_budget)));
|
||||
if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
|
||||
__ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
|
||||
__ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
|
||||
} else {
|
||||
__ mov(Operand::Cell(profiling_counter_),
|
||||
Immediate(Smi::FromInt(FLAG_interrupt_budget)));
|
||||
InterruptStub stub;
|
||||
__ CallStub(&stub);
|
||||
}
|
||||
__ pop(eax);
|
||||
EmitProfilingCounterReset();
|
||||
__ bind(&ok);
|
||||
}
|
||||
#ifdef DEBUG
|
||||
|
Loading…
Reference in New Issue
Block a user