diff --git a/src/mips/deoptimizer-mips.cc b/src/mips/deoptimizer-mips.cc index 611fbaaf96..51c2e46778 100644 --- a/src/mips/deoptimizer-mips.cc +++ b/src/mips/deoptimizer-mips.cc @@ -119,7 +119,7 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, const int kInstrSize = Assembler::kInstrSize; // This structure comes from FullCodeGenerator::EmitStackCheck. // The call of the stack guard check has the following form: - // sltu at, sp, t0 + // sltu at, sp, t0 / slt at, a3, zero_reg (in case of count based interrupts) // beq at, zero_reg, ok // lui t9, upper // ori t9, lower @@ -167,7 +167,11 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, // Restore the sltu instruction so beq can be taken again. CodePatcher patcher(pc_after - 6 * kInstrSize, 1); - patcher.masm()->sltu(at, sp, t0); + if (FLAG_count_based_interrupts) { + patcher.masm()->slt(at, a3, zero_reg); + } else { + patcher.masm()->sltu(at, sp, t0); + } // Replace the on-stack replacement address in the load-immediate (lui/ori // pair) with the entry address of the normal stack-check code. diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc index 3df2fc64ad..657bee8657 100644 --- a/src/mips/full-codegen-mips.cc +++ b/src/mips/full-codegen-mips.cc @@ -42,6 +42,7 @@ #include "compiler.h" #include "debug.h" #include "full-codegen.h" +#include "isolate-inl.h" #include "parser.h" #include "scopes.h" #include "stub-cache.h" @@ -119,7 +120,9 @@ class JumpPatchSite BASE_EMBEDDED { }; +// TODO(jkummerow): Obsolete as soon as x64 is updated. Remove. int FullCodeGenerator::self_optimization_header_size() { + UNREACHABLE(); return 10 * Instruction::kInstrSize; } @@ -142,32 +145,11 @@ void FullCodeGenerator::Generate() { CompilationInfo* info = info_; handler_table_ = isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); + profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell( + Handle(Smi::FromInt(FLAG_interrupt_budget))); SetFunctionPosition(function()); Comment cmnt(masm_, "[ function compiled by full code generator"); - // We can optionally optimize based on counters rather than statistical - // sampling. - if (info->ShouldSelfOptimize()) { - if (FLAG_trace_opt_verbose) { - PrintF("[adding self-optimization header to %s]\n", - *info->function()->debug_name()->ToCString()); - } - has_self_optimization_header_ = true; - MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell( - Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt)); - JSGlobalPropertyCell* cell; - if (maybe_cell->To(&cell)) { - __ li(a2, Handle(cell)); - __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); - __ Subu(a3, a3, Operand(Smi::FromInt(1))); - __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); - Handle compile_stub( - isolate()->builtins()->builtin(Builtins::kLazyRecompile)); - __ Jump(compile_stub, RelocInfo::CODE_TARGET, eq, a3, Operand(zero_reg)); - ASSERT_EQ(masm_->pc_offset(), self_optimization_header_size()); - } - } - #ifdef DEBUG if (strlen(FLAG_stop_at) > 0 && info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { @@ -341,6 +323,34 @@ void FullCodeGenerator::ClearAccumulator() { } +void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { + __ li(a2, Operand(profiling_counter_)); + __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); + __ Subu(a3, a3, Operand(Smi::FromInt(delta))); + __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); +} + + +void FullCodeGenerator::EmitProfilingCounterReset() { + int reset_value = FLAG_interrupt_budget; + if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { + // Self-optimization is a one-off thing: if it fails, don't try again. + reset_value = Smi::kMaxValue; + } + if (isolate()->IsDebuggerActive()) { + // Detect debug break requests as soon as possible. + reset_value = 10; + } + __ li(a2, Operand(profiling_counter_)); + __ li(a3, Operand(Smi::FromInt(reset_value))); + __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); +} + + +static const int kMaxBackEdgeWeight = 127; +static const int kBackEdgeDistanceDivisor = 142; + + void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, Label* back_edge_target) { // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need @@ -351,16 +361,35 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); Comment cmnt(masm_, "[ Stack check"); Label ok; - __ LoadRoot(t0, Heap::kStackLimitRootIndex); - __ sltu(at, sp, t0); - __ beq(at, zero_reg, &ok); - // CallStub will emit a li t9, ... first, so it is safe to use the delay slot. - StackCheckStub stub; - __ CallStub(&stub); + if (FLAG_count_based_interrupts) { + int weight = 1; + if (FLAG_weighted_back_edges) { + ASSERT(back_edge_target->is_bound()); + int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); + weight = Min(kMaxBackEdgeWeight, + Max(1, distance / kBackEdgeDistanceDivisor)); + } + EmitProfilingCounterDecrement(weight); + __ slt(at, a3, zero_reg); + __ beq(at, zero_reg, &ok); + // CallStub will emit a li t9 first, so it is safe to use the delay slot. + InterruptStub stub; + __ CallStub(&stub); + } else { + __ LoadRoot(t0, Heap::kStackLimitRootIndex); + __ sltu(at, sp, t0); + __ beq(at, zero_reg, &ok); + // CallStub will emit a li t9 first, so it is safe to use the delay slot. + StackCheckStub stub; + __ CallStub(&stub); + } // Record a mapping of this PC offset to the OSR id. This is used to find // the AST id from the unoptimized code in order to use it as a key into // the deoptimization input data found in the optimized code. RecordStackCheck(stmt->OsrEntryId()); + if (FLAG_count_based_interrupts) { + EmitProfilingCounterReset(); + } __ bind(&ok); PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); @@ -383,6 +412,32 @@ void FullCodeGenerator::EmitReturnSequence() { __ push(v0); __ CallRuntime(Runtime::kTraceExit, 1); } + if (FLAG_interrupt_at_exit || FLAG_self_optimization) { + // Pretend that the exit is a backwards jump to the entry. + int weight = 1; + if (info_->ShouldSelfOptimize()) { + weight = FLAG_interrupt_budget / FLAG_self_opt_count; + } else if (FLAG_weighted_back_edges) { + int distance = masm_->pc_offset(); + weight = Min(kMaxBackEdgeWeight, + Max(1, distance / kBackEdgeDistanceDivisor)); + } + EmitProfilingCounterDecrement(weight); + Label ok; + __ Branch(&ok, ge, a3, Operand(zero_reg)); + __ push(v0); + if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { + __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); + __ push(a2); + __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); + } else { + InterruptStub stub; + __ CallStub(&stub); + } + __ pop(v0); + EmitProfilingCounterReset(); + __ bind(&ok); + } #ifdef DEBUG // Add a label for checking the size of the code used for returning. @@ -902,7 +957,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { // Record position before stub call for type feedback. SetSourcePosition(clause->position()); Handle ic = CompareIC::GetUninitialized(Token::EQ_STRICT); - __ Call(ic, RelocInfo::CODE_TARGET, clause->CompareId()); + CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId()); patch_site.EmitPatchInfo(); __ Branch(&next_test, ne, v0, Operand(zero_reg)); @@ -1195,7 +1250,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, ? RelocInfo::CODE_TARGET : RelocInfo::CODE_TARGET_CONTEXT; Handle ic = isolate()->builtins()->LoadIC_Initialize(); - __ Call(ic, mode); + CallIC(ic, mode); } @@ -1279,7 +1334,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { __ lw(a0, GlobalObjectOperand()); __ li(a2, Operand(var->name())); Handle ic = isolate()->builtins()->LoadIC_Initialize(); - __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); + CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); context()->Plug(v0); break; } @@ -1493,7 +1548,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { Handle ic = is_classic_mode() ? isolate()->builtins()->StoreIC_Initialize() : isolate()->builtins()->StoreIC_Initialize_Strict(); - __ Call(ic, RelocInfo::CODE_TARGET, key->id()); + CallIC(ic, RelocInfo::CODE_TARGET, key->id()); PrepareForBailoutForId(key->id(), NO_REGISTERS); } else { VisitForEffect(value); @@ -1766,7 +1821,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { __ li(a2, Operand(key->handle())); // Call load IC. It has arguments receiver and property name a0 and a2. Handle ic = isolate()->builtins()->LoadIC_Initialize(); - __ Call(ic, RelocInfo::CODE_TARGET, prop->id()); + CallIC(ic, RelocInfo::CODE_TARGET, prop->id()); } @@ -1775,7 +1830,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { __ mov(a0, result_register()); // Call keyed load IC. It has arguments key and receiver in a0 and a1. Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - __ Call(ic, RelocInfo::CODE_TARGET, prop->id()); + CallIC(ic, RelocInfo::CODE_TARGET, prop->id()); } @@ -1803,7 +1858,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, __ bind(&stub_call); BinaryOpStub stub(op, mode); - __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); + CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); patch_site.EmitPatchInfo(); __ jmp(&done); @@ -1886,7 +1941,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, __ pop(a1); BinaryOpStub stub(op, mode); JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. - __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); + CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); patch_site.EmitPatchInfo(); context()->Plug(v0); } @@ -1927,7 +1982,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) { Handle ic = is_classic_mode() ? isolate()->builtins()->StoreIC_Initialize() : isolate()->builtins()->StoreIC_Initialize_Strict(); - __ Call(ic); + CallIC(ic); break; } case KEYED_PROPERTY: { @@ -1940,7 +1995,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) { Handle ic = is_classic_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize() : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); - __ Call(ic); + CallIC(ic); break; } } @@ -1958,7 +2013,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Handle ic = is_classic_mode() ? isolate()->builtins()->StoreIC_Initialize() : isolate()->builtins()->StoreIC_Initialize_Strict(); - __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); + CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); } else if (op == Token::INIT_CONST) { // Const initializers need a write barrier. @@ -2077,7 +2132,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { Handle ic = is_classic_mode() ? isolate()->builtins()->StoreIC_Initialize() : isolate()->builtins()->StoreIC_Initialize_Strict(); - __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); + CallIC(ic, RelocInfo::CODE_TARGET, expr->id()); // If the assignment ends an initialization block, revert to fast case. if (expr->ends_initialization_block()) { @@ -2129,7 +2184,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { Handle ic = is_classic_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize() : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); - __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); + CallIC(ic, RelocInfo::CODE_TARGET, expr->id()); // If the assignment ends an initialization block, revert to fast case. if (expr->ends_initialization_block()) { @@ -2164,6 +2219,14 @@ void FullCodeGenerator::VisitProperty(Property* expr) { } +void FullCodeGenerator::CallIC(Handle code, + RelocInfo::Mode rmode, + unsigned ast_id) { + ic_total_count_++; + __ Call(code, rmode, ast_id); +} + + void FullCodeGenerator::EmitCallWithIC(Call* expr, Handle name, RelocInfo::Mode mode) { @@ -2181,7 +2244,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr, // Call the IC initialization code. Handle ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); - __ Call(ic, mode, expr->id()); + CallIC(ic, mode, expr->id()); RecordJSReturnSite(expr); // Restore context register. __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2214,7 +2277,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, Handle ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count); __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key. - __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); + CallIC(ic, RelocInfo::CODE_TARGET, expr->id()); RecordJSReturnSite(expr); // Restore context register. __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -3828,7 +3891,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { RelocInfo::Mode mode = RelocInfo::CODE_TARGET; Handle ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); - __ Call(ic, mode, expr->id()); + CallIC(ic, mode, expr->id()); // Restore context register. __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); } else { @@ -3984,7 +4047,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr, VisitForAccumulatorValue(expr->expression()); SetSourcePosition(expr->position()); __ mov(a0, result_register()); - __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); + CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); context()->Plug(v0); } @@ -4095,7 +4158,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { SetSourcePosition(expr->position()); BinaryOpStub stub(Token::ADD, NO_OVERWRITE); - __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId()); + CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId()); patch_site.EmitPatchInfo(); __ bind(&done); @@ -4128,7 +4191,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { Handle ic = is_classic_mode() ? isolate()->builtins()->StoreIC_Initialize() : isolate()->builtins()->StoreIC_Initialize_Strict(); - __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); + CallIC(ic, RelocInfo::CODE_TARGET, expr->id()); PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); if (expr->is_postfix()) { if (!context()->IsEffect()) { @@ -4146,7 +4209,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { Handle ic = is_classic_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize() : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); - __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); + CallIC(ic, RelocInfo::CODE_TARGET, expr->id()); PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); if (expr->is_postfix()) { if (!context()->IsEffect()) { @@ -4172,7 +4235,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { Handle ic = isolate()->builtins()->LoadIC_Initialize(); // Use a regular load, not a contextual load, to avoid a reference // error. - __ Call(ic); + CallIC(ic); PrepareForBailout(expr, TOS_REG); context()->Plug(v0); } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { @@ -4350,7 +4413,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { // Record position and call the compare IC. SetSourcePosition(expr->position()); Handle ic = CompareIC::GetUninitialized(op); - __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); + CallIC(ic, RelocInfo::CODE_TARGET, expr->id()); patch_site.EmitPatchInfo(); PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through); diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc index 13cdfa09e6..04aa02edb3 100644 --- a/src/runtime-profiler.cc +++ b/src/runtime-profiler.cc @@ -175,7 +175,9 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { // prepared to generate it, but we don't expect to have to. bool found_code = false; Code* stack_check_code = NULL; -#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_ARM) +#if defined(V8_TARGET_ARCH_IA32) || \ + defined(V8_TARGET_ARCH_ARM) || \ + defined(V8_TARGET_ARCH_MIPS) if (FLAG_count_based_interrupts) { InterruptStub interrupt_stub; found_code = interrupt_stub.FindCodeInCache(&stack_check_code); @@ -351,7 +353,9 @@ void RuntimeProfiler::OptimizeNow() { void RuntimeProfiler::NotifyTick() { -#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_ARM) +#if defined(V8_TARGET_ARCH_IA32) || \ + defined(V8_TARGET_ARCH_ARM) || \ + defined(V8_TARGET_ARCH_MIPS) if (FLAG_count_based_interrupts) return; #endif isolate_->stack_guard()->RequestRuntimeProfilerTick(); diff --git a/src/runtime.cc b/src/runtime.cc index a12dbc7f6c..fec1460e8f 100644 --- a/src/runtime.cc +++ b/src/runtime.cc @@ -8358,7 +8358,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) { PrintF("]\n"); } Handle check_code; -#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_ARM) +#if defined(V8_TARGET_ARCH_IA32) || \ + defined(V8_TARGET_ARCH_ARM) || \ + defined(V8_TARGET_ARCH_MIPS) if (FLAG_count_based_interrupts) { InterruptStub interrupt_stub; check_code = interrupt_stub.GetCode();