Turn interrupt and stack check into builtins.

R=jkummerow@chromium.org
BUG=

Review URL: https://codereview.chromium.org/23480013

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16444 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
yangguo@chromium.org 2013-08-30 11:24:58 +00:00
parent 9efb5cd23b
commit ecbfcd7e06
20 changed files with 60 additions and 120 deletions

View File

@ -2491,16 +2491,6 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
}
void StackCheckStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kStackGuard, 0, 1);
}
void InterruptStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kInterrupt, 0, 1);
}
void MathPowStub::Generate(MacroAssembler* masm) {
const Register base = r1;
const Register exponent = r2;

View File

@ -161,14 +161,13 @@ Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState(
return PATCHED_FOR_OSR;
} else {
// Get the interrupt stub code object to match against from cache.
Code* interrupt_code = NULL;
InterruptStub stub;
if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE();
Code* interrupt_builtin =
isolate->builtins()->builtin(Builtins::kInterruptCheck);
ASSERT(Assembler::IsLdrPcImmediateOffset(
Assembler::instr_at(pc_after - 2 * kInstrSize)));
ASSERT_EQ(kBranchBeforeInterrupt,
Memory::int32_at(pc_after - 3 * kInstrSize));
ASSERT(reinterpret_cast<uint32_t>(interrupt_code->entry()) ==
ASSERT(reinterpret_cast<uint32_t>(interrupt_builtin->entry()) ==
Memory::uint32_at(interrupt_address_pointer));
return NOT_PATCHED;
}

View File

@ -296,8 +296,7 @@ void FullCodeGenerator::Generate() {
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
StackCheckStub stub;
__ CallStub(&stub);
__ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
__ bind(&ok);
}
@ -366,8 +365,7 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
}
EmitProfilingCounterDecrement(weight);
__ b(pl, &ok);
InterruptStub stub;
__ CallStub(&stub);
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
@ -416,8 +414,8 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(r2);
__ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
} else {
InterruptStub stub;
__ CallStub(&stub);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
}
__ pop(r0);
EmitProfilingCounterReset();

View File

@ -5643,9 +5643,10 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &done);
StackCheckStub stub;
PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
CallCode(isolate()->builtins()->StackCheck(),
RelocInfo::CODE_TARGET,
instr);
EnsureSpaceForLazyDeopt();
last_lazy_deopt_pc_ = masm()->pc_offset();
__ bind(&done);

View File

@ -1813,6 +1813,16 @@ const char* Builtins::Lookup(byte* pc) {
}
void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
}
void Builtins::Generate_StackCheck(MacroAssembler* masm) {
masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
}
#define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
Handle<Code> Builtins::name() { \
Code** code_address = \

View File

@ -211,6 +211,10 @@ enum BuiltinExtraArguments {
\
V(OnStackReplacement, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(InterruptCheck, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(StackCheck, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, V)
#ifdef ENABLE_DEBUGGER_SUPPORT
@ -395,6 +399,9 @@ class Builtins {
static void Generate_StringConstructCode(MacroAssembler* masm);
static void Generate_OnStackReplacement(MacroAssembler* masm);
static void Generate_InterruptCheck(MacroAssembler* masm);
static void Generate_StackCheck(MacroAssembler* masm);
#define DECLARE_CODE_AGE_BUILTIN_GENERATOR(C) \
static void Generate_Make##C##CodeYoungAgainEvenMarking( \
MacroAssembler* masm); \

View File

@ -449,30 +449,6 @@ class NopRuntimeCallHelper : public RuntimeCallHelper {
};
class StackCheckStub : public PlatformCodeStub {
public:
StackCheckStub() { }
void Generate(MacroAssembler* masm);
private:
Major MajorKey() { return StackCheck; }
int MinorKey() { return 0; }
};
class InterruptStub : public PlatformCodeStub {
public:
InterruptStub() { }
void Generate(MacroAssembler* masm);
private:
Major MajorKey() { return Interrupt; }
int MinorKey() { return 0; }
};
class ToNumberStub: public HydrogenCodeStub {
public:
ToNumberStub() { }

View File

@ -2593,11 +2593,6 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
void Deoptimizer::PatchInterruptCode(Isolate* isolate,
Code* unoptimized_code) {
DisallowHeapAllocation no_gc;
// Get the interrupt stub code object to match against. We aren't
// prepared to generate it, but we don't expect to have to.
Code* interrupt_code = NULL;
InterruptStub interrupt_stub;
CHECK(interrupt_stub.FindCodeInCache(&interrupt_code, isolate));
Code* replacement_code =
isolate->builtins()->builtin(Builtins::kOnStackReplacement);
@ -2628,9 +2623,9 @@ void Deoptimizer::PatchInterruptCode(Isolate* isolate,
void Deoptimizer::RevertInterruptCode(Isolate* isolate,
Code* unoptimized_code) {
InterruptStub interrupt_stub;
Code* interrupt_code = *interrupt_stub.GetCode(isolate);
DisallowHeapAllocation no_gc;
Code* interrupt_code =
isolate->builtins()->builtin(Builtins::kInterruptCheck);
// Iterate over the back edge table and revert the patched interrupt calls.
ASSERT(unoptimized_code->back_edges_patched_for_osr());

View File

@ -4229,16 +4229,6 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
}
void StackCheckStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kStackGuard, 0, 1);
}
void InterruptStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kInterrupt, 0, 1);
}
static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a global property cell. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and

View File

@ -246,10 +246,9 @@ Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState(
return PATCHED_FOR_OSR;
} else {
// Get the interrupt stub code object to match against from cache.
Code* interrupt_code = NULL;
InterruptStub stub;
if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE();
ASSERT_EQ(interrupt_code->entry(),
Code* interrupt_builtin =
isolate->builtins()->builtin(Builtins::kInterruptCheck);
ASSERT_EQ(interrupt_builtin->entry(),
Assembler::target_address_at(call_target_address));
ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
ASSERT_EQ(kJnsOffset, *(call_target_address - 2));

View File

@ -288,8 +288,7 @@ void FullCodeGenerator::Generate() {
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, Label::kNear);
StackCheckStub stub;
__ CallStub(&stub);
__ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
__ bind(&ok);
}
@ -347,8 +346,7 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
}
EmitProfilingCounterDecrement(weight);
__ j(positive, &ok, Label::kNear);
InterruptStub stub;
__ CallStub(&stub);
__ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
@ -395,8 +393,8 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
} else {
InterruptStub stub;
__ CallStub(&stub);
__ call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
}
__ pop(eax);
EmitProfilingCounterReset();

View File

@ -6430,8 +6430,9 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(instr->context()->IsRegister());
ASSERT(ToRegister(instr->context()).is(esi));
StackCheckStub stub;
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
CallCode(isolate()->builtins()->StackCheck(),
RelocInfo::CODE_TARGET,
instr);
EnsureSpaceForLazyDeopt();
__ bind(&done);
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);

View File

@ -2380,16 +2380,6 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
}
void StackCheckStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kStackGuard, 0, 1);
}
void InterruptStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kInterrupt, 0, 1);
}
void MathPowStub::Generate(MacroAssembler* masm) {
const Register base = a1;
const Register exponent = a2;

View File

@ -149,12 +149,11 @@ Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState(
return PATCHED_FOR_OSR;
} else {
// Get the interrupt stub code object to match against from cache.
Code* interrupt_code = NULL;
InterruptStub stub;
if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE();
Code* interrupt_builtin =
isolate->builtins()->builtin(Builtins::kInterruptCheck);
ASSERT(reinterpret_cast<uint32_t>(
Assembler::target_address_at(pc_after - 4 * kInstrSize)) ==
reinterpret_cast<uint32_t>(interrupt_code->entry()));
reinterpret_cast<uint32_t>(interrupt_builtin->entry()));
return NOT_PATCHED;
}
}

View File

@ -298,8 +298,7 @@ void FullCodeGenerator::Generate() {
Label ok;
__ LoadRoot(t0, Heap::kStackLimitRootIndex);
__ Branch(&ok, hs, sp, Operand(t0));
StackCheckStub stub;
__ CallStub(&stub);
__ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
__ bind(&ok);
}
@ -369,9 +368,8 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
EmitProfilingCounterDecrement(weight);
__ slt(at, a3, zero_reg);
__ beq(at, zero_reg, &ok);
// CallStub will emit a li t9 first, so it is safe to use the delay slot.
InterruptStub stub;
__ CallStub(&stub);
// Call will emit a li t9 first, so it is safe to use the delay slot.
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
@ -418,8 +416,8 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(a2);
__ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
} else {
InterruptStub stub;
__ CallStub(&stub);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
}
__ pop(v0);
EmitProfilingCounterReset();

View File

@ -5670,8 +5670,9 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
Label done;
__ LoadRoot(at, Heap::kStackLimitRootIndex);
__ Branch(&done, hs, sp, Operand(at));
StackCheckStub stub;
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
CallCode(isolate()->builtins()->StackCheck(),
RelocInfo::CODE_TARGET,
instr);
EnsureSpaceForLazyDeopt();
last_lazy_deopt_pc_ = masm()->pc_offset();
__ bind(&done);

View File

@ -3320,16 +3320,6 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
}
void StackCheckStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kStackGuard, 0, 1);
}
void InterruptStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kInterrupt, 0, 1);
}
static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a global property cell. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and

View File

@ -151,10 +151,9 @@ Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState(
return PATCHED_FOR_OSR;
} else {
// Get the interrupt stub code object to match against from cache.
Code* interrupt_code = NULL;
InterruptStub stub;
if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE();
ASSERT_EQ(interrupt_code->entry(),
Code* interrupt_builtin =
isolate->builtins()->builtin(Builtins::kInterruptCheck);
ASSERT_EQ(interrupt_builtin->entry(),
Assembler::target_address_at(call_target_address));
ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
ASSERT_EQ(kJnsOffset, *(call_target_address - 2));

View File

@ -280,8 +280,7 @@ void FullCodeGenerator::Generate() {
Label ok;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &ok, Label::kNear);
StackCheckStub stub;
__ CallStub(&stub);
__ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
__ bind(&ok);
}
@ -341,8 +340,7 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
}
EmitProfilingCounterDecrement(weight);
__ j(positive, &ok, Label::kNear);
InterruptStub stub;
__ CallStub(&stub);
__ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
@ -388,8 +386,8 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
} else {
InterruptStub stub;
__ CallStub(&stub);
__ call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
}
__ pop(rax);
EmitProfilingCounterReset();

View File

@ -5419,8 +5419,9 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
Label done;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &done, Label::kNear);
StackCheckStub stub;
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
CallCode(isolate()->builtins()->StackCheck(),
RelocInfo::CODE_TARGET,
instr);
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
last_lazy_deopt_pc_ = masm()->pc_offset();
__ bind(&done);