Simplify stack check instruction in Crankshaft.

So far we had two types of stack checks: one used for function entries
and one used at loop back edges which uses a deferred code object to
avoid spilling of registers in the loop.

After refactoring lazy deoptimization the first stack check can also
use deferred code. This change removes the first type of stack check
instruction in Crankshaft and uses a deferred stack check in all
places.
Review URL: http://codereview.chromium.org/8775002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10118 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
fschneider@chromium.org 2011-12-01 09:54:05 +00:00
parent d9dc9104af
commit 4ca20a1d35
9 changed files with 50 additions and 131 deletions

View File

@ -2202,12 +2202,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
if (instr->is_function_entry()) {
return MarkAsCall(new LStackCheck, instr);
} else {
ASSERT(instr->is_backwards_branch());
return AssignEnvironment(AssignPointerMap(new LStackCheck));
}
return AssignEnvironment(AssignPointerMap(new LStackCheck));
}

View File

@ -4649,38 +4649,22 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
LStackCheck* instr_;
};
ASSERT(instr->HasEnvironment());
LEnvironment* env = instr->environment();
DeferredStackCheck* deferred_stack_check =
new DeferredStackCheck(this, instr);
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(lo, deferred_stack_check->entry());
EnsureSpaceForLazyDeopt();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
// There is no LLazyBailout instruction for stack-checks. We have to
// prepare for lazy deoptimization explicitly here.
if (instr->hydrogen()->is_function_entry()) {
// Perform stack overflow check.
Label done;
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &done);
StackCheckStub stub;
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
EnsureSpaceForLazyDeopt();
__ bind(&done);
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
} else {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
new DeferredStackCheck(this, instr);
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(lo, deferred_stack_check->entry());
EnsureSpaceForLazyDeopt();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
// Don't record a deoptimization index for the safepoint here.
// This will be done explicitly when emitting call and the safepoint in
// the deferred code.
}
ASSERT(instr->HasEnvironment());
LEnvironment* env = instr->environment();
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
// Don't record a deoptimization index for the safepoint here.
// This will be done explicitly when emitting call and the safepoint in
// the deferred code.
}

View File

@ -1271,12 +1271,7 @@ class HSimulate: public HInstruction {
class HStackCheck: public HTemplateInstruction<1> {
public:
enum Type {
kFunctionEntry,
kBackwardsBranch
};
HStackCheck(HValue* context, Type type) : type_(type) {
explicit HStackCheck(HValue* context) {
SetOperandAt(0, context);
}
@ -1294,13 +1289,7 @@ class HStackCheck: public HTemplateInstruction<1> {
}
}
bool is_function_entry() { return type_ == kFunctionEntry; }
bool is_backwards_branch() { return type_ == kBackwardsBranch; }
DECLARE_CONCRETE_INSTRUCTION(StackCheck)
private:
Type type_;
};

View File

@ -2333,8 +2333,7 @@ HGraph* HGraphBuilder::CreateGraph() {
AddSimulate(AstNode::kDeclarationsId);
HValue* context = environment()->LookupContext();
AddInstruction(
new(zone()) HStackCheck(context, HStackCheck::kFunctionEntry));
AddInstruction(new(zone()) HStackCheck(context));
VisitStatements(info()->function()->body());
if (HasStackOverflow()) return NULL;
@ -2922,8 +2921,7 @@ void HGraphBuilder::VisitLoopBody(IterationStatement* stmt,
BreakAndContinueScope push(break_info, this);
AddSimulate(stmt->StackCheckId());
HValue* context = environment()->LookupContext();
HStackCheck* stack_check =
new(zone()) HStackCheck(context, HStackCheck::kBackwardsBranch);
HStackCheck* stack_check = new(zone()) HStackCheck(context);
AddInstruction(stack_check);
ASSERT(loop_entry->IsLoopHeader());
loop_entry->loop_information()->set_stack_check(stack_check);

View File

@ -4554,43 +4554,23 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
LStackCheck* instr_;
};
ASSERT(instr->HasEnvironment());
LEnvironment* env = instr->environment();
DeferredStackCheck* deferred_stack_check =
new DeferredStackCheck(this, instr);
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(below, deferred_stack_check->entry());
EnsureSpaceForLazyDeopt();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
// There is no LLazyBailout instruction for stack-checks. We have to
// prepare for lazy deoptimization explicitly here.
if (instr->hydrogen()->is_function_entry()) {
// Perform stack overflow check.
Label done;
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &done, Label::kNear);
ASSERT(instr->context()->IsRegister());
ASSERT(ToRegister(instr->context()).is(esi));
StackCheckStub stub;
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
EnsureSpaceForLazyDeopt();
__ bind(&done);
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
} else {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
new DeferredStackCheck(this, instr);
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(below, deferred_stack_check->entry());
EnsureSpaceForLazyDeopt();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
// Don't record a deoptimization index for the safepoint here.
// This will be done explicitly when emitting call and the safepoint in
// the deferred code.
}
ASSERT(instr->HasEnvironment());
LEnvironment* env = instr->environment();
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
// Don't record a deoptimization index for the safepoint here.
// This will be done explicitly when emitting call and the safepoint in
// the deferred code.
}

View File

@ -2328,15 +2328,9 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
if (instr->is_function_entry()) {
LOperand* context = UseFixed(instr->context(), esi);
return MarkAsCall(new(zone()) LStackCheck(context), instr);
} else {
ASSERT(instr->is_backwards_branch());
LOperand* context = UseAny(instr->context());
return AssignEnvironment(
AssignPointerMap(new(zone()) LStackCheck(context)));
}
LOperand* context = UseAny(instr->context());
return AssignEnvironment(
AssignPointerMap(new(zone()) LStackCheck(context)));
}

View File

@ -228,7 +228,6 @@ class LInstruction: public ZoneObject {
LPointerMap* pointer_map() const { return pointer_map_.get(); }
bool HasPointerMap() const { return pointer_map_.is_set(); }
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }

View File

@ -4261,36 +4261,21 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
LStackCheck* instr_;
};
ASSERT(instr->HasEnvironment());
LEnvironment* env = instr->environment();
DeferredStackCheck* deferred_stack_check =
new DeferredStackCheck(this, instr);
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(below, deferred_stack_check->entry());
EnsureSpaceForLazyDeopt();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
// There is no LLazyBailout instruction for stack-checks. We have to
// prepare for lazy deoptimization explicitly here.
if (instr->hydrogen()->is_function_entry()) {
// Perform stack overflow check.
Label done;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &done, Label::kNear);
StackCheckStub stub;
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
EnsureSpaceForLazyDeopt();
__ bind(&done);
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
} else {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
new DeferredStackCheck(this, instr);
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(below, deferred_stack_check->entry());
EnsureSpaceForLazyDeopt();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
// Don't record a deoptimization index for the safepoint here.
// This will be done explicitly when emitting call and the safepoint in
// the deferred code.
}
ASSERT(instr->HasEnvironment());
LEnvironment* env = instr->environment();
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
// Don't record a deoptimization index for the safepoint here.
// This will be done explicitly when emitting call and the safepoint in
// the deferred code.
}

View File

@ -2203,12 +2203,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
if (instr->is_function_entry()) {
return MarkAsCall(new LStackCheck, instr);
} else {
ASSERT(instr->is_backwards_branch());
return AssignEnvironment(AssignPointerMap(new LStackCheck));
}
return AssignEnvironment(AssignPointerMap(new LStackCheck));
}