Prepare for optionally inlining smi cases in the code generated

by the full codegens.
Review URL: http://codereview.chromium.org/3116042

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5343 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
kasperl@chromium.org 2010-08-25 14:22:03 +00:00
parent 6bd9d953dd
commit 1816e05dc2
4 changed files with 119 additions and 114 deletions

View File

@ -665,10 +665,10 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Compile the label expression. // Compile the label expression.
VisitForValue(clause->label(), kAccumulator); VisitForValue(clause->label(), kAccumulator);
// Perform the comparison as if via '==='. The comparison stub expects // Perform the comparison as if via '==='.
// the smi vs. smi case to be handled before it is called.
Label slow_case;
__ ldr(r1, MemOperand(sp, 0)); // Switch value. __ ldr(r1, MemOperand(sp, 0)); // Switch value.
if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
Label slow_case;
__ orr(r2, r1, r0); __ orr(r2, r1, r0);
__ tst(r2, Operand(kSmiTagMask)); __ tst(r2, Operand(kSmiTagMask));
__ b(ne, &slow_case); __ b(ne, &slow_case);
@ -676,8 +676,9 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
__ b(ne, &next_test); __ b(ne, &next_test);
__ Drop(1); // Switch value is no longer needed. __ Drop(1); // Switch value is no longer needed.
__ b(clause->body_target()->entry_label()); __ b(clause->body_target()->entry_label());
__ bind(&slow_case); __ bind(&slow_case);
}
CompareStub stub(eq, true, kBothCouldBeNaN, true, r1, r0); CompareStub stub(eq, true, kBothCouldBeNaN, true, r1, r0);
__ CallStub(&stub); __ CallStub(&stub);
__ cmp(r0, Operand(0)); __ cmp(r0, Operand(0));
@ -2672,26 +2673,26 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
case Token::BIT_NOT: { case Token::BIT_NOT: {
Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)"); Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); // The generic unary operation stub expects the argument to be
UnaryOverwriteMode overwrite = // in the accumulator register r0.
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
// GenericUnaryOpStub expects the argument to be in the
// accumulator register r0.
VisitForValue(expr->expression(), kAccumulator); VisitForValue(expr->expression(), kAccumulator);
// Avoid calling the stub for Smis. Label done;
Label smi, done; if (ShouldInlineSmiCase(expr->op())) {
__ BranchOnSmi(result_register(), &smi); Label call_stub;
// Non-smi: call stub leaving result in accumulator register. __ BranchOnNotSmi(r0, &call_stub);
__ CallStub(&stub); __ mvn(r0, Operand(r0));
__ b(&done);
// Perform operation directly on Smis.
__ bind(&smi);
__ mvn(result_register(), Operand(result_register()));
// Bit-clear inverted smi-tag. // Bit-clear inverted smi-tag.
__ bic(result_register(), result_register(), Operand(kSmiTagMask)); __ bic(r0, r0, Operand(kSmiTagMask));
__ b(&done);
__ bind(&call_stub);
}
bool overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOverwriteMode mode =
overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
GenericUnaryOpStub stub(Token::BIT_NOT, mode);
__ CallStub(&stub);
__ bind(&done); __ bind(&done);
Apply(context_, result_register()); Apply(context_, r0);
break; break;
} }
@ -3055,16 +3056,15 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
UNREACHABLE(); UNREACHABLE();
} }
// The comparison stub expects the smi vs. smi case to be handled if (ShouldInlineSmiCase(op)) {
// before it is called.
Label slow_case; Label slow_case;
__ orr(r2, r0, Operand(r1)); __ orr(r2, r0, Operand(r1));
__ BranchOnNotSmi(r2, &slow_case); __ BranchOnNotSmi(r2, &slow_case);
__ cmp(r1, r0); __ cmp(r1, r0);
__ b(cc, if_true); Split(cc, if_true, if_false, NULL);
__ jmp(if_false);
__ bind(&slow_case); __ bind(&slow_case);
}
CompareStub stub(cc, strict, kBothCouldBeNaN, true, r1, r0); CompareStub stub(cc, strict, kBothCouldBeNaN, true, r1, r0);
__ CallStub(&stub); __ CallStub(&stub);
__ cmp(r0, Operand(0)); __ cmp(r0, Operand(0));

View File

@ -318,9 +318,15 @@ int FullCodeGenerator::SlotOffset(Slot* slot) {
bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) { bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
if (Debugger::IsDebuggerActive()) return false; // TODO(kasperl): Once the compare stub allows leaving out the
if (op == Token::DIV ||op == Token::MOD) return false; // inlined smi case, we should get rid of this check.
return loop_depth_ > 0; if (Token::IsCompareOp(op)) return true;
// TODO(kasperl): Once the unary bit not stub allows leaving out
// the inlined smi case, we should get rid of this check.
if (op == Token::BIT_NOT) return true;
// Inline smi case inside loops, but not division and modulo which
// are too complicated and take up too much space.
return (op != Token::DIV) && (op != Token::MOD) && (loop_depth_ > 0);
} }

View File

@ -668,10 +668,10 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Compile the label expression. // Compile the label expression.
VisitForValue(clause->label(), kAccumulator); VisitForValue(clause->label(), kAccumulator);
// Perform the comparison as if via '==='. The comparison stub expects // Perform the comparison as if via '==='.
// the smi vs. smi case to be handled before it is called.
Label slow_case;
__ mov(edx, Operand(esp, 0)); // Switch value. __ mov(edx, Operand(esp, 0)); // Switch value.
if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
Label slow_case;
__ mov(ecx, edx); __ mov(ecx, edx);
__ or_(ecx, Operand(eax)); __ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask)); __ test(ecx, Immediate(kSmiTagMask));
@ -680,8 +680,9 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
__ j(not_equal, &next_test); __ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed. __ Drop(1); // Switch value is no longer needed.
__ jmp(clause->body_target()->entry_label()); __ jmp(clause->body_target()->entry_label());
__ bind(&slow_case); __ bind(&slow_case);
}
CompareStub stub(equal, true); CompareStub stub(equal, true);
__ CallStub(&stub); __ CallStub(&stub);
__ test(eax, Operand(eax)); __ test(eax, Operand(eax));
@ -2672,26 +2673,26 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
case Token::BIT_NOT: { case Token::BIT_NOT: {
Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)"); Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); // The generic unary operation stub expects the argument to be
UnaryOverwriteMode overwrite = // in the accumulator register eax.
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
// GenericUnaryOpStub expects the argument to be in the
// accumulator register eax.
VisitForValue(expr->expression(), kAccumulator); VisitForValue(expr->expression(), kAccumulator);
// Avoid calling the stub for Smis. Label done;
Label smi, done; if (ShouldInlineSmiCase(expr->op())) {
__ test(result_register(), Immediate(kSmiTagMask)); Label call_stub;
__ j(zero, &smi); __ test(eax, Immediate(kSmiTagMask));
// Non-smi: call stub leaving result in accumulator register. __ j(not_zero, &call_stub);
__ CallStub(&stub); __ lea(eax, Operand(eax, kSmiTagMask));
__ not_(eax);
__ jmp(&done); __ jmp(&done);
// Perform operation directly on Smis. __ bind(&call_stub);
__ bind(&smi); }
__ not_(result_register()); bool overwrite = expr->expression()->ResultOverwriteAllowed();
__ and_(result_register(), ~kSmiTagMask); // Remove inverted smi-tag. UnaryOverwriteMode mode =
overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
GenericUnaryOpStub stub(Token::BIT_NOT, mode);
__ CallStub(&stub);
__ bind(&done); __ bind(&done);
Apply(context_, result_register()); Apply(context_, eax);
break; break;
} }
@ -3066,18 +3067,17 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
UNREACHABLE(); UNREACHABLE();
} }
// The comparison stub expects the smi vs. smi case to be if (ShouldInlineSmiCase(op)) {
// handled before it is called.
Label slow_case; Label slow_case;
__ mov(ecx, Operand(edx)); __ mov(ecx, Operand(edx));
__ or_(ecx, Operand(eax)); __ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask)); __ test(ecx, Immediate(kSmiTagMask));
__ j(not_zero, &slow_case, not_taken); __ j(not_zero, &slow_case, not_taken);
__ cmp(edx, Operand(eax)); __ cmp(edx, Operand(eax));
__ j(cc, if_true); Split(cc, if_true, if_false, NULL);
__ jmp(if_false);
__ bind(&slow_case); __ bind(&slow_case);
}
CompareStub stub(cc, strict); CompareStub stub(cc, strict);
__ CallStub(&stub); __ CallStub(&stub);
__ test(eax, Operand(eax)); __ test(eax, Operand(eax));

View File

@ -672,8 +672,8 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Compile the label expression. // Compile the label expression.
VisitForValue(clause->label(), kAccumulator); VisitForValue(clause->label(), kAccumulator);
// Perform the comparison as if via '==='. The comparison stub expects // Perform the comparison as if via '==='.
// the smi vs. smi case to be handled before it is called. if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
Label slow_case; Label slow_case;
__ movq(rdx, Operand(rsp, 0)); // Switch value. __ movq(rdx, Operand(rsp, 0)); // Switch value.
__ JumpIfNotBothSmi(rdx, rax, &slow_case); __ JumpIfNotBothSmi(rdx, rax, &slow_case);
@ -681,8 +681,9 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
__ j(not_equal, &next_test); __ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed. __ Drop(1); // Switch value is no longer needed.
__ jmp(clause->body_target()->entry_label()); __ jmp(clause->body_target()->entry_label());
__ bind(&slow_case); __ bind(&slow_case);
}
CompareStub stub(equal, true); CompareStub stub(equal, true);
__ CallStub(&stub); __ CallStub(&stub);
__ testq(rax, rax); __ testq(rax, rax);
@ -2667,25 +2668,24 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
case Token::BIT_NOT: { case Token::BIT_NOT: {
Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)"); Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); // The generic unary operation stub expects the argument to be
UnaryOverwriteMode overwrite = // in the accumulator register rax.
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
// GenericUnaryOpStub expects the argument to be in the
// accumulator register rax.
VisitForValue(expr->expression(), kAccumulator); VisitForValue(expr->expression(), kAccumulator);
// Avoid calling the stub for Smis. Label done;
Label smi, done; if (ShouldInlineSmiCase(expr->op())) {
Condition is_smi = masm_->CheckSmi(result_register()); Label call_stub;
__ j(is_smi, &smi); __ JumpIfNotSmi(rax, &call_stub);
// Non-smi: call stub leaving result in accumulator register. __ SmiNot(rax, rax);
__ CallStub(&stub);
__ jmp(&done); __ jmp(&done);
// Perform operation directly on Smis. __ bind(&call_stub);
__ bind(&smi); }
__ SmiNot(result_register(), result_register()); bool overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOverwriteMode mode =
overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
GenericUnaryOpStub stub(Token::BIT_NOT, mode);
__ CallStub(&stub);
__ bind(&done); __ bind(&done);
Apply(context_, result_register()); Apply(context_, rax);
break; break;
} }
@ -3054,15 +3054,14 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
UNREACHABLE(); UNREACHABLE();
} }
// The comparison stub expects the smi vs. smi case to be handled if (ShouldInlineSmiCase(op)) {
// before it is called.
Label slow_case; Label slow_case;
__ JumpIfNotBothSmi(rax, rdx, &slow_case); __ JumpIfNotBothSmi(rax, rdx, &slow_case);
__ SmiCompare(rdx, rax); __ SmiCompare(rdx, rax);
__ j(cc, if_true); Split(cc, if_true, if_false, NULL);
__ jmp(if_false);
__ bind(&slow_case); __ bind(&slow_case);
}
CompareStub stub(cc, strict); CompareStub stub(cc, strict);
__ CallStub(&stub); __ CallStub(&stub);
__ testq(rax, rax); __ testq(rax, rax);