Second attempt: Improve our type feedback by recogizining never-executed IC calls for binary operations.

This is an improved version of my earlier change r5970. It avoids degrading the
non-optimized code.

Initially we emit a conditional branch that is either always- or never-taken
after a smi-check (depending on whether we test for smi for for non-smi)
Since test-eax always sets the carry-flag to 0 we use jump-if-carry and
jump-if-not-carry.

The first invocation of the stub patches a jc with a jz and
jnc with a jnz-instruction so that the code looks exactly as it was
without patching. The only difference is the test- or nop-instruction
after the IC-call.

Review URL: http://codereview.chromium.org/5763004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6030 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
fschneider@chromium.org 2010-12-15 13:56:41 +00:00
parent 3ed6c2a12f
commit b93a0f5d9a
12 changed files with 226 additions and 57 deletions

View File

@ -2360,10 +2360,8 @@ Condition CompareIC::ComputeCondition(Token::Value op) {
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope; HandleScope scope;
Handle<Code> rewritten; Handle<Code> rewritten;
#ifdef DEBUG
State previous_state = GetState(); State previous_state = GetState();
#endif State state = TargetState(previous_state, false, x, y);
State state = TargetState(x, y);
if (state == GENERIC) { if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0); CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
rewritten = stub.GetCode(); rewritten = stub.GetCode();
@ -2383,6 +2381,12 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
#endif #endif
} }
void PatchInlinedSmiCode(Address address) {
UNIMPLEMENTED();
}
} } // namespace v8::internal } } // namespace v8::internal
#endif // V8_TARGET_ARCH_ARM #endif // V8_TARGET_ARCH_ARM

View File

@ -38,6 +38,9 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// Forward declarations.
class JumpPatchSite;
// AST node visitor which can tell whether a given statement will be breakable // AST node visitor which can tell whether a given statement will be breakable
// when the code is compiled by the full compiler in the debugger. This means // when the code is compiled by the full compiler in the debugger. This means
// that there will be an IC (load/store/call) in the code generated for the // that there will be an IC (load/store/call) in the code generated for the
@ -533,6 +536,10 @@ class FullCodeGenerator: public AstVisitor {
// Helper for calling an IC stub. // Helper for calling an IC stub.
void EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode); void EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode);
// Calling an IC stub with a patch site. Passing NULL for patch_site
// indicates no inlined smi code and emits a nop after the IC call.
void EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site);
// Set fields in the stack frame. Offsets are the frame pointer relative // Set fields in the stack frame. Offsets are the frame pointer relative
// offsets defined in, e.g., StandardFrameConstants. // offsets defined in, e.g., StandardFrameConstants.
void StoreToFrameField(int frame_offset, Register value); void StoreToFrameField(int frame_offset, Register value);

View File

@ -571,6 +571,15 @@ class Assembler : public Malloced {
static const byte kTestEaxByte = 0xA9; static const byte kTestEaxByte = 0xA9;
// One byte opcode for test al, 0xXX. // One byte opcode for test al, 0xXX.
static const byte kTestAlByte = 0xA8; static const byte kTestAlByte = 0xA8;
// One byte opcode for nop.
static const byte kNopByte = 0x90;
// One byte opcode for a short unconditional jump.
static const byte kJmpShortOpcode = 0xEB;
// One byte prefix for a short conditional jump.
static const byte kJccShortPrefix = 0x70;
static const byte kJncShortOpcode = kJccShortPrefix | not_carry;
static const byte kJcShortOpcode = kJccShortPrefix | carry;
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Code generation // Code generation

View File

@ -250,7 +250,8 @@ class TypeRecordingBinaryOpStub: public CodeStub {
ASSERT(OpBits::is_valid(Token::NUM_TOKENS)); ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
} }
TypeRecordingBinaryOpStub(int key, TypeRecordingBinaryOpStub(
int key,
TRBinaryOpIC::TypeInfo operands_type, TRBinaryOpIC::TypeInfo operands_type,
TRBinaryOpIC::TypeInfo result_type = TRBinaryOpIC::UNINITIALIZED) TRBinaryOpIC::TypeInfo result_type = TRBinaryOpIC::UNINITIALIZED)
: op_(OpBits::decode(key)), : op_(OpBits::decode(key)),
@ -258,8 +259,7 @@ class TypeRecordingBinaryOpStub: public CodeStub {
use_sse3_(SSE3Bits::decode(key)), use_sse3_(SSE3Bits::decode(key)),
operands_type_(operands_type), operands_type_(operands_type),
result_type_(result_type), result_type_(result_type),
name_(NULL) { name_(NULL) { }
}
// Generate code to call the stub with the supplied arguments. This will add // Generate code to call the stub with the supplied arguments. This will add
// code at the call site to prepare arguments either in registers or on the // code at the call site to prepare arguments either in registers or on the

View File

@ -41,8 +41,61 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
#define __ ACCESS_MASM(masm_) #define __ ACCESS_MASM(masm_)
class JumpPatchSite BASE_EMBEDDED {
public:
explicit JumpPatchSite(MacroAssembler* masm)
: masm_(masm) {
#ifdef DEBUG
info_emitted_ = false;
#endif
}
~JumpPatchSite() {
ASSERT(patch_site_.is_bound() == info_emitted_);
}
void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
__ test(reg, Immediate(kSmiTagMask));
EmitJump(not_carry, target); // Always taken before patched.
}
void EmitJumpIfSmi(Register reg, NearLabel* target) {
__ test(reg, Immediate(kSmiTagMask));
EmitJump(carry, target); // Never taken before patched.
}
void EmitPatchInfo() {
int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
ASSERT(is_int8(delta_to_patch_site));
__ test(eax, Immediate(delta_to_patch_site));
#ifdef DEBUG
info_emitted_ = true;
#endif
}
bool is_bound() const { return patch_site_.is_bound(); }
private:
// jc will be patched with jz, jnc will become jnz.
void EmitJump(Condition cc, NearLabel* target) {
ASSERT(!patch_site_.is_bound() && !info_emitted_);
ASSERT(cc == carry || cc == not_carry);
__ bind(&patch_site_);
__ j(cc, target);
}
MacroAssembler* masm_;
Label patch_site_;
#ifdef DEBUG
bool info_emitted_;
#endif
};
// Generate code for a JS function. On entry to the function the receiver // Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right, with the // and arguments have been pushed on the stack left to right, with the
// return address on top of them. The actual argument count matches the // return address on top of them. The actual argument count matches the
@ -715,12 +768,13 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Perform the comparison as if via '==='. // Perform the comparison as if via '==='.
__ mov(edx, Operand(esp, 0)); // Switch value. __ mov(edx, Operand(esp, 0)); // Switch value.
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
JumpPatchSite patch_site(masm_);
if (inline_smi_code) { if (inline_smi_code) {
NearLabel slow_case; NearLabel slow_case;
__ mov(ecx, edx); __ mov(ecx, edx);
__ or_(ecx, Operand(eax)); __ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask)); patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
__ j(not_zero, &slow_case, not_taken);
__ cmp(edx, Operand(eax)); __ cmp(edx, Operand(eax));
__ j(not_equal, &next_test); __ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed. __ Drop(1); // Switch value is no longer needed.
@ -730,9 +784,8 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback. // Record position before stub call for type feedback.
SetSourcePosition(clause->position()); SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
__ call(ic, RelocInfo::CODE_TARGET); EmitCallIC(ic, &patch_site);
__ test(eax, Operand(eax)); __ test(eax, Operand(eax));
__ j(not_equal, &next_test); __ j(not_equal, &next_test);
@ -1556,12 +1609,11 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
OverwriteMode mode, OverwriteMode mode,
bool left_is_constant_smi, bool left_is_constant_smi,
Smi* value) { Smi* value) {
NearLabel call_stub; NearLabel call_stub, done;
Label done;
__ add(Operand(eax), Immediate(value)); __ add(Operand(eax), Immediate(value));
__ j(overflow, &call_stub); __ j(overflow, &call_stub);
__ test(eax, Immediate(kSmiTagMask)); JumpPatchSite patch_site(masm_);
__ j(zero, &done); patch_site.EmitJumpIfSmi(eax, &done);
// Undo the optimistic add operation and call the shared stub. // Undo the optimistic add operation and call the shared stub.
__ bind(&call_stub); __ bind(&call_stub);
@ -1574,7 +1626,8 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
__ mov(edx, eax); __ mov(edx, eax);
__ mov(eax, Immediate(value)); __ mov(eax, Immediate(value));
} }
__ CallStub(&stub); EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done); __ bind(&done);
context()->Plug(eax); context()->Plug(eax);
} }
@ -1584,7 +1637,7 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
OverwriteMode mode, OverwriteMode mode,
bool left_is_constant_smi, bool left_is_constant_smi,
Smi* value) { Smi* value) {
Label call_stub, done; NearLabel call_stub, done;
if (left_is_constant_smi) { if (left_is_constant_smi) {
__ mov(ecx, eax); __ mov(ecx, eax);
__ mov(eax, Immediate(value)); __ mov(eax, Immediate(value));
@ -1593,8 +1646,8 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
__ sub(Operand(eax), Immediate(value)); __ sub(Operand(eax), Immediate(value));
} }
__ j(overflow, &call_stub); __ j(overflow, &call_stub);
__ test(eax, Immediate(kSmiTagMask)); JumpPatchSite patch_site(masm_);
__ j(zero, &done); patch_site.EmitJumpIfSmi(eax, &done);
__ bind(&call_stub); __ bind(&call_stub);
if (left_is_constant_smi) { if (left_is_constant_smi) {
@ -1607,7 +1660,8 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
} }
Token::Value op = Token::SUB; Token::Value op = Token::SUB;
TypeRecordingBinaryOpStub stub(op, mode); TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub); EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done); __ bind(&done);
context()->Plug(eax); context()->Plug(eax);
} }
@ -1617,19 +1671,21 @@ void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
Token::Value op, Token::Value op,
OverwriteMode mode, OverwriteMode mode,
Smi* value) { Smi* value) {
Label call_stub, smi_case, done; NearLabel call_stub, smi_case, done;
int shift_value = value->value() & 0x1f; int shift_value = value->value() & 0x1f;
__ test(eax, Immediate(kSmiTagMask)); JumpPatchSite patch_site(masm_);
__ j(zero, &smi_case); patch_site.EmitJumpIfSmi(eax, &smi_case);
// Call stub.
__ bind(&call_stub); __ bind(&call_stub);
__ mov(edx, eax); __ mov(edx, eax);
__ mov(eax, Immediate(value)); __ mov(eax, Immediate(value));
TypeRecordingBinaryOpStub stub(op, mode); TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub); EmitCallIC(stub.GetCode(), &patch_site);
__ jmp(&done); __ jmp(&done);
// Smi case.
__ bind(&smi_case); __ bind(&smi_case);
switch (op) { switch (op) {
case Token::SHL: case Token::SHL:
@ -1679,17 +1735,19 @@ void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
Token::Value op, Token::Value op,
OverwriteMode mode, OverwriteMode mode,
Smi* value) { Smi* value) {
Label smi_case, done; NearLabel smi_case, done;
__ test(eax, Immediate(kSmiTagMask));
__ j(zero, &smi_case); JumpPatchSite patch_site(masm_);
patch_site.EmitJumpIfSmi(eax, &smi_case);
// The order of the arguments does not matter for bit-ops with a // The order of the arguments does not matter for bit-ops with a
// constant operand. // constant operand.
__ mov(edx, Immediate(value)); __ mov(edx, Immediate(value));
TypeRecordingBinaryOpStub stub(op, mode); TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub); EmitCallIC(stub.GetCode(), &patch_site);
__ jmp(&done); __ jmp(&done);
// Smi case.
__ bind(&smi_case); __ bind(&smi_case);
switch (op) { switch (op) {
case Token::BIT_OR: case Token::BIT_OR:
@ -1757,19 +1815,20 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
// Do combined smi check of the operands. Left operand is on the // Do combined smi check of the operands. Left operand is on the
// stack. Right operand is in eax. // stack. Right operand is in eax.
Label done, stub_call, smi_case; NearLabel done, smi_case, stub_call;
__ pop(edx); __ pop(edx);
__ mov(ecx, eax); __ mov(ecx, eax);
__ or_(eax, Operand(edx)); __ or_(eax, Operand(edx));
__ test(eax, Immediate(kSmiTagMask)); JumpPatchSite patch_site(masm_);
__ j(zero, &smi_case); patch_site.EmitJumpIfSmi(eax, &smi_case);
__ bind(&stub_call); __ bind(&stub_call);
__ mov(eax, ecx); __ mov(eax, ecx);
TypeRecordingBinaryOpStub stub(op, mode); TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub); EmitCallIC(stub.GetCode(), &patch_site);
__ jmp(&done); __ jmp(&done);
// Smi case.
__ bind(&smi_case); __ bind(&smi_case);
__ mov(eax, edx); // Copy left operand in case of a stub call. __ mov(eax, edx); // Copy left operand in case of a stub call.
@ -1848,7 +1907,7 @@ void FullCodeGenerator::EmitBinaryOp(Token::Value op,
OverwriteMode mode) { OverwriteMode mode) {
__ pop(edx); __ pop(edx);
TypeRecordingBinaryOpStub stub(op, mode); TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub); EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
context()->Plug(eax); context()->Plug(eax);
} }
@ -3720,8 +3779,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} }
// Inline smi case if we are in a loop. // Inline smi case if we are in a loop.
NearLabel stub_call; NearLabel stub_call, done;
Label done; JumpPatchSite patch_site(masm_);
if (ShouldInlineSmiCase(expr->op())) { if (ShouldInlineSmiCase(expr->op())) {
if (expr->op() == Token::INC) { if (expr->op() == Token::INC) {
__ add(Operand(eax), Immediate(Smi::FromInt(1))); __ add(Operand(eax), Immediate(Smi::FromInt(1)));
@ -3731,8 +3791,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ j(overflow, &stub_call); __ j(overflow, &stub_call);
// We could eliminate this smi check if we split the code at // We could eliminate this smi check if we split the code at
// the first smi check before calling ToNumber. // the first smi check before calling ToNumber.
__ test(eax, Immediate(kSmiTagMask)); patch_site.EmitJumpIfSmi(eax, &done);
__ j(zero, &done);
__ bind(&stub_call); __ bind(&stub_call);
// Call stub. Undo operation first. // Call stub. Undo operation first.
if (expr->op() == Token::INC) { if (expr->op() == Token::INC) {
@ -3750,7 +3810,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(eax, Immediate(Smi::FromInt(1))); __ mov(eax, Immediate(Smi::FromInt(1)));
TypeRecordingBinaryOpStub stub(expr->binary_op(), TypeRecordingBinaryOpStub stub(expr->binary_op(),
NO_OVERWRITE); NO_OVERWRITE);
__ CallStub(&stub); EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done); __ bind(&done);
// Store the value returned in eax. // Store the value returned in eax.
@ -4023,21 +4083,22 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
} }
bool inline_smi_code = ShouldInlineSmiCase(op); bool inline_smi_code = ShouldInlineSmiCase(op);
JumpPatchSite patch_site(masm_);
if (inline_smi_code) { if (inline_smi_code) {
NearLabel slow_case; NearLabel slow_case;
__ mov(ecx, Operand(edx)); __ mov(ecx, Operand(edx));
__ or_(ecx, Operand(eax)); __ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask)); patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
__ j(not_zero, &slow_case, not_taken);
__ cmp(edx, Operand(eax)); __ cmp(edx, Operand(eax));
Split(cc, if_true, if_false, NULL); Split(cc, if_true, if_false, NULL);
__ bind(&slow_case); __ bind(&slow_case);
} }
// Record position and call the compare IC. // Record position and call the compare IC.
Handle<Code> ic = CompareIC::GetUninitialized(op);
SetSourcePosition(expr->position()); SetSourcePosition(expr->position());
__ call(ic, RelocInfo::CODE_TARGET); Handle<Code> ic = CompareIC::GetUninitialized(op);
EmitCallIC(ic, &patch_site);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Operand(eax)); __ test(eax, Operand(eax));
Split(cc, if_true, if_false, fall_through); Split(cc, if_true, if_false, fall_through);
@ -4141,6 +4202,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
} }
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
__ call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
} else {
__ nop(); // Signals no inlined code.
}
}
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
__ mov(Operand(ebp, frame_offset), value); __ mov(Operand(ebp, frame_offset), value);

View File

@ -2049,13 +2049,23 @@ Condition CompareIC::ComputeCondition(Token::Value op) {
} }
static bool HasInlinedSmiCode(Address address) {
// The address of the instruction following the call.
Address test_instruction_address =
address + Assembler::kCallTargetAddressOffset;
// If the instruction following the call is not a test al, nothing
// was inlined.
return *test_instruction_address == Assembler::kTestAlByte;
}
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope; HandleScope scope;
Handle<Code> rewritten; Handle<Code> rewritten;
#ifdef DEBUG
State previous_state = GetState(); State previous_state = GetState();
#endif
State state = TargetState(x, y); State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
if (state == GENERIC) { if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
rewritten = stub.GetCode(); rewritten = stub.GetCode();
@ -2073,6 +2083,44 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
Token::Name(op_)); Token::Name(op_));
} }
#endif #endif
// Activate inlined smi code.
if (previous_state == UNINITIALIZED) {
PatchInlinedSmiCode(address());
}
}
void PatchInlinedSmiCode(Address address) {
// The address of the instruction following the call.
Address test_instruction_address =
address + Assembler::kCallTargetAddressOffset;
// If the instruction following the call is not a test al, nothing
// was inlined.
if (*test_instruction_address != Assembler::kTestAlByte) {
ASSERT(*test_instruction_address == Assembler::kNopByte);
return;
}
Address delta_address = test_instruction_address + 1;
// The delta to the start of the map check instruction and the
// condition code uses at the patched jump.
int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
if (FLAG_trace_ic) {
PrintF("[ patching ic at %p, test=%p, delta=%d\n",
address, test_instruction_address, delta);
}
// Patch with a short conditional jump. There must be a
// short jump-if-carry/not-carry at this position.
Address jmp_address = test_instruction_address - delta;
ASSERT(*jmp_address == Assembler::kJncShortOpcode ||
*jmp_address == Assembler::kJcShortOpcode);
Condition cc = *jmp_address == Assembler::kJncShortOpcode
? not_zero
: zero;
*jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
} }

View File

@ -315,6 +315,13 @@ void LCodeGen::CallCode(Handle<Code> code,
__ call(code, mode); __ call(code, mode);
RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex); RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
} }
// Signal that we don't inline smi code before these stubs in the
// optimizing code generator.
if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
code->kind() == Code::COMPARE_IC) {
__ nop();
}
} }

View File

@ -2041,6 +2041,11 @@ MaybeObject* TypeRecordingBinaryOp_Patch(Arguments args) {
TRBinaryOpIC::GetName(result_type), TRBinaryOpIC::GetName(result_type),
Token::Name(op)); Token::Name(op));
} }
// Activate inlined smi code.
if (previous_type == TRBinaryOpIC::UNINITIALIZED) {
PatchInlinedSmiCode(ic.address());
}
} }
Handle<JSBuiltinsObject> builtins = Top::builtins(); Handle<JSBuiltinsObject> builtins = Top::builtins();
@ -2127,13 +2132,17 @@ const char* CompareIC::GetStateName(State state) {
} }
CompareIC::State CompareIC::TargetState(Handle<Object> x, Handle<Object> y) { CompareIC::State CompareIC::TargetState(State state,
State state = GetState(); bool has_inlined_smi_code,
if (state != UNINITIALIZED) return GENERIC; Handle<Object> x,
if (x->IsSmi() && y->IsSmi()) return SMIS; Handle<Object> y) {
if (x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS; if (!has_inlined_smi_code && state != UNINITIALIZED) return GENERIC;
if (state == UNINITIALIZED && x->IsSmi() && y->IsSmi()) return SMIS;
if ((state == UNINITIALIZED || (state == SMIS && has_inlined_smi_code)) &&
x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS;
if (op_ != Token::EQ && op_ != Token::EQ_STRICT) return GENERIC; if (op_ != Token::EQ && op_ != Token::EQ_STRICT) return GENERIC;
if (x->IsJSObject() && y->IsJSObject()) return OBJECTS; if (state == UNINITIALIZED &&
x->IsJSObject() && y->IsJSObject()) return OBJECTS;
return GENERIC; return GENERIC;
} }

View File

@ -582,7 +582,8 @@ class CompareIC: public IC {
static const char* GetStateName(State state); static const char* GetStateName(State state);
private: private:
State TargetState(Handle<Object> x, Handle<Object> y); State TargetState(State state, bool has_inlined_smi_code,
Handle<Object> x, Handle<Object> y);
bool strict() const { return op_ == Token::EQ_STRICT; } bool strict() const { return op_ == Token::EQ_STRICT; }
Condition GetCondition() const { return ComputeCondition(op_); } Condition GetCondition() const { return ComputeCondition(op_); }
@ -591,6 +592,8 @@ class CompareIC: public IC {
Token::Value op_; Token::Value op_;
}; };
// Helper for TRBinaryOpIC and CompareIC.
void PatchInlinedSmiCode(Address address);
} } // namespace v8::internal } } // namespace v8::internal

View File

@ -142,6 +142,9 @@ TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr, Side side) {
CompareIC::State state = static_cast<CompareIC::State>(code->compare_state()); CompareIC::State state = static_cast<CompareIC::State>(code->compare_state());
switch (state) { switch (state) {
case CompareIC::UNINITIALIZED: case CompareIC::UNINITIALIZED:
// Uninitialized means never executed.
// TODO(fschneider): Introduce a separate value for never-executed ICs.
return unknown;
case CompareIC::SMIS: case CompareIC::SMIS:
return TypeInfo::Smi(); return TypeInfo::Smi();
case CompareIC::HEAP_NUMBERS: case CompareIC::HEAP_NUMBERS:
@ -184,6 +187,9 @@ TypeInfo TypeFeedbackOracle::BinaryType(BinaryOperation* expr, Side side) {
switch (type) { switch (type) {
case TRBinaryOpIC::UNINITIALIZED: case TRBinaryOpIC::UNINITIALIZED:
// Uninitialized means never executed.
// TODO(fschneider): Introduce a separate value for never-executed ICs
return unknown;
case TRBinaryOpIC::SMI: case TRBinaryOpIC::SMI:
switch (result_type) { switch (result_type) {
case TRBinaryOpIC::UNINITIALIZED: case TRBinaryOpIC::UNINITIALIZED:
@ -224,6 +230,9 @@ TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) {
CompareIC::State state = static_cast<CompareIC::State>(code->compare_state()); CompareIC::State state = static_cast<CompareIC::State>(code->compare_state());
switch (state) { switch (state) {
case CompareIC::UNINITIALIZED: case CompareIC::UNINITIALIZED:
// Uninitialized means never executed.
// TODO(fschneider): Introduce a separate value for never-executed ICs.
return unknown;
case CompareIC::SMIS: case CompareIC::SMIS:
return TypeInfo::Smi(); return TypeInfo::Smi();
case CompareIC::HEAP_NUMBERS: case CompareIC::HEAP_NUMBERS:

View File

@ -1951,10 +1951,8 @@ Condition CompareIC::ComputeCondition(Token::Value op) {
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope; HandleScope scope;
Handle<Code> rewritten; Handle<Code> rewritten;
#ifdef DEBUG
State previous_state = GetState(); State previous_state = GetState();
#endif State state = TargetState(previous_state, false, x, y);
State state = TargetState(x, y);
if (state == GENERIC) { if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
rewritten = stub.GetCode(); rewritten = stub.GetCode();
@ -1974,6 +1972,10 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
#endif #endif
} }
void PatchInlinedSmiCode(Address address) {
UNIMPLEMENTED();
}
} } // namespace v8::internal } } // namespace v8::internal
#endif // V8_TARGET_ARCH_X64 #endif // V8_TARGET_ARCH_X64