Revert r5970 and r5975.

Review URL: http://codereview.chromium.org/5717005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5977 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
fschneider@chromium.org 2010-12-13 10:41:50 +00:00
parent 9977abb42a
commit b1a2cc1e48
11 changed files with 65 additions and 223 deletions

View File

@ -2360,8 +2360,10 @@ Condition CompareIC::ComputeCondition(Token::Value op) {
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope;
Handle<Code> rewritten;
#ifdef DEBUG
State previous_state = GetState();
State state = TargetState(previous_state, x, y);
#endif
State state = TargetState(x, y);
if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
rewritten = stub.GetCode();
@ -2381,12 +2383,6 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
#endif
}
void PatchInlinedSmiCode(Address address) {
UNIMPLEMENTED();
}
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_ARM

View File

@ -38,9 +38,6 @@
namespace v8 {
namespace internal {
// Forward declarations.
class JumpPatchSite;
// AST node visitor which can tell whether a given statement will be breakable
// when the code is compiled by the full compiler in the debugger. This means
// that there will be an IC (load/store/call) in the code generated for the
@ -536,10 +533,6 @@ class FullCodeGenerator: public AstVisitor {
// Helper for calling an IC stub.
void EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode);
// Calling an IC stub with a patch site. Passing NULL for patch_site
// indicates no inlined smi code and emits a nop after the IC call.
void EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site);
// Set fields in the stack frame. Offsets are the frame pointer relative
// offsets defined in, e.g., StandardFrameConstants.
void StoreToFrameField(int frame_offset, Register value);

View File

@ -571,14 +571,6 @@ class Assembler : public Malloced {
static const byte kTestEaxByte = 0xA9;
// One byte opcode for test al, 0xXX.
static const byte kTestAlByte = 0xA8;
// One byte opcode for nop.
static const byte kNopByte = 0x90;
// One byte opcode for a short unconditional jump.
static const byte kJmpShortOpcode = 0xEB;
// One byte prefix for a short conditional jump.
static const byte kJccShortPrefix = 0x70;
// ---------------------------------------------------------------------------
// Code generation

View File

@ -1366,8 +1366,8 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
if (op_ == Token::DIV || op_ == Token::MOD) {
left = eax;
right = ebx;
__ mov(ebx, eax);
__ mov(eax, edx);
__ mov(ebx, eax);
__ mov(eax, edx);
}

View File

@ -231,8 +231,7 @@ class TypeRecordingBinaryOpStub: public CodeStub {
ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
}
TypeRecordingBinaryOpStub(
int key,
TypeRecordingBinaryOpStub(int key,
TRBinaryOpIC::TypeInfo operands_type,
TRBinaryOpIC::TypeInfo result_type = TRBinaryOpIC::UNINITIALIZED)
: op_(OpBits::decode(key)),
@ -240,7 +239,8 @@ class TypeRecordingBinaryOpStub: public CodeStub {
use_sse3_(SSE3Bits::decode(key)),
operands_type_(operands_type),
result_type_(result_type),
name_(NULL) { }
name_(NULL) {
}
// Generate code to call the stub with the supplied arguments. This will add
// code at the call site to prepare arguments either in registers or on the

View File

@ -41,46 +41,6 @@
namespace v8 {
namespace internal {
class JumpPatchSite BASE_EMBEDDED {
public:
explicit JumpPatchSite(MacroAssembler* masm)
: masm_(masm) {
#ifdef DEBUG
info_emitted_ = false;
#endif
}
~JumpPatchSite() {
ASSERT(patch_site_.is_bound() == info_emitted_);
}
void EmitJump(NearLabel* target) {
ASSERT(!patch_site_.is_bound() && !info_emitted_);
masm_->bind(&patch_site_);
masm_->jmp(target);
}
void EmitPatchInfo() {
int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
ASSERT(is_int8(delta_to_patch_site));
masm_->test(eax, Immediate(delta_to_patch_site));
#ifdef DEBUG
info_emitted_ = true;
#endif
}
bool is_bound() const { return patch_site_.is_bound(); }
private:
MacroAssembler* masm_;
Label patch_site_;
#ifdef DEBUG
bool info_emitted_;
#endif
};
#define __ ACCESS_MASM(masm_)
// Generate code for a JS function. On entry to the function the receiver
@ -755,13 +715,12 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Perform the comparison as if via '==='.
__ mov(edx, Operand(esp, 0)); // Switch value.
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
NearLabel slow_case;
__ mov(ecx, edx);
__ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
patch_site.EmitJump(&slow_case);
__ j(not_zero, &slow_case, not_taken);
__ cmp(edx, Operand(eax));
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
@ -771,8 +730,9 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
EmitCallIC(ic, &patch_site);
__ call(ic, RelocInfo::CODE_TARGET);
__ test(eax, Operand(eax));
__ j(not_equal, &next_test);
@ -1592,13 +1552,12 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
OverwriteMode mode,
bool left_is_constant_smi,
Smi* value) {
NearLabel call_stub, done;
NearLabel call_stub;
Label done;
__ add(Operand(eax), Immediate(value));
__ j(overflow, &call_stub);
__ test(eax, Immediate(kSmiTagMask));
JumpPatchSite patch_site(masm_);
patch_site.EmitJump(&call_stub);
__ jmp(&done);
__ j(zero, &done);
// Undo the optimistic add operation and call the shared stub.
__ bind(&call_stub);
@ -1611,8 +1570,7 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
__ mov(edx, eax);
__ mov(eax, Immediate(value));
}
EmitCallIC(stub.GetCode(), &patch_site);
__ CallStub(&stub);
__ bind(&done);
context()->Plug(eax);
}
@ -1622,7 +1580,7 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
OverwriteMode mode,
bool left_is_constant_smi,
Smi* value) {
NearLabel call_stub, done;
Label call_stub, done;
if (left_is_constant_smi) {
__ mov(ecx, eax);
__ mov(eax, Immediate(value));
@ -1632,9 +1590,7 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
}
__ j(overflow, &call_stub);
__ test(eax, Immediate(kSmiTagMask));
JumpPatchSite patch_site(masm_);
patch_site.EmitJump(&call_stub);
__ jmp(&done);
__ j(zero, &done);
__ bind(&call_stub);
if (left_is_constant_smi) {
@ -1647,8 +1603,7 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
}
Token::Value op = Token::SUB;
TypeRecordingBinaryOpStub stub(op, mode);
EmitCallIC(stub.GetCode(), &patch_site);
__ CallStub(&stub);
__ bind(&done);
context()->Plug(eax);
}
@ -1658,15 +1613,20 @@ void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Smi* value) {
NearLabel call_stub, done;
Label call_stub, smi_case, done;
int shift_value = value->value() & 0x1f;
__ test(eax, Immediate(kSmiTagMask));
// Patch site.
JumpPatchSite patch_site(masm_);
patch_site.EmitJump(&call_stub);
__ j(zero, &smi_case);
// Smi case.
__ bind(&call_stub);
__ mov(edx, eax);
__ mov(eax, Immediate(value));
TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub);
__ jmp(&done);
__ bind(&smi_case);
switch (op) {
case Token::SHL:
if (shift_value != 0) {
@ -1705,14 +1665,6 @@ void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
default:
UNREACHABLE();
}
__ jmp(&done);
// Call stub.
__ bind(&call_stub);
__ mov(edx, eax);
__ mov(eax, Immediate(value));
TypeRecordingBinaryOpStub stub(op, mode);
EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
context()->Plug(eax);
@ -1723,14 +1675,18 @@ void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Smi* value) {
NearLabel call_stub, done;
Label smi_case, done;
__ test(eax, Immediate(kSmiTagMask));
// Patch site. The first invocation of the stub will be patch the jmp with
// the required conditional jump.
JumpPatchSite patch_site(masm_);
patch_site.EmitJump(&call_stub);
__ j(zero, &smi_case);
// Smi case.
// The order of the arguments does not matter for bit-ops with a
// constant operand.
__ mov(edx, Immediate(value));
TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub);
__ jmp(&done);
__ bind(&smi_case);
switch (op) {
case Token::BIT_OR:
__ or_(Operand(eax), Immediate(value));
@ -1744,14 +1700,6 @@ void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
default:
UNREACHABLE();
}
__ jmp(&done);
// The order of the arguments does not matter for bit-ops with a
// constant operand.
__ bind(&call_stub);
__ mov(edx, Immediate(value));
TypeRecordingBinaryOpStub stub(op, mode);
EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
context()->Plug(eax);
@ -1805,15 +1753,20 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
// Do combined smi check of the operands. Left operand is on the
// stack. Right operand is in eax.
NearLabel done, stub_call;
Label done, stub_call, smi_case;
__ pop(edx);
__ mov(ecx, eax);
__ or_(eax, Operand(edx));
__ test(eax, Immediate(kSmiTagMask));
JumpPatchSite patch_site(masm_);
patch_site.EmitJump(&stub_call);
__ j(zero, &smi_case);
// Smi case.
__ bind(&stub_call);
__ mov(eax, ecx);
TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub);
__ jmp(&done);
__ bind(&smi_case);
__ mov(eax, edx); // Copy left operand in case of a stub call.
switch (op) {
@ -1881,12 +1834,6 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
default:
UNREACHABLE();
}
__ jmp(&done);
__ bind(&stub_call);
__ mov(eax, ecx);
TypeRecordingBinaryOpStub stub(op, mode);
EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
context()->Plug(eax);
@ -1897,7 +1844,7 @@ void FullCodeGenerator::EmitBinaryOp(Token::Value op,
OverwriteMode mode) {
__ pop(edx);
TypeRecordingBinaryOpStub stub(op, mode);
EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
__ CallStub(&stub);
context()->Plug(eax);
}
@ -3762,7 +3709,6 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Inline smi case if we are in a loop.
NearLabel stub_call;
JumpPatchSite patch_site(masm_);
Label done;
if (ShouldInlineSmiCase(expr->op())) {
if (expr->op() == Token::INC) {
@ -3774,9 +3720,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// We could eliminate this smi check if we split the code at
// the first smi check before calling ToNumber.
__ test(eax, Immediate(kSmiTagMask));
patch_site.EmitJump(&stub_call);
__ jmp(&done);
__ j(zero, &done);
__ bind(&stub_call);
// Call stub. Undo operation first.
if (expr->op() == Token::INC) {
@ -3794,9 +3738,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(eax, Immediate(Smi::FromInt(1)));
TypeRecordingBinaryOpStub stub(expr->binary_op(),
NO_OVERWRITE);
EmitCallIC(stub.GetCode(), &patch_site);
__ CallStub(&stub);
__ bind(&done);
// Store the value returned in eax.
switch (assign_type) {
case VARIABLE:
@ -4061,23 +4005,21 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
}
bool inline_smi_code = ShouldInlineSmiCase(op);
JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
NearLabel slow_case;
__ mov(ecx, Operand(edx));
__ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
patch_site.EmitJump(&slow_case);
__ j(not_zero, &slow_case, not_taken);
__ cmp(edx, Operand(eax));
Split(cc, if_true, if_false, NULL);
__ bind(&slow_case);
}
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
EmitCallIC(ic, &patch_site);
SetSourcePosition(expr->position());
__ call(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Operand(eax));
Split(cc, if_true, if_false, fall_through);
@ -4181,16 +4123,6 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
}
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
__ call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
} else {
__ nop(); // Signals no inlined code.
}
}
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
__ mov(Operand(ebp, frame_offset), value);

View File

@ -2049,23 +2049,13 @@ Condition CompareIC::ComputeCondition(Token::Value op) {
}
static bool HasInlinedSmiCode(Address address) {
// The address of the instruction following the call.
Address test_instruction_address =
address + Assembler::kCallTargetAddressOffset;
// If the instruction following the call is not a test al, nothing
// was inlined.
return *test_instruction_address == Assembler::kTestAlByte;
}
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope;
Handle<Code> rewritten;
#ifdef DEBUG
State previous_state = GetState();
State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
#endif
State state = TargetState(x, y);
if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
rewritten = stub.GetCode();
@ -2083,40 +2073,6 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
Token::Name(op_));
}
#endif
// Activate inlined smi code.
if (previous_state == UNINITIALIZED) {
PatchInlinedSmiCode(address());
}
}
void PatchInlinedSmiCode(Address address) {
// The address of the instruction following the call.
Address test_instruction_address =
address + Assembler::kCallTargetAddressOffset;
// If the instruction following the call is not a test al, nothing
// was inlined.
if (*test_instruction_address != Assembler::kTestAlByte) {
ASSERT(*test_instruction_address == Assembler::kNopByte);
return;
}
Address delta_address = test_instruction_address + 1;
// The delta to the start of the map check instruction and the
// condition code uses at the patched jump.
int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
if (FLAG_trace_ic) {
PrintF("[ patching ic at %p, test=%p, delta=%d\n",
address, test_instruction_address, delta);
}
// Patch with a short conditional jump. There must be an unconditional
// short jump at this position.
Address jmp_address = test_instruction_address - delta;
ASSERT(*jmp_address == Assembler::kJmpShortOpcode);
*jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | not_zero);
}

View File

@ -315,13 +315,6 @@ void LCodeGen::CallCode(Handle<Code> code,
__ call(code, mode);
RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
}
// Signal that we don't inline smi code before these stubs in the
// optimizing code generator.
if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
code->kind() == Code::COMPARE_IC) {
__ nop();
}
}

View File

@ -1951,7 +1951,7 @@ TRBinaryOpIC::State TRBinaryOpIC::ToState(TypeInfo type_info) {
TRBinaryOpIC::TypeInfo TRBinaryOpIC::JoinTypes(TRBinaryOpIC::TypeInfo x,
TRBinaryOpIC::TypeInfo y) {
TRBinaryOpIC::TypeInfo y) {
if (x == UNINITIALIZED) return y;
if (y == UNINITIALIZED) return x;
if (x == STRING && y == STRING) return STRING;
@ -2041,11 +2041,6 @@ MaybeObject* TypeRecordingBinaryOp_Patch(Arguments args) {
TRBinaryOpIC::GetName(result_type),
Token::Name(op));
}
// Activate inlined smi code.
if (previous_type == TRBinaryOpIC::UNINITIALIZED) {
PatchInlinedSmiCode(ic.address());
}
}
Handle<JSBuiltinsObject> builtins = Top::builtins();
@ -2132,17 +2127,13 @@ const char* CompareIC::GetStateName(State state) {
}
CompareIC::State CompareIC::TargetState(State state,
bool has_inlined_smi_code,
Handle<Object> x,
Handle<Object> y) {
if (!has_inlined_smi_code && state != UNINITIALIZED) return GENERIC;
if (state == UNINITIALIZED && x->IsSmi() && y->IsSmi()) return SMIS;
if ((state == UNINITIALIZED || (state == SMIS && has_inlined_smi_code)) &&
x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS;
CompareIC::State CompareIC::TargetState(Handle<Object> x, Handle<Object> y) {
State state = GetState();
if (state != UNINITIALIZED) return GENERIC;
if (x->IsSmi() && y->IsSmi()) return SMIS;
if (x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS;
if (op_ != Token::EQ && op_ != Token::EQ_STRICT) return GENERIC;
if (state == UNINITIALIZED &&
x->IsJSObject() && y->IsJSObject()) return OBJECTS;
if (x->IsJSObject() && y->IsJSObject()) return OBJECTS;
return GENERIC;
}

View File

@ -582,10 +582,7 @@ class CompareIC: public IC {
static const char* GetStateName(State state);
private:
State TargetState(State state,
bool has_inlined_smi_code,
Handle<Object> x,
Handle<Object> y);
State TargetState(Handle<Object> x, Handle<Object> y);
bool strict() const { return op_ == Token::EQ_STRICT; }
Condition GetCondition() const { return ComputeCondition(op_); }
@ -594,8 +591,6 @@ class CompareIC: public IC {
Token::Value op_;
};
// Helper for TRBinaryOpIC and CompareIC.
void PatchInlinedSmiCode(Address address);
} } // namespace v8::internal

View File

@ -142,8 +142,6 @@ TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr, Side side) {
CompareIC::State state = static_cast<CompareIC::State>(code->compare_state());
switch (state) {
case CompareIC::UNINITIALIZED:
// Uninitialized state means never executed.
return unknown;
case CompareIC::SMIS:
return TypeInfo::Smi();
case CompareIC::HEAP_NUMBERS:
@ -186,8 +184,6 @@ TypeInfo TypeFeedbackOracle::BinaryType(BinaryOperation* expr, Side side) {
switch (type) {
case TRBinaryOpIC::UNINITIALIZED:
// Uninitialized state means never executed.
return unknown;
case TRBinaryOpIC::SMI:
switch (result_type) {
case TRBinaryOpIC::UNINITIALIZED:
@ -228,8 +224,6 @@ TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) {
CompareIC::State state = static_cast<CompareIC::State>(code->compare_state());
switch (state) {
case CompareIC::UNINITIALIZED:
// Uninitialized state means never executed.
return unknown;
case CompareIC::SMIS:
return TypeInfo::Smi();
case CompareIC::HEAP_NUMBERS: