MIPS: Implement clearing of CompareICs.

Port r11491 (705d40cc)

Original commit message:
Implement clearing of CompareICs.

This allows CompareICs to be cleared during garbage collection to avoid
cross-context garbage retention through maps stored in CompareIC stubs
for the KNOWN_OBJECTS state.

BUG=
TEST=

Review URL: https://chromiumcodereview.appspot.com/10342024
Patch from Akos Palfi <palfia@homejinni.com>.

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11505 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mstarzinger@chromium.org 2012-05-04 07:53:11 +00:00
parent a33c883008
commit 6531771c71
2 changed files with 22 additions and 25 deletions

View File

@ -1688,12 +1688,12 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
// Activate inlined smi code. // Activate inlined smi code.
if (previous_state == UNINITIALIZED) { if (previous_state == UNINITIALIZED) {
PatchInlinedSmiCode(address()); PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
} }
} }
void PatchInlinedSmiCode(Address address) { void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
Address andi_instruction_address = Address andi_instruction_address =
address + Assembler::kCallTargetAddressOffset; address + Assembler::kCallTargetAddressOffset;
@ -1727,33 +1727,30 @@ void PatchInlinedSmiCode(Address address) {
Instr instr_at_patch = Assembler::instr_at(patch_address); Instr instr_at_patch = Assembler::instr_at(patch_address);
Instr branch_instr = Instr branch_instr =
Assembler::instr_at(patch_address + Instruction::kInstrSize); Assembler::instr_at(patch_address + Instruction::kInstrSize);
ASSERT(Assembler::IsAndImmediate(instr_at_patch)); // This is patching a conditional "jump if not smi/jump if smi" site.
ASSERT_EQ(0, Assembler::GetImmediate16(instr_at_patch)); // Enabling by changing from
// andi at, rx, 0
// Branch <target>, eq, at, Operand(zero_reg)
// to:
// andi at, rx, #kSmiTagMask
// Branch <target>, ne, at, Operand(zero_reg)
// and vice-versa to be disabled again.
CodePatcher patcher(patch_address, 2);
Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
if (check == ENABLE_INLINED_SMI_CHECK) {
ASSERT(Assembler::IsAndImmediate(instr_at_patch));
ASSERT_EQ(0, Assembler::GetImmediate16(instr_at_patch));
patcher.masm()->andi(at, reg, kSmiTagMask);
} else {
ASSERT(check == DISABLE_INLINED_SMI_CHECK);
ASSERT(Assembler::IsAndImmediate(instr_at_patch));
patcher.masm()->andi(at, reg, 0);
}
ASSERT(Assembler::IsBranch(branch_instr)); ASSERT(Assembler::IsBranch(branch_instr));
if (Assembler::IsBeq(branch_instr)) { if (Assembler::IsBeq(branch_instr)) {
// This is patching a "jump if not smi" site to be active.
// Changing:
// andi at, rx, 0
// Branch <target>, eq, at, Operand(zero_reg)
// to:
// andi at, rx, #kSmiTagMask
// Branch <target>, ne, at, Operand(zero_reg)
CodePatcher patcher(patch_address, 2);
Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
patcher.masm()->andi(at, reg, kSmiTagMask);
patcher.ChangeBranchCondition(ne); patcher.ChangeBranchCondition(ne);
} else { } else {
ASSERT(Assembler::IsBne(branch_instr)); ASSERT(Assembler::IsBne(branch_instr));
// This is patching a "jump if smi" site to be active.
// Changing:
// andi at, rx, 0
// Branch <target>, ne, at, Operand(zero_reg)
// to:
// andi at, rx, #kSmiTagMask
// Branch <target>, eq, at, Operand(zero_reg)
CodePatcher patcher(patch_address, 2);
Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
patcher.masm()->andi(at, reg, kSmiTagMask);
patcher.ChangeBranchCondition(eq); patcher.ChangeBranchCondition(eq);
} }
} }

View File

@ -5378,7 +5378,7 @@ CodePatcher::CodePatcher(byte* address, int instructions)
: address_(address), : address_(address),
instructions_(instructions), instructions_(instructions),
size_(instructions * Assembler::kInstrSize), size_(instructions * Assembler::kInstrSize),
masm_(Isolate::Current(), address, size_ + Assembler::kGap) { masm_(NULL, address, size_ + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to patch. // Create a new macro assembler pointing to the address of the code to patch.
// The size is adjusted with kGap on order for the assembler to generate size // The size is adjusted with kGap on order for the assembler to generate size
// bytes of instructions without failing with buffer size constraints. // bytes of instructions without failing with buffer size constraints.