From c690c2ba7764b18f3379c6b0650abd2fcc8b2dbd Mon Sep 17 00:00:00 2001 From: "erik.corry@gmail.com" Date: Tue, 27 Apr 2010 19:24:36 +0000 Subject: [PATCH] Change the LoadIC calling convention so that the receiver is both on top of the stack and also in r0. This makes sense because the receiver is usually in r0 anyway. We may remove it from the stack later. Also removes some spilled scopes from the code generator allowing it to keep expression temporaries in registers more. Review URL: http://codereview.chromium.org/1751019 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4518 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/codegen-arm.cc | 66 ++++++++++++++++++++++-------------- src/arm/codegen-arm.h | 2 -- src/arm/debug-arm.cc | 2 +- src/arm/full-codegen-arm.cc | 15 ++++---- src/arm/ic-arm.cc | 59 ++++++++++++++++---------------- src/arm/stub-cache-arm.cc | 15 ++++---- src/arm/virtual-frame-arm.cc | 27 +++++++++++++++ src/arm/virtual-frame-arm.h | 4 +++ 8 files changed, 116 insertions(+), 74 deletions(-) diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc index 1c34f06a87..cc749c1ff6 100644 --- a/src/arm/codegen-arm.cc +++ b/src/arm/codegen-arm.cc @@ -570,9 +570,9 @@ void CodeGenerator::Load(Expression* expr) { void CodeGenerator::LoadGlobal() { - VirtualFrame::SpilledScope spilled_scope(frame_); - __ ldr(r0, GlobalObject()); - frame_->EmitPush(r0); + Register reg = frame_->GetTOSRegister(); + __ ldr(reg, GlobalObject()); + frame_->EmitPush(reg); } @@ -687,7 +687,6 @@ Reference::~Reference() { void CodeGenerator::LoadReference(Reference* ref) { - VirtualFrame::SpilledScope spilled_scope(frame_); Comment cmnt(masm_, "[ LoadReference"); Expression* e = ref->expression(); Property* property = e->AsProperty(); @@ -696,11 +695,11 @@ void CodeGenerator::LoadReference(Reference* ref) { if (property != NULL) { // The expression is either a property or a variable proxy that rewrites // to a property. - LoadAndSpill(property->obj()); + Load(property->obj()); if (property->key()->IsPropertyName()) { ref->set_type(Reference::NAMED); } else { - LoadAndSpill(property->key()); + Load(property->key()); ref->set_type(Reference::KEYED); } } else if (var != NULL) { @@ -715,6 +714,7 @@ void CodeGenerator::LoadReference(Reference* ref) { } } else { // Anything else is a runtime error. + VirtualFrame::SpilledScope spilled_scope(frame_); LoadAndSpill(e); frame_->CallRuntime(Runtime::kThrowReferenceError, 1); } @@ -1527,6 +1527,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, LoadAndSpill(applicand); Handle name = Factory::LookupAsciiSymbol("apply"); __ mov(r2, Operand(name)); + __ ldr(r0, MemOperand(sp, 0)); frame_->CallLoadIC(RelocInfo::CODE_TARGET); frame_->EmitPush(r0); @@ -2948,9 +2949,10 @@ void CodeGenerator::VisitConditional(Conditional* node) { void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { if (slot->type() == Slot::LOOKUP) { - VirtualFrame::SpilledScope spilled_scope(frame_); ASSERT(slot->var()->is_dynamic()); + // JumpTargets do not yet support merging frames so the frame must be + // spilled when jumping to these targets. JumpTarget slow; JumpTarget done; @@ -2960,16 +2962,18 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { // perform a runtime call for all variables in the scope // containing the eval. if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { - LoadFromGlobalSlotCheckExtensions(slot, typeof_state, r1, r2, &slow); + LoadFromGlobalSlotCheckExtensions(slot, typeof_state, &slow); // If there was no control flow to slow, we can exit early. if (!slow.is_linked()) { frame_->EmitPush(r0); return; } + frame_->SpillAll(); done.Jump(); } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { + frame_->SpillAll(); Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); // Only generate the fast case for locals that rewrite to slots. // This rules out argument loads. @@ -2992,6 +2996,7 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { } slow.Bind(); + VirtualFrame::SpilledScope spilled_scope(frame_); frame_->EmitPush(cp); __ mov(r0, Operand(slot->var()->name())); frame_->EmitPush(r0); @@ -3143,16 +3148,17 @@ void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, TypeofState typeof_state, - Register tmp, - Register tmp2, JumpTarget* slow) { // Check that no extension objects have been created by calls to // eval from the current scope to the global scope. + Register tmp = frame_->scratch0(); + Register tmp2 = frame_->scratch1(); Register context = cp; Scope* s = scope(); while (s != NULL) { if (s->num_heap_slots() > 0) { if (s->calls_eval()) { + frame_->SpillAll(); // Check that extension is NULL. __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX)); __ tst(tmp2, tmp2); @@ -3170,6 +3176,7 @@ void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, } if (s->is_eval_scope()) { + frame_->SpillAll(); Label next, fast; __ Move(tmp, context); __ bind(&next); @@ -3192,6 +3199,7 @@ void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, // Load the global object. LoadGlobal(); // Setup the name register and call load IC. + frame_->SpillAllButCopyTOSToR0(); __ mov(r2, Operand(slot->var()->name())); frame_->CallLoadIC(typeof_state == INSIDE_TYPEOF ? RelocInfo::CODE_TARGET @@ -3524,7 +3532,6 @@ void CodeGenerator::VisitProperty(Property* node) { #ifdef DEBUG int original_height = frame_->height(); #endif - VirtualFrame::SpilledScope spilled_scope(frame_); Comment cmnt(masm_, "[ Property"); { Reference property(this, node); @@ -5241,7 +5248,9 @@ void DeferredReferenceGetNamedValue::Generate() { __ DecrementCounter(&Counters::named_load_inline, 1, r1, r2); __ IncrementCounter(&Counters::named_load_inline_miss, 1, r1, r2); - // Setup the name register and call load IC. + // Setup the registers and call load IC. + // On entry to this deferred code, r0 is assumed to already contain the + // receiver from the top of the stack. __ mov(r2, Operand(name_)); // The rest of the instructions in the deferred code must be together. @@ -5295,6 +5304,7 @@ void CodeGenerator::EmitNamedLoad(Handle name, bool is_contextual) { if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { Comment cmnt(masm(), "[ Load from named Property"); // Setup the name register and call load IC. + frame_->SpillAllButCopyTOSToR0(); __ mov(r2, Operand(name)); frame_->CallLoadIC(is_contextual ? RelocInfo::CODE_TARGET_CONTEXT @@ -5303,9 +5313,6 @@ void CodeGenerator::EmitNamedLoad(Handle name, bool is_contextual) { // Inline the in-object property case. Comment cmnt(masm(), "[ Inlined named property load"); - DeferredReferenceGetNamedValue* deferred = - new DeferredReferenceGetNamedValue(name); - // Counter will be decremented in the deferred code. Placed here to avoid // having it in the instruction stream below where patching will occur. __ IncrementCounter(&Counters::named_load_inline, 1, @@ -5315,29 +5322,34 @@ void CodeGenerator::EmitNamedLoad(Handle name, bool is_contextual) { // Parts of this code is patched, so the exact instructions generated needs // to be fixed. Therefore the instruction pool is blocked when generating // this code + + // Load the receiver from the stack. + frame_->SpillAllButCopyTOSToR0(); + + DeferredReferenceGetNamedValue* deferred = + new DeferredReferenceGetNamedValue(name); + #ifdef DEBUG - int kInlinedNamedLoadInstructions = 8; + int kInlinedNamedLoadInstructions = 7; Label check_inlined_codesize; masm_->bind(&check_inlined_codesize); #endif - { Assembler::BlockConstPoolScope block_const_pool(masm_); - // Load the receiver from the stack. - __ ldr(r1, MemOperand(sp, 0)); + { Assembler::BlockConstPoolScope block_const_pool(masm_); // Check that the receiver is a heap object. - __ tst(r1, Operand(kSmiTagMask)); + __ tst(r0, Operand(kSmiTagMask)); deferred->Branch(eq); // Check the map. The null map used below is patched by the inline cache // code. - __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); + __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); __ mov(r3, Operand(Factory::null_value())); __ cmp(r2, r3); deferred->Branch(ne); // Initially use an invalid index. The index will be patched by the // inline cache code. - __ ldr(r0, MemOperand(r1, 0)); + __ ldr(r0, MemOperand(r0, 0)); // Make sure that the expected number of instructions are generated. ASSERT_EQ(kInlinedNamedLoadInstructions, @@ -5351,22 +5363,24 @@ void CodeGenerator::EmitNamedLoad(Handle name, bool is_contextual) { void CodeGenerator::EmitKeyedLoad() { if (loop_nesting() == 0) { + VirtualFrame::SpilledScope spilled(frame_); Comment cmnt(masm_, "[ Load from keyed property"); frame_->CallKeyedLoadIC(); } else { // Inline the keyed load. Comment cmnt(masm_, "[ Inlined load from keyed property"); - DeferredReferenceGetKeyedValue* deferred = - new DeferredReferenceGetKeyedValue(); - // Counter will be decremented in the deferred code. Placed here to avoid // having it in the instruction stream below where patching will occur. __ IncrementCounter(&Counters::keyed_load_inline, 1, frame_->scratch0(), frame_->scratch1()); // Load the receiver from the stack. - __ ldr(r0, MemOperand(sp, kPointerSize)); + frame_->SpillAllButCopyTOSToR0(); + VirtualFrame::SpilledScope spilled(frame_); + + DeferredReferenceGetKeyedValue* deferred = + new DeferredReferenceGetKeyedValue(); // Check that the receiver is a heap object. __ tst(r0, Operand(kSmiTagMask)); diff --git a/src/arm/codegen-arm.h b/src/arm/codegen-arm.h index ea990caeb9..5d5f50b99d 100644 --- a/src/arm/codegen-arm.h +++ b/src/arm/codegen-arm.h @@ -322,8 +322,6 @@ class CodeGenerator: public AstVisitor { void LoadFromGlobalSlotCheckExtensions(Slot* slot, TypeofState typeof_state, - Register tmp, - Register tmp2, JumpTarget* slow); // Special code for typeof expressions: Unfortunately, we must diff --git a/src/arm/debug-arm.cc b/src/arm/debug-arm.cc index 1aca7cee44..d8149f0928 100644 --- a/src/arm/debug-arm.cc +++ b/src/arm/debug-arm.cc @@ -133,9 +133,9 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) { // Calling convention for IC load (from ic-arm.cc). // ----------- S t a t e ------------- - // -- r0 : receiver // -- r2 : name // -- lr : return address + // -- r0 : receiver // -- [sp] : receiver // ----------------------------------- // Registers r0 and r2 contain objects that need to be pushed on the diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 10ccf9cf15..bed93640ff 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -125,7 +125,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) { __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset)); __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters()))); - __ stm(db_w, sp, r3.bit() | r2.bit() | r1.bit()); + __ Push(r3, r2, r1); // Arguments to ArgumentsAccessStub: // function, receiver address, parameter count. @@ -696,8 +696,8 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var, Comment cmnt(masm_, "Global variable"); // Use inline caching. Variable name is passed in r2 and the global // object on the stack. - __ ldr(ip, CodeGenerator::GlobalObject()); - __ push(ip); + __ ldr(r0, CodeGenerator::GlobalObject()); + __ push(r0); __ mov(r2, Operand(var->name())); Handle ic(Builtins::builtin(Builtins::LoadIC_Initialize)); __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); @@ -739,7 +739,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var, __ mov(r1, Operand(key_literal->handle())); // Push both as arguments to ic. - __ stm(db_w, sp, r2.bit() | r1.bit()); + __ Push(r2, r1); // Do a keyed property load. Handle ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); @@ -771,7 +771,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); __ mov(r2, Operand(expr->pattern())); __ mov(r1, Operand(expr->flags())); - __ stm(db_w, sp, r4.bit() | r3.bit() | r2.bit() | r1.bit()); + __ Push(r4, r3, r2, r1); __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); __ bind(&done); Apply(context_, r0); @@ -785,7 +785,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); __ mov(r1, Operand(expr->constant_properties())); __ mov(r0, Operand(Smi::FromInt(expr->fast_elements() ? 1 : 0))); - __ stm(db_w, sp, r3.bit() | r2.bit() | r1.bit() | r0.bit()); + __ Push(r3, r2, r1, r0); if (expr->depth() > 1) { __ CallRuntime(Runtime::kCreateObjectLiteral, 4); } else { @@ -860,7 +860,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); __ mov(r1, Operand(expr->constant_elements())); - __ stm(db_w, sp, r3.bit() | r2.bit() | r1.bit()); + __ Push(r3, r2, r1); if (expr->depth() > 1) { __ CallRuntime(Runtime::kCreateArrayLiteral, 3); } else { @@ -997,6 +997,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { SetSourcePosition(prop->position()); Literal* key = prop->key()->AsLiteral(); __ mov(r2, Operand(key->handle())); + __ ldr(r0, MemOperand(sp, 0)); Handle ic(Builtins::builtin(Builtins::LoadIC_Initialize)); __ Call(ic, RelocInfo::CODE_TARGET); } diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc index 17584e0633..e1d36f02f1 100644 --- a/src/arm/ic-arm.cc +++ b/src/arm/ic-arm.cc @@ -61,6 +61,7 @@ static void GenerateDictionaryLoad(MacroAssembler* masm, // dictionary. // // r2 - holds the name of the property and is unchanged. + // r4 - used as temporary. Label done; @@ -108,25 +109,25 @@ static void GenerateDictionaryLoad(MacroAssembler* masm, static const int kProbes = 4; for (int i = 0; i < kProbes; i++) { // Compute the masked index: (hash + i + i * i) & mask. - __ ldr(t1, FieldMemOperand(r2, String::kHashFieldOffset)); + __ ldr(r4, FieldMemOperand(r2, String::kHashFieldOffset)); if (i > 0) { // Add the probe offset (i + i * i) left shifted to avoid right shifting // the hash in a separate instruction. The value hash + i + i * i is right // shifted in the following and instruction. ASSERT(StringDictionary::GetProbeOffset(i) < 1 << (32 - String::kHashFieldOffset)); - __ add(t1, t1, Operand( + __ add(r4, r4, Operand( StringDictionary::GetProbeOffset(i) << String::kHashShift)); } - __ and_(t1, r3, Operand(t1, LSR, String::kHashShift)); + __ and_(r4, r3, Operand(r4, LSR, String::kHashShift)); // Scale the index by multiplying by the element size. ASSERT(StringDictionary::kEntrySize == 3); - __ add(t1, t1, Operand(t1, LSL, 1)); // t1 = t1 * 3 + __ add(r4, r4, Operand(r4, LSL, 1)); // r4 = r4 * 3 // Check if the key is identical to the name. - __ add(t1, t0, Operand(t1, LSL, 2)); - __ ldr(ip, FieldMemOperand(t1, kElementsStartOffset)); + __ add(r4, t0, Operand(r4, LSL, 2)); + __ ldr(ip, FieldMemOperand(r4, kElementsStartOffset)); __ cmp(r2, Operand(ip)); if (i != kProbes - 1) { __ b(eq, &done); @@ -136,13 +137,13 @@ static void GenerateDictionaryLoad(MacroAssembler* masm, } // Check that the value is a normal property. - __ bind(&done); // t1 == t0 + 4*index - __ ldr(r3, FieldMemOperand(t1, kElementsStartOffset + 2 * kPointerSize)); + __ bind(&done); // r4 == t0 + 4*index + __ ldr(r3, FieldMemOperand(r4, kElementsStartOffset + 2 * kPointerSize)); __ tst(r3, Operand(PropertyDetails::TypeField::mask() << kSmiTagSize)); __ b(ne, miss); // Get the value at the masked, scaled index and return. - __ ldr(t1, FieldMemOperand(t1, kElementsStartOffset + 1 * kPointerSize)); + __ ldr(t1, FieldMemOperand(r4, kElementsStartOffset + 1 * kPointerSize)); } @@ -239,12 +240,11 @@ void LoadIC::GenerateArrayLength(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address - // -- [sp] : receiver + // -- r0 : receiver + // -- sp[0] : receiver // ----------------------------------- Label miss; - __ ldr(r0, MemOperand(sp, 0)); - StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss); __ bind(&miss); StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); @@ -255,12 +255,11 @@ void LoadIC::GenerateStringLength(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address - // -- [sp] : receiver + // -- r0 : receiver + // -- sp[0] : receiver // ----------------------------------- Label miss; - __ ldr(r0, MemOperand(sp, 0)); - StubCompiler::GenerateLoadStringLength(masm, r0, r1, r3, &miss); // Cache miss: Jump to runtime. __ bind(&miss); @@ -272,13 +271,11 @@ void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address - // -- [sp] : receiver + // -- r0 : receiver + // -- sp[0] : receiver // ----------------------------------- Label miss; - // Load receiver. - __ ldr(r0, MemOperand(sp, 0)); - StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss); __ bind(&miss); StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); @@ -351,7 +348,8 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { static void GenerateNormalHelper(MacroAssembler* masm, int argc, bool is_global_object, - Label* miss) { + Label* miss, + Register scratch) { // Search dictionary - put result in register r1. GenerateDictionaryLoad(masm, miss, r0, r1); @@ -360,7 +358,7 @@ static void GenerateNormalHelper(MacroAssembler* masm, __ b(eq, miss); // Check that the value is a JSFunction. - __ CompareObjectType(r1, r0, r0, JS_FUNCTION_TYPE); + __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE); __ b(ne, miss); // Patch the receiver with the global proxy if necessary. @@ -409,7 +407,7 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) { __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset)); __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); __ b(ne, &miss); - GenerateNormalHelper(masm, argc, true, &miss); + GenerateNormalHelper(masm, argc, true, &miss, r4); // Accessing non-global object: Check for access to global proxy. Label global_proxy, invoke; @@ -422,7 +420,7 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) { __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); __ b(ne, &miss); __ bind(&invoke); - GenerateNormalHelper(masm, argc, false, &miss); + GenerateNormalHelper(masm, argc, false, &miss, r4); // Global object access: Check access rights. __ bind(&global_proxy); @@ -489,10 +487,10 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address - // -- [sp] : receiver + // -- r0 : receiver + // -- sp[0] : receiver // ----------------------------------- - __ ldr(r0, MemOperand(sp, 0)); // Probe the stub cache. Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, NOT_IN_LOOP, @@ -508,11 +506,11 @@ void LoadIC::GenerateNormal(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address - // -- [sp] : receiver + // -- r0 : receiver + // -- sp[0] : receiver // ----------------------------------- Label miss, probe, global; - __ ldr(r0, MemOperand(sp, 0)); // Check that the receiver isn't a smi. __ tst(r0, Operand(kSmiTagMask)); __ b(eq, &miss); @@ -551,11 +549,12 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address - // -- [sp] : receiver + // -- r0 : receiver + // -- sp[0] : receiver // ----------------------------------- - __ ldr(r3, MemOperand(sp, 0)); - __ stm(db_w, sp, r2.bit() | r3.bit()); + __ mov(r3, r0); + __ Push(r3, r2); // Perform tail call to the entry. ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss)); diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc index aec937d626..a770d160f0 100644 --- a/src/arm/stub-cache-arm.cc +++ b/src/arm/stub-cache-arm.cc @@ -1551,35 +1551,34 @@ Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object, // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address - // -- [sp] : receiver + // -- r0 : receiver + // -- sp[0] : receiver // ----------------------------------- Label miss; - // Get the receiver from the stack. - __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); - // If the object is the holder then we know that it's a global // object which can only happen for contextual calls. In this case, // the receiver cannot be a smi. if (object != holder) { - __ tst(r1, Operand(kSmiTagMask)); + __ tst(r0, Operand(kSmiTagMask)); __ b(eq, &miss); } // Check that the map of the global has not changed. - CheckPrototypes(object, r1, holder, r3, r0, name, &miss); + CheckPrototypes(object, r0, holder, r3, r4, name, &miss); // Get the value from the cell. __ mov(r3, Operand(Handle(cell))); - __ ldr(r0, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); + __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); // Check for deleted property if property can actually be deleted. if (!is_dont_delete) { __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); - __ cmp(r0, ip); + __ cmp(r4, ip); __ b(eq, &miss); } + __ mov(r0, r4); __ IncrementCounter(&Counters::named_load_global_inline, 1, r1, r3); __ Ret(); diff --git a/src/arm/virtual-frame-arm.cc b/src/arm/virtual-frame-arm.cc index a847b4ab7c..e31f47fcfb 100644 --- a/src/arm/virtual-frame-arm.cc +++ b/src/arm/virtual-frame-arm.cc @@ -268,6 +268,7 @@ void VirtualFrame::CallJSFunction(int arg_count) { void VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) { + ASSERT(SpilledScope::is_spilled()); Forget(arg_count); ASSERT(cgen()->HasValidEntryRegisters()); __ CallRuntime(f, arg_count); @@ -402,6 +403,32 @@ void VirtualFrame::EmitPop(Register reg) { } +void VirtualFrame::SpillAllButCopyTOSToR0() { + switch (top_of_stack_state_) { + case NO_TOS_REGISTERS: + __ ldr(r0, MemOperand(sp, 0)); + break; + case R0_TOS: + __ push(r0); + break; + case R1_TOS: + __ push(r1); + __ mov(r0, r1); + break; + case R0_R1_TOS: + __ Push(r1, r0); + break; + case R1_R0_TOS: + __ Push(r0, r1); + __ mov(r0, r1); + break; + default: + UNREACHABLE(); + } + top_of_stack_state_ = NO_TOS_REGISTERS; +} + + Register VirtualFrame::Peek() { AssertIsNotSpilled(); if (top_of_stack_state_ == NO_TOS_REGISTERS) { diff --git a/src/arm/virtual-frame-arm.h b/src/arm/virtual-frame-arm.h index 798028244f..ae817565d3 100644 --- a/src/arm/virtual-frame-arm.h +++ b/src/arm/virtual-frame-arm.h @@ -344,6 +344,10 @@ class VirtualFrame : public ZoneObject { // must be copied to a scratch register before modification. Register Peek(); + // A little specialized, this one. It flushes all registers, but it puts a + // copy of the top-of-stack in R0. + void SpillAllButCopyTOSToR0(); + // Pop and save an element from the top of the expression stack and // emit a corresponding pop instruction. void EmitPop(Register reg);