diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index ca8216b1f5..6c83dad69d 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -708,7 +708,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, } // Get the full codegen state from the stack and untag it. - __ SmiToInteger32(r10, Operand(rsp, 1 * kPointerSize)); + __ SmiToInteger32(r10, Operand(rsp, kPCOnStackSize)); // Switch on the state. Label not_no_registers, not_tos_rax; @@ -717,7 +717,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, __ ret(1 * kPointerSize); // Remove state. __ bind(¬_no_registers); - __ movq(rax, Operand(rsp, 2 * kPointerSize)); + __ movq(rax, Operand(rsp, kPCOnStackSize + kPointerSize)); __ cmpq(r10, Immediate(FullCodeGenerator::TOS_REG)); __ j(not_equal, ¬_tos_rax, Label::kNear); __ ret(2 * kPointerSize); // Remove state, rax. @@ -782,8 +782,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // 2. Get the function to call (passed as receiver) from the stack, check // if it is a function. Label slow, non_function; - // The function to call is at position n+1 on the stack. - __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); + StackArgumentsAccessor args(rsp, rax); + __ movq(rdi, args.GetReceiverOperand()); __ JumpIfSmi(rdi, &non_function); __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); __ j(not_equal, &slow); @@ -808,7 +808,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ j(not_zero, &shift_arguments); // Compute the receiver in non-strict mode. - __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); + __ movq(rbx, args.GetArgumentOperand(1)); __ JumpIfSmi(rbx, &convert_to_object, Label::kNear); __ CompareRoot(rbx, Heap::kNullValueRootIndex); @@ -837,7 +837,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { } // Restore the function to rdi. - __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); + __ movq(rdi, args.GetReceiverOperand()); __ jmp(&patch_receiver, Label::kNear); // Use the global receiver object from the called function as the @@ -851,7 +851,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); __ bind(&patch_receiver); - __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx); + __ movq(args.GetArgumentOperand(1), rbx); __ jmp(&shift_arguments); } @@ -868,7 +868,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // CALL_NON_FUNCTION builtin expects the non-function callee as // receiver, so overwrite the first argument which will ultimately // become the receiver. - __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi); + __ movq(args.GetArgumentOperand(1), rdi); // 4. Shift arguments and return address one slot down on the stack // (overwriting the original receiver). Adjust argument count to make @@ -1178,10 +1178,11 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { // Load the first argument into rax and get rid of the rest // (including the receiver). + StackArgumentsAccessor args(rsp, rax); Label no_arguments; __ testq(rax, rax); __ j(zero, &no_arguments); - __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); + __ movq(rbx, args.GetArgumentOperand(1)); __ PopReturnAddressTo(rcx); __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); __ PushReturnAddressFrom(rcx); diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index 41507d158d..c4d73639c9 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -306,7 +306,8 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { __ IncrementCounter(counters->fast_new_closure_total(), 1); // Get the function info from the stack. - __ movq(rdx, Operand(rsp, 1 * kPointerSize)); + StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rdx, args.GetArgumentOperand(0)); int map_index = Context::FunctionMapIndex(language_mode_, is_generator_); @@ -414,7 +415,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { __ ret(1 * kPointerSize); __ bind(&restore); - __ movq(rdx, Operand(rsp, 1 * kPointerSize)); + __ movq(rdx, args.GetArgumentOperand(0)); __ jmp(&install_unoptimized); // Create a new closure through the slower runtime call. @@ -437,7 +438,8 @@ void FastNewContextStub::Generate(MacroAssembler* masm) { rax, rbx, rcx, &gc, TAG_OBJECT); // Get the function from the stack. - __ movq(rcx, Operand(rsp, 1 * kPointerSize)); + StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rcx, args.GetArgumentOperand(0)); // Set up the object header. __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); @@ -483,10 +485,10 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) { rax, rbx, rcx, &gc, TAG_OBJECT); // Get the function from the stack. - __ movq(rcx, Operand(rsp, 1 * kPointerSize)); - + StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rcx, args.GetArgumentOperand(1)); // Get the serialized scope info from the stack. - __ movq(rbx, Operand(rsp, 2 * kPointerSize)); + __ movq(rbx, args.GetArgumentOperand(0)); // Set up the object header. __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); @@ -1259,7 +1261,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { if (tagged) { Label input_not_smi, loaded; // Test that rax is a number. - __ movq(rax, Operand(rsp, kPointerSize)); + StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rax, args.GetArgumentOperand(0)); __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); // Input is a smi. Untag and load it onto the FPU stack. // Then load the bits of the double into rbx. @@ -1734,8 +1737,9 @@ void MathPowStub::Generate(MacroAssembler* masm) { // The exponent and base are supplied as arguments on the stack. // This can only happen if the stub is called from non-optimized code. // Load input parameters from stack. - __ movq(base, Operand(rsp, 2 * kPointerSize)); - __ movq(exponent, Operand(rsp, 1 * kPointerSize)); + StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(base, args.GetArgumentOperand(0)); + __ movq(exponent, args.GetArgumentOperand(1)); __ JumpIfSmi(base, &base_is_smi, Label::kNear); __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), Heap::kHeapNumberMapRootIndex); @@ -2168,7 +2172,8 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { Factory* factory = masm->isolate()->factory(); - __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); + StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ SmiToInteger64(rbx, args.GetArgumentOperand(2)); // rbx = parameter count (untagged) // Check if the calling frame is an arguments adaptor frame. @@ -2190,7 +2195,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { ArgumentsAdaptorFrameConstants::kLengthOffset)); __ lea(rdx, Operand(rdx, rcx, times_pointer_size, StandardFrameConstants::kCallerSPOffset)); - __ movq(Operand(rsp, 2 * kPointerSize), rdx); + __ movq(args.GetArgumentOperand(1), rdx); // rbx = parameter count (untagged) // rcx = argument count (untagged) @@ -2251,7 +2256,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { // Set up the callee in-object property. STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); - __ movq(rdx, Operand(rsp, 3 * kPointerSize)); + __ movq(rdx, args.GetArgumentOperand(0)); __ movq(FieldOperand(rax, JSObject::kHeaderSize + Heap::kArgumentsCalleeIndex * kPointerSize), rdx); @@ -2302,7 +2307,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { // Load tagged parameter count into r9. __ Integer32ToSmi(r9, rbx); __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); - __ addq(r8, Operand(rsp, 1 * kPointerSize)); + __ addq(r8, args.GetArgumentOperand(2)); __ subq(r8, r9); __ Move(r11, factory->the_hole_value()); __ movq(rdx, rdi); @@ -2341,7 +2346,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { Label arguments_loop, arguments_test; __ movq(r8, rbx); - __ movq(rdx, Operand(rsp, 2 * kPointerSize)); + __ movq(rdx, args.GetArgumentOperand(1)); // Untag rcx for the loop below. __ SmiToInteger64(rcx, rcx); __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); @@ -2368,7 +2373,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { // rcx = argument count (untagged) __ bind(&runtime); __ Integer32ToSmi(rcx, rcx); - __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. + __ movq(args.GetArgumentOperand(2), rcx); // Patch argument count. __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); } @@ -2387,12 +2392,13 @@ void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { __ j(not_equal, &runtime); // Patch the arguments.length and the parameters pointer. + StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); - __ movq(Operand(rsp, 1 * kPointerSize), rcx); + __ movq(args.GetArgumentOperand(2), rcx); __ SmiToInteger64(rcx, rcx); __ lea(rdx, Operand(rdx, rcx, times_pointer_size, StandardFrameConstants::kCallerSPOffset)); - __ movq(Operand(rsp, 2 * kPointerSize), rdx); + __ movq(args.GetArgumentOperand(1), rdx); __ bind(&runtime); __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); @@ -2413,18 +2419,19 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { __ j(equal, &adaptor_frame); // Get the length from the frame. - __ movq(rcx, Operand(rsp, 1 * kPointerSize)); + StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rcx, args.GetArgumentOperand(2)); __ SmiToInteger64(rcx, rcx); __ jmp(&try_allocate); // Patch the arguments.length and the parameters pointer. __ bind(&adaptor_frame); __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); - __ movq(Operand(rsp, 1 * kPointerSize), rcx); + __ movq(args.GetArgumentOperand(2), rcx); __ SmiToInteger64(rcx, rcx); __ lea(rdx, Operand(rdx, rcx, times_pointer_size, StandardFrameConstants::kCallerSPOffset)); - __ movq(Operand(rsp, 2 * kPointerSize), rdx); + __ movq(args.GetArgumentOperand(1), rdx); // Try the new space allocation. Start out with computing the size of // the arguments object and the elements array. @@ -2454,7 +2461,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { // Get the length (smi tagged) and set that as an in-object property too. STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); - __ movq(rcx, Operand(rsp, 1 * kPointerSize)); + __ movq(rcx, args.GetArgumentOperand(2)); __ movq(FieldOperand(rax, JSObject::kHeaderSize + Heap::kArgumentsLengthIndex * kPointerSize), rcx); @@ -2465,7 +2472,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { __ j(zero, &done); // Get the parameters pointer from the stack. - __ movq(rdx, Operand(rsp, 2 * kPointerSize)); + __ movq(rdx, args.GetArgumentOperand(1)); // Set up the elements pointer in the allocated arguments object and // initialize the header in the elements fixed array. @@ -2948,7 +2955,8 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) { const int kMaxInlineLength = 100; Label slowcase; Label done; - __ movq(r8, Operand(rsp, kPointerSize * 3)); + StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(r8, args.GetArgumentOperand(0)); __ JumpIfNotSmi(r8, &slowcase); __ SmiToInteger32(rbx, r8); __ cmpl(rbx, Immediate(kMaxInlineLength)); @@ -2986,11 +2994,11 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) { __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); // Set input, index and length fields from arguments. - __ movq(r8, Operand(rsp, kPointerSize * 1)); + __ movq(r8, args.GetArgumentOperand(2)); __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); - __ movq(r8, Operand(rsp, kPointerSize * 2)); + __ movq(r8, args.GetArgumentOperand(1)); __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); - __ movq(r8, Operand(rsp, kPointerSize * 3)); + __ movq(r8, args.GetArgumentOperand(0)); __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); // Fill out the elements FixedArray. @@ -3121,7 +3129,8 @@ void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm, void NumberToStringStub::Generate(MacroAssembler* masm) { Label runtime; - __ movq(rbx, Operand(rsp, kPointerSize)); + StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rbx, args.GetArgumentOperand(0)); // Generate code to lookup number in the number string cache. GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime); @@ -3532,6 +3541,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // rdi : the function to call Isolate* isolate = masm->isolate(); Label slow, non_function; + StackArgumentsAccessor args(rsp, argc_); // The receiver might implicitly be the global object. This is // indicated by passing the hole as the receiver to the call @@ -3539,15 +3549,14 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { if (ReceiverMightBeImplicit()) { Label call; // Get the receiver from the stack. - // +1 ~ return address - __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize)); + __ movq(rax, args.GetReceiverOperand()); // Call as function is indicated with the hole. __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); __ j(not_equal, &call, Label::kNear); // Patch the receiver on the stack with the global receiver object. __ movq(rcx, GlobalObjectOperand()); __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); - __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rcx); + __ movq(args.GetReceiverOperand(), rcx); __ bind(&call); } @@ -3609,7 +3618,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // CALL_NON_FUNCTION expects the non-function callee as receiver (instead // of the original receiver from the call site). __ bind(&non_function); - __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi); + __ movq(args.GetReceiverOperand(), rdi); __ Set(rax, argc_); __ Set(rbx, 0); __ SetCallKind(rcx, CALL_AS_METHOD); @@ -4144,12 +4153,13 @@ void InstanceofStub::Generate(MacroAssembler* masm) { static const unsigned int kWordBeforeResultValue = 0x458B4909; // Only the inline check flag is supported on X64. ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); - int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0; + int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0; // Get the object - go slow case if it's a smi. Label slow; - - __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); + StackArgumentsAccessor args(rsp, 2 + extra_argument_offset, + ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rax, args.GetArgumentOperand(0)); __ JumpIfSmi(rax, &slow); // Check that the left hand is a JS object. Leave its map in rax. @@ -4159,7 +4169,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ j(above, &slow); // Get the prototype of the function. - __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space)); + __ movq(rdx, args.GetArgumentOperand(1)); // rdx is function, rax is map. // If there is a call site cache don't look in the global cache, but do the @@ -4194,8 +4204,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); } else { // Get return address and delta to inlined map check. - __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); - __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); + __ movq(kScratchRegister, StackOperandForReturnAddress(0)); + __ subq(kScratchRegister, args.GetArgumentOperand(2)); if (FLAG_debug_code) { __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); @@ -4235,8 +4245,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) { // Assert it is a 1-byte signed value. ASSERT(true_offset >= 0 && true_offset < 0x100); __ movl(rax, Immediate(true_offset)); - __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); - __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); + __ movq(kScratchRegister, StackOperandForReturnAddress(0)); + __ subq(kScratchRegister, args.GetArgumentOperand(2)); __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); if (FLAG_debug_code) { __ movl(rax, Immediate(kWordBeforeResultValue)); @@ -4245,7 +4255,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { } __ Set(rax, 0); } - __ ret(2 * kPointerSize + extra_stack_space); + __ ret((2 + extra_argument_offset) * kPointerSize); __ bind(&is_not_instance); if (!HasCallSiteInlineCheck()) { @@ -4258,8 +4268,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) { // Assert it is a 1-byte signed value. ASSERT(false_offset >= 0 && false_offset < 0x100); __ movl(rax, Immediate(false_offset)); - __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); - __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); + __ movq(kScratchRegister, StackOperandForReturnAddress(0)); + __ subq(kScratchRegister, args.GetArgumentOperand(2)); __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); if (FLAG_debug_code) { __ movl(rax, Immediate(kWordBeforeResultValue)); @@ -4267,7 +4277,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); } } - __ ret(2 * kPointerSize + extra_stack_space); + __ ret((2 + extra_argument_offset) * kPointerSize); // Slow-case: Go through the JavaScript implementation. __ bind(&slow); @@ -4425,8 +4435,9 @@ void StringAddStub::Generate(MacroAssembler* masm) { Builtins::JavaScript builtin_id = Builtins::ADD; // Load the two arguments. - __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). - __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). + StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rax, args.GetArgumentOperand(0)); // First argument (left). + __ movq(rdx, args.GetArgumentOperand(1)); // Second argument (right). // Make sure that both arguments are strings if not known in advance. // Otherwise, at least one of the arguments is definitely a string, @@ -5433,8 +5444,9 @@ void StringCompareStub::Generate(MacroAssembler* masm) { // rsp[8] : right string // rsp[16] : left string - __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left - __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right + StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rdx, args.GetArgumentOperand(0)); // left + __ movq(rax, args.GetArgumentOperand(1)); // right // Check for identity. Label not_same; @@ -5947,9 +5959,11 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { // undefined value), it guarantees the hash table doesn't contain the // property. It's true even if some slots represent deleted properties // (their names are the null value). + StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER, + kPointerSize); for (int i = kInlinedProbes; i < kTotalProbes; i++) { // Compute the masked index: (hash + i + i * i) & mask. - __ movq(scratch, Operand(rsp, 2 * kPointerSize)); + __ movq(scratch, args.GetArgumentOperand(1)); if (i > 0) { __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); } @@ -5969,7 +5983,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { __ j(equal, ¬_in_dictionary); // Stop if found the property. - __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); + __ cmpq(scratch, args.GetArgumentOperand(0)); __ j(equal, &in_dictionary); if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { @@ -6321,8 +6335,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { Label fast_elements; // Get array literal index, array literal and its map. - __ movq(rdx, Operand(rsp, 1 * kPointerSize)); - __ movq(rbx, Operand(rsp, 2 * kPointerSize)); + StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rdx, args.GetArgumentOperand(1)); + __ movq(rbx, args.GetArgumentOperand(0)); __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset)); __ CheckFastElements(rdi, &double_elements); @@ -6489,7 +6504,8 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { __ j(not_zero, &normal_sequence); // look at the first argument - __ movq(rcx, Operand(rsp, kPointerSize)); + StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rcx, args.GetArgumentOperand(0)); __ testq(rcx, rcx); __ j(zero, &normal_sequence); @@ -6668,7 +6684,8 @@ void InternalArrayConstructorStub::GenerateCase( if (IsFastPackedElementsKind(kind)) { // We might need to create a holey array // look at the first argument - __ movq(rcx, Operand(rsp, kPointerSize)); + StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); + __ movq(rcx, args.GetArgumentOperand(0)); __ testq(rcx, rcx); __ j(zero, &normal_sequence); diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc index a39f14b075..24773c2595 100644 --- a/src/x64/codegen-x64.cc +++ b/src/x64/codegen-x64.cc @@ -744,6 +744,28 @@ void Code::PatchPlatformCodeAge(byte* sequence, } +Operand StackArgumentsAccessor::GetArgumentOperand(int index) { + ASSERT(index >= 0); + ASSERT(base_reg_.is(rsp) || base_reg_.is(rbp)); + int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0; + int displacement_to_last_argument = base_reg_.is(rsp) ? + kPCOnStackSize : kFPOnStackSize + kPCOnStackSize; + displacement_to_last_argument += extra_displacement_to_last_argument_; + if (argument_count_reg_.is(no_reg)) { + // argument[0] is at base_reg_ + displacement_to_last_argument + + // (argument_count_immediate_ + receiver - 1) * kPointerSize. + ASSERT(argument_count_immediate_ + receiver > 0); + return Operand(base_reg_, displacement_to_last_argument + + (argument_count_immediate_ + receiver - 1 - index) * kPointerSize); + } else { + // argument[0] is at base_reg_ + displacement_to_last_argument + + // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize. + return Operand(base_reg_, argument_count_reg_, times_pointer_size, + displacement_to_last_argument + (receiver - 1 - index) * kPointerSize); + } +} + + } } // namespace v8::internal #endif // V8_TARGET_ARCH_X64 diff --git a/src/x64/codegen-x64.h b/src/x64/codegen-x64.h index 5747e0bc6f..9b8454a353 100644 --- a/src/x64/codegen-x64.h +++ b/src/x64/codegen-x64.h @@ -103,6 +103,73 @@ class MathExpGenerator : public AllStatic { DISALLOW_COPY_AND_ASSIGN(MathExpGenerator); }; + +enum StackArgumentsAccessorReceiverMode { + ARGUMENTS_CONTAIN_RECEIVER, + ARGUMENTS_DONT_CONTAIN_RECEIVER +}; + + +class StackArgumentsAccessor BASE_EMBEDDED { + public: + StackArgumentsAccessor( + Register base_reg, + int argument_count_immediate, + StackArgumentsAccessorReceiverMode receiver_mode = + ARGUMENTS_CONTAIN_RECEIVER, + int extra_displacement_to_last_argument = 0) + : base_reg_(base_reg), + argument_count_reg_(no_reg), + argument_count_immediate_(argument_count_immediate), + receiver_mode_(receiver_mode), + extra_displacement_to_last_argument_( + extra_displacement_to_last_argument) { } + + StackArgumentsAccessor( + Register base_reg, + Register argument_count_reg, + StackArgumentsAccessorReceiverMode receiver_mode = + ARGUMENTS_CONTAIN_RECEIVER, + int extra_displacement_to_last_argument = 0) + : base_reg_(base_reg), + argument_count_reg_(argument_count_reg), + argument_count_immediate_(0), + receiver_mode_(receiver_mode), + extra_displacement_to_last_argument_( + extra_displacement_to_last_argument) { } + + StackArgumentsAccessor( + Register base_reg, + const ParameterCount& parameter_count, + StackArgumentsAccessorReceiverMode receiver_mode = + ARGUMENTS_CONTAIN_RECEIVER, + int extra_displacement_to_last_argument = 0) + : base_reg_(base_reg), + argument_count_reg_(parameter_count.is_reg() ? + parameter_count.reg() : no_reg), + argument_count_immediate_(parameter_count.is_immediate() ? + parameter_count.immediate() : 0), + receiver_mode_(receiver_mode), + extra_displacement_to_last_argument_( + extra_displacement_to_last_argument) { } + + Operand GetArgumentOperand(int index); + Operand GetReceiverOperand() { + ASSERT(receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER); + return GetArgumentOperand(0);; + } + + private: + const Register base_reg_; + const Register argument_count_reg_; + const int argument_count_immediate_; + const StackArgumentsAccessorReceiverMode receiver_mode_; + const int extra_displacement_to_last_argument_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(StackArgumentsAccessor); +}; + + } } // namespace v8::internal #endif // V8_X64_CODEGEN_X64_H_ diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc index 4837b9aa9a..84a47d6ac4 100644 --- a/src/x64/ic-x64.cc +++ b/src/x64/ic-x64.cc @@ -904,8 +904,8 @@ void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { // ----------------------------------- Label miss; - // Get the receiver of the function from the stack. - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, argc); + __ movq(rdx, args.GetReceiverOperand()); GenerateNameDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss); @@ -940,8 +940,8 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, __ IncrementCounter(counters->keyed_call_miss(), 1); } - // Get the receiver of the function from the stack; 1 ~ return address. - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, argc); + __ movq(rdx, args.GetReceiverOperand()); // Enter an internal frame. { @@ -965,7 +965,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, // This can happen only for regular CallIC but not KeyedCallIC. if (id == IC::kCallIC_Miss) { Label invoke, global; - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); // receiver + __ movq(rdx, args.GetReceiverOperand()); __ JumpIfSmi(rdx, &invoke); __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx); __ j(equal, &global); @@ -975,7 +975,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, // Patch the receiver on the stack. __ bind(&global); __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); - __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); + __ movq(args.GetReceiverOperand(), rdx); __ bind(&invoke); } @@ -1005,8 +1005,8 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, // rsp[(argc + 1) * 8] : argument 0 = receiver // ----------------------------------- - // Get the receiver of the function from the stack; 1 ~ return address. - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, argc); + __ movq(rdx, args.GetReceiverOperand()); GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state); GenerateMiss(masm, argc, extra_ic_state); } @@ -1023,8 +1023,8 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { // rsp[(argc + 1) * 8] : argument 0 = receiver // ----------------------------------- - // Get the receiver of the function from the stack; 1 ~ return address. - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, argc); + __ movq(rdx, args.GetReceiverOperand()); Label do_call, slow_call, slow_load; Label check_number_dictionary, check_name, lookup_monomorphic_cache; @@ -1302,7 +1302,8 @@ void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm, // rsp[(argc + 1) * 8] : argument 0 = receiver // ----------------------------------- Label slow, notin; - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, argc); + __ movq(rdx, args.GetReceiverOperand()); Operand mapped_location = GenerateMappedArgumentsLookup( masm, rdx, rcx, rbx, rax, r8, ¬in, &slow); __ movq(rdi, mapped_location); diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index 6883d386f4..1a93dab1c5 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -414,8 +414,10 @@ static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); __ movq(StackOperandForReturnAddress(0), scratch); __ Move(scratch, Smi::FromInt(0)); - for (int i = 1; i <= kFastApiCallArguments; i++) { - __ movq(Operand(rsp, i * kPointerSize), scratch); + StackArgumentsAccessor args(rsp, kFastApiCallArguments, + ARGUMENTS_DONT_CONTAIN_RECEIVER); + for (int i = 0; i < kFastApiCallArguments; i++) { + __ movq(args.GetArgumentOperand(i), scratch); } } @@ -464,23 +466,26 @@ static void GenerateFastApiCall(MacroAssembler* masm, __ LoadHeapObject(rdi, function); __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); + int api_call_argc = argc + kFastApiCallArguments; + StackArgumentsAccessor args(rsp, api_call_argc); + // Pass the additional arguments. - __ movq(Operand(rsp, 2 * kPointerSize), rdi); + __ movq(args.GetArgumentOperand(api_call_argc - 1), rdi); Handle api_call_info = optimization.api_call_info(); Handle call_data(api_call_info->data(), masm->isolate()); if (masm->isolate()->heap()->InNewSpace(*call_data)) { __ Move(rcx, api_call_info); __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset)); - __ movq(Operand(rsp, 3 * kPointerSize), rbx); + __ movq(args.GetArgumentOperand(api_call_argc - 2), rbx); } else { - __ Move(Operand(rsp, 3 * kPointerSize), call_data); + __ Move(args.GetArgumentOperand(api_call_argc - 2), call_data); } __ movq(kScratchRegister, ExternalReference::isolate_address(masm->isolate())); - __ movq(Operand(rsp, 4 * kPointerSize), kScratchRegister); + __ movq(args.GetArgumentOperand(api_call_argc - 3), kScratchRegister); __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); - __ movq(Operand(rsp, 5 * kPointerSize), kScratchRegister); - __ movq(Operand(rsp, 6 * kPointerSize), kScratchRegister); + __ movq(args.GetArgumentOperand(api_call_argc - 4), kScratchRegister); + __ movq(args.GetArgumentOperand(api_call_argc - 5), kScratchRegister); // Prepare arguments. STATIC_ASSERT(kFastApiCallArguments == 6); @@ -526,7 +531,7 @@ static void GenerateFastApiCall(MacroAssembler* masm, __ CallApiFunctionAndReturn(function_address, thunk_address, callback_arg, - argc + kFastApiCallArguments + 1, + api_call_argc + 1, returns_handle, kFastApiCallArguments + 1); } @@ -1075,7 +1080,7 @@ Register StubCompiler::CheckPrototypes(Handle object, int depth = 0; if (save_at_depth == depth) { - __ movq(Operand(rsp, kPointerSize), object_reg); + __ movq(Operand(rsp, kPCOnStackSize), object_reg); } // Check the maps in the prototype chain. @@ -1135,7 +1140,7 @@ Register StubCompiler::CheckPrototypes(Handle object, } if (save_at_depth == depth) { - __ movq(Operand(rsp, kPointerSize), reg); + __ movq(Operand(rsp, kPCOnStackSize), reg); } // Go to the next object in the prototype chain. @@ -1470,11 +1475,8 @@ void CallStubCompiler::GenerateGlobalReceiverCheck(Handle object, Label* miss) { ASSERT(holder->IsGlobalObject()); - // Get the number of arguments. - const int argc = arguments().immediate(); - - // Get the receiver from the stack. - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, arguments()); + __ movq(rdx, args.GetReceiverOperand()); // Check that the maps haven't changed. @@ -1538,9 +1540,8 @@ Handle CallStubCompiler::CompileCallField(Handle object, GenerateNameCheck(name, &miss); - // Get the receiver from the stack. - const int argc = arguments().immediate(); - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, arguments()); + __ movq(rdx, args.GetReceiverOperand()); // Check that the receiver isn't a smi. __ JumpIfSmi(rdx, &miss); @@ -1561,7 +1562,7 @@ Handle CallStubCompiler::CompileCallField(Handle object, // necessary. if (object->IsGlobalObject()) { __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); - __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); + __ movq(args.GetReceiverOperand(), rdx); } // Invoke the function. @@ -1591,11 +1592,11 @@ Handle CallStubCompiler::CompileArrayCodeCall( // Check that function is still array const int argc = arguments().immediate(); + StackArgumentsAccessor args(rsp, argc); GenerateNameCheck(name, &miss); if (cell.is_null()) { - // Get the receiver from the stack. - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + __ movq(rdx, args.GetReceiverOperand()); // Check that the receiver isn't a smi. __ JumpIfSmi(rdx, &miss); @@ -1647,9 +1648,9 @@ Handle CallStubCompiler::CompileArrayPushCall( Label miss; GenerateNameCheck(name, &miss); - // Get the receiver from the stack. const int argc = arguments().immediate(); - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, argc); + __ movq(rdx, args.GetReceiverOperand()); // Check that the receiver isn't a smi. __ JumpIfSmi(rdx, &miss); @@ -1688,7 +1689,7 @@ Handle CallStubCompiler::CompileArrayPushCall( __ j(greater, &attempt_to_grow_elements); // Check if value is a smi. - __ movq(rcx, Operand(rsp, argc * kPointerSize)); + __ movq(rcx, args.GetArgumentOperand(1)); __ JumpIfNotSmi(rcx, &with_write_barrier); // Save new length. @@ -1723,7 +1724,7 @@ Handle CallStubCompiler::CompileArrayPushCall( __ cmpl(rax, rcx); __ j(greater, &call_builtin); - __ movq(rcx, Operand(rsp, argc * kPointerSize)); + __ movq(rcx, args.GetArgumentOperand(1)); __ StoreNumberToDoubleElements( rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize); @@ -1800,7 +1801,7 @@ Handle CallStubCompiler::CompileArrayPushCall( __ jmp(&call_builtin); } - __ movq(rbx, Operand(rsp, argc * kPointerSize)); + __ movq(rbx, args.GetArgumentOperand(1)); // Growing elements that are SMI-only requires special handling in case // the new element is non-Smi. For now, delegate to the builtin. Label no_fast_elements_check; @@ -1849,7 +1850,7 @@ Handle CallStubCompiler::CompileArrayPushCall( __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); // Restore receiver to rdx as finish sequence assumes it's here. - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + __ movq(rdx, args.GetReceiverOperand()); // Increment element's and array's sizes. __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset), @@ -1898,9 +1899,9 @@ Handle CallStubCompiler::CompileArrayPopCall( Label miss, return_undefined, call_builtin; GenerateNameCheck(name, &miss); - // Get the receiver from the stack. const int argc = arguments().immediate(); - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, argc); + __ movq(rdx, args.GetReceiverOperand()); // Check that the receiver isn't a smi. __ JumpIfSmi(rdx, &miss); @@ -1978,6 +1979,7 @@ Handle CallStubCompiler::CompileStringCharCodeAtCall( if (!object->IsString() || !cell.is_null()) return Handle::null(); const int argc = arguments().immediate(); + StackArgumentsAccessor args(rsp, argc); Label miss; Label name_miss; @@ -2003,9 +2005,9 @@ Handle CallStubCompiler::CompileStringCharCodeAtCall( Register receiver = rbx; Register index = rdi; Register result = rax; - __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); + __ movq(receiver, args.GetReceiverOperand()); if (argc > 0) { - __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); + __ movq(index, args.GetArgumentOperand(1)); } else { __ LoadRoot(index, Heap::kUndefinedValueRootIndex); } @@ -2059,6 +2061,8 @@ Handle CallStubCompiler::CompileStringCharAtCall( if (!object->IsString() || !cell.is_null()) return Handle::null(); const int argc = arguments().immediate(); + StackArgumentsAccessor args(rsp, argc); + Label miss; Label name_miss; Label index_out_of_range; @@ -2084,9 +2088,9 @@ Handle CallStubCompiler::CompileStringCharAtCall( Register index = rdi; Register scratch = rdx; Register result = rax; - __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); + __ movq(receiver, args.GetReceiverOperand()); if (argc > 0) { - __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); + __ movq(index, args.GetArgumentOperand(1)); } else { __ LoadRoot(index, Heap::kUndefinedValueRootIndex); } @@ -2139,13 +2143,14 @@ Handle CallStubCompiler::CompileStringFromCharCodeCall( // If the object is not a JSObject or we got an unexpected number of // arguments, bail out to the regular call. const int argc = arguments().immediate(); + StackArgumentsAccessor args(rsp, argc); if (!object->IsJSObject() || argc != 1) return Handle::null(); Label miss; GenerateNameCheck(name, &miss); if (cell.is_null()) { - __ movq(rdx, Operand(rsp, 2 * kPointerSize)); + __ movq(rdx, args.GetArgumentOperand(argc - 1)); __ JumpIfSmi(rdx, &miss); CheckPrototypes(Handle::cast(object), rdx, holder, rbx, rax, rdi, name, &miss); @@ -2158,7 +2163,7 @@ Handle CallStubCompiler::CompileStringFromCharCodeCall( // Load the char code argument. Register code = rbx; - __ movq(code, Operand(rsp, 1 * kPointerSize)); + __ movq(code, args.GetArgumentOperand(argc)); // Check the code is a smi. Label slow; @@ -2345,13 +2350,14 @@ Handle CallStubCompiler::CompileMathAbsCall( // If the object is not a JSObject or we got an unexpected number of // arguments, bail out to the regular call. const int argc = arguments().immediate(); + StackArgumentsAccessor args(rsp, argc); if (!object->IsJSObject() || argc != 1) return Handle::null(); Label miss; GenerateNameCheck(name, &miss); if (cell.is_null()) { - __ movq(rdx, Operand(rsp, 2 * kPointerSize)); + __ movq(rdx, args.GetArgumentOperand(argc - 1)); __ JumpIfSmi(rdx, &miss); CheckPrototypes(Handle::cast(object), rdx, holder, rbx, rax, rdi, name, &miss); @@ -2362,7 +2368,7 @@ Handle CallStubCompiler::CompileMathAbsCall( GenerateLoadFunctionFromCell(cell, function, &miss); } // Load the (only) argument into rax. - __ movq(rax, Operand(rsp, 1 * kPointerSize)); + __ movq(rax, args.GetArgumentOperand(argc)); // Check if the argument is a smi. Label not_smi; @@ -2452,9 +2458,9 @@ Handle CallStubCompiler::CompileFastApiCall( Label miss, miss_before_stack_reserved; GenerateNameCheck(name, &miss_before_stack_reserved); - // Get the receiver from the stack. const int argc = arguments().immediate(); - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, argc); + __ movq(rdx, args.GetReceiverOperand()); // Check that the receiver isn't a smi. __ JumpIfSmi(rdx, &miss_before_stack_reserved); @@ -2506,9 +2512,8 @@ void CallStubCompiler::CompileHandlerFrontend(Handle object, Label miss; GenerateNameCheck(name, &miss); - // Get the receiver from the stack. - const int argc = arguments().immediate(); - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, arguments()); + __ movq(rdx, args.GetReceiverOperand()); // Check that the receiver isn't a smi. if (check != NUMBER_CHECK) { @@ -2532,7 +2537,7 @@ void CallStubCompiler::CompileHandlerFrontend(Handle object, // necessary. if (object->IsGlobalObject()) { __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); - __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); + __ movq(args.GetReceiverOperand(), rdx); } break; @@ -2652,21 +2657,20 @@ Handle CallStubCompiler::CompileCallInterceptor(Handle object, Label miss; GenerateNameCheck(name, &miss); - // Get the number of arguments. - const int argc = arguments().immediate(); LookupResult lookup(isolate()); LookupPostInterceptor(holder, name, &lookup); // Get the receiver from the stack. - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + StackArgumentsAccessor args(rsp, arguments()); + __ movq(rdx, args.GetReceiverOperand()); CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state_); compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax, &miss); // Restore receiver. - __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); + __ movq(rdx, args.GetReceiverOperand()); // Check that the function really is a function. __ JumpIfSmi(rax, &miss); @@ -2677,7 +2681,7 @@ Handle CallStubCompiler::CompileCallInterceptor(Handle object, // necessary. if (object->IsGlobalObject()) { __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); - __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); + __ movq(args.GetReceiverOperand(), rdx); } // Invoke the function. @@ -2724,15 +2728,14 @@ Handle CallStubCompiler::CompileCallGlobal( Label miss; GenerateNameCheck(name, &miss); - // Get the number of arguments. - const int argc = arguments().immediate(); + StackArgumentsAccessor args(rsp, arguments()); GenerateGlobalReceiverCheck(object, holder, name, &miss); GenerateLoadFunctionFromCell(cell, function, &miss); // Patch the receiver on the stack with the global proxy. if (object->IsGlobalObject()) { __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); - __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); + __ movq(args.GetReceiverOperand(), rdx); } // Set up the context (function already in rdi).