PPC: [es6] Correct Function.prototype.apply, Reflect.construct and Reflect.apply.
Port 5bd4832492
Original commit message:
Introduce a new Apply builtin that forms a correct and optimizable
foundation for the Function.prototype.apply, Reflect.construct and
Reflect.apply builtins (which properly does the PrepareForTailCall
as required by the ES2015 spec).
The new Apply builtin avoids going to the runtime if it is safe to
just access the backing store elements of the argArray, i.e. if you
pass a JSArray with no holes, or an unmapped, unmodified sloppy or
strict arguments object.
R=bmeurer@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=v8:4413, v8:4430
LOG=n
Review URL: https://codereview.chromium.org/1533073003
Cr-Commit-Position: refs/heads/master@{#32974}
This commit is contained in:
parent
5c33ac6ccd
commit
36afb789e6
@ -1345,184 +1345,216 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
static void Generate_PushAppliedArguments(MacroAssembler* masm,
|
||||
const int vectorOffset,
|
||||
const int argumentsOffset,
|
||||
const int indexOffset,
|
||||
const int limitOffset) {
|
||||
Register receiver = LoadDescriptor::ReceiverRegister();
|
||||
Register key = LoadDescriptor::NameRegister();
|
||||
Register slot = LoadDescriptor::SlotRegister();
|
||||
Register vector = LoadWithVectorDescriptor::VectorRegister();
|
||||
|
||||
// Copy all arguments from the array to the stack.
|
||||
Label entry, loop;
|
||||
__ LoadP(key, MemOperand(fp, indexOffset));
|
||||
__ b(&entry);
|
||||
__ bind(&loop);
|
||||
__ LoadP(receiver, MemOperand(fp, argumentsOffset));
|
||||
|
||||
// Use inline caching to speed up access to arguments.
|
||||
int slot_index = TypeFeedbackVector::PushAppliedArgumentsIndex();
|
||||
__ LoadSmiLiteral(slot, Smi::FromInt(slot_index));
|
||||
__ LoadP(vector, MemOperand(fp, vectorOffset));
|
||||
Handle<Code> ic =
|
||||
KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
|
||||
__ Call(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Push the nth argument.
|
||||
__ push(r3);
|
||||
|
||||
// Update the index on the stack and in register key.
|
||||
__ LoadP(key, MemOperand(fp, indexOffset));
|
||||
__ AddSmiLiteral(key, key, Smi::FromInt(1), r0);
|
||||
__ StoreP(key, MemOperand(fp, indexOffset));
|
||||
|
||||
// Test if the copy loop has finished copying all the elements from the
|
||||
// arguments object.
|
||||
__ bind(&entry);
|
||||
__ LoadP(r0, MemOperand(fp, limitOffset));
|
||||
__ cmp(key, r0);
|
||||
__ bne(&loop);
|
||||
|
||||
// On exit, the pushed arguments count is in r3, untagged
|
||||
__ SmiUntag(r3, key);
|
||||
}
|
||||
|
||||
|
||||
// Used by FunctionApply and ReflectApply
|
||||
static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
|
||||
const int kFormalParameters = targetIsArgument ? 3 : 2;
|
||||
const int kStackSize = kFormalParameters + 1;
|
||||
|
||||
{
|
||||
FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
|
||||
const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
|
||||
const int kReceiverOffset = kArgumentsOffset + kPointerSize;
|
||||
const int kFunctionOffset = kReceiverOffset + kPointerSize;
|
||||
const int kVectorOffset =
|
||||
InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
|
||||
|
||||
// Push the vector.
|
||||
__ LoadP(r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
|
||||
__ LoadP(r4,
|
||||
FieldMemOperand(r4, SharedFunctionInfo::kFeedbackVectorOffset));
|
||||
__ push(r4);
|
||||
|
||||
__ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
|
||||
__ LoadP(r4, MemOperand(fp, kArgumentsOffset)); // get the args array
|
||||
__ Push(r3, r4);
|
||||
if (targetIsArgument) {
|
||||
__ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
|
||||
CALL_FUNCTION);
|
||||
} else {
|
||||
__ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
|
||||
}
|
||||
|
||||
Generate_CheckStackOverflow(masm, r3, kArgcIsSmiTagged);
|
||||
|
||||
// Push current limit and index.
|
||||
const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
|
||||
const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
|
||||
__ li(r4, Operand::Zero());
|
||||
__ LoadP(r5, MemOperand(fp, kReceiverOffset));
|
||||
__ Push(r3, r4, r5); // limit, initial index and receiver.
|
||||
|
||||
// Copy all arguments from the array to the stack.
|
||||
Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
|
||||
kIndexOffset, kLimitOffset);
|
||||
|
||||
// Call the callable.
|
||||
// TODO(bmeurer): This should be a tail call according to ES6.
|
||||
__ LoadP(r4, MemOperand(fp, kFunctionOffset));
|
||||
__ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
|
||||
|
||||
// Tear down the internal frame and remove function, receiver and args.
|
||||
}
|
||||
__ addi(sp, sp, Operand(kStackSize * kPointerSize));
|
||||
__ blr();
|
||||
}
|
||||
|
||||
|
||||
static void Generate_ConstructHelper(MacroAssembler* masm) {
|
||||
const int kFormalParameters = 3;
|
||||
const int kStackSize = kFormalParameters + 1;
|
||||
|
||||
{
|
||||
FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
|
||||
const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
|
||||
const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
|
||||
const int kFunctionOffset = kArgumentsOffset + kPointerSize;
|
||||
static const int kVectorOffset =
|
||||
InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
|
||||
|
||||
// Push the vector.
|
||||
__ LoadP(r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
|
||||
__ LoadP(r4,
|
||||
FieldMemOperand(r4, SharedFunctionInfo::kFeedbackVectorOffset));
|
||||
__ push(r4);
|
||||
|
||||
// If newTarget is not supplied, set it to constructor
|
||||
Label validate_arguments;
|
||||
__ LoadP(r3, MemOperand(fp, kNewTargetOffset));
|
||||
__ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
|
||||
__ bne(&validate_arguments);
|
||||
__ LoadP(r3, MemOperand(fp, kFunctionOffset));
|
||||
__ StoreP(r3, MemOperand(fp, kNewTargetOffset));
|
||||
|
||||
// Validate arguments
|
||||
__ bind(&validate_arguments);
|
||||
__ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
|
||||
__ push(r3);
|
||||
__ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array
|
||||
__ push(r3);
|
||||
__ LoadP(r3, MemOperand(fp, kNewTargetOffset)); // get the new.target
|
||||
__ push(r3);
|
||||
__ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX,
|
||||
CALL_FUNCTION);
|
||||
|
||||
Generate_CheckStackOverflow(masm, r3, kArgcIsSmiTagged);
|
||||
|
||||
// Push current limit and index.
|
||||
const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
|
||||
const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
|
||||
__ li(r4, Operand::Zero());
|
||||
__ Push(r3, r4); // limit and initial index.
|
||||
// Push the constructor function as callee
|
||||
__ LoadP(r3, MemOperand(fp, kFunctionOffset));
|
||||
__ push(r3);
|
||||
|
||||
// Copy all arguments from the array to the stack.
|
||||
Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
|
||||
kIndexOffset, kLimitOffset);
|
||||
|
||||
// Use undefined feedback vector
|
||||
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadP(r4, MemOperand(fp, kFunctionOffset));
|
||||
__ LoadP(r6, MemOperand(fp, kNewTargetOffset));
|
||||
|
||||
// Call the function.
|
||||
__ Call(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
|
||||
|
||||
// Leave internal frame.
|
||||
}
|
||||
__ addi(sp, sp, Operand(kStackSize * kPointerSize));
|
||||
__ blr();
|
||||
}
|
||||
|
||||
|
||||
void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
|
||||
Generate_ApplyHelper(masm, false);
|
||||
// ----------- S t a t e -------------
|
||||
// -- r3 : argc
|
||||
// -- sp[0] : argArray
|
||||
// -- sp[4] : thisArg
|
||||
// -- sp[8] : receiver
|
||||
// -----------------------------------
|
||||
|
||||
// 1. Load receiver into r4, argArray into r3 (if present), remove all
|
||||
// arguments from the stack (including the receiver), and push thisArg (if
|
||||
// present) instead.
|
||||
{
|
||||
Label skip;
|
||||
Register arg_size = r5;
|
||||
Register new_sp = r6;
|
||||
Register scratch = r7;
|
||||
__ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
|
||||
__ add(new_sp, sp, arg_size);
|
||||
__ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
||||
__ mr(scratch, r3);
|
||||
__ LoadP(r4, MemOperand(new_sp, 0)); // receiver
|
||||
__ cmpi(arg_size, Operand(kPointerSize));
|
||||
__ blt(&skip);
|
||||
__ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
|
||||
__ beq(&skip);
|
||||
__ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
|
||||
__ bind(&skip);
|
||||
__ mr(sp, new_sp);
|
||||
__ StoreP(scratch, MemOperand(sp, 0));
|
||||
}
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- r3 : argArray
|
||||
// -- r4 : receiver
|
||||
// -- sp[0] : thisArg
|
||||
// -----------------------------------
|
||||
|
||||
// 2. Make sure the receiver is actually callable.
|
||||
Label receiver_not_callable;
|
||||
__ JumpIfSmi(r4, &receiver_not_callable);
|
||||
__ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
|
||||
__ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
|
||||
__ TestBit(r7, Map::kIsCallable, r0);
|
||||
__ beq(&receiver_not_callable, cr0);
|
||||
|
||||
// 3. Tail call with no arguments if argArray is null or undefined.
|
||||
Label no_arguments;
|
||||
__ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments);
|
||||
|
||||
// 4a. Apply the receiver to the given argArray (passing undefined for
|
||||
// new.target).
|
||||
__ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
|
||||
__ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
|
||||
|
||||
// 4b. The argArray is either null or undefined, so we tail call without any
|
||||
// arguments to the receiver.
|
||||
__ bind(&no_arguments);
|
||||
{
|
||||
__ li(r3, Operand::Zero());
|
||||
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
// 4c. The receiver is not callable, throw an appropriate TypeError.
|
||||
__ bind(&receiver_not_callable);
|
||||
{
|
||||
__ StoreP(r4, MemOperand(sp, 0));
|
||||
__ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
||||
Generate_ApplyHelper(masm, true);
|
||||
// ----------- S t a t e -------------
|
||||
// -- r3 : argc
|
||||
// -- sp[0] : argumentsList
|
||||
// -- sp[4] : thisArgument
|
||||
// -- sp[8] : target
|
||||
// -- sp[12] : receiver
|
||||
// -----------------------------------
|
||||
|
||||
// 1. Load target into r4 (if present), argumentsList into r3 (if present),
|
||||
// remove all arguments from the stack (including the receiver), and push
|
||||
// thisArgument (if present) instead.
|
||||
{
|
||||
Label skip;
|
||||
Register arg_size = r5;
|
||||
Register new_sp = r6;
|
||||
Register scratch = r7;
|
||||
__ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
|
||||
__ add(new_sp, sp, arg_size);
|
||||
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
||||
__ mr(scratch, r4);
|
||||
__ mr(r3, r4);
|
||||
__ cmpi(arg_size, Operand(kPointerSize));
|
||||
__ blt(&skip);
|
||||
__ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
|
||||
__ beq(&skip);
|
||||
__ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
|
||||
__ cmpi(arg_size, Operand(2 * kPointerSize));
|
||||
__ beq(&skip);
|
||||
__ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
|
||||
__ bind(&skip);
|
||||
__ mr(sp, new_sp);
|
||||
__ StoreP(scratch, MemOperand(sp, 0));
|
||||
}
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- r3 : argumentsList
|
||||
// -- r4 : target
|
||||
// -- sp[0] : thisArgument
|
||||
// -----------------------------------
|
||||
|
||||
// 2. Make sure the target is actually callable.
|
||||
Label target_not_callable;
|
||||
__ JumpIfSmi(r4, &target_not_callable);
|
||||
__ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
|
||||
__ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
|
||||
__ TestBit(r7, Map::kIsCallable, r0);
|
||||
__ beq(&target_not_callable, cr0);
|
||||
|
||||
// 3a. Apply the target to the given argumentsList (passing undefined for
|
||||
// new.target).
|
||||
__ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
|
||||
__ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
|
||||
|
||||
// 3b. The target is not callable, throw an appropriate TypeError.
|
||||
__ bind(&target_not_callable);
|
||||
{
|
||||
__ StoreP(r4, MemOperand(sp, 0));
|
||||
__ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
Generate_ConstructHelper(masm);
|
||||
// ----------- S t a t e -------------
|
||||
// -- r3 : argc
|
||||
// -- sp[0] : new.target (optional)
|
||||
// -- sp[4] : argumentsList
|
||||
// -- sp[8] : target
|
||||
// -- sp[12] : receiver
|
||||
// -----------------------------------
|
||||
|
||||
// 1. Load target into r4 (if present), argumentsList into r3 (if present),
|
||||
// new.target into r6 (if present, otherwise use target), remove all
|
||||
// arguments from the stack (including the receiver), and push thisArgument
|
||||
// (if present) instead.
|
||||
{
|
||||
Label skip;
|
||||
Register arg_size = r5;
|
||||
Register new_sp = r7;
|
||||
__ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
|
||||
__ add(new_sp, sp, arg_size);
|
||||
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
||||
__ mr(r3, r4);
|
||||
__ mr(r6, r4);
|
||||
__ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined)
|
||||
__ cmpi(arg_size, Operand(kPointerSize));
|
||||
__ blt(&skip);
|
||||
__ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
|
||||
__ mr(r6, r4); // new.target defaults to target
|
||||
__ beq(&skip);
|
||||
__ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
|
||||
__ cmpi(arg_size, Operand(2 * kPointerSize));
|
||||
__ beq(&skip);
|
||||
__ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
|
||||
__ bind(&skip);
|
||||
__ mr(sp, new_sp);
|
||||
}
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- r3 : argumentsList
|
||||
// -- r6 : new.target
|
||||
// -- r4 : target
|
||||
// -- sp[0] : receiver (undefined)
|
||||
// -----------------------------------
|
||||
|
||||
// 2. Make sure the target is actually a constructor.
|
||||
Label target_not_constructor;
|
||||
__ JumpIfSmi(r4, &target_not_constructor);
|
||||
__ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
|
||||
__ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
|
||||
__ TestBit(r7, Map::kIsConstructor, r0);
|
||||
__ beq(&target_not_constructor, cr0);
|
||||
|
||||
// 3. Make sure the target is actually a constructor.
|
||||
Label new_target_not_constructor;
|
||||
__ JumpIfSmi(r6, &new_target_not_constructor);
|
||||
__ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
|
||||
__ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
|
||||
__ TestBit(r7, Map::kIsConstructor, r0);
|
||||
__ beq(&new_target_not_constructor, cr0);
|
||||
|
||||
// 4a. Construct the target with the given new.target and argumentsList.
|
||||
__ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
|
||||
|
||||
// 4b. The target is not a constructor, throw an appropriate TypeError.
|
||||
__ bind(&target_not_constructor);
|
||||
{
|
||||
__ StoreP(r4, MemOperand(sp, 0));
|
||||
__ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1);
|
||||
}
|
||||
|
||||
// 4c. The new.target is not a constructor, throw an appropriate TypeError.
|
||||
__ bind(&new_target_not_constructor);
|
||||
{
|
||||
__ StoreP(r6, MemOperand(sp, 0));
|
||||
__ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1578,6 +1610,131 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
// static
|
||||
void Builtins::Generate_Apply(MacroAssembler* masm) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r3 : argumentsList
|
||||
// -- r4 : target
|
||||
// -- r6 : new.target (checked to be constructor or undefined)
|
||||
// -- sp[0] : thisArgument
|
||||
// -----------------------------------
|
||||
|
||||
// Create the list of arguments from the array-like argumentsList.
|
||||
{
|
||||
Label create_arguments, create_array, create_runtime, done_create;
|
||||
__ JumpIfSmi(r3, &create_runtime);
|
||||
|
||||
// Load the map of argumentsList into r5.
|
||||
__ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
|
||||
|
||||
// Load native context into r7.
|
||||
__ LoadP(r7, NativeContextMemOperand());
|
||||
|
||||
// Check if argumentsList is an (unmodified) arguments object.
|
||||
__ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
|
||||
__ cmp(ip, r5);
|
||||
__ beq(&create_arguments);
|
||||
__ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX));
|
||||
__ cmp(ip, r5);
|
||||
__ beq(&create_arguments);
|
||||
|
||||
// Check if argumentsList is a fast JSArray.
|
||||
__ CompareInstanceType(r5, ip, JS_ARRAY_TYPE);
|
||||
__ beq(&create_array);
|
||||
|
||||
// Ask the runtime to create the list (actually a FixedArray).
|
||||
__ bind(&create_runtime);
|
||||
{
|
||||
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
|
||||
__ Push(r4, r6, r3);
|
||||
__ CallRuntime(Runtime::kCreateListFromArrayLike, 1);
|
||||
__ Pop(r4, r6);
|
||||
__ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset));
|
||||
__ SmiUntag(r5);
|
||||
}
|
||||
__ b(&done_create);
|
||||
|
||||
// Try to create the list from an arguments object.
|
||||
__ bind(&create_arguments);
|
||||
__ LoadP(r5, FieldMemOperand(
|
||||
r3, JSObject::kHeaderSize +
|
||||
Heap::kArgumentsLengthIndex * kPointerSize));
|
||||
__ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset));
|
||||
__ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset));
|
||||
__ cmp(r5, ip);
|
||||
__ bne(&create_runtime);
|
||||
__ SmiUntag(r5);
|
||||
__ mr(r3, r7);
|
||||
__ b(&done_create);
|
||||
|
||||
// Try to create the list from a JSArray object.
|
||||
__ bind(&create_array);
|
||||
__ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset));
|
||||
__ DecodeField<Map::ElementsKindBits>(r5);
|
||||
STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
|
||||
STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
|
||||
STATIC_ASSERT(FAST_ELEMENTS == 2);
|
||||
__ cmpi(r5, Operand(FAST_ELEMENTS));
|
||||
__ bgt(&create_runtime);
|
||||
__ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS));
|
||||
__ beq(&create_runtime);
|
||||
__ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset));
|
||||
__ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
|
||||
__ SmiUntag(r5);
|
||||
|
||||
__ bind(&done_create);
|
||||
}
|
||||
|
||||
// Check for stack overflow.
|
||||
{
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label done;
|
||||
__ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
|
||||
// Make ip the space we have left. The stack might already be overflowed
|
||||
// here which will cause ip to become negative.
|
||||
__ sub(ip, sp, ip);
|
||||
// Check if the arguments will overflow the stack.
|
||||
__ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
|
||||
__ cmp(ip, r0); // Signed comparison.
|
||||
__ bgt(&done);
|
||||
__ TailCallRuntime(Runtime::kThrowStackOverflow, 1, 1);
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- r4 : target
|
||||
// -- r3 : args (a FixedArray built from argumentsList)
|
||||
// -- r5 : len (number of elements to push from args)
|
||||
// -- r6 : new.target (checked to be constructor or undefined)
|
||||
// -- sp[0] : thisArgument
|
||||
// -----------------------------------
|
||||
|
||||
// Push arguments onto the stack (thisArgument is already on the stack).
|
||||
{
|
||||
Label loop, no_args;
|
||||
__ cmpi(r5, Operand::Zero());
|
||||
__ beq(&no_args);
|
||||
__ addi(r3, r3,
|
||||
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
|
||||
__ mtctr(r5);
|
||||
__ bind(&loop);
|
||||
__ LoadPU(r0, MemOperand(r3, kPointerSize));
|
||||
__ push(r0);
|
||||
__ bdnz(&loop);
|
||||
__ bind(&no_args);
|
||||
__ mr(r3, r5);
|
||||
}
|
||||
|
||||
// Dispatch to Call or Construct depending on whether new.target is undefined.
|
||||
{
|
||||
__ CompareRoot(r6, Heap::kUndefinedValueRootIndex);
|
||||
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
|
||||
__ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// static
|
||||
void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
ConvertReceiverMode mode) {
|
||||
|
Loading…
Reference in New Issue
Block a user