[builtins] Port parameter and argument-related code stubs to CSA

Includes the port of these three builtins: FastNewStrictArguments,
FastNewSloppyArguments and FastNewRestParameter. Also inline
the implementation of these into the corresponding interpreter
byte codes.

BUG=v8:5269
LOG=N
R=ishell@chromium.org, rmcilroy@chromium.org

Review-Url: https://codereview.chromium.org/2645743002
Cr-Commit-Position: refs/heads/master@{#43002}
This commit is contained in:
danno 2017-02-07 08:02:44 -08:00 committed by Commit bot
parent 4d8bde0cd6
commit c205c9b7ea
32 changed files with 626 additions and 3415 deletions

View File

@ -972,6 +972,8 @@ v8_source_set("v8_base") {
"src/bootstrapper.cc",
"src/bootstrapper.h",
"src/builtins/builtins-api.cc",
"src/builtins/builtins-arguments.cc",
"src/builtins/builtins-arguments.h",
"src/builtins/builtins-array.cc",
"src/builtins/builtins-arraybuffer.cc",
"src/builtins/builtins-async-function.cc",

View File

@ -3104,495 +3104,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r1 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(r1);
// Make r2 point to the JavaScript frame.
__ mov(r2, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ ldr(ip, MemOperand(r2, StandardFrameConstants::kFunctionOffset));
__ cmp(ip, r1);
__ b(eq, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have rest parameters (only possible if we have an
// arguments adaptor frame below the function frame).
Label no_rest_parameters;
__ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
__ ldr(ip, MemOperand(r2, CommonFrameConstants::kContextOrFrameTypeOffset));
__ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(ne, &no_rest_parameters);
// Check if the arguments adaptor frame contains more arguments than
// specified by the function's internal formal parameter count.
Label rest_parameters;
__ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r3,
FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
__ sub(r0, r0, r3, SetCC);
__ b(gt, &rest_parameters);
// Return an empty rest parameter array.
__ bind(&no_rest_parameters);
{
// ----------- S t a t e -------------
// -- cp : context
// -- lr : return address
// -----------------------------------
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, r0, r1, r2, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the rest parameter array in r0.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r1);
__ str(r1, FieldMemOperand(r0, JSArray::kMapOffset));
__ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
__ str(r1, FieldMemOperand(r0, JSArray::kPropertiesOffset));
__ str(r1, FieldMemOperand(r0, JSArray::kElementsOffset));
__ mov(r1, Operand(0));
__ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(Smi::FromInt(JSArray::kSize));
__ CallRuntime(Runtime::kAllocateInNewSpace);
}
__ jmp(&done_allocate);
}
__ bind(&rest_parameters);
{
// Compute the pointer to the first rest parameter (skippping the receiver).
__ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ add(r2, r2,
Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
// ----------- S t a t e -------------
// -- cp : context
// -- r0 : number of rest parameters (tagged)
// -- r1 : function
// -- r2 : pointer to first rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
__ mov(r6, Operand(JSArray::kSize + FixedArray::kHeaderSize));
__ add(r6, r6, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ Allocate(r6, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in r3.
__ LoadRoot(r1, Heap::kFixedArrayMapRootIndex);
__ str(r1, FieldMemOperand(r3, FixedArray::kMapOffset));
__ str(r0, FieldMemOperand(r3, FixedArray::kLengthOffset));
__ add(r4, r3, Operand(FixedArray::kHeaderSize));
{
Label loop, done_loop;
__ add(r1, r4, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ bind(&loop);
__ cmp(r4, r1);
__ b(eq, &done_loop);
__ ldr(ip, MemOperand(r2, 1 * kPointerSize, NegPostIndex));
__ str(ip, FieldMemOperand(r4, 0 * kPointerSize));
__ add(r4, r4, Operand(1 * kPointerSize));
__ b(&loop);
__ bind(&done_loop);
}
// Setup the rest parameter array in r4.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r1);
__ str(r1, FieldMemOperand(r4, JSArray::kMapOffset));
__ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
__ str(r1, FieldMemOperand(r4, JSArray::kPropertiesOffset));
__ str(r3, FieldMemOperand(r4, JSArray::kElementsOffset));
__ str(r0, FieldMemOperand(r4, JSArray::kLengthOffset));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ mov(r0, r4);
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ cmp(r6, Operand(kMaxRegularHeapObjectSize));
__ b(gt, &too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r6);
__ Push(r0, r2, r6);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ mov(r3, r0);
__ Pop(r0, r2);
}
__ jmp(&done_allocate);
// Fall back to %NewRestParameter.
__ bind(&too_big_for_new_space);
__ push(r1);
__ TailCallRuntime(Runtime::kNewRestParameter);
}
}
void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r1 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(r1);
// Make r9 point to the JavaScript frame.
__ mov(r9, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ ldr(r9, MemOperand(r9, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ ldr(ip, MemOperand(r9, StandardFrameConstants::kFunctionOffset));
__ cmp(ip, r1);
__ b(eq, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2,
FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
__ add(r3, r9, Operand(r2, LSL, kPointerSizeLog2 - 1));
__ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
// r1 : function
// r2 : number of parameters (tagged)
// r3 : parameters pointer
// r9 : JavaScript frame pointer
// Registers used over whole function:
// r5 : arguments count (tagged)
// r6 : mapped parameter count (tagged)
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ ldr(r4, MemOperand(r9, StandardFrameConstants::kCallerFPOffset));
__ ldr(r0, MemOperand(r4, CommonFrameConstants::kContextOrFrameTypeOffset));
__ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(eq, &adaptor_frame);
// No adaptor, parameter count = argument count.
__ mov(r5, r2);
__ mov(r6, r2);
__ b(&try_allocate);
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ ldr(r5, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ add(r4, r4, Operand(r5, LSL, 1));
__ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
// r5 = argument count (tagged)
// r6 = parameter count (tagged)
// Compute the mapped parameter count = min(r6, r5) in r6.
__ mov(r6, r2);
__ cmp(r6, Operand(r5));
__ mov(r6, Operand(r5), LeaveCC, gt);
__ bind(&try_allocate);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
__ cmp(r6, Operand(Smi::kZero));
__ mov(r9, Operand::Zero(), LeaveCC, eq);
__ mov(r9, Operand(r6, LSL, 1), LeaveCC, ne);
__ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
// 2. Backing store.
__ add(r9, r9, Operand(r5, LSL, 1));
__ add(r9, r9, Operand(FixedArray::kHeaderSize));
// 3. Arguments object.
__ add(r9, r9, Operand(JSSloppyArgumentsObject::kSize));
// Do the allocation of all three objects in one go.
__ Allocate(r9, r0, r9, r4, &runtime, NO_ALLOCATION_FLAGS);
// r0 = address of new object(s) (tagged)
// r2 = argument count (smi-tagged)
// Get the arguments boilerplate from the current native context into r4.
const int kNormalOffset =
Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
const int kAliasedOffset =
Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
__ ldr(r4, NativeContextMemOperand());
__ cmp(r6, Operand::Zero());
__ ldr(r4, MemOperand(r4, kNormalOffset), eq);
__ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
// r0 = address of new object (tagged)
// r2 = argument count (smi-tagged)
// r4 = address of arguments map (tagged)
// r6 = mapped parameter count (tagged)
__ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
__ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
__ str(r9, FieldMemOperand(r0, JSObject::kPropertiesOffset));
__ str(r9, FieldMemOperand(r0, JSObject::kElementsOffset));
// Set up the callee in-object property.
__ AssertNotSmi(r1);
__ str(r1, FieldMemOperand(r0, JSSloppyArgumentsObject::kCalleeOffset));
// Use the length (smi tagged) and set that as an in-object property too.
__ AssertSmi(r5);
__ str(r5, FieldMemOperand(r0, JSSloppyArgumentsObject::kLengthOffset));
// Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, r4 will point there, otherwise
// it will point to the backing store.
__ add(r4, r0, Operand(JSSloppyArgumentsObject::kSize));
__ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
// r0 = address of new object (tagged)
// r2 = argument count (tagged)
// r4 = address of parameter map or backing store (tagged)
// r6 = mapped parameter count (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ cmp(r6, Operand(Smi::kZero));
// Move backing store address to r1, because it is
// expected there when filling in the unmapped arguments.
__ mov(r1, r4, LeaveCC, eq);
__ b(eq, &skip_parameter_map);
__ LoadRoot(r5, Heap::kSloppyArgumentsElementsMapRootIndex);
__ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
__ add(r5, r6, Operand(Smi::FromInt(2)));
__ str(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
__ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
__ add(r5, r4, Operand(r6, LSL, 1));
__ add(r5, r5, Operand(kParameterMapHeaderSize));
__ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop, parameters_test;
__ mov(r5, r6);
__ add(r9, r2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
__ sub(r9, r9, Operand(r6));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ add(r1, r4, Operand(r5, LSL, 1));
__ add(r1, r1, Operand(kParameterMapHeaderSize));
// r1 = address of backing store (tagged)
// r4 = address of parameter map (tagged), which is also the address of new
// object + Heap::kSloppyArgumentsObjectSize (tagged)
// r0 = temporary scratch (a.o., for address calculation)
// r5 = loop variable (tagged)
// ip = the hole value
__ jmp(&parameters_test);
__ bind(&parameters_loop);
__ sub(r5, r5, Operand(Smi::FromInt(1)));
__ mov(r0, Operand(r5, LSL, 1));
__ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
__ str(r9, MemOperand(r4, r0));
__ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
__ str(ip, MemOperand(r1, r0));
__ add(r9, r9, Operand(Smi::FromInt(1)));
__ bind(&parameters_test);
__ cmp(r5, Operand(Smi::kZero));
__ b(ne, &parameters_loop);
// Restore r0 = new object (tagged) and r5 = argument count (tagged).
__ sub(r0, r4, Operand(JSSloppyArgumentsObject::kSize));
__ ldr(r5, FieldMemOperand(r0, JSSloppyArgumentsObject::kLengthOffset));
__ bind(&skip_parameter_map);
// r0 = address of new object (tagged)
// r1 = address of backing store (tagged)
// r5 = argument count (tagged)
// r6 = mapped parameter count (tagged)
// r9 = scratch
// Copy arguments header and remaining slots (if there are any).
__ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
__ str(r9, FieldMemOperand(r1, FixedArray::kMapOffset));
__ str(r5, FieldMemOperand(r1, FixedArray::kLengthOffset));
Label arguments_loop, arguments_test;
__ sub(r3, r3, Operand(r6, LSL, 1));
__ jmp(&arguments_test);
__ bind(&arguments_loop);
__ sub(r3, r3, Operand(kPointerSize));
__ ldr(r4, MemOperand(r3, 0));
__ add(r9, r1, Operand(r6, LSL, 1));
__ str(r4, FieldMemOperand(r9, FixedArray::kHeaderSize));
__ add(r6, r6, Operand(Smi::FromInt(1)));
__ bind(&arguments_test);
__ cmp(r6, Operand(r5));
__ b(lt, &arguments_loop);
// Return.
__ Ret();
// Do the runtime call to allocate the arguments object.
// r0 = address of new object (tagged)
// r5 = argument count (tagged)
__ bind(&runtime);
__ Push(r1, r3, r5);
__ TailCallRuntime(Runtime::kNewSloppyArguments);
}
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r1 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(r1);
// Make r2 point to the JavaScript frame.
__ mov(r2, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ ldr(ip, MemOperand(r2, StandardFrameConstants::kFunctionOffset));
__ cmp(ip, r1);
__ b(eq, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ ldr(r3, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
__ ldr(ip, MemOperand(r3, CommonFrameConstants::kContextOrFrameTypeOffset));
__ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(eq, &arguments_adaptor);
{
__ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r0, FieldMemOperand(
r4, SharedFunctionInfo::kFormalParameterCountOffset));
__ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ add(r2, r2,
Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
}
__ b(&arguments_done);
__ bind(&arguments_adaptor);
{
__ ldr(r0, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ add(r2, r3, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ add(r2, r2,
Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
}
__ bind(&arguments_done);
// ----------- S t a t e -------------
// -- cp : context
// -- r0 : number of rest parameters (tagged)
// -- r1 : function
// -- r2 : pointer to first rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate;
__ mov(r6, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
__ add(r6, r6, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ Allocate(r6, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in r3.
__ LoadRoot(r1, Heap::kFixedArrayMapRootIndex);
__ str(r1, FieldMemOperand(r3, FixedArray::kMapOffset));
__ str(r0, FieldMemOperand(r3, FixedArray::kLengthOffset));
__ add(r4, r3, Operand(FixedArray::kHeaderSize));
{
Label loop, done_loop;
__ add(r1, r4, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ bind(&loop);
__ cmp(r4, r1);
__ b(eq, &done_loop);
__ ldr(ip, MemOperand(r2, 1 * kPointerSize, NegPostIndex));
__ str(ip, FieldMemOperand(r4, 0 * kPointerSize));
__ add(r4, r4, Operand(1 * kPointerSize));
__ b(&loop);
__ bind(&done_loop);
}
// Setup the strict arguments object in r4.
__ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r1);
__ str(r1, FieldMemOperand(r4, JSStrictArgumentsObject::kMapOffset));
__ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
__ str(r1, FieldMemOperand(r4, JSStrictArgumentsObject::kPropertiesOffset));
__ str(r3, FieldMemOperand(r4, JSStrictArgumentsObject::kElementsOffset));
__ str(r0, FieldMemOperand(r4, JSStrictArgumentsObject::kLengthOffset));
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ mov(r0, r4);
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ cmp(r6, Operand(kMaxRegularHeapObjectSize));
__ b(gt, &too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r6);
__ Push(r0, r2, r6);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ mov(r3, r0);
__ Pop(r0, r2);
}
__ b(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ push(r1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}

View File

@ -70,27 +70,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
// static
const Register TypeConversionDescriptor::ArgumentRegister() { return r0; }

View File

@ -3350,581 +3350,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x1 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(x1);
// Make x2 point to the JavaScript frame.
__ Mov(x2, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ Ldr(x3, MemOperand(x2, StandardFrameConstants::kFunctionOffset));
__ Cmp(x3, x1);
__ B(eq, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ Bind(&ok);
}
// Check if we have rest parameters (only possible if we have an
// arguments adaptor frame below the function frame).
Label no_rest_parameters;
__ Ldr(x2, MemOperand(x2, CommonFrameConstants::kCallerFPOffset));
__ Ldr(x3, MemOperand(x2, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Cmp(x3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ B(ne, &no_rest_parameters);
// Check if the arguments adaptor frame contains more arguments than
// specified by the function's internal formal parameter count.
Label rest_parameters;
__ Ldrsw(x0, UntagSmiMemOperand(
x2, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldrsw(
x3, FieldMemOperand(x3, SharedFunctionInfo::kFormalParameterCountOffset));
__ Subs(x0, x0, x3);
__ B(gt, &rest_parameters);
// Return an empty rest parameter array.
__ Bind(&no_rest_parameters);
{
// ----------- S t a t e -------------
// -- cp : context
// -- lr : return address
// -----------------------------------
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, x0, x1, x2, &allocate, NO_ALLOCATION_FLAGS);
__ Bind(&done_allocate);
// Setup the rest parameter array in x0.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, x1);
__ Str(x1, FieldMemOperand(x0, JSArray::kMapOffset));
__ LoadRoot(x1, Heap::kEmptyFixedArrayRootIndex);
__ Str(x1, FieldMemOperand(x0, JSArray::kPropertiesOffset));
__ Str(x1, FieldMemOperand(x0, JSArray::kElementsOffset));
__ Mov(x1, Smi::kZero);
__ Str(x1, FieldMemOperand(x0, JSArray::kLengthOffset));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace.
__ Bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(Smi::FromInt(JSArray::kSize));
__ CallRuntime(Runtime::kAllocateInNewSpace);
}
__ B(&done_allocate);
}
__ Bind(&rest_parameters);
{
// Compute the pointer to the first rest parameter (skippping the receiver).
__ Add(x2, x2, Operand(x0, LSL, kPointerSizeLog2));
__ Add(x2, x2, StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize);
// ----------- S t a t e -------------
// -- cp : context
// -- x0 : number of rest parameters
// -- x1 : function
// -- x2 : pointer to first rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
__ Mov(x6, JSArray::kSize + FixedArray::kHeaderSize);
__ Add(x6, x6, Operand(x0, LSL, kPointerSizeLog2));
__ Allocate(x6, x3, x4, x5, &allocate, NO_ALLOCATION_FLAGS);
__ Bind(&done_allocate);
// Compute arguments.length in x6.
__ SmiTag(x6, x0);
// Setup the elements array in x3.
__ LoadRoot(x1, Heap::kFixedArrayMapRootIndex);
__ Str(x1, FieldMemOperand(x3, FixedArray::kMapOffset));
__ Str(x6, FieldMemOperand(x3, FixedArray::kLengthOffset));
__ Add(x4, x3, FixedArray::kHeaderSize);
{
Label loop, done_loop;
__ Add(x0, x4, Operand(x0, LSL, kPointerSizeLog2));
__ Bind(&loop);
__ Cmp(x4, x0);
__ B(eq, &done_loop);
__ Ldr(x5, MemOperand(x2, 0 * kPointerSize));
__ Str(x5, FieldMemOperand(x4, 0 * kPointerSize));
__ Sub(x2, x2, Operand(1 * kPointerSize));
__ Add(x4, x4, Operand(1 * kPointerSize));
__ B(&loop);
__ Bind(&done_loop);
}
// Setup the rest parameter array in x0.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, x1);
__ Str(x1, FieldMemOperand(x0, JSArray::kMapOffset));
__ LoadRoot(x1, Heap::kEmptyFixedArrayRootIndex);
__ Str(x1, FieldMemOperand(x0, JSArray::kPropertiesOffset));
__ Str(x3, FieldMemOperand(x0, JSArray::kElementsOffset));
__ Str(x6, FieldMemOperand(x0, JSArray::kLengthOffset));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ Bind(&allocate);
__ Cmp(x6, Operand(kMaxRegularHeapObjectSize));
__ B(gt, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(x0);
__ SmiTag(x6);
__ Push(x0, x2, x6);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Mov(x3, x0);
__ Pop(x2, x0);
__ SmiUntag(x0);
}
__ B(&done_allocate);
// Fall back to %NewRestParameter.
__ Bind(&too_big_for_new_space);
__ Push(x1);
__ TailCallRuntime(Runtime::kNewRestParameter);
}
}
void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x1 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(x1);
// Make x6 point to the JavaScript frame.
__ Mov(x6, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ Ldr(x6, MemOperand(x6, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ Ldr(x3, MemOperand(x6, StandardFrameConstants::kFunctionOffset));
__ Cmp(x3, x1);
__ B(eq, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ Bind(&ok);
}
// TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldrsw(
x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
__ Add(x3, x6, Operand(x2, LSL, kPointerSizeLog2));
__ Add(x3, x3, Operand(StandardFrameConstants::kCallerSPOffset));
__ SmiTag(x2);
// x1 : function
// x2 : number of parameters (tagged)
// x3 : parameters pointer
// x6 : JavaScript frame pointer
//
// Returns pointer to result object in x0.
// Make an untagged copy of the parameter count.
// Note: arg_count_smi is an alias of param_count_smi.
Register function = x1;
Register arg_count_smi = x2;
Register param_count_smi = x2;
Register recv_arg = x3;
Register param_count = x7;
__ SmiUntag(param_count, param_count_smi);
// Check if the calling frame is an arguments adaptor frame.
Register caller_fp = x11;
Register caller_ctx = x12;
Label runtime;
Label adaptor_frame, try_allocate;
__ Ldr(caller_fp, MemOperand(x6, StandardFrameConstants::kCallerFPOffset));
__ Ldr(
caller_ctx,
MemOperand(caller_fp, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ B(eq, &adaptor_frame);
// No adaptor, parameter count = argument count.
// x1 function function pointer
// x2 arg_count_smi number of function arguments (smi)
// x3 recv_arg pointer to receiver arguments
// x4 mapped_params number of mapped params, min(params, args) (uninit)
// x7 param_count number of function parameters
// x11 caller_fp caller's frame pointer
// x14 arg_count number of function arguments (uninit)
Register arg_count = x14;
Register mapped_params = x4;
__ Mov(arg_count, param_count);
__ Mov(mapped_params, param_count);
__ B(&try_allocate);
// We have an adaptor frame. Patch the parameters pointer.
__ Bind(&adaptor_frame);
__ Ldr(arg_count_smi,
MemOperand(caller_fp,
ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiUntag(arg_count, arg_count_smi);
__ Add(x10, caller_fp, Operand(arg_count, LSL, kPointerSizeLog2));
__ Add(recv_arg, x10, StandardFrameConstants::kCallerSPOffset);
// Compute the mapped parameter count = min(param_count, arg_count)
__ Cmp(param_count, arg_count);
__ Csel(mapped_params, param_count, arg_count, lt);
__ Bind(&try_allocate);
// x0 alloc_obj pointer to allocated objects: param map, backing
// store, arguments (uninit)
// x1 function function pointer
// x2 arg_count_smi number of function arguments (smi)
// x3 recv_arg pointer to receiver arguments
// x4 mapped_params number of mapped parameters, min(params, args)
// x7 param_count number of function parameters
// x10 size size of objects to allocate (uninit)
// x14 arg_count number of function arguments
// Compute the size of backing store, parameter map, and arguments object.
// 1. Parameter map, has two extra words containing context and backing
// store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// Calculate the parameter map size, assuming it exists.
Register size = x10;
__ Mov(size, Operand(mapped_params, LSL, kPointerSizeLog2));
__ Add(size, size, kParameterMapHeaderSize);
// If there are no mapped parameters, set the running size total to zero.
// Otherwise, use the parameter map size calculated earlier.
__ Cmp(mapped_params, 0);
__ CzeroX(size, eq);
// 2. Add the size of the backing store and arguments object.
__ Add(size, size, Operand(arg_count, LSL, kPointerSizeLog2));
__ Add(size, size, FixedArray::kHeaderSize + JSSloppyArgumentsObject::kSize);
// Do the allocation of all three objects in one go. Assign this to x0, as it
// will be returned to the caller.
Register alloc_obj = x0;
__ Allocate(size, alloc_obj, x11, x12, &runtime, NO_ALLOCATION_FLAGS);
// Get the arguments boilerplate from the current (global) context.
// x0 alloc_obj pointer to allocated objects (param map, backing
// store, arguments)
// x1 function function pointer
// x2 arg_count_smi number of function arguments (smi)
// x3 recv_arg pointer to receiver arguments
// x4 mapped_params number of mapped parameters, min(params, args)
// x7 param_count number of function parameters
// x11 sloppy_args_map offset to args (or aliased args) map (uninit)
// x14 arg_count number of function arguments
Register global_ctx = x10;
Register sloppy_args_map = x11;
Register aliased_args_map = x10;
__ Ldr(global_ctx, NativeContextMemOperand());
__ Ldr(sloppy_args_map,
ContextMemOperand(global_ctx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
__ Ldr(
aliased_args_map,
ContextMemOperand(global_ctx, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX));
__ Cmp(mapped_params, 0);
__ CmovX(sloppy_args_map, aliased_args_map, ne);
// Copy the JS object part.
__ Str(sloppy_args_map, FieldMemOperand(alloc_obj, JSObject::kMapOffset));
__ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex);
__ Str(x10, FieldMemOperand(alloc_obj, JSObject::kPropertiesOffset));
__ Str(x10, FieldMemOperand(alloc_obj, JSObject::kElementsOffset));
// Set up the callee in-object property.
__ AssertNotSmi(function);
__ Str(function,
FieldMemOperand(alloc_obj, JSSloppyArgumentsObject::kCalleeOffset));
// Use the length and set that as an in-object property.
__ Str(arg_count_smi,
FieldMemOperand(alloc_obj, JSSloppyArgumentsObject::kLengthOffset));
// Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, "elements" will point there, otherwise
// it will point to the backing store.
// x0 alloc_obj pointer to allocated objects (param map, backing
// store, arguments)
// x1 function function pointer
// x2 arg_count_smi number of function arguments (smi)
// x3 recv_arg pointer to receiver arguments
// x4 mapped_params number of mapped parameters, min(params, args)
// x5 elements pointer to parameter map or backing store (uninit)
// x6 backing_store pointer to backing store (uninit)
// x7 param_count number of function parameters
// x14 arg_count number of function arguments
Register elements = x5;
__ Add(elements, alloc_obj, JSSloppyArgumentsObject::kSize);
__ Str(elements, FieldMemOperand(alloc_obj, JSObject::kElementsOffset));
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ Cmp(mapped_params, 0);
// Set up backing store address, because it is needed later for filling in
// the unmapped arguments.
Register backing_store = x6;
__ CmovX(backing_store, elements, eq);
__ B(eq, &skip_parameter_map);
__ LoadRoot(x10, Heap::kSloppyArgumentsElementsMapRootIndex);
__ Str(x10, FieldMemOperand(elements, FixedArray::kMapOffset));
__ Add(x10, mapped_params, 2);
__ SmiTag(x10);
__ Str(x10, FieldMemOperand(elements, FixedArray::kLengthOffset));
__ Str(cp, FieldMemOperand(elements,
FixedArray::kHeaderSize + 0 * kPointerSize));
__ Add(x10, elements, Operand(mapped_params, LSL, kPointerSizeLog2));
__ Add(x10, x10, kParameterMapHeaderSize);
__ Str(x10, FieldMemOperand(elements,
FixedArray::kHeaderSize + 1 * kPointerSize));
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. Then index the context,
// where parameters are stored in reverse order, at:
//
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS + parameter_count - 1
//
// The mapped parameter thus needs to get indices:
//
// MIN_CONTEXT_SLOTS + parameter_count - 1 ..
// MIN_CONTEXT_SLOTS + parameter_count - mapped_parameter_count
//
// We loop from right to left.
// x0 alloc_obj pointer to allocated objects (param map, backing
// store, arguments)
// x1 function function pointer
// x2 arg_count_smi number of function arguments (smi)
// x3 recv_arg pointer to receiver arguments
// x4 mapped_params number of mapped parameters, min(params, args)
// x5 elements pointer to parameter map or backing store (uninit)
// x6 backing_store pointer to backing store (uninit)
// x7 param_count number of function parameters
// x11 loop_count parameter loop counter (uninit)
// x12 index parameter index (smi, uninit)
// x13 the_hole hole value (uninit)
// x14 arg_count number of function arguments
Register loop_count = x11;
Register index = x12;
Register the_hole = x13;
Label parameters_loop, parameters_test;
__ Mov(loop_count, mapped_params);
__ Add(index, param_count, static_cast<int>(Context::MIN_CONTEXT_SLOTS));
__ Sub(index, index, mapped_params);
__ SmiTag(index);
__ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
__ Add(backing_store, elements, Operand(loop_count, LSL, kPointerSizeLog2));
__ Add(backing_store, backing_store, kParameterMapHeaderSize);
__ B(&parameters_test);
__ Bind(&parameters_loop);
__ Sub(loop_count, loop_count, 1);
__ Mov(x10, Operand(loop_count, LSL, kPointerSizeLog2));
__ Add(x10, x10, kParameterMapHeaderSize - kHeapObjectTag);
__ Str(index, MemOperand(elements, x10));
__ Sub(x10, x10, kParameterMapHeaderSize - FixedArray::kHeaderSize);
__ Str(the_hole, MemOperand(backing_store, x10));
__ Add(index, index, Smi::FromInt(1));
__ Bind(&parameters_test);
__ Cbnz(loop_count, &parameters_loop);
__ Bind(&skip_parameter_map);
// Copy arguments header and remaining slots (if there are any.)
__ LoadRoot(x10, Heap::kFixedArrayMapRootIndex);
__ Str(x10, FieldMemOperand(backing_store, FixedArray::kMapOffset));
__ Str(arg_count_smi, FieldMemOperand(backing_store,
FixedArray::kLengthOffset));
// x0 alloc_obj pointer to allocated objects (param map, backing
// store, arguments)
// x1 function function pointer
// x2 arg_count_smi number of function arguments (smi)
// x3 recv_arg pointer to receiver arguments
// x4 mapped_params number of mapped parameters, min(params, args)
// x6 backing_store pointer to backing store (uninit)
// x14 arg_count number of function arguments
Label arguments_loop, arguments_test;
__ Mov(x10, mapped_params);
__ Sub(recv_arg, recv_arg, Operand(x10, LSL, kPointerSizeLog2));
__ B(&arguments_test);
__ Bind(&arguments_loop);
__ Sub(recv_arg, recv_arg, kPointerSize);
__ Ldr(x11, MemOperand(recv_arg));
__ Add(x12, backing_store, Operand(x10, LSL, kPointerSizeLog2));
__ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize));
__ Add(x10, x10, 1);
__ Bind(&arguments_test);
__ Cmp(x10, arg_count);
__ B(lt, &arguments_loop);
__ Ret();
// Do the runtime call to allocate the arguments object.
__ Bind(&runtime);
__ Push(function, recv_arg, arg_count_smi);
__ TailCallRuntime(Runtime::kNewSloppyArguments);
}
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x1 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(x1);
// Make x2 point to the JavaScript frame.
__ Mov(x2, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ Ldr(x3, MemOperand(x2, StandardFrameConstants::kFunctionOffset));
__ Cmp(x3, x1);
__ B(eq, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ Bind(&ok);
}
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ Ldr(x3, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
__ Ldr(x4, MemOperand(x3, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Cmp(x4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ B(eq, &arguments_adaptor);
{
__ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldrsw(x0, FieldMemOperand(
x4, SharedFunctionInfo::kFormalParameterCountOffset));
__ Add(x2, x2, Operand(x0, LSL, kPointerSizeLog2));
__ Add(x2, x2, StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize);
}
__ B(&arguments_done);
__ Bind(&arguments_adaptor);
{
__ Ldrsw(x0, UntagSmiMemOperand(
x3, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ Add(x2, x3, Operand(x0, LSL, kPointerSizeLog2));
__ Add(x2, x2, StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize);
}
__ Bind(&arguments_done);
// ----------- S t a t e -------------
// -- cp : context
// -- x0 : number of rest parameters
// -- x1 : function
// -- x2 : pointer to first rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate;
__ Mov(x6, JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize);
__ Add(x6, x6, Operand(x0, LSL, kPointerSizeLog2));
__ Allocate(x6, x3, x4, x5, &allocate, NO_ALLOCATION_FLAGS);
__ Bind(&done_allocate);
// Compute arguments.length in x6.
__ SmiTag(x6, x0);
// Setup the elements array in x3.
__ LoadRoot(x1, Heap::kFixedArrayMapRootIndex);
__ Str(x1, FieldMemOperand(x3, FixedArray::kMapOffset));
__ Str(x6, FieldMemOperand(x3, FixedArray::kLengthOffset));
__ Add(x4, x3, FixedArray::kHeaderSize);
{
Label loop, done_loop;
__ Add(x0, x4, Operand(x0, LSL, kPointerSizeLog2));
__ Bind(&loop);
__ Cmp(x4, x0);
__ B(eq, &done_loop);
__ Ldr(x5, MemOperand(x2, 0 * kPointerSize));
__ Str(x5, FieldMemOperand(x4, 0 * kPointerSize));
__ Sub(x2, x2, Operand(1 * kPointerSize));
__ Add(x4, x4, Operand(1 * kPointerSize));
__ B(&loop);
__ Bind(&done_loop);
}
// Setup the strict arguments object in x0.
__ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, x1);
__ Str(x1, FieldMemOperand(x0, JSStrictArgumentsObject::kMapOffset));
__ LoadRoot(x1, Heap::kEmptyFixedArrayRootIndex);
__ Str(x1, FieldMemOperand(x0, JSStrictArgumentsObject::kPropertiesOffset));
__ Str(x3, FieldMemOperand(x0, JSStrictArgumentsObject::kElementsOffset));
__ Str(x6, FieldMemOperand(x0, JSStrictArgumentsObject::kLengthOffset));
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ Bind(&allocate);
__ Cmp(x6, Operand(kMaxRegularHeapObjectSize));
__ B(gt, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(x0);
__ SmiTag(x6);
__ Push(x0, x2, x6);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Mov(x3, x0);
__ Pop(x2, x0);
__ SmiUntag(x0);
}
__ B(&done_allocate);
// Fall back to %NewStrictArguments.
__ Bind(&too_big_for_new_space);
__ Push(x1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
// The number of register that CallApiFunctionAndReturn will need to save on
// the stack. The space for these registers need to be allocated in the
// ExitFrame before calling CallApiFunctionAndReturn.

View File

@ -71,30 +71,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// x1: function
Register registers[] = {x1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// x1: function
Register registers[] = {x1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// x1: function
Register registers[] = {x1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
// static
const Register TypeConversionDescriptor::ArgumentRegister() { return x0; }

View File

@ -0,0 +1,424 @@
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/builtins/builtins-arguments.h"
#include "src/builtins/builtins-utils.h"
#include "src/builtins/builtins.h"
#include "src/code-factory.h"
#include "src/code-stub-assembler.h"
#include "src/interface-descriptors.h"
namespace v8 {
namespace internal {
typedef compiler::Node Node;
std::tuple<Node*, Node*, Node*>
ArgumentsBuiltinsAssembler::GetArgumentsFrameAndCount(Node* function,
ParameterMode mode) {
CSA_ASSERT(this, HasInstanceType(function, JS_FUNCTION_TYPE));
Variable frame_ptr(this, MachineType::PointerRepresentation());
frame_ptr.Bind(LoadParentFramePointer());
CSA_ASSERT(this,
WordEqual(function,
LoadBufferObject(frame_ptr.value(),
StandardFrameConstants::kFunctionOffset,
MachineType::Pointer())));
Variable argument_count(this, ParameterRepresentation(mode));
VariableList list({&frame_ptr, &argument_count}, zone());
Label done_argument_count(this, list);
// Determine the number of passed parameters, which is either the count stored
// in an arguments adapter frame or fetched from the shared function info.
Node* frame_ptr_above = LoadBufferObject(
frame_ptr.value(), StandardFrameConstants::kCallerFPOffset,
MachineType::Pointer());
Node* shared =
LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset);
Node* formal_parameter_count = LoadSharedFunctionInfoSpecialField(
shared, SharedFunctionInfo::kFormalParameterCountOffset, mode);
argument_count.Bind(formal_parameter_count);
Node* marker_or_function = LoadBufferObject(
frame_ptr_above, CommonFrameConstants::kContextOrFrameTypeOffset);
GotoIf(SmiNotEqual(marker_or_function,
SmiConstant(StackFrame::ARGUMENTS_ADAPTOR)),
&done_argument_count);
Node* adapted_parameter_count = LoadBufferObject(
frame_ptr_above, ArgumentsAdaptorFrameConstants::kLengthOffset);
frame_ptr.Bind(frame_ptr_above);
argument_count.Bind(TaggedToParameter(adapted_parameter_count, mode));
Goto(&done_argument_count);
Bind(&done_argument_count);
return std::tuple<Node*, Node*, Node*>(
frame_ptr.value(), argument_count.value(), formal_parameter_count);
}
std::tuple<Node*, Node*, Node*>
ArgumentsBuiltinsAssembler::AllocateArgumentsObject(Node* map,
Node* arguments_count,
Node* parameter_map_count,
ParameterMode mode,
int base_size) {
// Allocate the parameter object (either a Rest parameter object, a strict
// argument object or a sloppy arguments object) and the elements/mapped
// arguments together.
int elements_offset = base_size;
Node* element_count = arguments_count;
if (parameter_map_count != nullptr) {
base_size += FixedArray::kHeaderSize;
element_count = IntPtrOrSmiAdd(element_count, parameter_map_count, mode);
}
bool empty = IsIntPtrOrSmiConstantZero(arguments_count);
DCHECK_IMPLIES(empty, parameter_map_count == nullptr);
Node* size =
empty ? IntPtrConstant(base_size)
: ElementOffsetFromIndex(element_count, FAST_ELEMENTS, mode,
base_size + FixedArray::kHeaderSize);
Node* result = Allocate(size);
Comment("Initialize arguments object");
StoreMapNoWriteBarrier(result, map);
Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
StoreObjectField(result, JSArray::kPropertiesOffset, empty_fixed_array);
Node* smi_arguments_count = ParameterToTagged(arguments_count, mode);
StoreObjectFieldNoWriteBarrier(result, JSArray::kLengthOffset,
smi_arguments_count);
Node* arguments = nullptr;
if (!empty) {
arguments = InnerAllocate(result, elements_offset);
StoreObjectFieldNoWriteBarrier(arguments, FixedArray::kLengthOffset,
smi_arguments_count);
Node* fixed_array_map = LoadRoot(Heap::kFixedArrayMapRootIndex);
StoreMapNoWriteBarrier(arguments, fixed_array_map);
}
Node* parameter_map = nullptr;
if (parameter_map_count != nullptr) {
Node* parameter_map_offset = ElementOffsetFromIndex(
arguments_count, FAST_ELEMENTS, mode, FixedArray::kHeaderSize);
parameter_map = InnerAllocate(arguments, parameter_map_offset);
StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset,
parameter_map);
Node* sloppy_elements_map =
LoadRoot(Heap::kSloppyArgumentsElementsMapRootIndex);
StoreMapNoWriteBarrier(parameter_map, sloppy_elements_map);
parameter_map_count = ParameterToTagged(parameter_map_count, mode);
StoreObjectFieldNoWriteBarrier(parameter_map, FixedArray::kLengthOffset,
parameter_map_count);
} else {
if (empty) {
StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset,
empty_fixed_array);
} else {
StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset,
arguments);
}
}
return std::tuple<Node*, Node*, Node*>(result, arguments, parameter_map);
}
Node* ArgumentsBuiltinsAssembler::ConstructParametersObjectFromArgs(
Node* map, Node* frame_ptr, Node* arg_count, Node* first_arg,
Node* rest_count, ParameterMode param_mode, int base_size) {
// Allocate the parameter object (either a Rest parameter object, a strict
// argument object or a sloppy arguments object) and the elements together and
// fill in the contents with the arguments above |formal_parameter_count|.
Node* result;
Node* elements;
Node* unused;
std::tie(result, elements, unused) =
AllocateArgumentsObject(map, rest_count, nullptr, param_mode, base_size);
DCHECK(unused == nullptr);
CodeStubArguments arguments(this, arg_count, frame_ptr, param_mode);
Variable offset(this, MachineType::PointerRepresentation());
offset.Bind(IntPtrConstant(FixedArrayBase::kHeaderSize - kHeapObjectTag));
VariableList list({&offset}, zone());
arguments.ForEach(list,
[this, elements, &offset](Node* arg) {
StoreNoWriteBarrier(MachineRepresentation::kTagged,
elements, offset.value(), arg);
Increment(offset, kPointerSize);
},
first_arg, nullptr, param_mode);
return result;
}
Node* ArgumentsBuiltinsAssembler::EmitFastNewRestParameter(Node* context,
Node* function) {
Node* frame_ptr;
Node* argument_count;
Node* formal_parameter_count;
ParameterMode mode = OptimalParameterMode();
Node* zero = IntPtrOrSmiConstant(0, mode);
std::tie(frame_ptr, argument_count, formal_parameter_count) =
GetArgumentsFrameAndCount(function, mode);
Variable result(this, MachineRepresentation::kTagged);
Label no_rest_parameters(this), runtime(this, Label::kDeferred),
done(this, &result);
Node* rest_count =
IntPtrOrSmiSub(argument_count, formal_parameter_count, mode);
Node* const native_context = LoadNativeContext(context);
Node* const array_map = LoadJSArrayElementsMap(FAST_ELEMENTS, native_context);
GotoIf(IntPtrOrSmiLessThanOrEqual(rest_count, zero, mode),
&no_rest_parameters);
GotoIfFixedArraySizeDoesntFitInNewSpace(
rest_count, &runtime, JSArray::kSize + FixedArray::kHeaderSize, mode);
// Allocate the Rest JSArray and the elements together and fill in the
// contents with the arguments above |formal_parameter_count|.
result.Bind(ConstructParametersObjectFromArgs(
array_map, frame_ptr, argument_count, formal_parameter_count, rest_count,
mode, JSArray::kSize));
Goto(&done);
Bind(&no_rest_parameters);
{
Node* arguments;
Node* elements;
Node* unused;
std::tie(arguments, elements, unused) =
AllocateArgumentsObject(array_map, zero, nullptr, mode, JSArray::kSize);
result.Bind(arguments);
Goto(&done);
}
Bind(&runtime);
{
result.Bind(CallRuntime(Runtime::kNewRestParameter, context, function));
Goto(&done);
}
Bind(&done);
return result.value();
}
TF_BUILTIN(FastNewRestParameter, ArgumentsBuiltinsAssembler) {
Node* function = Parameter(FastNewArgumentsDescriptor::kFunction);
Node* context = Parameter(FastNewArgumentsDescriptor::kContext);
Return(EmitFastNewRestParameter(context, function));
}
Node* ArgumentsBuiltinsAssembler::EmitFastNewStrictArguments(Node* context,
Node* function) {
Variable result(this, MachineRepresentation::kTagged);
Label done(this, &result), empty(this), runtime(this, Label::kDeferred);
Node* frame_ptr;
Node* argument_count;
Node* formal_parameter_count;
ParameterMode mode = OptimalParameterMode();
Node* zero = IntPtrOrSmiConstant(0, mode);
std::tie(frame_ptr, argument_count, formal_parameter_count) =
GetArgumentsFrameAndCount(function, mode);
GotoIfFixedArraySizeDoesntFitInNewSpace(
argument_count, &runtime,
JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize, mode);
Node* const native_context = LoadNativeContext(context);
Node* const map =
LoadContextElement(native_context, Context::STRICT_ARGUMENTS_MAP_INDEX);
GotoIf(WordEqual(argument_count, zero), &empty);
result.Bind(ConstructParametersObjectFromArgs(
map, frame_ptr, argument_count, zero, argument_count, mode,
JSStrictArgumentsObject::kSize));
Goto(&done);
Bind(&empty);
{
Node* arguments;
Node* elements;
Node* unused;
std::tie(arguments, elements, unused) = AllocateArgumentsObject(
map, zero, nullptr, mode, JSStrictArgumentsObject::kSize);
result.Bind(arguments);
Goto(&done);
}
Bind(&runtime);
{
result.Bind(CallRuntime(Runtime::kNewStrictArguments, context, function));
Goto(&done);
}
Bind(&done);
return result.value();
}
TF_BUILTIN(FastNewStrictArguments, ArgumentsBuiltinsAssembler) {
Node* function = Parameter(FastNewArgumentsDescriptor::kFunction);
Node* context = Parameter(FastNewArgumentsDescriptor::kContext);
Return(EmitFastNewStrictArguments(context, function));
}
Node* ArgumentsBuiltinsAssembler::EmitFastNewSloppyArguments(Node* context,
Node* function) {
Node* frame_ptr;
Node* argument_count;
Node* formal_parameter_count;
Variable result(this, MachineRepresentation::kTagged);
ParameterMode mode = OptimalParameterMode();
Node* zero = IntPtrOrSmiConstant(0, mode);
Label done(this, &result), empty(this), no_parameters(this),
runtime(this, Label::kDeferred);
std::tie(frame_ptr, argument_count, formal_parameter_count) =
GetArgumentsFrameAndCount(function, mode);
GotoIf(WordEqual(argument_count, zero), &empty);
GotoIf(WordEqual(formal_parameter_count, zero), &no_parameters);
{
Comment("Mapped parameter JSSloppyArgumentsObject");
Node* mapped_count =
IntPtrOrSmiMin(argument_count, formal_parameter_count, mode);
Node* parameter_map_size =
IntPtrOrSmiAdd(mapped_count, IntPtrOrSmiConstant(2, mode), mode);
// Verify that the overall allocation will fit in new space.
Node* elements_allocated =
IntPtrOrSmiAdd(argument_count, parameter_map_size, mode);
GotoIfFixedArraySizeDoesntFitInNewSpace(
elements_allocated, &runtime,
JSSloppyArgumentsObject::kSize + FixedArray::kHeaderSize * 2, mode);
Node* const native_context = LoadNativeContext(context);
Node* const map = LoadContextElement(
native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
Node* argument_object;
Node* elements;
Node* map_array;
std::tie(argument_object, elements, map_array) =
AllocateArgumentsObject(map, argument_count, parameter_map_size, mode,
JSSloppyArgumentsObject::kSize);
StoreObjectFieldNoWriteBarrier(
argument_object, JSSloppyArgumentsObject::kCalleeOffset, function);
StoreFixedArrayElement(map_array, 0, context, SKIP_WRITE_BARRIER);
StoreFixedArrayElement(map_array, 1, elements, SKIP_WRITE_BARRIER);
Comment("Fill in non-mapped parameters");
Node* argument_offset =
ElementOffsetFromIndex(argument_count, FAST_ELEMENTS, mode,
FixedArray::kHeaderSize - kHeapObjectTag);
Node* mapped_offset =
ElementOffsetFromIndex(mapped_count, FAST_ELEMENTS, mode,
FixedArray::kHeaderSize - kHeapObjectTag);
CodeStubArguments arguments(this, argument_count, frame_ptr, mode);
Variable current_argument(this, MachineType::PointerRepresentation());
current_argument.Bind(arguments.AtIndexPtr(argument_count, mode));
VariableList var_list1({&current_argument}, zone());
mapped_offset = BuildFastLoop(
var_list1, argument_offset, mapped_offset,
[this, elements, &current_argument](Node* offset) {
Increment(current_argument, kPointerSize);
Node* arg = LoadBufferObject(current_argument.value(), 0);
StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset,
arg);
},
-kPointerSize, INTPTR_PARAMETERS);
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+argument_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+argument_count-mapped_count
// We loop from right to left.
Comment("Fill in mapped parameters");
Variable context_index(this, OptimalParameterRepresentation());
context_index.Bind(IntPtrOrSmiSub(
IntPtrOrSmiAdd(IntPtrOrSmiConstant(Context::MIN_CONTEXT_SLOTS, mode),
formal_parameter_count, mode),
mapped_count, mode));
Node* the_hole = TheHoleConstant();
VariableList var_list2({&context_index}, zone());
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
Node* adjusted_map_array = IntPtrAdd(
BitcastTaggedToWord(map_array),
IntPtrConstant(kParameterMapHeaderSize - FixedArray::kHeaderSize));
Node* zero_offset = ElementOffsetFromIndex(
zero, FAST_ELEMENTS, mode, FixedArray::kHeaderSize - kHeapObjectTag);
BuildFastLoop(var_list2, mapped_offset, zero_offset,
[this, the_hole, elements, adjusted_map_array, &context_index,
mode](Node* offset) {
StoreNoWriteBarrier(MachineRepresentation::kTagged,
elements, offset, the_hole);
StoreNoWriteBarrier(
MachineRepresentation::kTagged, adjusted_map_array,
offset, ParameterToTagged(context_index.value(), mode));
Increment(context_index, 1, mode);
},
-kPointerSize, INTPTR_PARAMETERS);
result.Bind(argument_object);
Goto(&done);
}
Bind(&no_parameters);
{
Comment("No parameters JSSloppyArgumentsObject");
GotoIfFixedArraySizeDoesntFitInNewSpace(
argument_count, &runtime,
JSSloppyArgumentsObject::kSize + FixedArray::kHeaderSize, mode);
Node* const native_context = LoadNativeContext(context);
Node* const map =
LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
result.Bind(ConstructParametersObjectFromArgs(
map, frame_ptr, argument_count, zero, argument_count, mode,
JSSloppyArgumentsObject::kSize));
StoreObjectFieldNoWriteBarrier(
result.value(), JSSloppyArgumentsObject::kCalleeOffset, function);
Goto(&done);
}
Bind(&empty);
{
Comment("Empty JSSloppyArgumentsObject");
Node* const native_context = LoadNativeContext(context);
Node* const map =
LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
Node* arguments;
Node* elements;
Node* unused;
std::tie(arguments, elements, unused) = AllocateArgumentsObject(
map, zero, nullptr, mode, JSSloppyArgumentsObject::kSize);
result.Bind(arguments);
StoreObjectFieldNoWriteBarrier(
result.value(), JSSloppyArgumentsObject::kCalleeOffset, function);
Goto(&done);
}
Bind(&runtime);
{
result.Bind(CallRuntime(Runtime::kNewSloppyArguments, context, function));
Goto(&done);
}
Bind(&done);
return result.value();
}
TF_BUILTIN(FastNewSloppyArguments, ArgumentsBuiltinsAssembler) {
Node* function = Parameter(FastNewArgumentsDescriptor::kFunction);
Node* context = Parameter(FastNewArgumentsDescriptor::kContext);
Return(EmitFastNewSloppyArguments(context, function));
}
} // namespace internal
} // namespace v8

View File

@ -0,0 +1,55 @@
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/code-stub-assembler.h"
namespace v8 {
namespace internal {
typedef compiler::Node Node;
typedef compiler::CodeAssemblerState CodeAssemblerState;
typedef compiler::CodeAssemblerLabel CodeAssemblerLabel;
class ArgumentsBuiltinsAssembler : public CodeStubAssembler {
public:
explicit ArgumentsBuiltinsAssembler(CodeAssemblerState* state)
: CodeStubAssembler(state) {}
Node* EmitFastNewStrictArguments(Node* context, Node* function);
Node* EmitFastNewSloppyArguments(Node* context, Node* function);
Node* EmitFastNewRestParameter(Node* context, Node* function);
private:
// Calculates and returns the the frame pointer, argument count and formal
// parameter count to be used to access a function's parameters, taking
// argument adapter frames into account. The tuple is of the form:
// <frame_ptr, # parameters actually passed, formal parameter count>
std::tuple<Node*, Node*, Node*> GetArgumentsFrameAndCount(Node* function,
ParameterMode mode);
// Allocates an an arguments (either rest, strict or sloppy) together with the
// FixedArray elements for the arguments and a parameter map (for sloppy
// arguments only). A tuple is returned with pointers to the arguments object,
// the elements and parameter map in the form:
// <argument object, arguments FixedArray, parameter map or nullptr>
std::tuple<Node*, Node*, Node*> AllocateArgumentsObject(
Node* map, Node* arguments, Node* mapped_arguments,
ParameterMode param_mode, int base_size);
// For Rest parameters and Strict arguments, the copying of parameters from
// the stack into the arguments object is straight-forward and shares much of
// the same underlying logic, which is encapsulated by this function. It
// allocates an arguments-like object of size |base_size| with the map |map|,
// and then copies |rest_count| arguments from the stack frame pointed to by
// |frame_ptr| starting from |first_arg|. |arg_count| == |first_arg| +
// |rest_count|.
Node* ConstructParametersObjectFromArgs(Node* map, Node* frame_ptr,
Node* arg_count, Node* first_arg,
Node* rest_count,
ParameterMode param_mode,
int base_size);
};
} // namespace internal
} // namespace v8

View File

@ -103,6 +103,9 @@ class Isolate;
FastNewFunctionContext) \
TFS(FastNewFunctionContextFunction, BUILTIN, kNoExtraICState, \
FastNewFunctionContext) \
TFS(FastNewStrictArguments, BUILTIN, kNoExtraICState, FastNewArguments) \
TFS(FastNewSloppyArguments, BUILTIN, kNoExtraICState, FastNewArguments) \
TFS(FastNewRestParameter, BUILTIN, kNoExtraICState, FastNewArguments) \
TFS(FastCloneRegExp, BUILTIN, kNoExtraICState, FastCloneRegExp) \
TFS(FastCloneShallowArrayTrack, BUILTIN, kNoExtraICState, \
FastCloneShallowArray) \

View File

@ -346,24 +346,21 @@ Callable CodeFactory::FastNewFunctionContext(Isolate* isolate,
}
// static
Callable CodeFactory::FastNewRestParameter(Isolate* isolate,
bool skip_stub_frame) {
FastNewRestParameterStub stub(isolate, skip_stub_frame);
return make_callable(stub);
Callable CodeFactory::FastNewRestParameter(Isolate* isolate) {
return Callable(isolate->builtins()->FastNewRestParameter(),
FastNewRestParameterDescriptor(isolate));
}
// static
Callable CodeFactory::FastNewSloppyArguments(Isolate* isolate,
bool skip_stub_frame) {
FastNewSloppyArgumentsStub stub(isolate, skip_stub_frame);
return make_callable(stub);
Callable CodeFactory::FastNewSloppyArguments(Isolate* isolate) {
return Callable(isolate->builtins()->FastNewSloppyArguments(),
FastNewRestParameterDescriptor(isolate));
}
// static
Callable CodeFactory::FastNewStrictArguments(Isolate* isolate,
bool skip_stub_frame) {
FastNewStrictArgumentsStub stub(isolate, skip_stub_frame);
return make_callable(stub);
Callable CodeFactory::FastNewStrictArguments(Isolate* isolate) {
return Callable(isolate->builtins()->FastNewStrictArguments(),
FastNewRestParameterDescriptor(isolate));
}
// static

View File

@ -148,12 +148,9 @@ class V8_EXPORT_PRIVATE CodeFactory final {
ScopeType scope_type);
static Callable FastNewClosure(Isolate* isolate);
static Callable FastNewObject(Isolate* isolate);
static Callable FastNewRestParameter(Isolate* isolate,
bool skip_stub_frame = false);
static Callable FastNewSloppyArguments(Isolate* isolate,
bool skip_stub_frame = false);
static Callable FastNewStrictArguments(Isolate* isolate,
bool skip_stub_frame = false);
static Callable FastNewRestParameter(Isolate* isolate);
static Callable FastNewSloppyArguments(Isolate* isolate);
static Callable FastNewStrictArguments(Isolate* isolate);
static Callable CopyFastSmiOrObjectElements(Isolate* isolate);
static Callable GrowFastDoubleElements(Isolate* isolate);

View File

@ -1201,6 +1201,25 @@ Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
return result.value();
}
Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField(
Node* shared, int offset, ParameterMode mode) {
if (Is64()) {
Node* result = LoadObjectField(shared, offset, MachineType::Int32());
if (mode == SMI_PARAMETERS) {
result = SmiTag(result);
} else {
result = ChangeUint32ToWord(result);
}
return result;
} else {
Node* result = LoadObjectField(shared, offset);
if (mode != SMI_PARAMETERS) {
result = SmiUntag(result);
}
return result;
}
}
Node* CodeStubAssembler::LoadNameHashField(Node* name) {
CSA_ASSERT(this, IsName(name));
return LoadObjectField(name, Name::kHashFieldOffset, MachineType::Uint32());
@ -6287,6 +6306,16 @@ void CodeStubAssembler::BuildFastFixedArrayForEach(
: IndexAdvanceMode::kPost);
}
void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
int max_newspace_parameters =
(kMaxRegularHeapObjectSize - base_size) / kPointerSize;
GotoIf(IntPtrOrSmiGreaterThan(
element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode),
mode),
doesnt_fit);
}
void CodeStubAssembler::InitializeFieldsWithRoot(
Node* object, Node* start_offset, Node* end_offset,
Heap::RootListIndex root_index) {

View File

@ -77,10 +77,13 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
return Is64() ? INTPTR_PARAMETERS : SMI_PARAMETERS;
}
MachineRepresentation ParameterRepresentation(ParameterMode mode) const {
return mode == INTPTR_PARAMETERS ? MachineType::PointerRepresentation()
: MachineRepresentation::kTaggedSigned;
}
MachineRepresentation OptimalParameterRepresentation() const {
return OptimalParameterMode() == INTPTR_PARAMETERS
? MachineType::PointerRepresentation()
: MachineRepresentation::kTaggedSigned;
return ParameterRepresentation(OptimalParameterMode());
}
Node* ParameterToWord(Node* value, ParameterMode mode) {
@ -384,6 +387,14 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* LoadMapConstructorFunctionIndex(Node* map);
// Load the constructor of a Map (equivalent to Map::GetConstructor()).
Node* LoadMapConstructor(Node* map);
// Loads a value from the specially encoded integer fields in the
// SharedFunctionInfo object.
// TODO(danno): This currently only works for the integer fields that are
// mapped to the upper part of 64-bit words. We should customize
// SFI::BodyDescriptor and store int32 values directly.
Node* LoadSharedFunctionInfoSpecialField(Node* shared, int offset,
ParameterMode param_mode);
// Check if the map is set for slow properties.
Node* IsDictionaryMap(Node* map);
@ -1114,6 +1125,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
FixedArray::kHeaderSize);
}
void GotoIfFixedArraySizeDoesntFitInNewSpace(Node* element_count,
Label* doesnt_fit, int base_size,
ParameterMode mode);
void InitializeFieldsWithRoot(Node* object, Node* start_offset,
Node* end_offset, Heap::RootListIndex root);

View File

@ -49,9 +49,6 @@ class Node;
V(StoreBufferOverflow) \
V(StoreSlowElement) \
V(SubString) \
V(FastNewRestParameter) \
V(FastNewSloppyArguments) \
V(FastNewStrictArguments) \
V(NameDictionaryLookup) \
/* This can be removed once there are no */ \
/* more deopting Hydrogen stubs. */ \
@ -748,69 +745,6 @@ class NumberToStringStub final : public TurboFanCodeStub {
DEFINE_TURBOFAN_CODE_STUB(NumberToString, TurboFanCodeStub);
};
// TODO(turbofan): This stub should be possible to write in TurboFan
// using the CodeStubAssembler very soon in a way that is as efficient
// and easy as the current handwritten version, which is partly a copy
// of the strict arguments object materialization code.
class FastNewRestParameterStub final : public PlatformCodeStub {
public:
explicit FastNewRestParameterStub(Isolate* isolate,
bool skip_stub_frame = false)
: PlatformCodeStub(isolate) {
minor_key_ = SkipStubFrameBits::encode(skip_stub_frame);
}
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewRestParameter);
DEFINE_PLATFORM_CODE_STUB(FastNewRestParameter, PlatformCodeStub);
int skip_stub_frame() const { return SkipStubFrameBits::decode(minor_key_); }
private:
class SkipStubFrameBits : public BitField<bool, 0, 1> {};
};
// TODO(turbofan): This stub should be possible to write in TurboFan
// using the CodeStubAssembler very soon in a way that is as efficient
// and easy as the current handwritten version.
class FastNewSloppyArgumentsStub final : public PlatformCodeStub {
public:
explicit FastNewSloppyArgumentsStub(Isolate* isolate,
bool skip_stub_frame = false)
: PlatformCodeStub(isolate) {
minor_key_ = SkipStubFrameBits::encode(skip_stub_frame);
}
int skip_stub_frame() const { return SkipStubFrameBits::decode(minor_key_); }
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewSloppyArguments);
DEFINE_PLATFORM_CODE_STUB(FastNewSloppyArguments, PlatformCodeStub);
private:
class SkipStubFrameBits : public BitField<bool, 0, 1> {};
};
// TODO(turbofan): This stub should be possible to write in TurboFan
// using the CodeStubAssembler very soon in a way that is as efficient
// and easy as the current handwritten version.
class FastNewStrictArgumentsStub final : public PlatformCodeStub {
public:
explicit FastNewStrictArgumentsStub(Isolate* isolate,
bool skip_stub_frame = false)
: PlatformCodeStub(isolate) {
minor_key_ = SkipStubFrameBits::encode(skip_stub_frame);
}
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewStrictArguments);
DEFINE_PLATFORM_CODE_STUB(FastNewStrictArguments, PlatformCodeStub);
int skip_stub_frame() const { return SkipStubFrameBits::decode(minor_key_); }
private:
class SkipStubFrameBits : public BitField<bool, 0, 1> {};
};
class CreateAllocationSiteStub : public TurboFanCodeStub {
public:
explicit CreateAllocationSiteStub(Isolate* isolate)

View File

@ -271,14 +271,16 @@ void FullCodeGenerator::Generate() {
__ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
}
if (is_strict(language_mode()) || !has_simple_parameters()) {
FastNewStrictArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewStrictArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
} else if (literal()->has_duplicate_parameters()) {
__ Push(r1);
__ CallRuntime(Runtime::kNewSloppyArguments_Generic);
} else {
FastNewSloppyArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
SetVar(arguments, r0, r1, r2);

View File

@ -273,14 +273,16 @@ void FullCodeGenerator::Generate() {
__ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
}
if (is_strict(language_mode()) || !has_simple_parameters()) {
FastNewStrictArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewStrictArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
} else if (literal()->has_duplicate_parameters()) {
__ Push(x1);
__ CallRuntime(Runtime::kNewSloppyArguments_Generic);
} else {
FastNewSloppyArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
SetVar(arguments, x0, x1, x2);

View File

@ -263,14 +263,16 @@ void FullCodeGenerator::Generate() {
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
}
if (is_strict(language_mode()) || !has_simple_parameters()) {
FastNewStrictArgumentsStub stub(isolate());
__ CallStub(&stub);
__ call(isolate()->builtins()->FastNewStrictArguments(),
RelocInfo::CODE_TARGET);
RestoreContext();
} else if (literal()->has_duplicate_parameters()) {
__ Push(edi);
__ CallRuntime(Runtime::kNewSloppyArguments_Generic);
} else {
FastNewSloppyArgumentsStub stub(isolate());
__ CallStub(&stub);
__ call(isolate()->builtins()->FastNewSloppyArguments(),
RelocInfo::CODE_TARGET);
RestoreContext();
}
SetVar(arguments, eax, ebx, edx);

View File

@ -281,14 +281,16 @@ void FullCodeGenerator::Generate() {
__ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
}
if (is_strict(language_mode()) || !has_simple_parameters()) {
FastNewStrictArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewStrictArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
} else if (literal()->has_duplicate_parameters()) {
__ Push(a1);
__ CallRuntime(Runtime::kNewSloppyArguments_Generic);
} else {
FastNewSloppyArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
SetVar(arguments, v0, a1, a2);

View File

@ -280,14 +280,16 @@ void FullCodeGenerator::Generate() {
__ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
}
if (is_strict(language_mode()) || !has_simple_parameters()) {
FastNewStrictArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewStrictArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
} else if (literal()->has_duplicate_parameters()) {
__ Push(a1);
__ CallRuntime(Runtime::kNewSloppyArguments_Generic);
} else {
FastNewSloppyArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
SetVar(arguments, v0, a1, a2);

View File

@ -260,14 +260,16 @@ void FullCodeGenerator::Generate() {
__ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
}
if (is_strict(language_mode()) || !has_simple_parameters()) {
FastNewStrictArgumentsStub stub(isolate());
__ CallStub(&stub);
__ call(isolate()->builtins()->FastNewStrictArguments(),
RelocInfo::CODE_TARGET);
RestoreContext();
} else if (literal()->has_duplicate_parameters()) {
__ Push(rdi);
__ CallRuntime(Runtime::kNewSloppyArguments_Generic);
} else {
FastNewSloppyArgumentsStub stub(isolate());
__ CallStub(&stub);
__ call(isolate()->builtins()->FastNewSloppyArguments(),
RelocInfo::CODE_TARGET);
RestoreContext();
}
SetVar(arguments, rax, rbx, rdx);

View File

@ -3064,558 +3064,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- edi : function
// -- esi : context
// -- ebp : frame pointer
// -- esp[0] : return address
// -----------------------------------
__ AssertFunction(edi);
// Make edx point to the JavaScript frame.
__ mov(edx, ebp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
__ j(equal, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have rest parameters (only possible if we have an
// arguments adaptor frame below the function frame).
Label no_rest_parameters;
__ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
__ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(not_equal, &no_rest_parameters, Label::kNear);
// Check if the arguments adaptor frame contains more arguments than
// specified by the function's internal formal parameter count.
Label rest_parameters;
__ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
__ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ sub(eax,
FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
__ j(greater, &rest_parameters);
// Return an empty rest parameter array.
__ bind(&no_rest_parameters);
{
// ----------- S t a t e -------------
// -- esi : context
// -- esp[0] : return address
// -----------------------------------
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the rest parameter array in rax.
__ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
__ mov(FieldOperand(eax, JSArray::kMapOffset), ecx);
__ mov(ecx, isolate()->factory()->empty_fixed_array());
__ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx);
__ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx);
__ mov(FieldOperand(eax, JSArray::kLengthOffset), Immediate(Smi::kZero));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(Smi::FromInt(JSArray::kSize));
__ CallRuntime(Runtime::kAllocateInNewSpace);
}
__ jmp(&done_allocate);
}
__ bind(&rest_parameters);
{
// Compute the pointer to the first rest parameter (skippping the receiver).
__ lea(ebx,
Operand(ebx, eax, times_half_pointer_size,
StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
// ----------- S t a t e -------------
// -- esi : context
// -- eax : number of rest parameters (tagged)
// -- ebx : pointer to first rest parameters
// -- esp[0] : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
__ lea(ecx, Operand(eax, times_half_pointer_size,
JSArray::kSize + FixedArray::kHeaderSize));
__ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in edx.
__ mov(FieldOperand(edx, FixedArray::kMapOffset),
isolate()->factory()->fixed_array_map());
__ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
{
Label loop, done_loop;
__ Move(ecx, Smi::kZero);
__ bind(&loop);
__ cmp(ecx, eax);
__ j(equal, &done_loop, Label::kNear);
__ mov(edi, Operand(ebx, 0 * kPointerSize));
__ mov(FieldOperand(edx, ecx, times_half_pointer_size,
FixedArray::kHeaderSize),
edi);
__ sub(ebx, Immediate(1 * kPointerSize));
__ add(ecx, Immediate(Smi::FromInt(1)));
__ jmp(&loop);
__ bind(&done_loop);
}
// Setup the rest parameter array in edi.
__ lea(edi,
Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
__ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
__ mov(FieldOperand(edi, JSArray::kMapOffset), ecx);
__ mov(FieldOperand(edi, JSArray::kPropertiesOffset),
isolate()->factory()->empty_fixed_array());
__ mov(FieldOperand(edi, JSArray::kElementsOffset), edx);
__ mov(FieldOperand(edi, JSArray::kLengthOffset), eax);
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ mov(eax, edi);
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
__ j(greater, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(ecx);
__ Push(eax);
__ Push(ebx);
__ Push(ecx);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ mov(edx, eax);
__ Pop(ebx);
__ Pop(eax);
}
__ jmp(&done_allocate);
// Fall back to %NewRestParameter.
__ bind(&too_big_for_new_space);
__ PopReturnAddressTo(ecx);
// We reload the function from the caller frame due to register pressure
// within this stub. This is the slow path, hence reloading is preferable.
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
} else {
__ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
}
__ PushReturnAddressFrom(ecx);
__ TailCallRuntime(Runtime::kNewRestParameter);
}
}
void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- edi : function
// -- esi : context
// -- ebp : frame pointer
// -- esp[0] : return address
// -----------------------------------
__ AssertFunction(edi);
// Make ecx point to the JavaScript frame.
__ mov(ecx, ebp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ mov(ecx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ cmp(edi, Operand(ecx, StandardFrameConstants::kFunctionOffset));
__ j(equal, &ok);
__ Abort(kInvalidFrameForFastNewSloppyArgumentsStub);
__ bind(&ok);
}
// TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
__ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
__ mov(ebx,
FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
__ lea(edx, Operand(ecx, ebx, times_half_pointer_size,
StandardFrameConstants::kCallerSPOffset));
// ebx : number of parameters (tagged)
// edx : parameters pointer
// edi : function
// ecx : JavaScript frame pointer.
// esp[0] : return address
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ mov(eax, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
__ mov(eax, Operand(eax, CommonFrameConstants::kContextOrFrameTypeOffset));
__ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(equal, &adaptor_frame, Label::kNear);
// No adaptor, parameter count = argument count.
__ mov(ecx, ebx);
__ push(ebx);
__ jmp(&try_allocate, Label::kNear);
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ push(ebx);
__ mov(edx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
__ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ lea(edx, Operand(edx, ecx, times_2,
StandardFrameConstants::kCallerSPOffset));
// ebx = parameter count (tagged)
// ecx = argument count (smi-tagged)
// Compute the mapped parameter count = min(ebx, ecx) in ebx.
__ cmp(ebx, ecx);
__ j(less_equal, &try_allocate, Label::kNear);
__ mov(ebx, ecx);
// Save mapped parameter count and function.
__ bind(&try_allocate);
__ push(edi);
__ push(ebx);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
Label no_parameter_map;
__ test(ebx, ebx);
__ j(zero, &no_parameter_map, Label::kNear);
__ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
__ bind(&no_parameter_map);
// 2. Backing store.
__ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
// 3. Arguments object.
__ add(ebx, Immediate(JSSloppyArgumentsObject::kSize));
// Do the allocation of all three objects in one go.
__ Allocate(ebx, eax, edi, no_reg, &runtime, NO_ALLOCATION_FLAGS);
// eax = address of new object(s) (tagged)
// ecx = argument count (smi-tagged)
// esp[0] = mapped parameter count (tagged)
// esp[4] = function
// esp[8] = parameter count (tagged)
// Get the arguments map from the current native context into edi.
Label has_mapped_parameters, instantiate;
__ mov(edi, NativeContextOperand());
__ mov(ebx, Operand(esp, 0 * kPointerSize));
__ test(ebx, ebx);
__ j(not_zero, &has_mapped_parameters, Label::kNear);
__ mov(
edi,
Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX)));
__ jmp(&instantiate, Label::kNear);
__ bind(&has_mapped_parameters);
__ mov(edi, Operand(edi, Context::SlotOffset(
Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)));
__ bind(&instantiate);
// eax = address of new object (tagged)
// ebx = mapped parameter count (tagged)
// ecx = argument count (smi-tagged)
// edi = address of arguments map (tagged)
// esp[0] = mapped parameter count (tagged)
// esp[4] = function
// esp[8] = parameter count (tagged)
// Copy the JS object part.
__ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
__ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
masm->isolate()->factory()->empty_fixed_array());
__ mov(FieldOperand(eax, JSObject::kElementsOffset),
masm->isolate()->factory()->empty_fixed_array());
// Set up the callee in-object property.
STATIC_ASSERT(JSSloppyArgumentsObject::kCalleeIndex == 1);
__ mov(edi, Operand(esp, 1 * kPointerSize));
__ AssertNotSmi(edi);
__ mov(FieldOperand(eax, JSSloppyArgumentsObject::kCalleeOffset), edi);
// Use the length (smi tagged) and set that as an in-object property too.
__ AssertSmi(ecx);
__ mov(FieldOperand(eax, JSSloppyArgumentsObject::kLengthOffset), ecx);
// Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, edi will point there, otherwise to the
// backing store.
__ lea(edi, Operand(eax, JSSloppyArgumentsObject::kSize));
__ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
// eax = address of new object (tagged)
// ebx = mapped parameter count (tagged)
// ecx = argument count (tagged)
// edx = address of receiver argument
// edi = address of parameter map or backing store (tagged)
// esp[0] = mapped parameter count (tagged)
// esp[4] = function
// esp[8] = parameter count (tagged)
// Free two registers.
__ push(edx);
__ push(eax);
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ test(ebx, ebx);
__ j(zero, &skip_parameter_map);
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
__ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
__ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
__ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
__ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop, parameters_test;
__ push(ecx);
__ mov(eax, Operand(esp, 3 * kPointerSize));
__ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
__ add(ebx, Operand(esp, 5 * kPointerSize));
__ sub(ebx, eax);
__ mov(ecx, isolate()->factory()->the_hole_value());
__ mov(edx, edi);
__ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
// eax = loop variable (tagged)
// ebx = mapping index (tagged)
// ecx = the hole value
// edx = address of parameter map (tagged)
// edi = address of backing store (tagged)
// esp[0] = argument count (tagged)
// esp[4] = address of new object (tagged)
// esp[8] = address of receiver argument
// esp[12] = mapped parameter count (tagged)
// esp[16] = function
// esp[20] = parameter count (tagged)
__ jmp(&parameters_test, Label::kNear);
__ bind(&parameters_loop);
__ sub(eax, Immediate(Smi::FromInt(1)));
__ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
__ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
__ add(ebx, Immediate(Smi::FromInt(1)));
__ bind(&parameters_test);
__ test(eax, eax);
__ j(not_zero, &parameters_loop, Label::kNear);
__ pop(ecx);
__ bind(&skip_parameter_map);
// ecx = argument count (tagged)
// edi = address of backing store (tagged)
// esp[0] = address of new object (tagged)
// esp[4] = address of receiver argument
// esp[8] = mapped parameter count (tagged)
// esp[12] = function
// esp[16] = parameter count (tagged)
// Copy arguments header and remaining slots (if there are any).
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
Immediate(isolate()->factory()->fixed_array_map()));
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
Label arguments_loop, arguments_test;
__ mov(ebx, Operand(esp, 2 * kPointerSize));
__ mov(edx, Operand(esp, 1 * kPointerSize));
__ sub(edx, ebx); // Is there a smarter way to do negative scaling?
__ sub(edx, ebx);
__ jmp(&arguments_test, Label::kNear);
__ bind(&arguments_loop);
__ sub(edx, Immediate(kPointerSize));
__ mov(eax, Operand(edx, 0));
__ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
__ add(ebx, Immediate(Smi::FromInt(1)));
__ bind(&arguments_test);
__ cmp(ebx, ecx);
__ j(less, &arguments_loop, Label::kNear);
// Restore.
__ pop(eax); // Address of arguments object.
__ Drop(4);
// Return.
__ ret(0);
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
__ pop(eax); // Remove saved mapped parameter count.
__ pop(edi); // Pop saved function.
__ pop(eax); // Remove saved parameter count.
__ pop(eax); // Pop return address.
__ push(edi); // Push function.
__ push(edx); // Push parameters pointer.
__ push(ecx); // Push parameter count.
__ push(eax); // Push return address.
__ TailCallRuntime(Runtime::kNewSloppyArguments);
}
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- edi : function
// -- esi : context
// -- ebp : frame pointer
// -- esp[0] : return address
// -----------------------------------
__ AssertFunction(edi);
// Make edx point to the JavaScript frame.
__ mov(edx, ebp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
__ j(equal, &ok);
__ Abort(kInvalidFrameForFastNewStrictArgumentsStub);
__ bind(&ok);
}
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
__ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(equal, &arguments_adaptor, Label::kNear);
{
__ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
__ mov(eax,
FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
__ lea(ebx,
Operand(edx, eax, times_half_pointer_size,
StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
}
__ jmp(&arguments_done, Label::kNear);
__ bind(&arguments_adaptor);
{
__ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ lea(ebx,
Operand(ebx, eax, times_half_pointer_size,
StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
}
__ bind(&arguments_done);
// ----------- S t a t e -------------
// -- eax : number of arguments (tagged)
// -- ebx : pointer to the first argument
// -- esi : context
// -- esp[0] : return address
// -----------------------------------
// Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate;
__ lea(ecx,
Operand(eax, times_half_pointer_size,
JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
__ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in edx.
__ mov(FieldOperand(edx, FixedArray::kMapOffset),
isolate()->factory()->fixed_array_map());
__ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
{
Label loop, done_loop;
__ Move(ecx, Smi::kZero);
__ bind(&loop);
__ cmp(ecx, eax);
__ j(equal, &done_loop, Label::kNear);
__ mov(edi, Operand(ebx, 0 * kPointerSize));
__ mov(FieldOperand(edx, ecx, times_half_pointer_size,
FixedArray::kHeaderSize),
edi);
__ sub(ebx, Immediate(1 * kPointerSize));
__ add(ecx, Immediate(Smi::FromInt(1)));
__ jmp(&loop);
__ bind(&done_loop);
}
// Setup the rest parameter array in edi.
__ lea(edi,
Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
__ LoadGlobalFunction(Context::STRICT_ARGUMENTS_MAP_INDEX, ecx);
__ mov(FieldOperand(edi, JSStrictArgumentsObject::kMapOffset), ecx);
__ mov(FieldOperand(edi, JSStrictArgumentsObject::kPropertiesOffset),
isolate()->factory()->empty_fixed_array());
__ mov(FieldOperand(edi, JSStrictArgumentsObject::kElementsOffset), edx);
__ mov(FieldOperand(edi, JSStrictArgumentsObject::kLengthOffset), eax);
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ mov(eax, edi);
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
__ j(greater, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(ecx);
__ Push(eax);
__ Push(ebx);
__ Push(ecx);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ mov(edx, eax);
__ Pop(ebx);
__ Pop(eax);
}
__ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ PopReturnAddressTo(ecx);
// We reload the function from the caller frame due to register pressure
// within this stub. This is the slow path, hence reloading is preferable.
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
} else {
__ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
}
__ PushReturnAddressFrom(ecx);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
// Generates an Operand for saving parameters after PrepareCallApiFunction.
static Operand ApiParameterOperand(int index) {
return Operand(esp, index * kPointerSize);

View File

@ -69,27 +69,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {edi};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {edi};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {edi};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
// static
const Register TypeConversionDescriptor::ArgumentRegister() { return eax; }

View File

@ -88,6 +88,16 @@ const Register FastNewObjectDescriptor::NewTargetRegister() {
return kJavaScriptCallNewTargetRegister;
}
void FastNewArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {TargetRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
const Register FastNewArgumentsDescriptor::TargetRegister() {
return kJSFunctionRegister;
}
void LoadDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) {
// kReceiver, kName, kSlot

View File

@ -33,9 +33,7 @@ class PlatformInterfaceDescriptor;
V(FastNewClosure) \
V(FastNewFunctionContext) \
V(FastNewObject) \
V(FastNewRestParameter) \
V(FastNewSloppyArguments) \
V(FastNewStrictArguments) \
V(FastNewArguments) \
V(TypeConversion) \
V(Typeof) \
V(FastCloneRegExp) \
@ -497,21 +495,11 @@ class FastNewObjectDescriptor : public CallInterfaceDescriptor {
static const Register NewTargetRegister();
};
class FastNewRestParameterDescriptor : public CallInterfaceDescriptor {
class FastNewArgumentsDescriptor : public CallInterfaceDescriptor {
public:
DECLARE_DESCRIPTOR(FastNewRestParameterDescriptor, CallInterfaceDescriptor)
};
class FastNewSloppyArgumentsDescriptor : public CallInterfaceDescriptor {
public:
DECLARE_DESCRIPTOR(FastNewSloppyArgumentsDescriptor,
CallInterfaceDescriptor)
};
class FastNewStrictArgumentsDescriptor : public CallInterfaceDescriptor {
public:
DECLARE_DESCRIPTOR(FastNewStrictArgumentsDescriptor,
CallInterfaceDescriptor)
DEFINE_PARAMETERS(kFunction)
DECLARE_DESCRIPTOR(FastNewArgumentsDescriptor, CallInterfaceDescriptor)
static const Register TargetRegister();
};
class TypeConversionDescriptor final : public CallInterfaceDescriptor {

View File

@ -8,6 +8,7 @@
#include <memory>
#include "src/ast/prettyprinter.h"
#include "src/builtins/builtins-arguments.h"
#include "src/builtins/builtins-constructor.h"
#include "src/code-factory.h"
#include "src/compilation-info.h"
@ -2935,10 +2936,9 @@ void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) {
__ Bind(&if_not_duplicate_parameters);
{
// TODO(rmcilroy): Inline FastNewSloppyArguments when it is a TurboFan stub.
Callable callable = CodeFactory::FastNewSloppyArguments(isolate_, true);
Node* target = __ HeapConstant(callable.code());
Node* result = __ CallStub(callable.descriptor(), target, context, closure);
ArgumentsBuiltinsAssembler constructor_assembler(assembler->state());
Node* result =
constructor_assembler.EmitFastNewSloppyArguments(context, closure);
__ SetAccumulator(result);
__ Dispatch();
}
@ -2956,12 +2956,11 @@ void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) {
//
// Creates a new unmapped arguments object.
void Interpreter::DoCreateUnmappedArguments(InterpreterAssembler* assembler) {
// TODO(rmcilroy): Inline FastNewStrictArguments when it is a TurboFan stub.
Callable callable = CodeFactory::FastNewStrictArguments(isolate_, true);
Node* target = __ HeapConstant(callable.code());
Node* context = __ GetContext();
Node* closure = __ LoadRegister(Register::function_closure());
Node* result = __ CallStub(callable.descriptor(), target, context, closure);
ArgumentsBuiltinsAssembler builtins_assembler(assembler->state());
Node* result =
builtins_assembler.EmitFastNewStrictArguments(context, closure);
__ SetAccumulator(result);
__ Dispatch();
}
@ -2970,12 +2969,10 @@ void Interpreter::DoCreateUnmappedArguments(InterpreterAssembler* assembler) {
//
// Creates a new rest parameter array.
void Interpreter::DoCreateRestParameter(InterpreterAssembler* assembler) {
// TODO(rmcilroy): Inline FastNewRestArguments when it is a TurboFan stub.
Callable callable = CodeFactory::FastNewRestParameter(isolate_, true);
Node* target = __ HeapConstant(callable.code());
Node* closure = __ LoadRegister(Register::function_closure());
Node* context = __ GetContext();
Node* result = __ CallStub(callable.descriptor(), target, context, closure);
ArgumentsBuiltinsAssembler builtins_assembler(assembler->state());
Node* result = builtins_assembler.EmitFastNewRestParameter(context, closure);
__ SetAccumulator(result);
__ Dispatch();
}

View File

@ -3307,499 +3307,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function
// -- cp : context
// -- fp : frame pointer
// -- ra : return address
// -----------------------------------
__ AssertFunction(a1);
// Make a2 point to the JavaScript frame.
__ mov(a2, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ lw(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
__ Branch(&ok, eq, a1, Operand(a3));
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have rest parameters (only possible if we have an
// arguments adaptor frame below the function frame).
Label no_rest_parameters;
__ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
__ lw(a3, MemOperand(a2, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&no_rest_parameters, ne, a3,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// Check if the arguments adaptor frame contains more arguments than
// specified by the function's internal formal parameter count.
Label rest_parameters;
__ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a3,
FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
__ Subu(a0, a0, Operand(a3));
__ Branch(&rest_parameters, gt, a0, Operand(zero_reg));
// Return an empty rest parameter array.
__ bind(&no_rest_parameters);
{
// ----------- S t a t e -------------
// -- cp : context
// -- ra : return address
// -----------------------------------
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, v0, a0, a1, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the rest parameter array in v0.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, a1);
__ sw(a1, FieldMemOperand(v0, JSArray::kMapOffset));
__ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
__ sw(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset));
__ sw(a1, FieldMemOperand(v0, JSArray::kElementsOffset));
__ Move(a1, Smi::kZero);
__ Ret(USE_DELAY_SLOT);
__ sw(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(Smi::FromInt(JSArray::kSize));
__ CallRuntime(Runtime::kAllocateInNewSpace);
}
__ jmp(&done_allocate);
}
__ bind(&rest_parameters);
{
// Compute the pointer to the first rest parameter (skippping the receiver).
__ Lsa(a2, a2, a0, kPointerSizeLog2 - 1);
__ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
// ----------- S t a t e -------------
// -- cp : context
// -- a0 : number of rest parameters (tagged)
// -- a1 : function
// -- a2 : pointer to first rest parameters
// -- ra : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
__ li(t0, Operand(JSArray::kSize + FixedArray::kHeaderSize));
__ Lsa(t0, t0, a0, kPointerSizeLog2 - 1);
__ Allocate(t0, v0, a3, t1, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in v0.
__ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
__ sw(at, FieldMemOperand(v0, FixedArray::kMapOffset));
__ sw(a0, FieldMemOperand(v0, FixedArray::kLengthOffset));
__ Addu(a3, v0, Operand(FixedArray::kHeaderSize));
{
Label loop, done_loop;
__ sll(at, a0, kPointerSizeLog2 - 1);
__ Addu(a1, a3, at);
__ bind(&loop);
__ Branch(&done_loop, eq, a1, Operand(a3));
__ lw(at, MemOperand(a2, 0 * kPointerSize));
__ sw(at, FieldMemOperand(a3, 0 * kPointerSize));
__ Subu(a2, a2, Operand(1 * kPointerSize));
__ Addu(a3, a3, Operand(1 * kPointerSize));
__ jmp(&loop);
__ bind(&done_loop);
}
// Setup the rest parameter array in a3.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at);
__ sw(at, FieldMemOperand(a3, JSArray::kMapOffset));
__ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
__ sw(at, FieldMemOperand(a3, JSArray::kPropertiesOffset));
__ sw(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
__ sw(a0, FieldMemOperand(a3, JSArray::kLengthOffset));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ Branch(&too_big_for_new_space, gt, t0,
Operand(kMaxRegularHeapObjectSize));
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(t0);
__ Push(a0, a2, t0);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2);
}
__ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ Push(a1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
}
void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function
// -- cp : context
// -- fp : frame pointer
// -- ra : return address
// -----------------------------------
__ AssertFunction(a1);
// Make t0 point to the JavaScript frame.
__ mov(t0, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ lw(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ lw(a3, MemOperand(t0, StandardFrameConstants::kFunctionOffset));
__ Branch(&ok, eq, a1, Operand(a3));
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
__ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a2,
FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
__ Lsa(a3, t0, a2, kPointerSizeLog2 - 1);
__ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
// a1 : function
// a2 : number of parameters (tagged)
// a3 : parameters pointer
// t0 : Javascript frame pointer
// Registers used over whole function:
// t1 : arguments count (tagged)
// t2 : mapped parameter count (tagged)
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ lw(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
__ lw(a0, MemOperand(t0, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&adaptor_frame, eq, a0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// No adaptor, parameter count = argument count.
__ mov(t1, a2);
__ Branch(USE_DELAY_SLOT, &try_allocate);
__ mov(t2, a2); // In delay slot.
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ Lsa(t0, t0, t1, 1);
__ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset));
// t1 = argument count (tagged)
// t2 = parameter count (tagged)
// Compute the mapped parameter count = min(t2, t1) in t2.
__ mov(t2, a2);
__ Branch(&try_allocate, le, t2, Operand(t1));
__ mov(t2, t1);
__ bind(&try_allocate);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
Label param_map_size;
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ Branch(USE_DELAY_SLOT, &param_map_size, eq, t2, Operand(zero_reg));
__ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0.
__ sll(t5, t2, 1);
__ addiu(t5, t5, kParameterMapHeaderSize);
__ bind(&param_map_size);
// 2. Backing store.
__ Lsa(t5, t5, t1, 1);
__ Addu(t5, t5, Operand(FixedArray::kHeaderSize));
// 3. Arguments object.
__ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize));
// Do the allocation of all three objects in one go.
__ Allocate(t5, v0, t5, t0, &runtime, NO_ALLOCATION_FLAGS);
// v0 = address of new object(s) (tagged)
// a2 = argument count (smi-tagged)
// Get the arguments boilerplate from the current native context into t0.
const int kNormalOffset =
Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
const int kAliasedOffset =
Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
__ lw(t0, NativeContextMemOperand());
Label skip2_ne, skip2_eq;
__ Branch(&skip2_ne, ne, t2, Operand(zero_reg));
__ lw(t0, MemOperand(t0, kNormalOffset));
__ bind(&skip2_ne);
__ Branch(&skip2_eq, eq, t2, Operand(zero_reg));
__ lw(t0, MemOperand(t0, kAliasedOffset));
__ bind(&skip2_eq);
// v0 = address of new object (tagged)
// a2 = argument count (smi-tagged)
// t0 = address of arguments map (tagged)
// t2 = mapped parameter count (tagged)
__ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset));
__ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex);
__ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset));
__ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset));
// Set up the callee in-object property.
__ AssertNotSmi(a1);
__ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
// Use the length (smi tagged) and set that as an in-object property too.
__ AssertSmi(t1);
__ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
// Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, t0 will point there, otherwise
// it will point to the backing store.
__ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize));
__ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
// v0 = address of new object (tagged)
// a2 = argument count (tagged)
// t0 = address of parameter map or backing store (tagged)
// t2 = mapped parameter count (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
Label skip3;
__ Branch(&skip3, ne, t2, Operand(Smi::kZero));
// Move backing store address to a1, because it is
// expected there when filling in the unmapped arguments.
__ mov(a1, t0);
__ bind(&skip3);
__ Branch(&skip_parameter_map, eq, t2, Operand(Smi::kZero));
__ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex);
__ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset));
__ Addu(t1, t2, Operand(Smi::FromInt(2)));
__ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
__ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize));
__ Lsa(t1, t0, t2, 1);
__ Addu(t1, t1, Operand(kParameterMapHeaderSize));
__ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize));
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop, parameters_test;
__ mov(t1, t2);
__ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
__ Subu(t5, t5, Operand(t2));
__ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
__ Lsa(a1, t0, t1, 1);
__ Addu(a1, a1, Operand(kParameterMapHeaderSize));
// a1 = address of backing store (tagged)
// t0 = address of parameter map (tagged)
// a0 = temporary scratch (a.o., for address calculation)
// t1 = loop variable (tagged)
// t3 = the hole value
__ jmp(&parameters_test);
__ bind(&parameters_loop);
__ Subu(t1, t1, Operand(Smi::FromInt(1)));
__ sll(a0, t1, 1);
__ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
__ Addu(t6, t0, a0);
__ sw(t5, MemOperand(t6));
__ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
__ Addu(t6, a1, a0);
__ sw(t3, MemOperand(t6));
__ Addu(t5, t5, Operand(Smi::FromInt(1)));
__ bind(&parameters_test);
__ Branch(&parameters_loop, ne, t1, Operand(Smi::kZero));
// t1 = argument count (tagged).
__ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
__ bind(&skip_parameter_map);
// v0 = address of new object (tagged)
// a1 = address of backing store (tagged)
// t1 = argument count (tagged)
// t2 = mapped parameter count (tagged)
// t5 = scratch
// Copy arguments header and remaining slots (if there are any).
__ LoadRoot(t5, Heap::kFixedArrayMapRootIndex);
__ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset));
__ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset));
Label arguments_loop, arguments_test;
__ sll(t6, t2, 1);
__ Subu(a3, a3, Operand(t6));
__ jmp(&arguments_test);
__ bind(&arguments_loop);
__ Subu(a3, a3, Operand(kPointerSize));
__ lw(t0, MemOperand(a3, 0));
__ Lsa(t5, a1, t2, 1);
__ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize));
__ Addu(t2, t2, Operand(Smi::FromInt(1)));
__ bind(&arguments_test);
__ Branch(&arguments_loop, lt, t2, Operand(t1));
// Return.
__ Ret();
// Do the runtime call to allocate the arguments object.
// t1 = argument count (tagged)
__ bind(&runtime);
__ Push(a1, a3, t1);
__ TailCallRuntime(Runtime::kNewSloppyArguments);
}
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function
// -- cp : context
// -- fp : frame pointer
// -- ra : return address
// -----------------------------------
__ AssertFunction(a1);
// Make a2 point to the JavaScript frame.
__ mov(a2, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ lw(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
__ Branch(&ok, eq, a1, Operand(a3));
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ lw(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
__ lw(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&arguments_adaptor, eq, a0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
{
__ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a0,
FieldMemOperand(t0, SharedFunctionInfo::kFormalParameterCountOffset));
__ Lsa(a2, a2, a0, kPointerSizeLog2 - 1);
__ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
}
__ Branch(&arguments_done);
__ bind(&arguments_adaptor);
{
__ lw(a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ Lsa(a2, a3, a0, kPointerSizeLog2 - 1);
__ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
}
__ bind(&arguments_done);
// ----------- S t a t e -------------
// -- cp : context
// -- a0 : number of rest parameters (tagged)
// -- a1 : function
// -- a2 : pointer to first rest parameters
// -- ra : return address
// -----------------------------------
// Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate;
__ li(t0, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
__ Lsa(t0, t0, a0, kPointerSizeLog2 - 1);
__ Allocate(t0, v0, a3, t1, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in v0.
__ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
__ sw(at, FieldMemOperand(v0, FixedArray::kMapOffset));
__ sw(a0, FieldMemOperand(v0, FixedArray::kLengthOffset));
__ Addu(a3, v0, Operand(FixedArray::kHeaderSize));
{
Label loop, done_loop;
__ sll(at, a0, kPointerSizeLog2 - 1);
__ Addu(a1, a3, at);
__ bind(&loop);
__ Branch(&done_loop, eq, a1, Operand(a3));
__ lw(at, MemOperand(a2, 0 * kPointerSize));
__ sw(at, FieldMemOperand(a3, 0 * kPointerSize));
__ Subu(a2, a2, Operand(1 * kPointerSize));
__ Addu(a3, a3, Operand(1 * kPointerSize));
__ Branch(&loop);
__ bind(&done_loop);
}
// Setup the strict arguments object in a3.
__ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, at);
__ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset));
__ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
__ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset));
__ sw(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset));
__ sw(a0, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset));
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ Branch(&too_big_for_new_space, gt, t0, Operand(kMaxRegularHeapObjectSize));
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(t0);
__ Push(a0, a2, t0);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2);
}
__ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ Push(a1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}

View File

@ -68,27 +68,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
// static
const Register TypeConversionDescriptor::ArgumentRegister() { return a0; }

View File

@ -3309,515 +3309,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function
// -- cp : context
// -- fp : frame pointer
// -- ra : return address
// -----------------------------------
__ AssertFunction(a1);
// Make a2 point to the JavaScript frame.
__ mov(a2, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ ld(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
__ Branch(&ok, eq, a1, Operand(a3));
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have rest parameters (only possible if we have an
// arguments adaptor frame below the function frame).
Label no_rest_parameters;
__ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
__ ld(a3, MemOperand(a2, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&no_rest_parameters, ne, a3,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// Check if the arguments adaptor frame contains more arguments than
// specified by the function's internal formal parameter count.
Label rest_parameters;
__ SmiLoadUntag(
a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a3,
FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
__ Dsubu(a0, a0, Operand(a3));
__ Branch(&rest_parameters, gt, a0, Operand(zero_reg));
// Return an empty rest parameter array.
__ bind(&no_rest_parameters);
{
// ----------- S t a t e -------------
// -- cp : context
// -- ra : return address
// -----------------------------------
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, v0, a0, a1, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the rest parameter array in v0.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, a1);
__ sd(a1, FieldMemOperand(v0, JSArray::kMapOffset));
__ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
__ sd(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset));
__ sd(a1, FieldMemOperand(v0, JSArray::kElementsOffset));
__ Move(a1, Smi::kZero);
__ Ret(USE_DELAY_SLOT);
__ sd(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(Smi::FromInt(JSArray::kSize));
__ CallRuntime(Runtime::kAllocateInNewSpace);
}
__ jmp(&done_allocate);
}
__ bind(&rest_parameters);
{
// Compute the pointer to the first rest parameter (skippping the receiver).
__ Dlsa(a2, a2, a0, kPointerSizeLog2);
__ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
// ----------- S t a t e -------------
// -- cp : context
// -- a0 : number of rest parameters
// -- a1 : function
// -- a2 : pointer to first rest parameters
// -- ra : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
__ li(a5, Operand(JSArray::kSize + FixedArray::kHeaderSize));
__ Dlsa(a5, a5, a0, kPointerSizeLog2);
__ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Compute arguments.length in a4.
__ SmiTag(a4, a0);
// Setup the elements array in v0.
__ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
__ sd(at, FieldMemOperand(v0, FixedArray::kMapOffset));
__ sd(a4, FieldMemOperand(v0, FixedArray::kLengthOffset));
__ Daddu(a3, v0, Operand(FixedArray::kHeaderSize));
{
Label loop, done_loop;
__ Dlsa(a1, a3, a0, kPointerSizeLog2);
__ bind(&loop);
__ Branch(&done_loop, eq, a1, Operand(a3));
__ ld(at, MemOperand(a2, 0 * kPointerSize));
__ sd(at, FieldMemOperand(a3, 0 * kPointerSize));
__ Dsubu(a2, a2, Operand(1 * kPointerSize));
__ Daddu(a3, a3, Operand(1 * kPointerSize));
__ Branch(&loop);
__ bind(&done_loop);
}
// Setup the rest parameter array in a3.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at);
__ sd(at, FieldMemOperand(a3, JSArray::kMapOffset));
__ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
__ sd(at, FieldMemOperand(a3, JSArray::kPropertiesOffset));
__ sd(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
__ sd(a4, FieldMemOperand(a3, JSArray::kLengthOffset));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ Branch(&too_big_for_new_space, gt, a5,
Operand(kMaxRegularHeapObjectSize));
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(a0);
__ SmiTag(a5);
__ Push(a0, a2, a5);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2);
__ SmiUntag(a0);
}
__ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ Push(a1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
}
void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function
// -- cp : context
// -- fp : frame pointer
// -- ra : return address
// -----------------------------------
__ AssertFunction(a1);
// Make t0 point to the JavaScript frame.
__ mov(t0, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ ld(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ ld(a3, MemOperand(t0, StandardFrameConstants::kFunctionOffset));
__ Branch(&ok, eq, a1, Operand(a3));
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
__ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a2,
FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
__ Lsa(a3, t0, a2, kPointerSizeLog2);
__ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
__ SmiTag(a2);
// a1 : function
// a2 : number of parameters (tagged)
// a3 : parameters pointer
// t0 : Javascript frame pointer
// Registers used over whole function:
// a5 : arguments count (tagged)
// a6 : mapped parameter count (tagged)
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ ld(a4, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
__ ld(a0, MemOperand(a4, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&adaptor_frame, eq, a0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// No adaptor, parameter count = argument count.
__ mov(a5, a2);
__ Branch(USE_DELAY_SLOT, &try_allocate);
__ mov(a6, a2); // In delay slot.
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ ld(a5, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiScale(t2, a5, kPointerSizeLog2);
__ Daddu(a4, a4, Operand(t2));
__ Daddu(a3, a4, Operand(StandardFrameConstants::kCallerSPOffset));
// a5 = argument count (tagged)
// a6 = parameter count (tagged)
// Compute the mapped parameter count = min(a6, a5) in a6.
__ mov(a6, a2);
__ Branch(&try_allocate, le, a6, Operand(a5));
__ mov(a6, a5);
__ bind(&try_allocate);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
Label param_map_size;
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ Branch(USE_DELAY_SLOT, &param_map_size, eq, a6, Operand(zero_reg));
__ mov(t1, zero_reg); // In delay slot: param map size = 0 when a6 == 0.
__ SmiScale(t1, a6, kPointerSizeLog2);
__ daddiu(t1, t1, kParameterMapHeaderSize);
__ bind(&param_map_size);
// 2. Backing store.
__ SmiScale(t2, a5, kPointerSizeLog2);
__ Daddu(t1, t1, Operand(t2));
__ Daddu(t1, t1, Operand(FixedArray::kHeaderSize));
// 3. Arguments object.
__ Daddu(t1, t1, Operand(JSSloppyArgumentsObject::kSize));
// Do the allocation of all three objects in one go.
__ Allocate(t1, v0, t1, a4, &runtime, NO_ALLOCATION_FLAGS);
// v0 = address of new object(s) (tagged)
// a2 = argument count (smi-tagged)
// Get the arguments boilerplate from the current native context into a4.
const int kNormalOffset =
Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
const int kAliasedOffset =
Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
__ ld(a4, NativeContextMemOperand());
Label skip2_ne, skip2_eq;
__ Branch(&skip2_ne, ne, a6, Operand(zero_reg));
__ ld(a4, MemOperand(a4, kNormalOffset));
__ bind(&skip2_ne);
__ Branch(&skip2_eq, eq, a6, Operand(zero_reg));
__ ld(a4, MemOperand(a4, kAliasedOffset));
__ bind(&skip2_eq);
// v0 = address of new object (tagged)
// a2 = argument count (smi-tagged)
// a4 = address of arguments map (tagged)
// a6 = mapped parameter count (tagged)
__ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset));
__ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
__ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset));
__ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset));
// Set up the callee in-object property.
__ AssertNotSmi(a1);
__ sd(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
// Use the length (smi tagged) and set that as an in-object property too.
__ AssertSmi(a5);
__ sd(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
// Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, a4 will point there, otherwise
// it will point to the backing store.
__ Daddu(a4, v0, Operand(JSSloppyArgumentsObject::kSize));
__ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
// v0 = address of new object (tagged)
// a2 = argument count (tagged)
// a4 = address of parameter map or backing store (tagged)
// a6 = mapped parameter count (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
Label skip3;
__ Branch(&skip3, ne, a6, Operand(Smi::kZero));
// Move backing store address to a1, because it is
// expected there when filling in the unmapped arguments.
__ mov(a1, a4);
__ bind(&skip3);
__ Branch(&skip_parameter_map, eq, a6, Operand(Smi::kZero));
__ LoadRoot(a5, Heap::kSloppyArgumentsElementsMapRootIndex);
__ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset));
__ Daddu(a5, a6, Operand(Smi::FromInt(2)));
__ sd(a5, FieldMemOperand(a4, FixedArray::kLengthOffset));
__ sd(cp, FieldMemOperand(a4, FixedArray::kHeaderSize + 0 * kPointerSize));
__ SmiScale(t2, a6, kPointerSizeLog2);
__ Daddu(a5, a4, Operand(t2));
__ Daddu(a5, a5, Operand(kParameterMapHeaderSize));
__ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + 1 * kPointerSize));
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop, parameters_test;
__ mov(a5, a6);
__ Daddu(t1, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
__ Dsubu(t1, t1, Operand(a6));
__ LoadRoot(a7, Heap::kTheHoleValueRootIndex);
__ SmiScale(t2, a5, kPointerSizeLog2);
__ Daddu(a1, a4, Operand(t2));
__ Daddu(a1, a1, Operand(kParameterMapHeaderSize));
// a1 = address of backing store (tagged)
// a4 = address of parameter map (tagged)
// a0 = temporary scratch (a.o., for address calculation)
// t1 = loop variable (tagged)
// a7 = the hole value
__ jmp(&parameters_test);
__ bind(&parameters_loop);
__ Dsubu(a5, a5, Operand(Smi::FromInt(1)));
__ SmiScale(a0, a5, kPointerSizeLog2);
__ Daddu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
__ Daddu(t2, a4, a0);
__ sd(t1, MemOperand(t2));
__ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
__ Daddu(t2, a1, a0);
__ sd(a7, MemOperand(t2));
__ Daddu(t1, t1, Operand(Smi::FromInt(1)));
__ bind(&parameters_test);
__ Branch(&parameters_loop, ne, a5, Operand(Smi::kZero));
// Restore t1 = argument count (tagged).
__ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
__ bind(&skip_parameter_map);
// v0 = address of new object (tagged)
// a1 = address of backing store (tagged)
// a5 = argument count (tagged)
// a6 = mapped parameter count (tagged)
// t1 = scratch
// Copy arguments header and remaining slots (if there are any).
__ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
__ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset));
__ sd(a5, FieldMemOperand(a1, FixedArray::kLengthOffset));
Label arguments_loop, arguments_test;
__ SmiScale(t2, a6, kPointerSizeLog2);
__ Dsubu(a3, a3, Operand(t2));
__ jmp(&arguments_test);
__ bind(&arguments_loop);
__ Dsubu(a3, a3, Operand(kPointerSize));
__ ld(a4, MemOperand(a3, 0));
__ SmiScale(t2, a6, kPointerSizeLog2);
__ Daddu(t1, a1, Operand(t2));
__ sd(a4, FieldMemOperand(t1, FixedArray::kHeaderSize));
__ Daddu(a6, a6, Operand(Smi::FromInt(1)));
__ bind(&arguments_test);
__ Branch(&arguments_loop, lt, a6, Operand(a5));
// Return.
__ Ret();
// Do the runtime call to allocate the arguments object.
// a5 = argument count (tagged)
__ bind(&runtime);
__ Push(a1, a3, a5);
__ TailCallRuntime(Runtime::kNewSloppyArguments);
}
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function
// -- cp : context
// -- fp : frame pointer
// -- ra : return address
// -----------------------------------
__ AssertFunction(a1);
// Make a2 point to the JavaScript frame.
__ mov(a2, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ ld(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
__ Branch(&ok, eq, a1, Operand(a3));
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ ld(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
__ ld(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&arguments_adaptor, eq, a0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
{
__ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a0,
FieldMemOperand(a4, SharedFunctionInfo::kFormalParameterCountOffset));
__ Dlsa(a2, a2, a0, kPointerSizeLog2);
__ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
}
__ Branch(&arguments_done);
__ bind(&arguments_adaptor);
{
__ SmiLoadUntag(
a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ Dlsa(a2, a3, a0, kPointerSizeLog2);
__ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
}
__ bind(&arguments_done);
// ----------- S t a t e -------------
// -- cp : context
// -- a0 : number of rest parameters
// -- a1 : function
// -- a2 : pointer to first rest parameters
// -- ra : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
__ li(a5, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
__ Dlsa(a5, a5, a0, kPointerSizeLog2);
__ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Compute arguments.length in a4.
__ SmiTag(a4, a0);
// Setup the elements array in v0.
__ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
__ sd(at, FieldMemOperand(v0, FixedArray::kMapOffset));
__ sd(a4, FieldMemOperand(v0, FixedArray::kLengthOffset));
__ Daddu(a3, v0, Operand(FixedArray::kHeaderSize));
{
Label loop, done_loop;
__ Dlsa(a1, a3, a0, kPointerSizeLog2);
__ bind(&loop);
__ Branch(&done_loop, eq, a1, Operand(a3));
__ ld(at, MemOperand(a2, 0 * kPointerSize));
__ sd(at, FieldMemOperand(a3, 0 * kPointerSize));
__ Dsubu(a2, a2, Operand(1 * kPointerSize));
__ Daddu(a3, a3, Operand(1 * kPointerSize));
__ Branch(&loop);
__ bind(&done_loop);
}
// Setup the strict arguments object in a3.
__ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, at);
__ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset));
__ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
__ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset));
__ sd(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset));
__ sd(a4, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset));
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ Branch(&too_big_for_new_space, gt, a5, Operand(kMaxRegularHeapObjectSize));
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(a0);
__ SmiTag(a5);
__ Push(a0, a2, a5);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2);
__ SmiUntag(a0);
}
__ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ Push(a1);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
DCHECK(static_cast<int>(offset) == offset);

View File

@ -68,27 +68,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
// static
const Register TypeConversionDescriptor::ArgumentRegister() { return a0; }

View File

@ -589,12 +589,29 @@ RUNTIME_FUNCTION(Runtime_NewRestParameter) {
RUNTIME_FUNCTION(Runtime_NewSloppyArguments) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSFunction, callee, 0);
Object** parameters = reinterpret_cast<Object**>(args[1]);
CONVERT_SMI_ARG_CHECKED(argument_count, 2);
StackFrameIterator iterator(isolate);
// Stub/interpreter handler frame
iterator.Advance();
DCHECK(iterator.frame()->type() == StackFrame::STUB);
// Function frame
iterator.Advance();
JavaScriptFrame* function_frame = JavaScriptFrame::cast(iterator.frame());
DCHECK(function_frame->is_java_script());
int argc = function_frame->GetArgumentsLength();
Address fp = function_frame->fp();
if (function_frame->has_adapted_arguments()) {
iterator.Advance();
fp = iterator.frame()->fp();
}
Object** parameters = reinterpret_cast<Object**>(
fp + argc * kPointerSize + StandardFrameConstants::kCallerSPOffset);
ParameterArguments argument_getter(parameters);
return *NewSloppyArguments(isolate, callee, argument_getter, argument_count);
return *NewSloppyArguments(isolate, callee, argument_getter, argc);
}
RUNTIME_FUNCTION(Runtime_NewArgumentsElements) {

View File

@ -476,6 +476,8 @@
'bootstrapper.cc',
'bootstrapper.h',
'builtins/builtins-api.cc',
'builtins/builtins-arguments.cc',
'builtins/builtins-arguments.h',
'builtins/builtins-arraybuffer.cc',
'builtins/builtins-array.cc',
'builtins/builtins-async-function.cc',

View File

@ -3021,531 +3021,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rdi : function
// -- rsi : context
// -- rbp : frame pointer
// -- rsp[0] : return address
// -----------------------------------
__ AssertFunction(rdi);
// Make rdx point to the JavaScript frame.
__ movp(rdx, rbp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
__ j(equal, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have rest parameters (only possible if we have an
// arguments adaptor frame below the function frame).
Label no_rest_parameters;
__ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
__ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &no_rest_parameters, Label::kNear);
// Check if the arguments adaptor frame contains more arguments than
// specified by the function's internal formal parameter count.
Label rest_parameters;
__ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
__ LoadSharedFunctionInfoSpecialField(
rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
__ SmiToInteger32(
rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ subl(rax, rcx);
__ j(greater, &rest_parameters);
// Return an empty rest parameter array.
__ bind(&no_rest_parameters);
{
// ----------- S t a t e -------------
// -- rsi : context
// -- rsp[0] : return address
// -----------------------------------
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the rest parameter array in rax.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
__ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
__ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
__ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
__ movp(FieldOperand(rax, JSArray::kElementsOffset), rcx);
__ movp(FieldOperand(rax, JSArray::kLengthOffset), Immediate(0));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(Smi::FromInt(JSArray::kSize));
__ CallRuntime(Runtime::kAllocateInNewSpace);
}
__ jmp(&done_allocate);
}
__ bind(&rest_parameters);
{
// Compute the pointer to the first rest parameter (skippping the receiver).
__ leap(rbx, Operand(rbx, rax, times_pointer_size,
StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
// ----------- S t a t e -------------
// -- rdi : function
// -- rsi : context
// -- rax : number of rest parameters
// -- rbx : pointer to first rest parameters
// -- rsp[0] : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
__ leal(rcx, Operand(rax, times_pointer_size,
JSArray::kSize + FixedArray::kHeaderSize));
__ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Compute the arguments.length in rdi.
__ Integer32ToSmi(rdi, rax);
// Setup the elements array in rdx.
__ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
__ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
__ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
{
Label loop, done_loop;
__ Set(rcx, 0);
__ bind(&loop);
__ cmpl(rcx, rax);
__ j(equal, &done_loop, Label::kNear);
__ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
__ movp(
FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
kScratchRegister);
__ subp(rbx, Immediate(1 * kPointerSize));
__ addl(rcx, Immediate(1));
__ jmp(&loop);
__ bind(&done_loop);
}
// Setup the rest parameter array in rax.
__ leap(rax,
Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
__ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
__ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
__ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
__ movp(FieldOperand(rax, JSArray::kElementsOffset), rdx);
__ movp(FieldOperand(rax, JSArray::kLengthOffset), rdi);
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ cmpl(rcx, Immediate(kMaxRegularHeapObjectSize));
__ j(greater, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Integer32ToSmi(rax, rax);
__ Integer32ToSmi(rcx, rcx);
__ Push(rax);
__ Push(rbx);
__ Push(rcx);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ movp(rdx, rax);
__ Pop(rbx);
__ Pop(rax);
__ SmiToInteger32(rax, rax);
}
__ jmp(&done_allocate);
// Fall back to %NewRestParameter.
__ bind(&too_big_for_new_space);
__ PopReturnAddressTo(kScratchRegister);
__ Push(rdi);
__ PushReturnAddressFrom(kScratchRegister);
__ TailCallRuntime(Runtime::kNewRestParameter);
}
}
void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rdi : function
// -- rsi : context
// -- rbp : frame pointer
// -- rsp[0] : return address
// -----------------------------------
__ AssertFunction(rdi);
// Make r9 point to the JavaScript frame.
__ movp(r9, rbp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ movp(r9, Operand(r9, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ cmpp(rdi, Operand(r9, StandardFrameConstants::kFunctionOffset));
__ j(equal, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
__ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
__ LoadSharedFunctionInfoSpecialField(
rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
__ leap(rdx, Operand(r9, rcx, times_pointer_size,
StandardFrameConstants::kCallerSPOffset));
__ Integer32ToSmi(rcx, rcx);
// rcx : number of parameters (tagged)
// rdx : parameters pointer
// rdi : function
// rsp[0] : return address
// r9 : JavaScript frame pointer.
// Registers used over the whole function:
// rbx: the mapped parameter count (untagged)
// rax: the allocated object (tagged).
Factory* factory = isolate()->factory();
__ SmiToInteger64(rbx, rcx);
// rbx = parameter count (untagged)
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ movp(rax, Operand(r9, StandardFrameConstants::kCallerFPOffset));
__ movp(r8, Operand(rax, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Cmp(r8, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adaptor_frame);
// No adaptor, parameter count = argument count.
__ movp(r11, rbx);
__ jmp(&try_allocate, Label::kNear);
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ SmiToInteger64(
r11, Operand(rax, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ leap(rdx, Operand(rax, r11, times_pointer_size,
StandardFrameConstants::kCallerSPOffset));
// rbx = parameter count (untagged)
// r11 = argument count (untagged)
// Compute the mapped parameter count = min(rbx, r11) in rbx.
__ cmpp(rbx, r11);
__ j(less_equal, &try_allocate, Label::kNear);
__ movp(rbx, r11);
__ bind(&try_allocate);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
Label no_parameter_map;
__ xorp(r8, r8);
__ testp(rbx, rbx);
__ j(zero, &no_parameter_map, Label::kNear);
__ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
__ bind(&no_parameter_map);
// 2. Backing store.
__ leap(r8, Operand(r8, r11, times_pointer_size, FixedArray::kHeaderSize));
// 3. Arguments object.
__ addp(r8, Immediate(JSSloppyArgumentsObject::kSize));
// Do the allocation of all three objects in one go.
__ Allocate(r8, rax, r9, no_reg, &runtime, NO_ALLOCATION_FLAGS);
// rax = address of new object(s) (tagged)
// r11 = argument count (untagged)
// Get the arguments map from the current native context into r9.
Label has_mapped_parameters, instantiate;
__ movp(r9, NativeContextOperand());
__ testp(rbx, rbx);
__ j(not_zero, &has_mapped_parameters, Label::kNear);
const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
__ movp(r9, Operand(r9, Context::SlotOffset(kIndex)));
__ jmp(&instantiate, Label::kNear);
const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX;
__ bind(&has_mapped_parameters);
__ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex)));
__ bind(&instantiate);
// rax = address of new object (tagged)
// rbx = mapped parameter count (untagged)
// r11 = argument count (untagged)
// r9 = address of arguments map (tagged)
__ movp(FieldOperand(rax, JSObject::kMapOffset), r9);
__ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
__ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
__ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
// Set up the callee in-object property.
__ AssertNotSmi(rdi);
__ movp(FieldOperand(rax, JSSloppyArgumentsObject::kCalleeOffset), rdi);
// Use the length (smi tagged) and set that as an in-object property too.
// Note: r11 is tagged from here on.
__ Integer32ToSmi(r11, r11);
__ movp(FieldOperand(rax, JSSloppyArgumentsObject::kLengthOffset), r11);
// Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, rdi will point there, otherwise to the
// backing store.
__ leap(rdi, Operand(rax, JSSloppyArgumentsObject::kSize));
__ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
// rax = address of new object (tagged)
// rbx = mapped parameter count (untagged)
// r11 = argument count (tagged)
// rdi = address of parameter map or backing store (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ testp(rbx, rbx);
__ j(zero, &skip_parameter_map);
__ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
// rbx contains the untagged argument count. Add 2 and tag to write.
__ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
__ Integer64PlusConstantToSmi(r9, rbx, 2);
__ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
__ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
__ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
__ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop, parameters_test;
// Load tagged parameter count into r9.
__ Integer32ToSmi(r9, rbx);
__ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
__ addp(r8, rcx);
__ subp(r8, r9);
__ movp(rcx, rdi);
__ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
__ SmiToInteger64(r9, r9);
// r9 = loop variable (untagged)
// r8 = mapping index (tagged)
// rcx = address of parameter map (tagged)
// rdi = address of backing store (tagged)
__ jmp(&parameters_test, Label::kNear);
__ bind(&parameters_loop);
__ subp(r9, Immediate(1));
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ movp(FieldOperand(rcx, r9, times_pointer_size, kParameterMapHeaderSize),
r8);
__ movp(FieldOperand(rdi, r9, times_pointer_size, FixedArray::kHeaderSize),
kScratchRegister);
__ SmiAddConstant(r8, r8, Smi::FromInt(1));
__ bind(&parameters_test);
__ testp(r9, r9);
__ j(not_zero, &parameters_loop, Label::kNear);
__ bind(&skip_parameter_map);
// r11 = argument count (tagged)
// rdi = address of backing store (tagged)
// Copy arguments header and remaining slots (if there are any).
__ Move(FieldOperand(rdi, FixedArray::kMapOffset),
factory->fixed_array_map());
__ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r11);
Label arguments_loop, arguments_test;
__ movp(r8, rbx);
// Untag r11 for the loop below.
__ SmiToInteger64(r11, r11);
__ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
__ subp(rdx, kScratchRegister);
__ jmp(&arguments_test, Label::kNear);
__ bind(&arguments_loop);
__ subp(rdx, Immediate(kPointerSize));
__ movp(r9, Operand(rdx, 0));
__ movp(FieldOperand(rdi, r8,
times_pointer_size,
FixedArray::kHeaderSize),
r9);
__ addp(r8, Immediate(1));
__ bind(&arguments_test);
__ cmpp(r8, r11);
__ j(less, &arguments_loop, Label::kNear);
// Return.
__ ret(0);
// Do the runtime call to allocate the arguments object.
// r11 = argument count (untagged)
__ bind(&runtime);
__ Integer32ToSmi(r11, r11);
__ PopReturnAddressTo(rax);
__ Push(rdi); // Push function.
__ Push(rdx); // Push parameters pointer.
__ Push(r11); // Push parameter count.
__ PushReturnAddressFrom(rax);
__ TailCallRuntime(Runtime::kNewSloppyArguments);
}
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rdi : function
// -- rsi : context
// -- rbp : frame pointer
// -- rsp[0] : return address
// -----------------------------------
__ AssertFunction(rdi);
// Make rdx point to the JavaScript frame.
__ movp(rdx, rbp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
__ j(equal, &ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
__ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &arguments_adaptor, Label::kNear);
{
__ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
__ LoadSharedFunctionInfoSpecialField(
rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
__ leap(rbx, Operand(rdx, rax, times_pointer_size,
StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
}
__ jmp(&arguments_done, Label::kNear);
__ bind(&arguments_adaptor);
{
__ SmiToInteger32(
rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ leap(rbx, Operand(rbx, rax, times_pointer_size,
StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
}
__ bind(&arguments_done);
// ----------- S t a t e -------------
// -- rax : number of arguments
// -- rbx : pointer to the first argument
// -- rdi : function
// -- rsi : context
// -- rsp[0] : return address
// -----------------------------------
// Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate;
__ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize +
FixedArray::kHeaderSize));
__ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Compute the arguments.length in rdi.
__ Integer32ToSmi(rdi, rax);
// Setup the elements array in rdx.
__ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
__ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
__ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
{
Label loop, done_loop;
__ Set(rcx, 0);
__ bind(&loop);
__ cmpl(rcx, rax);
__ j(equal, &done_loop, Label::kNear);
__ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
__ movp(
FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
kScratchRegister);
__ subp(rbx, Immediate(1 * kPointerSize));
__ addl(rcx, Immediate(1));
__ jmp(&loop);
__ bind(&done_loop);
}
// Setup the strict arguments object in rax.
__ leap(rax,
Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
__ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, rcx);
__ movp(FieldOperand(rax, JSStrictArgumentsObject::kMapOffset), rcx);
__ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
__ movp(FieldOperand(rax, JSStrictArgumentsObject::kPropertiesOffset), rcx);
__ movp(FieldOperand(rax, JSStrictArgumentsObject::kElementsOffset), rdx);
__ movp(FieldOperand(rax, JSStrictArgumentsObject::kLengthOffset), rdi);
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ cmpl(rcx, Immediate(kMaxRegularHeapObjectSize));
__ j(greater, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Integer32ToSmi(rax, rax);
__ Integer32ToSmi(rcx, rcx);
__ Push(rax);
__ Push(rbx);
__ Push(rcx);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ movp(rdx, rax);
__ Pop(rbx);
__ Pop(rax);
__ SmiToInteger32(rax, rax);
}
__ jmp(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ PopReturnAddressTo(kScratchRegister);
__ Push(rdi);
__ PushReturnAddressFrom(kScratchRegister);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
static int Offset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
// Check that fits into int.
@ -3553,7 +3028,6 @@ static int Offset(ExternalReference ref0, ExternalReference ref1) {
return static_cast<int>(offset);
}
// Prepares stack to put arguments (aligns and so on). WIN64 calling
// convention requires to put the pointer to the return value slot into
// rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves

View File

@ -69,25 +69,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {rdi};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {rdi};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {rdi};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void TypeofDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {rbx};