[stubs] Port FastNewObjectStub to TF

In the process, convert from a code stub into a builtin.

Review-Url: https://codereview.chromium.org/2606733002
Cr-Commit-Position: refs/heads/master@{#41986}
This commit is contained in:
danno 2016-12-29 03:11:14 -08:00 committed by Commit bot
parent 21ebbd3ec2
commit 5b02a98bfa
26 changed files with 216 additions and 821 deletions

View File

@ -3365,123 +3365,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
}
void FastNewObjectStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r1 : target
// -- r3 : new target
// -- cp : context
// -- lr : return address
// -----------------------------------
__ AssertFunction(r1);
__ AssertReceiver(r3);
// Verify that the new target is a JSFunction.
Label new_object;
__ CompareObjectType(r3, r2, r2, JS_FUNCTION_TYPE);
__ b(ne, &new_object);
// Load the initial map and verify that it's in fact a map.
__ ldr(r2, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(r2, &new_object);
__ CompareObjectType(r2, r0, r0, MAP_TYPE);
__ b(ne, &new_object);
// Fall back to runtime if the target differs from the new target's
// initial map constructor.
__ ldr(r0, FieldMemOperand(r2, Map::kConstructorOrBackPointerOffset));
__ cmp(r0, r1);
__ b(ne, &new_object);
// Allocate the JSObject on the heap.
Label allocate, done_allocate;
__ ldrb(r4, FieldMemOperand(r2, Map::kInstanceSizeOffset));
__ Allocate(r4, r0, r5, r6, &allocate, SIZE_IN_WORDS);
__ bind(&done_allocate);
// Initialize the JSObject fields.
__ str(r2, FieldMemOperand(r0, JSObject::kMapOffset));
__ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
__ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
__ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
__ add(r1, r0, Operand(JSObject::kHeaderSize - kHeapObjectTag));
// ----------- S t a t e -------------
// -- r0 : result (tagged)
// -- r1 : result fields (untagged)
// -- r5 : result end (untagged)
// -- r2 : initial map
// -- cp : context
// -- lr : return address
// -----------------------------------
// Perform in-object slack tracking if requested.
Label slack_tracking;
STATIC_ASSERT(Map::kNoSlackTracking == 0);
__ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
__ ldr(r3, FieldMemOperand(r2, Map::kBitField3Offset));
__ tst(r3, Operand(Map::ConstructionCounter::kMask));
__ b(ne, &slack_tracking);
{
// Initialize all in-object fields with undefined.
__ InitializeFieldsWithFiller(r1, r5, r6);
__ Ret();
}
__ bind(&slack_tracking);
{
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
__ sub(r3, r3, Operand(1 << Map::ConstructionCounter::kShift));
__ str(r3, FieldMemOperand(r2, Map::kBitField3Offset));
// Initialize the in-object fields with undefined.
__ ldrb(r4, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
__ sub(r4, r5, Operand(r4, LSL, kPointerSizeLog2));
__ InitializeFieldsWithFiller(r1, r4, r6);
// Initialize the remaining (reserved) fields with one pointer filler map.
__ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(r1, r5, r6);
// Check if we can finalize the instance size.
STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
__ tst(r3, Operand(Map::ConstructionCounter::kMask));
__ Ret(ne);
// Finalize the instance size.
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r0, r2);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(r0);
}
__ Ret();
}
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
__ mov(r4, Operand(r4, LSL, kPointerSizeLog2 + 1));
__ Push(r2, r4);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(r2);
}
__ ldrb(r5, FieldMemOperand(r2, Map::kInstanceSizeOffset));
__ add(r5, r0, Operand(r5, LSL, kPointerSizeLog2));
STATIC_ASSERT(kHeapObjectTag == 1);
__ sub(r5, r5, Operand(kHeapObjectTag));
__ b(&done_allocate);
// Fall back to %NewObject.
__ bind(&new_object);
__ Push(r1, r3);
__ TailCallRuntime(Runtime::kNewObject);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r1 : function

View File

@ -70,12 +70,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1, r3};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1};

View File

@ -3636,127 +3636,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewObjectStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x1 : target
// -- x3 : new target
// -- cp : context
// -- lr : return address
// -----------------------------------
__ AssertFunction(x1);
__ AssertReceiver(x3);
// Verify that the new target is a JSFunction.
Label new_object;
__ JumpIfNotObjectType(x3, x2, x2, JS_FUNCTION_TYPE, &new_object);
// Load the initial map and verify that it's in fact a map.
__ Ldr(x2, FieldMemOperand(x3, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(x2, &new_object);
__ JumpIfNotObjectType(x2, x0, x0, MAP_TYPE, &new_object);
// Fall back to runtime if the target differs from the new target's
// initial map constructor.
__ Ldr(x0, FieldMemOperand(x2, Map::kConstructorOrBackPointerOffset));
__ CompareAndBranch(x0, x1, ne, &new_object);
// Allocate the JSObject on the heap.
Label allocate, done_allocate;
__ Ldrb(x4, FieldMemOperand(x2, Map::kInstanceSizeOffset));
__ Allocate(x4, x0, x5, x6, &allocate, SIZE_IN_WORDS);
__ Bind(&done_allocate);
// Initialize the JSObject fields.
STATIC_ASSERT(JSObject::kMapOffset == 0 * kPointerSize);
__ Str(x2, FieldMemOperand(x0, JSObject::kMapOffset));
__ LoadRoot(x3, Heap::kEmptyFixedArrayRootIndex);
STATIC_ASSERT(JSObject::kPropertiesOffset == 1 * kPointerSize);
STATIC_ASSERT(JSObject::kElementsOffset == 2 * kPointerSize);
__ Str(x3, FieldMemOperand(x0, JSObject::kPropertiesOffset));
__ Str(x3, FieldMemOperand(x0, JSObject::kElementsOffset));
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
__ Add(x1, x0, Operand(JSObject::kHeaderSize - kHeapObjectTag));
// ----------- S t a t e -------------
// -- x0 : result (tagged)
// -- x1 : result fields (untagged)
// -- x5 : result end (untagged)
// -- x2 : initial map
// -- cp : context
// -- lr : return address
// -----------------------------------
// Perform in-object slack tracking if requested.
Label slack_tracking;
STATIC_ASSERT(Map::kNoSlackTracking == 0);
__ LoadRoot(x6, Heap::kUndefinedValueRootIndex);
__ Ldr(w3, FieldMemOperand(x2, Map::kBitField3Offset));
__ TestAndBranchIfAnySet(w3, Map::ConstructionCounter::kMask,
&slack_tracking);
{
// Initialize all in-object fields with undefined.
__ InitializeFieldsWithFiller(x1, x5, x6);
__ Ret();
}
__ Bind(&slack_tracking);
{
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
__ Sub(w3, w3, 1 << Map::ConstructionCounter::kShift);
__ Str(w3, FieldMemOperand(x2, Map::kBitField3Offset));
// Initialize the in-object fields with undefined.
__ Ldrb(x4, FieldMemOperand(x2, Map::kUnusedPropertyFieldsOffset));
__ Sub(x4, x5, Operand(x4, LSL, kPointerSizeLog2));
__ InitializeFieldsWithFiller(x1, x4, x6);
// Initialize the remaining (reserved) fields with one pointer filler map.
__ LoadRoot(x6, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(x1, x5, x6);
// Check if we can finalize the instance size.
Label finalize;
STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
__ TestAndBranchIfAllClear(w3, Map::ConstructionCounter::kMask, &finalize);
__ Ret();
// Finalize the instance size.
__ Bind(&finalize);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(x0, x2);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(x0);
}
__ Ret();
}
// Fall back to %AllocateInNewSpace.
__ Bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
__ Mov(x4,
Operand(x4, LSL, kPointerSizeLog2 + kSmiTagSize + kSmiShiftSize));
__ Push(x2, x4);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(x2);
}
__ Ldrb(x5, FieldMemOperand(x2, Map::kInstanceSizeOffset));
__ Add(x5, x0, Operand(x5, LSL, kPointerSizeLog2));
STATIC_ASSERT(kHeapObjectTag == 1);
__ Sub(x5, x5, kHeapObjectTag); // Subtract the tag from end.
__ B(&done_allocate);
// Fall back to %NewObject.
__ Bind(&new_object);
__ Push(x1, x3);
__ TailCallRuntime(Runtime::kNewObject);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x1 : function

View File

@ -69,12 +69,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {x1, x3};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// x1: function

View File

@ -326,11 +326,11 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(r6);
__ EnterBuiltinFrame(cp, r1, r6);
__ Push(r2); // first argument
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(r2);
__ LeaveBuiltinFrame(cp, r1, r6);
__ SmiUntag(r6);
@ -474,11 +474,11 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(r6);
__ EnterBuiltinFrame(cp, r1, r6);
__ Push(r2); // first argument
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(r2);
__ LeaveBuiltinFrame(cp, r1, r6);
__ SmiUntag(r6);
@ -574,8 +574,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
if (create_implicit_receiver) {
// Allocate the new receiver object.
__ Push(r1, r3);
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ mov(r4, r0);
__ Pop(r1, r3);

View File

@ -315,11 +315,11 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(x6);
__ EnterBuiltinFrame(cp, x1, x6);
__ Push(x2); // first argument
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(x2);
__ LeaveBuiltinFrame(cp, x1, x6);
__ SmiUntag(x6);
@ -467,11 +467,11 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(x6);
__ EnterBuiltinFrame(cp, x1, x6);
__ Push(x2); // first argument
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(x2);
__ LeaveBuiltinFrame(cp, x1, x6);
__ SmiUntag(x6);
@ -569,8 +569,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
if (create_implicit_receiver) {
// Allocate the new receiver object.
__ Push(constructor, new_target);
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Mov(x4, x0);
__ Pop(new_target, constructor);

View File

@ -154,5 +154,119 @@ TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
Return(EmitFastNewClosure(shared, context));
}
TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
typedef FastNewObjectDescriptor Descriptor;
Node* context = Parameter(Descriptor::kContext);
Node* target = Parameter(Descriptor::kTarget);
Node* new_target = Parameter(Descriptor::kNewTarget);
CSA_ASSERT(this, HasInstanceType(target, JS_FUNCTION_TYPE));
CSA_ASSERT(this, IsJSReceiver(new_target));
// Verify that the new target is a JSFunction.
Label runtime(this), fast(this);
GotoIf(HasInstanceType(new_target, JS_FUNCTION_TYPE), &fast);
Goto(&runtime);
Bind(&runtime);
TailCallRuntime(Runtime::kNewObject, context, target, new_target);
Bind(&fast);
// Load the initial map and verify that it's in fact a map.
Node* initial_map =
LoadObjectField(new_target, JSFunction::kPrototypeOrInitialMapOffset);
GotoIf(TaggedIsSmi(initial_map), &runtime);
GotoIf(DoesntHaveInstanceType(initial_map, MAP_TYPE), &runtime);
// Fall back to runtime if the target differs from the new target's
// initial map constructor.
Node* new_target_constructor =
LoadObjectField(initial_map, Map::kConstructorOrBackPointerOffset);
GotoIf(WordNotEqual(target, new_target_constructor), &runtime);
Node* instance_size_words = ChangeUint32ToWord(LoadObjectField(
initial_map, Map::kInstanceSizeOffset, MachineType::Uint8()));
Node* instance_size =
WordShl(instance_size_words, IntPtrConstant(kPointerSizeLog2));
Node* object = Allocate(instance_size);
StoreMapNoWriteBarrier(object, initial_map);
Node* empty_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset,
empty_array);
StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset,
empty_array);
instance_size_words = ChangeUint32ToWord(LoadObjectField(
initial_map, Map::kInstanceSizeOffset, MachineType::Uint8()));
instance_size =
WordShl(instance_size_words, IntPtrConstant(kPointerSizeLog2));
// Perform in-object slack tracking if requested.
Node* bit_field3 = LoadMapBitField3(initial_map);
Label slack_tracking(this), finalize(this, Label::kDeferred), done(this);
GotoIf(IsSetWord32<Map::ConstructionCounter>(bit_field3), &slack_tracking);
// Initialize remaining fields.
{
Comment("no slack tracking");
InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize),
instance_size, Heap::kUndefinedValueRootIndex);
Return(object);
}
{
Bind(&slack_tracking);
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
Comment("update allocation count");
Node* new_bit_field3 = Int32Sub(
bit_field3, Int32Constant(1 << Map::ConstructionCounter::kShift));
StoreObjectFieldNoWriteBarrier(initial_map, Map::kBitField3Offset,
new_bit_field3,
MachineRepresentation::kWord32);
GotoIf(IsClearWord32<Map::ConstructionCounter>(new_bit_field3), &finalize);
Node* unused_fields = LoadObjectField(
initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
Node* used_size =
IntPtrSub(instance_size, WordShl(ChangeUint32ToWord(unused_fields),
IntPtrConstant(kPointerSizeLog2)));
Comment("initialize filler fields (no finalize)");
InitializeFieldsWithRoot(object, used_size, instance_size,
Heap::kOnePointerFillerMapRootIndex);
Comment("initialize undefined fields (no finalize)");
InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize),
used_size, Heap::kUndefinedValueRootIndex);
Return(object);
}
{
// Finalize the instance size.
Bind(&finalize);
Node* unused_fields = LoadObjectField(
initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
Node* used_size =
IntPtrSub(instance_size, WordShl(ChangeUint32ToWord(unused_fields),
IntPtrConstant(kPointerSizeLog2)));
Comment("initialize filler fields (finalize)");
InitializeFieldsWithRoot(object, used_size, instance_size,
Heap::kOnePointerFillerMapRootIndex);
Comment("initialize undefined fields (finalize)");
InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize),
used_size, Heap::kUndefinedValueRootIndex);
CallRuntime(Runtime::kFinalizeInstanceSize, context, initial_map);
Return(object);
}
}
} // namespace internal
} // namespace v8

View File

@ -53,6 +53,7 @@ namespace internal {
/* Declared first for dependency reasons */ \
ASM(CompileLazy) \
TFS(ToObject, BUILTIN, kNoExtraICState, TypeConversion) \
TFS(FastNewObject, BUILTIN, kNoExtraICState, FastNewObject) \
\
/* Calls */ \
ASM(ArgumentsAdaptorTrampoline) \

View File

@ -135,8 +135,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
// Allocate the new receiver object.
__ Push(edi);
__ Push(edx);
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ mov(ebx, eax);
__ Pop(edx);
__ Pop(edi);
@ -1921,8 +1921,8 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterBuiltinFrame(esi, edi, ecx);
__ Push(ebx); // the first argument
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(FieldOperand(eax, JSValue::kValueOffset));
__ LeaveBuiltinFrame(esi, edi, ecx);
}
@ -2084,8 +2084,8 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ SmiTag(ebx);
__ EnterBuiltinFrame(esi, edi, ebx);
__ Push(eax); // the first argument
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(FieldOperand(eax, JSValue::kValueOffset));
__ LeaveBuiltinFrame(esi, edi, ebx);
__ SmiUntag(ebx);

View File

@ -341,11 +341,11 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(t0);
__ EnterBuiltinFrame(cp, a1, t0);
__ Push(a0); // first argument
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(a0);
__ LeaveBuiltinFrame(cp, a1, t0);
__ SmiUntag(t0);
@ -492,11 +492,11 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(t0);
__ EnterBuiltinFrame(cp, a1, t0);
__ Push(a0); // first argument
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(a0);
__ LeaveBuiltinFrame(cp, a1, t0);
__ SmiUntag(t0);
@ -585,8 +585,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
if (create_implicit_receiver) {
// Allocate the new receiver object.
__ Push(a1, a3);
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ mov(t4, v0);
__ Pop(a1, a3);

View File

@ -339,11 +339,11 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(t0);
__ EnterBuiltinFrame(cp, a1, t0);
__ Push(a0);
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(a0);
__ LeaveBuiltinFrame(cp, a1, t0);
__ SmiUntag(t0);
@ -491,11 +491,11 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(t0);
__ EnterBuiltinFrame(cp, a1, t0);
__ Push(a0);
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(a0);
__ LeaveBuiltinFrame(cp, a1, t0);
__ SmiUntag(t0);
@ -582,8 +582,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
if (create_implicit_receiver) {
__ Push(a1, a3);
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ mov(t0, v0);
__ Pop(a1, a3);

View File

@ -137,8 +137,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
// Allocate the new receiver object.
__ Push(rdi);
__ Push(rdx);
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ movp(rbx, rax);
__ Pop(rdx);
__ Pop(rdi);
@ -1878,8 +1878,8 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterBuiltinFrame(rsi, rdi, r8);
__ Push(rbx); // the first argument
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(FieldOperand(rax, JSValue::kValueOffset));
__ LeaveBuiltinFrame(rsi, rdi, r8);
}
@ -2033,8 +2033,8 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterBuiltinFrame(rsi, rdi, r8);
__ Push(rbx); // the first argument
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(FieldOperand(rax, JSValue::kValueOffset));
__ LeaveBuiltinFrame(rsi, rdi, r8);
}

View File

@ -371,8 +371,8 @@ Callable CodeFactory::FastNewClosure(Isolate* isolate) {
// static
Callable CodeFactory::FastNewObject(Isolate* isolate) {
FastNewObjectStub stub(isolate);
return make_callable(stub);
return Callable(isolate->builtins()->FastNewObject(),
FastNewObjectDescriptor(isolate));
}
// static

View File

@ -1065,6 +1065,11 @@ Node* CodeStubAssembler::HasInstanceType(Node* object,
return Word32Equal(LoadInstanceType(object), Int32Constant(instance_type));
}
Node* CodeStubAssembler::DoesntHaveInstanceType(Node* object,
InstanceType instance_type) {
return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
}
Node* CodeStubAssembler::LoadProperties(Node* object) {
return LoadObjectField(object, JSObject::kPropertiesOffset);
}
@ -6353,6 +6358,20 @@ void CodeStubAssembler::BuildFastFixedArrayForEach(
: IndexAdvanceMode::kPost);
}
void CodeStubAssembler::InitializeFieldsWithRoot(
Node* object, Node* start_offset, Node* end_offset,
Heap::RootListIndex root_index) {
start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
Node* root_value = LoadRoot(root_index);
BuildFastLoop(MachineType::PointerRepresentation(), end_offset, start_offset,
[this, object, root_value](Node* current) {
StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
current, root_value);
},
-kPointerSize, CodeStubAssembler::IndexAdvanceMode::kPre);
}
void CodeStubAssembler::BranchIfNumericRelationalComparison(
RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true,
Label* if_false) {

View File

@ -312,6 +312,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* LoadInstanceType(Node* object);
// Compare the instance the type of the object against the provided one.
Node* HasInstanceType(Node* object, InstanceType type);
Node* DoesntHaveInstanceType(Node* object, InstanceType type);
// Load the properties backing store of a JSObject.
Node* LoadProperties(Node* object);
// Load the elements backing store of a JSObject.
@ -779,6 +780,29 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
return WordNotEqual(WordAnd(word, IntPtrConstant(mask)), IntPtrConstant(0));
}
// Returns true if all of the |T|'s bits in given |word32| are clear.
template <typename T>
Node* IsClearWord32(Node* word32) {
return IsClearWord32(word32, T::kMask);
}
// Returns true if all of the mask's bits in given |word32| are clear.
Node* IsClearWord32(Node* word32, uint32_t mask) {
return Word32Equal(Word32And(word32, Int32Constant(mask)),
Int32Constant(0));
}
// Returns true if all of the |T|'s bits in given |word| are clear.
template <typename T>
Node* IsClearWord(Node* word) {
return IsClearWord(word, T::kMask);
}
// Returns true if all of the mask's bits in given |word| are clear.
Node* IsClearWord(Node* word, uint32_t mask) {
return WordEqual(WordAnd(word, IntPtrConstant(mask)), IntPtrConstant(0));
}
void SetCounter(StatsCounter* counter, int value);
void IncrementCounter(StatsCounter* counter, int delta);
void DecrementCounter(StatsCounter* counter, int delta);
@ -1045,6 +1069,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
FixedArray::kHeaderSize);
}
void InitializeFieldsWithRoot(Node* object, Node* start_offset,
Node* end_offset, Heap::RootListIndex root);
enum RelationalComparisonMode {
kLessThan,
kLessThanOrEqual,

View File

@ -50,7 +50,6 @@ class Node;
V(StoreElement) \
V(SubString) \
V(LoadGlobalIC) \
V(FastNewObject) \
V(FastNewRestParameter) \
V(FastNewSloppyArguments) \
V(FastNewStrictArguments) \
@ -803,15 +802,6 @@ class FastNewFunctionContextStub final : public TurboFanCodeStub {
DEFINE_TURBOFAN_CODE_STUB(FastNewFunctionContext, TurboFanCodeStub);
};
class FastNewObjectStub final : public PlatformCodeStub {
public:
explicit FastNewObjectStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewObject);
DEFINE_PLATFORM_CODE_STUB(FastNewObject, PlatformCodeStub);
};
// TODO(turbofan): This stub should be possible to write in TurboFan
// using the CodeStubAssembler very soon in a way that is as efficient
// and easy as the current handwritten version, which is partly a copy

View File

@ -3324,136 +3324,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewObjectStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- edi : target
// -- edx : new target
// -- esi : context
// -- esp[0] : return address
// -----------------------------------
__ AssertFunction(edi);
__ AssertReceiver(edx);
// Verify that the new target is a JSFunction.
Label new_object;
__ CmpObjectType(edx, JS_FUNCTION_TYPE, ebx);
__ j(not_equal, &new_object);
// Load the initial map and verify that it's in fact a map.
__ mov(ecx, FieldOperand(edx, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(ecx, &new_object);
__ CmpObjectType(ecx, MAP_TYPE, ebx);
__ j(not_equal, &new_object);
// Fall back to runtime if the target differs from the new target's
// initial map constructor.
__ cmp(edi, FieldOperand(ecx, Map::kConstructorOrBackPointerOffset));
__ j(not_equal, &new_object);
// Allocate the JSObject on the heap.
Label allocate, done_allocate;
__ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
__ lea(ebx, Operand(ebx, times_pointer_size, 0));
__ Allocate(ebx, eax, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Initialize the JSObject fields.
__ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
__ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
masm->isolate()->factory()->empty_fixed_array());
__ mov(FieldOperand(eax, JSObject::kElementsOffset),
masm->isolate()->factory()->empty_fixed_array());
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
__ lea(ebx, FieldOperand(eax, JSObject::kHeaderSize));
// ----------- S t a t e -------------
// -- eax : result (tagged)
// -- ebx : result fields (untagged)
// -- edi : result end (untagged)
// -- ecx : initial map
// -- esi : context
// -- esp[0] : return address
// -----------------------------------
// Perform in-object slack tracking if requested.
Label slack_tracking;
STATIC_ASSERT(Map::kNoSlackTracking == 0);
__ test(FieldOperand(ecx, Map::kBitField3Offset),
Immediate(Map::ConstructionCounter::kMask));
__ j(not_zero, &slack_tracking, Label::kNear);
{
// Initialize all in-object fields with undefined.
__ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
__ InitializeFieldsWithFiller(ebx, edi, edx);
__ Ret();
}
__ bind(&slack_tracking);
{
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
__ sub(FieldOperand(ecx, Map::kBitField3Offset),
Immediate(1 << Map::ConstructionCounter::kShift));
// Initialize the in-object fields with undefined.
__ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
__ neg(edx);
__ lea(edx, Operand(edi, edx, times_pointer_size, 0));
__ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
__ InitializeFieldsWithFiller(ebx, edx, edi);
// Initialize the remaining (reserved) fields with one pointer filler map.
__ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
__ lea(edx, Operand(ebx, edx, times_pointer_size, 0));
__ LoadRoot(edi, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(ebx, edx, edi);
// Check if we can finalize the instance size.
Label finalize;
STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
__ test(FieldOperand(ecx, Map::kBitField3Offset),
Immediate(Map::ConstructionCounter::kMask));
__ j(zero, &finalize, Label::kNear);
__ Ret();
// Finalize the instance size.
__ bind(&finalize);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(eax);
__ Push(ecx);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(eax);
}
__ Ret();
}
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(ebx);
__ Push(ecx);
__ Push(ebx);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(ecx);
}
__ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
__ lea(edi, Operand(eax, ebx, times_pointer_size, 0));
STATIC_ASSERT(kHeapObjectTag == 1);
__ dec(edi);
__ jmp(&done_allocate);
// Fall back to %NewObject.
__ bind(&new_object);
__ PopReturnAddressTo(ecx);
__ Push(edi);
__ Push(edx);
__ PushReturnAddressFrom(ecx);
__ TailCallRuntime(Runtime::kNewObject);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- edi : function

View File

@ -68,12 +68,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {edi, edx};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {edi};

View File

@ -74,6 +74,20 @@ void FastNewFunctionContextDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {TargetRegister(), NewTargetRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
const Register FastNewObjectDescriptor::TargetRegister() {
return kJSFunctionRegister;
}
const Register FastNewObjectDescriptor::NewTargetRegister() {
return kJavaScriptCallNewTargetRegister;
}
void LoadDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) {
// kReceiver, kName, kSlot

View File

@ -448,7 +448,10 @@ class FastNewFunctionContextDescriptor : public CallInterfaceDescriptor {
class FastNewObjectDescriptor : public CallInterfaceDescriptor {
public:
DEFINE_PARAMETERS(kTarget, kNewTarget)
DECLARE_DESCRIPTOR(FastNewObjectDescriptor, CallInterfaceDescriptor)
static const Register TargetRegister();
static const Register NewTargetRegister();
};
class FastNewRestParameterDescriptor : public CallInterfaceDescriptor {

View File

@ -3564,127 +3564,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewObjectStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : target
// -- a3 : new target
// -- cp : context
// -- ra : return address
// -----------------------------------
__ AssertFunction(a1);
__ AssertReceiver(a3);
// Verify that the new target is a JSFunction.
Label new_object;
__ GetObjectType(a3, a2, a2);
__ Branch(&new_object, ne, a2, Operand(JS_FUNCTION_TYPE));
// Load the initial map and verify that it's in fact a map.
__ lw(a2, FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(a2, &new_object);
__ GetObjectType(a2, a0, a0);
__ Branch(&new_object, ne, a0, Operand(MAP_TYPE));
// Fall back to runtime if the target differs from the new target's
// initial map constructor.
__ lw(a0, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset));
__ Branch(&new_object, ne, a0, Operand(a1));
// Allocate the JSObject on the heap.
Label allocate, done_allocate;
__ lbu(t0, FieldMemOperand(a2, Map::kInstanceSizeOffset));
__ Allocate(t0, v0, t1, a0, &allocate, SIZE_IN_WORDS);
__ bind(&done_allocate);
// Initialize the JSObject fields.
__ sw(a2, FieldMemOperand(v0, JSObject::kMapOffset));
__ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
__ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
__ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
__ Addu(a1, v0, Operand(JSObject::kHeaderSize - kHeapObjectTag));
// ----------- S t a t e -------------
// -- v0 : result (tagged)
// -- a1 : result fields (untagged)
// -- t1 : result end (untagged)
// -- a2 : initial map
// -- cp : context
// -- ra : return address
// -----------------------------------
// Perform in-object slack tracking if requested.
Label slack_tracking;
STATIC_ASSERT(Map::kNoSlackTracking == 0);
__ lw(a3, FieldMemOperand(a2, Map::kBitField3Offset));
__ And(at, a3, Operand(Map::ConstructionCounter::kMask));
__ Branch(USE_DELAY_SLOT, &slack_tracking, ne, at, Operand(0));
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex); // In delay slot.
{
// Initialize all in-object fields with undefined.
__ InitializeFieldsWithFiller(a1, t1, a0);
__ Ret();
}
__ bind(&slack_tracking);
{
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
__ Subu(a3, a3, Operand(1 << Map::ConstructionCounter::kShift));
__ sw(a3, FieldMemOperand(a2, Map::kBitField3Offset));
// Initialize the in-object fields with undefined.
__ lbu(t0, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
__ sll(t0, t0, kPointerSizeLog2);
__ subu(t0, t1, t0);
__ InitializeFieldsWithFiller(a1, t0, a0);
// Initialize the remaining (reserved) fields with one pointer filler map.
__ LoadRoot(a0, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(a1, t1, a0);
// Check if we can finalize the instance size.
Label finalize;
STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
__ And(a3, a3, Operand(Map::ConstructionCounter::kMask));
__ Branch(&finalize, eq, a3, Operand(zero_reg));
__ Ret();
// Finalize the instance size.
__ bind(&finalize);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(v0, a2);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(v0);
}
__ Ret();
}
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
__ sll(t0, t0, kPointerSizeLog2 + kSmiTagSize);
__ Push(a2, t0);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a2);
}
__ lbu(t1, FieldMemOperand(a2, Map::kInstanceSizeOffset));
__ Lsa(t1, v0, t1, kPointerSizeLog2);
STATIC_ASSERT(kHeapObjectTag == 1);
__ Subu(t1, t1, Operand(kHeapObjectTag));
__ jmp(&done_allocate);
// Fall back to %NewObject.
__ bind(&new_object);
__ Push(a1, a3);
__ TailCallRuntime(Runtime::kNewObject);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function

View File

@ -68,12 +68,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1, a3};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1};

View File

@ -3567,128 +3567,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewObjectStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : target
// -- a3 : new target
// -- cp : context
// -- ra : return address
// -----------------------------------
__ AssertFunction(a1);
__ AssertReceiver(a3);
// Verify that the new target is a JSFunction.
Label new_object;
__ GetObjectType(a3, a2, a2);
__ Branch(&new_object, ne, a2, Operand(JS_FUNCTION_TYPE));
// Load the initial map and verify that it's in fact a map.
__ ld(a2, FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(a2, &new_object);
__ GetObjectType(a2, a0, a0);
__ Branch(&new_object, ne, a0, Operand(MAP_TYPE));
// Fall back to runtime if the target differs from the new target's
// initial map constructor.
__ ld(a0, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset));
__ Branch(&new_object, ne, a0, Operand(a1));
// Allocate the JSObject on the heap.
Label allocate, done_allocate;
__ lbu(a4, FieldMemOperand(a2, Map::kInstanceSizeOffset));
__ Allocate(a4, v0, a5, a0, &allocate, SIZE_IN_WORDS);
__ bind(&done_allocate);
// Initialize the JSObject fields.
__ sd(a2, FieldMemOperand(v0, JSObject::kMapOffset));
__ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
__ sd(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
__ sd(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
__ Daddu(a1, v0, Operand(JSObject::kHeaderSize - kHeapObjectTag));
// ----------- S t a t e -------------
// -- v0 : result (tagged)
// -- a1 : result fields (untagged)
// -- a5 : result end (untagged)
// -- a2 : initial map
// -- cp : context
// -- ra : return address
// -----------------------------------
// Perform in-object slack tracking if requested.
Label slack_tracking;
STATIC_ASSERT(Map::kNoSlackTracking == 0);
__ lwu(a3, FieldMemOperand(a2, Map::kBitField3Offset));
__ And(at, a3, Operand(Map::ConstructionCounter::kMask));
__ Branch(USE_DELAY_SLOT, &slack_tracking, ne, at, Operand(zero_reg));
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex); // In delay slot.
{
// Initialize all in-object fields with undefined.
__ InitializeFieldsWithFiller(a1, a5, a0);
__ Ret();
}
__ bind(&slack_tracking);
{
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
__ Subu(a3, a3, Operand(1 << Map::ConstructionCounter::kShift));
__ sw(a3, FieldMemOperand(a2, Map::kBitField3Offset));
// Initialize the in-object fields with undefined.
__ lbu(a4, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
__ dsll(a4, a4, kPointerSizeLog2);
__ Dsubu(a4, a5, a4);
__ InitializeFieldsWithFiller(a1, a4, a0);
// Initialize the remaining (reserved) fields with one pointer filler map.
__ LoadRoot(a0, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(a1, a5, a0);
// Check if we can finalize the instance size.
Label finalize;
STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
__ And(a3, a3, Operand(Map::ConstructionCounter::kMask));
__ Branch(&finalize, eq, a3, Operand(zero_reg));
__ Ret();
// Finalize the instance size.
__ bind(&finalize);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(v0, a2);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(v0);
}
__ Ret();
}
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
__ dsll(a4, a4, kPointerSizeLog2 + kSmiShiftSize + kSmiTagSize);
__ SmiTag(a4);
__ Push(a2, a4);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a2);
}
__ lbu(a5, FieldMemOperand(a2, Map::kInstanceSizeOffset));
__ Dlsa(a5, v0, a5, kPointerSizeLog2);
STATIC_ASSERT(kHeapObjectTag == 1);
__ Dsubu(a5, a5, Operand(kHeapObjectTag));
__ jmp(&done_allocate);
// Fall back to %NewObject.
__ bind(&new_object);
__ Push(a1, a3);
__ TailCallRuntime(Runtime::kNewObject);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function

View File

@ -68,12 +68,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1, a3};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {a1};

View File

@ -3268,132 +3268,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewObjectStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rdi : target
// -- rdx : new target
// -- rsi : context
// -- rsp[0] : return address
// -----------------------------------
__ AssertFunction(rdi);
__ AssertReceiver(rdx);
// Verify that the new target is a JSFunction.
Label new_object;
__ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx);
__ j(not_equal, &new_object);
// Load the initial map and verify that it's in fact a map.
__ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(rcx, &new_object);
__ CmpObjectType(rcx, MAP_TYPE, rbx);
__ j(not_equal, &new_object);
// Fall back to runtime if the target differs from the new target's
// initial map constructor.
__ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset));
__ j(not_equal, &new_object);
// Allocate the JSObject on the heap.
Label allocate, done_allocate;
__ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
__ leal(rbx, Operand(rbx, times_pointer_size, 0));
__ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Initialize the JSObject fields.
__ movp(FieldOperand(rax, JSObject::kMapOffset), rcx);
__ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
__ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
__ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
__ leap(rbx, FieldOperand(rax, JSObject::kHeaderSize));
// ----------- S t a t e -------------
// -- rax : result (tagged)
// -- rbx : result fields (untagged)
// -- rdi : result end (untagged)
// -- rcx : initial map
// -- rsi : context
// -- rsp[0] : return address
// -----------------------------------
// Perform in-object slack tracking if requested.
Label slack_tracking;
STATIC_ASSERT(Map::kNoSlackTracking == 0);
__ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
__ testl(FieldOperand(rcx, Map::kBitField3Offset),
Immediate(Map::ConstructionCounter::kMask));
__ j(not_zero, &slack_tracking, Label::kNear);
{
// Initialize all in-object fields with undefined.
__ InitializeFieldsWithFiller(rbx, rdi, r11);
__ Ret();
}
__ bind(&slack_tracking);
{
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
__ subl(FieldOperand(rcx, Map::kBitField3Offset),
Immediate(1 << Map::ConstructionCounter::kShift));
// Initialize the in-object fields with undefined.
__ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset));
__ negp(rdx);
__ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0));
__ InitializeFieldsWithFiller(rbx, rdx, r11);
// Initialize the remaining (reserved) fields with one pointer filler map.
__ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(rdx, rdi, r11);
// Check if we can finalize the instance size.
Label finalize;
STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
__ testl(FieldOperand(rcx, Map::kBitField3Offset),
Immediate(Map::ConstructionCounter::kMask));
__ j(zero, &finalize, Label::kNear);
__ Ret();
// Finalize the instance size.
__ bind(&finalize);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(rax);
__ Push(rcx);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(rax);
}
__ Ret();
}
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Integer32ToSmi(rbx, rbx);
__ Push(rcx);
__ Push(rbx);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(rcx);
}
__ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
__ leap(rdi, Operand(rax, rbx, times_pointer_size, 0));
STATIC_ASSERT(kHeapObjectTag == 1);
__ decp(rdi); // Remove the tag from the end address.
__ jmp(&done_allocate);
// Fall back to %NewObject.
__ bind(&new_object);
__ PopReturnAddressTo(rcx);
__ Push(rdi);
__ Push(rdx);
__ PushReturnAddressFrom(rcx);
__ TailCallRuntime(Runtime::kNewObject);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rdi : function

View File

@ -68,12 +68,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {rdi, rdx};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {rdi};