diff --git a/src/a64/builtins-a64.cc b/src/a64/builtins-a64.cc index f4996d77c6..59d4dd2cb6 100644 --- a/src/a64/builtins-a64.cc +++ b/src/a64/builtins-a64.cc @@ -327,10 +327,12 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, - bool count_constructions) { + bool count_constructions, + bool create_memento) { // ----------- S t a t e ------------- // -- x0 : number of arguments // -- x1 : constructor function + // -- x2 : allocation site or undefined // -- lr : return address // -- sp[...]: constructor arguments // ----------------------------------- @@ -338,6 +340,10 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, ASM_LOCATION("Builtins::Generate_JSConstructStubHelper"); // Should never count constructions for api objects. ASSERT(!is_api_function || !count_constructions); + // Should never create mementos for api functions. + ASSERT(!is_api_function || !create_memento); + // Should never create mementos before slack tracking is finished. + ASSERT(!count_constructions || !create_memento); Isolate* isolate = masm->isolate(); @@ -345,7 +351,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, { FrameScope scope(masm, StackFrame::CONSTRUCT); - // Preserve the two incoming parameters on the stack. + // Preserve the three incoming parameters on the stack. + if (create_memento) { + __ AssertUndefinedOrAllocationSite(x2, x10); + __ Push(x2); + } + Register argc = x0; Register constructor = x1; // x1: constructor function @@ -405,7 +416,13 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, Register obj_size = x3; Register new_obj = x4; __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset)); - __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS); + if (create_memento) { + __ Add(x7, obj_size, + Operand(AllocationMemento::kSize / kPointerSize)); + __ Allocate(x7, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS); + } else { + __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS); + } // Allocated the JSObject, now initialize the fields. Map is set to // initial map and properties and elements are set to empty fixed array. @@ -467,6 +484,18 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, __ FillFields(first_non_prealloc, non_prealloc_fields, one_pointer_filler); prop_fields = NoReg; + } else if (create_memento) { + // Fill the pre-allocated fields with undef. + __ FillFields(first_prop, prop_fields, undef); + __ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2)); + __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex); + ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset); + __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex)); + // Load the AllocationSite + __ Peek(x14, 2 * kXRegSize); + ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset); + __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex)); + first_prop = NoReg; } else { // Fill all of the property fields with undef. __ FillFields(first_prop, prop_fields, undef); @@ -531,13 +560,41 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Allocate the new receiver object using the runtime call. __ Bind(&rt_call); - __ Push(constructor); // Argument for Runtime_NewObject. - __ CallRuntime(Runtime::kNewObject, 1); - __ Mov(x4, x0); + Label count_incremented; + if (create_memento) { + // Get the cell or allocation site. + __ Peek(x4, 2 * kXRegSize); + __ Push(x4); + __ Push(constructor); // Argument for Runtime_NewObject. + __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); + __ Mov(x4, x0); + // If we ended up using the runtime, and we want a memento, then the + // runtime call made it for us, and we shouldn't do create count + // increment. + __ jmp(&count_incremented); + } else { + __ Push(constructor); // Argument for Runtime_NewObject. + __ CallRuntime(Runtime::kNewObject, 1); + __ Mov(x4, x0); + } // Receiver for constructor call allocated. // x4: JSObject __ Bind(&allocated); + + if (create_memento) { + __ Peek(x10, 2 * kXRegSize); + __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented); + // r2 is an AllocationSite. We are creating a memento from it, so we + // need to increment the memento create count. + __ Ldr(x5, FieldMemOperand(x10, + AllocationSite::kPretenureCreateCountOffset)); + __ Add(x5, x5, Operand(Smi::FromInt(1))); + __ Str(x5, FieldMemOperand(x10, + AllocationSite::kPretenureCreateCountOffset)); + __ bind(&count_incremented); + } + __ Push(x4, x4); // Reload the number of arguments from the stack. @@ -644,17 +701,17 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, true); + Generate_JSConstructStubHelper(masm, false, true, false); } void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, false); + Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new); } void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, true, false); + Generate_JSConstructStubHelper(masm, true, false, false); } diff --git a/src/a64/code-stubs-a64.cc b/src/a64/code-stubs-a64.cc index c8399c9004..5cd39747fe 100644 --- a/src/a64/code-stubs-a64.cc +++ b/src/a64/code-stubs-a64.cc @@ -3207,18 +3207,20 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, __ Cmp(scratch1, function); __ B(eq, &done); - // If we came here, we need to see if we are the array function. - // If we didn't have a matching function, and we didn't find the megamorph - // sentinel, then we have in the slot either some other function or an - // AllocationSite. Do a map check on the object in scratch1 register. - __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset)); - __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss); + if (!FLAG_pretenuring_call_new) { + // If we came here, we need to see if we are the array function. + // If we didn't have a matching function, and we didn't find the megamorph + // sentinel, then we have in the slot either some other function or an + // AllocationSite. Do a map check on the object in scratch1 register. + __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset)); + __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss); - // Make sure the function is the Array() function - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); - __ Cmp(function, scratch1); - __ B(ne, &megamorphic); - __ B(&done); + // Make sure the function is the Array() function + __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); + __ Cmp(function, scratch1); + __ B(ne, &megamorphic); + __ B(&done); + } __ Bind(&miss); @@ -3237,32 +3239,37 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, // An uninitialized cache is patched with the function or sentinel to // indicate the ElementsKind if function is the Array constructor. __ Bind(&initialize); - // Make sure the function is the Array() function - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); - __ Cmp(function, scratch1); - __ B(ne, ¬_array_function); - // The target function is the Array constructor, - // Create an AllocationSite if we don't already have it, store it in the slot. - { - FrameScope scope(masm, StackFrame::INTERNAL); - CreateAllocationSiteStub create_stub; + if (!FLAG_pretenuring_call_new) { + // Make sure the function is the Array() function + __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); + __ Cmp(function, scratch1); + __ B(ne, ¬_array_function); - // Arguments register must be smi-tagged to call out. - __ SmiTag(argc); - __ Push(argc, function, feedback_vector, index); + // The target function is the Array constructor, + // Create an AllocationSite if we don't already have it, store it in the + // slot. + { + FrameScope scope(masm, StackFrame::INTERNAL); + CreateAllocationSiteStub create_stub; - // CreateAllocationSiteStub expect the feedback vector in x2 and the slot - // index in x3. - ASSERT(feedback_vector.Is(x2) && index.Is(x3)); - __ CallStub(&create_stub); + // Arguments register must be smi-tagged to call out. + __ SmiTag(argc); + __ Push(argc, function, feedback_vector, index); - __ Pop(index, feedback_vector, function, argc); - __ SmiUntag(argc); + // CreateAllocationSiteStub expect the feedback vector in x2 and the slot + // index in x3. + ASSERT(feedback_vector.Is(x2) && index.Is(x3)); + __ CallStub(&create_stub); + + __ Pop(index, feedback_vector, function, argc); + __ SmiUntag(argc); + } + __ B(&done); + + __ Bind(¬_array_function); } - __ B(&done); - __ Bind(¬_array_function); // An uninitialized cache is patched with the function. __ Add(scratch1, feedback_vector, @@ -3410,17 +3417,25 @@ void CallConstructStub::Generate(MacroAssembler* masm) { &slow); if (RecordCallTarget()) { - Label feedback_register_initialized; GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5); - // Put the AllocationSite from the feedback vector into x2, or undefined. __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); - __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize)); - __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset)); - __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, - &feedback_register_initialized); - __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); - __ bind(&feedback_register_initialized); + if (FLAG_pretenuring_call_new) { + // Put the AllocationSite from the feedback vector into x2. + // By adding kPointerSize we encode that we know the AllocationSite + // entry is at the feedback vector slot given by x3 + 1. + __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize)); + } else { + Label feedback_register_initialized; + // Put the AllocationSite from the feedback vector into x2, or undefined. + __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize)); + __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset)); + __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, + &feedback_register_initialized); + __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); + __ bind(&feedback_register_initialized); + } + __ AssertUndefinedOrAllocationSite(x2, x5); } diff --git a/src/a64/full-codegen-a64.cc b/src/a64/full-codegen-a64.cc index 8c3c88ddce..4f907a5bf7 100644 --- a/src/a64/full-codegen-a64.cc +++ b/src/a64/full-codegen-a64.cc @@ -2608,6 +2608,13 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { Handle uninitialized = TypeFeedbackInfo::UninitializedSentinel(isolate()); StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); + if (FLAG_pretenuring_call_new) { + StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), + isolate()->factory()->NewAllocationSite()); + ASSERT(expr->AllocationSiteFeedbackSlot() == + expr->CallNewFeedbackSlot() + 1); + } + __ LoadObject(x2, FeedbackVector()); __ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc index 3ad64acc3d..6c8f39246b 100644 --- a/src/arm/builtins-arm.cc +++ b/src/arm/builtins-arm.cc @@ -336,10 +336,12 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, - bool count_constructions) { + bool count_constructions, + bool create_memento) { // ----------- S t a t e ------------- // -- r0 : number of arguments // -- r1 : constructor function + // -- r2 : allocation site or undefined // -- lr : return address // -- sp[...]: constructor arguments // ----------------------------------- @@ -347,12 +349,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Should never count constructions for api objects. ASSERT(!is_api_function || !count_constructions); + // Should never create mementos for api functions. + ASSERT(!is_api_function || !create_memento); + + // Should never create mementos before slack tracking is finished. + ASSERT(!count_constructions || !create_memento); + Isolate* isolate = masm->isolate(); // Enter a construct frame. { FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); + if (create_memento) { + __ AssertUndefinedOrAllocationSite(r2, r3); + __ push(r2); + } + // Preserve the two incoming parameters on the stack. __ SmiTag(r0); __ push(r0); // Smi-tagged arguments count. @@ -414,13 +427,17 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // r1: constructor function // r2: initial map __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset)); + if (create_memento) { + __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize)); + } + __ Allocate(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS); // Allocated the JSObject, now initialize the fields. Map is set to // initial map and properties and elements are set to empty fixed array. // r1: constructor function // r2: initial map - // r3: object size + // r3: object size (not including memento if create_memento) // r4: JSObject (not tagged) __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); __ mov(r5, r4); @@ -434,12 +451,13 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Fill all the in-object properties with the appropriate filler. // r1: constructor function // r2: initial map - // r3: object size (in words) + // r3: object size (in words, including memento if create_memento) // r4: JSObject (not tagged) // r5: First in-object property of JSObject (not tagged) ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); - __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); + if (count_constructions) { + __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset)); __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, kBitsPerByte); @@ -453,9 +471,28 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, __ InitializeFieldsWithFiller(r5, r0, r6); // To allow for truncation. __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex); + __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. + __ InitializeFieldsWithFiller(r5, r0, r6); + } else if (create_memento) { + __ sub(r6, r3, Operand(AllocationMemento::kSize / kPointerSize)); + __ add(r0, r4, Operand(r6, LSL, kPointerSizeLog2)); // End of object. + __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); + __ InitializeFieldsWithFiller(r5, r0, r6); + + // Fill in memento fields. + // r5: points to the allocated but uninitialized memento. + __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex); + ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset); + __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); + // Load the AllocationSite + __ ldr(r6, MemOperand(sp, 2 * kPointerSize)); + ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset); + __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); + } else { + __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); + __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. + __ InitializeFieldsWithFiller(r5, r0, r6); } - __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. - __ InitializeFieldsWithFiller(r5, r0, r6); // Add the object tag to make the JSObject real, so that we can continue // and jump into the continuation code at any time from now on. Any @@ -553,13 +590,47 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Allocate the new receiver object using the runtime call. // r1: constructor function __ bind(&rt_call); + if (create_memento) { + // Get the cell or allocation site. + __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); + __ push(r2); + } + __ push(r1); // argument for Runtime_NewObject - __ CallRuntime(Runtime::kNewObject, 1); + if (create_memento) { + __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); + } else { + __ CallRuntime(Runtime::kNewObject, 1); + } __ mov(r4, r0); + // If we ended up using the runtime, and we want a memento, then the + // runtime call made it for us, and we shouldn't do create count + // increment. + Label count_incremented; + if (create_memento) { + __ jmp(&count_incremented); + } + // Receiver for constructor call allocated. // r4: JSObject __ bind(&allocated); + + if (create_memento) { + __ ldr(r2, MemOperand(sp, kPointerSize * 2)); + __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); + __ cmp(r2, r5); + __ b(eq, &count_incremented); + // r2 is an AllocationSite. We are creating a memento from it, so we + // need to increment the memento create count. + __ ldr(r3, FieldMemOperand(r2, + AllocationSite::kPretenureCreateCountOffset)); + __ add(r3, r3, Operand(Smi::FromInt(1))); + __ str(r3, FieldMemOperand(r2, + AllocationSite::kPretenureCreateCountOffset)); + __ bind(&count_incremented); + } + __ push(r4); __ push(r4); @@ -662,17 +733,17 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, true); + Generate_JSConstructStubHelper(masm, false, true, false); } void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, false); + Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new); } void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, true, false); + Generate_JSConstructStubHelper(masm, true, false, false); } diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 748b531385..cad641738c 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -2946,19 +2946,21 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ cmp(r4, r1); __ b(eq, &done); - // If we came here, we need to see if we are the array function. - // If we didn't have a matching function, and we didn't find the megamorph - // sentinel, then we have in the slot either some other function or an - // AllocationSite. Do a map check on the object in ecx. - __ ldr(r5, FieldMemOperand(r4, 0)); - __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); - __ b(ne, &miss); + if (!FLAG_pretenuring_call_new) { + // If we came here, we need to see if we are the array function. + // If we didn't have a matching function, and we didn't find the megamorph + // sentinel, then we have in the slot either some other function or an + // AllocationSite. Do a map check on the object in ecx. + __ ldr(r5, FieldMemOperand(r4, 0)); + __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); + __ b(ne, &miss); - // Make sure the function is the Array() function - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); - __ cmp(r1, r4); - __ b(ne, &megamorphic); - __ jmp(&done); + // Make sure the function is the Array() function + __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); + __ cmp(r1, r4); + __ b(ne, &megamorphic); + __ jmp(&done); + } __ bind(&miss); @@ -2974,32 +2976,35 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); __ jmp(&done); - // An uninitialized cache is patched with the function or sentinel to - // indicate the ElementsKind if function is the Array constructor. + // An uninitialized cache is patched with the function __ bind(&initialize); - // Make sure the function is the Array() function - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); - __ cmp(r1, r4); - __ b(ne, ¬_array_function); - // The target function is the Array constructor, - // Create an AllocationSite if we don't already have it, store it in the slot. - { - FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); + if (!FLAG_pretenuring_call_new) { + // Make sure the function is the Array() function + __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); + __ cmp(r1, r4); + __ b(ne, ¬_array_function); - // Arguments register must be smi-tagged to call out. - __ SmiTag(r0); - __ Push(r3, r2, r1, r0); + // The target function is the Array constructor, + // Create an AllocationSite if we don't already have it, store it in the + // slot. + { + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); - CreateAllocationSiteStub create_stub; - __ CallStub(&create_stub); + // Arguments register must be smi-tagged to call out. + __ SmiTag(r0); + __ Push(r3, r2, r1, r0); - __ Pop(r3, r2, r1, r0); - __ SmiUntag(r0); + CreateAllocationSiteStub create_stub; + __ CallStub(&create_stub); + + __ Pop(r3, r2, r1, r0); + __ SmiUntag(r0); + } + __ b(&done); + + __ bind(¬_array_function); } - __ b(&done); - - __ bind(¬_array_function); __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); @@ -3138,17 +3143,25 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ b(ne, &slow); if (RecordCallTarget()) { - Label feedback_register_initialized; GenerateRecordCallTarget(masm); - // Put the AllocationSite from the feedback vector into r2, or undefined. __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); - __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize)); - __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset)); - __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); - __ b(eq, &feedback_register_initialized); - __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); - __ bind(&feedback_register_initialized); + if (FLAG_pretenuring_call_new) { + // Put the AllocationSite from the feedback vector into r2. + // By adding kPointerSize we encode that we know the AllocationSite + // entry is at the feedback vector slot given by r3 + 1. + __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize + kPointerSize)); + } else { + Label feedback_register_initialized; + // Put the AllocationSite from the feedback vector into r2, or undefined. + __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize)); + __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset)); + __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); + __ b(eq, &feedback_register_initialized); + __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); + __ bind(&feedback_register_initialized); + } + __ AssertUndefinedOrAllocationSite(r2, r5); } diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 691039507f..eec910a095 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -2903,6 +2903,13 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { Handle uninitialized = TypeFeedbackInfo::UninitializedSentinel(isolate()); StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); + if (FLAG_pretenuring_call_new) { + StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), + isolate()->factory()->NewAllocationSite()); + ASSERT(expr->AllocationSiteFeedbackSlot() == + expr->CallNewFeedbackSlot() + 1); + } + __ Move(r2, FeedbackVector()); __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); diff --git a/src/ast.cc b/src/ast.cc index 997d3e0787..086d015362 100644 --- a/src/ast.cc +++ b/src/ast.cc @@ -643,8 +643,11 @@ bool Call::ComputeGlobalTarget(Handle global, void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) { + int allocation_site_feedback_slot = FLAG_pretenuring_call_new + ? AllocationSiteFeedbackSlot() + : CallNewFeedbackSlot(); allocation_site_ = - oracle->GetCallNewAllocationSite(CallNewFeedbackSlot()); + oracle->GetCallNewAllocationSite(allocation_site_feedback_slot); is_monomorphic_ = oracle->CallNewIsMonomorphic(CallNewFeedbackSlot()); if (is_monomorphic_) { target_ = oracle->GetCallNewTarget(CallNewFeedbackSlot()); diff --git a/src/ast.h b/src/ast.h index 262b41a1db..c6ee71ed83 100644 --- a/src/ast.h +++ b/src/ast.h @@ -1840,7 +1840,9 @@ class CallNew V8_FINAL : public Expression, public FeedbackSlotInterface { // Type feedback information. virtual ComputablePhase GetComputablePhase() { return DURING_PARSE; } - virtual int ComputeFeedbackSlotCount(Isolate* isolate) { return 1; } + virtual int ComputeFeedbackSlotCount(Isolate* isolate) { + return FLAG_pretenuring_call_new ? 2 : 1; + } virtual void SetFirstFeedbackSlot(int slot) { callnew_feedback_slot_ = slot; } @@ -1849,8 +1851,12 @@ class CallNew V8_FINAL : public Expression, public FeedbackSlotInterface { ASSERT(callnew_feedback_slot_ != kInvalidFeedbackSlot); return callnew_feedback_slot_; } + int AllocationSiteFeedbackSlot() { + ASSERT(callnew_feedback_slot_ != kInvalidFeedbackSlot); + ASSERT(FLAG_pretenuring_call_new); + return callnew_feedback_slot_ + 1; + } - TypeFeedbackId CallNewFeedbackId() const { return reuse(id()); } void RecordTypeFeedback(TypeFeedbackOracle* oracle); virtual bool IsMonomorphic() V8_OVERRIDE { return is_monomorphic_; } Handle target() const { return target_; } diff --git a/src/factory.cc b/src/factory.cc index 64567a8214..57574d1685 100644 --- a/src/factory.cc +++ b/src/factory.cc @@ -1327,6 +1327,17 @@ Handle Factory::NewJSObject(Handle constructor, } +Handle Factory::NewJSObjectWithMemento( + Handle constructor, + Handle site) { + JSFunction::EnsureHasInitialMap(constructor); + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateJSObject(*constructor, NOT_TENURED, *site), + JSObject); +} + + Handle Factory::NewJSModule(Handle context, Handle scope_info) { CALL_HEAP_FUNCTION( diff --git a/src/factory.h b/src/factory.h index 3f77d45a67..f712880a02 100644 --- a/src/factory.h +++ b/src/factory.h @@ -328,6 +328,9 @@ class Factory { // runtime. Handle NewJSObject(Handle constructor, PretenureFlag pretenure = NOT_TENURED); + // JSObject that should have a memento pointing to the allocation site. + Handle NewJSObjectWithMemento(Handle constructor, + Handle site); // Global objects are pretenured and initialized based on a constructor. Handle NewGlobalObject(Handle constructor); diff --git a/src/hydrogen.cc b/src/hydrogen.cc index cbd53eeaee..d214fcb898 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -8278,12 +8278,25 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) { // Allocate an instance of the implicit receiver object. HValue* size_in_bytes = Add(instance_size); - PretenureFlag pretenure_flag = - (FLAG_pretenuring_call_new && !FLAG_allocation_site_pretenuring) ? - isolate()->heap()->GetPretenureMode() : NOT_TENURED; + HAllocationMode allocation_mode; + if (FLAG_pretenuring_call_new) { + if (FLAG_allocation_site_pretenuring) { + // Try to use pretenuring feedback. + Handle allocation_site = expr->allocation_site(); + allocation_mode = HAllocationMode(allocation_site); + // Take a dependency on allocation site. + AllocationSite::AddDependentCompilationInfo(allocation_site, + AllocationSite::TENURING, + top_info()); + } else { + allocation_mode = HAllocationMode( + isolate()->heap()->GetPretenureMode()); + } + } + HAllocate* receiver = - Add(size_in_bytes, HType::JSObject(), pretenure_flag, - JS_OBJECT_TYPE); + BuildAllocate(size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, + allocation_mode); receiver->set_known_initial_map(initial_map); // Load the initial map from the constructor. diff --git a/src/hydrogen.h b/src/hydrogen.h index 4d538f4339..e91199e2fd 100644 --- a/src/hydrogen.h +++ b/src/hydrogen.h @@ -1031,6 +1031,8 @@ class HAllocationMode V8_FINAL BASE_EMBEDDED { : current_site_(current_site), pretenure_flag_(NOT_TENURED) {} explicit HAllocationMode(PretenureFlag pretenure_flag) : current_site_(NULL), pretenure_flag_(pretenure_flag) {} + HAllocationMode() + : current_site_(NULL), pretenure_flag_(NOT_TENURED) {} HValue* current_site() const { return current_site_; } Handle feedback_site() const { return feedback_site_; } diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc index c3ac0fe080..d4c656f0e1 100644 --- a/src/ia32/builtins-ia32.cc +++ b/src/ia32/builtins-ia32.cc @@ -125,19 +125,32 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, - bool count_constructions) { + bool count_constructions, + bool create_memento) { // ----------- S t a t e ------------- // -- eax: number of arguments // -- edi: constructor function + // -- ebx: allocation site or undefined // ----------------------------------- // Should never count constructions for api objects. ASSERT(!is_api_function || !count_constructions); + // Should never create mementos for api functions. + ASSERT(!is_api_function || !create_memento); + + // Should never create mementos before slack tracking is finished. + ASSERT(!count_constructions || !create_memento); + // Enter a construct frame. { FrameScope scope(masm, StackFrame::CONSTRUCT); + if (create_memento) { + __ AssertUndefinedOrAllocationSite(ebx); + __ push(ebx); + } + // Store a smi-tagged arguments count on the stack. __ SmiTag(eax); __ push(eax); @@ -202,20 +215,26 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // eax: initial map __ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset)); __ shl(edi, kPointerSizeLog2); + if (create_memento) { + __ add(edi, Immediate(AllocationMemento::kSize)); + } + __ Allocate(edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS); + + Factory* factory = masm->isolate()->factory(); + // Allocated the JSObject, now initialize the fields. // eax: initial map // ebx: JSObject - // edi: start of next object + // edi: start of next object (including memento if create_memento) __ mov(Operand(ebx, JSObject::kMapOffset), eax); - Factory* factory = masm->isolate()->factory(); __ mov(ecx, factory->empty_fixed_array()); __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx); __ mov(Operand(ebx, JSObject::kElementsOffset), ecx); // Set extra fields in the newly allocated object. // eax: initial map // ebx: JSObject - // edi: start of next object + // edi: start of next object (including memento if create_memento) __ lea(ecx, Operand(ebx, JSObject::kHeaderSize)); __ mov(edx, factory->undefined_value()); if (count_constructions) { @@ -231,8 +250,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } __ InitializeFieldsWithFiller(ecx, esi, edx); __ mov(edx, factory->one_pointer_filler_map()); + __ InitializeFieldsWithFiller(ecx, edi, edx); + } else if (create_memento) { + __ lea(esi, Operand(edi, -AllocationMemento::kSize)); + __ InitializeFieldsWithFiller(ecx, esi, edx); + + // Fill in memento fields if necessary. + // esi: points to the allocated but uninitialized memento. + Handle allocation_memento_map = factory->allocation_memento_map(); + __ mov(Operand(esi, AllocationMemento::kMapOffset), + allocation_memento_map); + // Get the cell or undefined. + __ mov(edx, Operand(esp, kPointerSize*2)); + __ mov(Operand(esi, AllocationMemento::kAllocationSiteOffset), + edx); + } else { + __ InitializeFieldsWithFiller(ecx, edi, edx); } - __ InitializeFieldsWithFiller(ecx, edi, edx); // Add the object tag to make the JSObject real, so that we can continue // and jump into the continuation code at any time from now on. Any @@ -323,16 +357,48 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Allocate the new receiver object using the runtime call. __ bind(&rt_call); + int offset = 0; + if (create_memento) { + // Get the cell or allocation site. + __ mov(edi, Operand(esp, kPointerSize * 2)); + __ push(edi); + offset = kPointerSize; + } + // Must restore edi (constructor) before calling runtime. - __ mov(edi, Operand(esp, 0)); + __ mov(edi, Operand(esp, offset)); // edi: function (constructor) __ push(edi); - __ CallRuntime(Runtime::kNewObject, 1); + if (create_memento) { + __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); + } else { + __ CallRuntime(Runtime::kNewObject, 1); + } __ mov(ebx, eax); // store result in ebx + // If we ended up using the runtime, and we want a memento, then the + // runtime call made it for us, and we shouldn't do create count + // increment. + Label count_incremented; + if (create_memento) { + __ jmp(&count_incremented); + } + // New object allocated. // ebx: newly allocated object __ bind(&allocated); + + if (create_memento) { + __ mov(ecx, Operand(esp, kPointerSize * 2)); + __ cmp(ecx, masm->isolate()->factory()->undefined_value()); + __ j(equal, &count_incremented); + // ecx is an AllocationSite. We are creating a memento from it, so we + // need to increment the memento create count. + __ add(FieldOperand(ecx, AllocationSite::kPretenureCreateCountOffset), + Immediate(Smi::FromInt(1))); + __ bind(&count_incremented); + } + // Retrieve the function from the stack. __ pop(edi); @@ -415,17 +481,17 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, true); + Generate_JSConstructStubHelper(masm, false, true, false); } void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, false); + Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new); } void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, true, false); + Generate_JSConstructStubHelper(masm, true, false, false); } diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 9ac0551ef8..fa67502ad0 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -2278,20 +2278,22 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); __ j(equal, &done, Label::kFar); - // If we came here, we need to see if we are the array function. - // If we didn't have a matching function, and we didn't find the megamorph - // sentinel, then we have in the slot either some other function or an - // AllocationSite. Do a map check on the object in ecx. - Handle allocation_site_map = - masm->isolate()->factory()->allocation_site_map(); - __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map)); - __ j(not_equal, &miss); + if (!FLAG_pretenuring_call_new) { + // If we came here, we need to see if we are the array function. + // If we didn't have a matching function, and we didn't find the megamorph + // sentinel, then we have in the slot either some other function or an + // AllocationSite. Do a map check on the object in ecx. + Handle allocation_site_map = + masm->isolate()->factory()->allocation_site_map(); + __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map)); + __ j(not_equal, &miss); - // Make sure the function is the Array() function - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); - __ cmp(edi, ecx); - __ j(not_equal, &megamorphic); - __ jmp(&done, Label::kFar); + // Make sure the function is the Array() function + __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); + __ cmp(edi, ecx); + __ j(not_equal, &megamorphic); + __ jmp(&done, Label::kFar); + } __ bind(&miss); @@ -2310,35 +2312,39 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // An uninitialized cache is patched with the function or sentinel to // indicate the ElementsKind if function is the Array constructor. __ bind(&initialize); - // Make sure the function is the Array() function - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); - __ cmp(edi, ecx); - __ j(not_equal, ¬_array_function); + if (!FLAG_pretenuring_call_new) { + // Make sure the function is the Array() function + __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); + __ cmp(edi, ecx); + __ j(not_equal, ¬_array_function); - // The target function is the Array constructor, - // Create an AllocationSite if we don't already have it, store it in the slot. - { - FrameScope scope(masm, StackFrame::INTERNAL); + // The target function is the Array constructor, + // Create an AllocationSite if we don't already have it, store it in the + // slot. + { + FrameScope scope(masm, StackFrame::INTERNAL); - // Arguments register must be smi-tagged to call out. - __ SmiTag(eax); - __ push(eax); - __ push(edi); - __ push(edx); - __ push(ebx); + // Arguments register must be smi-tagged to call out. + __ SmiTag(eax); + __ push(eax); + __ push(edi); + __ push(edx); + __ push(ebx); - CreateAllocationSiteStub create_stub; - __ CallStub(&create_stub); + CreateAllocationSiteStub create_stub; + __ CallStub(&create_stub); - __ pop(ebx); - __ pop(edx); - __ pop(edi); - __ pop(eax); - __ SmiUntag(eax); + __ pop(ebx); + __ pop(edx); + __ pop(edi); + __ pop(eax); + __ SmiUntag(eax); + } + __ jmp(&done); + + __ bind(¬_array_function); } - __ jmp(&done); - __ bind(¬_array_function); __ mov(FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), edi); @@ -2481,18 +2487,27 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ j(not_equal, &slow); if (RecordCallTarget()) { - Label feedback_register_initialized; GenerateRecordCallTarget(masm); - // Put the AllocationSite from the feedback vector into ebx, or undefined. - __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, - FixedArray::kHeaderSize)); - Handle allocation_site_map = - masm->isolate()->factory()->allocation_site_map(); - __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); - __ j(equal, &feedback_register_initialized); - __ mov(ebx, masm->isolate()->factory()->undefined_value()); - __ bind(&feedback_register_initialized); + if (FLAG_pretenuring_call_new) { + // Put the AllocationSite from the feedback vector into ebx. + // By adding kPointerSize we encode that we know the AllocationSite + // entry is at the feedback vector slot given by edx + 1. + __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize + kPointerSize)); + } else { + Label feedback_register_initialized; + // Put the AllocationSite from the feedback vector into ebx, or undefined. + __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize)); + Handle allocation_site_map = + masm->isolate()->factory()->allocation_site_map(); + __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); + __ j(equal, &feedback_register_initialized); + __ mov(ebx, masm->isolate()->factory()->undefined_value()); + __ bind(&feedback_register_initialized); + } + __ AssertUndefinedOrAllocationSite(ebx); } diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 82824dd78a..16c3294914 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -2839,6 +2839,13 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { Handle uninitialized = TypeFeedbackInfo::UninitializedSentinel(isolate()); StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); + if (FLAG_pretenuring_call_new) { + StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), + isolate()->factory()->NewAllocationSite()); + ASSERT(expr->AllocationSiteFeedbackSlot() == + expr->CallNewFeedbackSlot() + 1); + } + __ LoadHeapObject(ebx, FeedbackVector()); __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot()))); diff --git a/src/objects-inl.h b/src/objects-inl.h index f831c11d49..618cf30f76 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -1474,7 +1474,8 @@ void AllocationSite::MarkZombie() { // elements kind is the initial elements kind. AllocationSiteMode AllocationSite::GetMode( ElementsKind boilerplate_elements_kind) { - if (IsFastSmiElementsKind(boilerplate_elements_kind)) { + if (FLAG_pretenuring_call_new || + IsFastSmiElementsKind(boilerplate_elements_kind)) { return TRACK_ALLOCATION_SITE; } @@ -1484,8 +1485,9 @@ AllocationSiteMode AllocationSite::GetMode( AllocationSiteMode AllocationSite::GetMode(ElementsKind from, ElementsKind to) { - if (IsFastSmiElementsKind(from) && - IsMoreGeneralElementsKindTransition(from, to)) { + if (FLAG_pretenuring_call_new || + (IsFastSmiElementsKind(from) && + IsMoreGeneralElementsKindTransition(from, to))) { return TRACK_ALLOCATION_SITE; } diff --git a/src/runtime.cc b/src/runtime.cc index 79d29a02d3..6403858a95 100644 --- a/src/runtime.cc +++ b/src/runtime.cc @@ -8264,12 +8264,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObjectFromBound) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) { - HandleScope scope(isolate); - ASSERT(args.length() == 1); - - Handle constructor = args.at(0); - +static MaybeObject* Runtime_NewObjectHelper(Isolate* isolate, + Handle constructor, + Handle site) { // If the constructor isn't a proper function we throw a type error. if (!constructor->IsJSFunction()) { Vector< Handle > arguments = HandleVector(&constructor, 1); @@ -8327,7 +8324,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) { shared->CompleteInobjectSlackTracking(); } - Handle result = isolate->factory()->NewJSObject(function); + Handle result; + if (site.is_null()) { + result = isolate->factory()->NewJSObject(function); + } else { + result = isolate->factory()->NewJSObjectWithMemento(function, site); + } RETURN_IF_EMPTY_HANDLE(isolate, result); isolate->counters()->constructed_objects()->Increment(); @@ -8337,6 +8339,34 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) { } +RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + + Handle constructor = args.at(0); + return Runtime_NewObjectHelper(isolate, + constructor, + Handle::null()); +} + + +RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObjectWithAllocationSite) { + HandleScope scope(isolate); + ASSERT(args.length() == 2); + + Handle constructor = args.at(1); + Handle feedback = args.at(0); + Handle site; + if (feedback->IsAllocationSite()) { + // The feedback can be an AllocationSite or undefined. + site = Handle::cast(feedback); + } + return Runtime_NewObjectHelper(isolate, + constructor, + site); +} + + RUNTIME_FUNCTION(MaybeObject*, Runtime_FinalizeInstanceSize) { HandleScope scope(isolate); ASSERT(args.length() == 1); diff --git a/src/runtime.h b/src/runtime.h index 23d6d95fb5..0060639cd4 100644 --- a/src/runtime.h +++ b/src/runtime.h @@ -401,6 +401,7 @@ namespace internal { F(NewClosure, 3, 1) \ F(NewClosureFromStubFailure, 1, 1) \ F(NewObject, 1, 1) \ + F(NewObjectWithAllocationSite, 2, 1) \ F(NewObjectFromBound, 1, 1) \ F(FinalizeInstanceSize, 1, 1) \ F(Throw, 1, 1) \ diff --git a/src/type-info.cc b/src/type-info.cc index 35beb1808a..99b1b3d899 100644 --- a/src/type-info.cc +++ b/src/type-info.cc @@ -120,13 +120,17 @@ bool TypeFeedbackOracle::StoreIsKeyedPolymorphic(TypeFeedbackId ast_id) { bool TypeFeedbackOracle::CallIsMonomorphic(int slot) { Handle value = GetInfo(slot); - return value->IsAllocationSite() || value->IsJSFunction(); + return FLAG_pretenuring_call_new + ? value->IsJSFunction() + : value->IsAllocationSite() || value->IsJSFunction(); } bool TypeFeedbackOracle::CallNewIsMonomorphic(int slot) { Handle info = GetInfo(slot); - return info->IsAllocationSite() || info->IsJSFunction(); + return FLAG_pretenuring_call_new + ? info->IsJSFunction() + : info->IsAllocationSite() || info->IsJSFunction(); } @@ -153,27 +157,29 @@ KeyedAccessStoreMode TypeFeedbackOracle::GetStoreMode( Handle TypeFeedbackOracle::GetCallTarget(int slot) { Handle info = GetInfo(slot); - if (info->IsAllocationSite()) { - return Handle(isolate()->native_context()->array_function()); - } else { + if (FLAG_pretenuring_call_new || info->IsJSFunction()) { return Handle::cast(info); } + + ASSERT(info->IsAllocationSite()); + return Handle(isolate()->native_context()->array_function()); } Handle TypeFeedbackOracle::GetCallNewTarget(int slot) { Handle info = GetInfo(slot); - if (info->IsAllocationSite()) { - return Handle(isolate()->native_context()->array_function()); - } else { + if (FLAG_pretenuring_call_new || info->IsJSFunction()) { return Handle::cast(info); } + + ASSERT(info->IsAllocationSite()); + return Handle(isolate()->native_context()->array_function()); } Handle TypeFeedbackOracle::GetCallNewAllocationSite(int slot) { Handle info = GetInfo(slot); - if (info->IsAllocationSite()) { + if (FLAG_pretenuring_call_new || info->IsAllocationSite()) { return Handle::cast(info); } return Handle::null(); diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index 05987d55a3..a816030219 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -124,19 +124,32 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, - bool count_constructions) { + bool count_constructions, + bool create_memento) { // ----------- S t a t e ------------- // -- rax: number of arguments // -- rdi: constructor function + // -- rbx: allocation site or undefined // ----------------------------------- // Should never count constructions for api objects. - ASSERT(!is_api_function || !count_constructions); + ASSERT(!is_api_function || !count_constructions);\ + + // Should never create mementos for api functions. + ASSERT(!is_api_function || !create_memento); + + // Should never create mementos before slack tracking is finished. + ASSERT(!count_constructions || !create_memento); // Enter a construct frame. { FrameScope scope(masm, StackFrame::CONSTRUCT); + if (create_memento) { + __ AssertUndefinedOrAllocationSite(rbx); + __ Push(rbx); + } + // Store a smi-tagged arguments count on the stack. __ Integer32ToSmi(rax, rax); __ Push(rax); @@ -202,6 +215,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Now allocate the JSObject on the heap. __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); __ shl(rdi, Immediate(kPointerSizeLog2)); + if (create_memento) { + __ addq(rdi, Immediate(AllocationMemento::kSize)); + } // rdi: size of new object __ Allocate(rdi, rbx, @@ -209,10 +225,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, no_reg, &rt_call, NO_ALLOCATION_FLAGS); + Factory* factory = masm->isolate()->factory(); // Allocated the JSObject, now initialize the fields. // rax: initial map // rbx: JSObject (not HeapObject tagged - the actual address). - // rdi: start of next object + // rdi: start of next object (including memento if create_memento) __ movp(Operand(rbx, JSObject::kMapOffset), rax); __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx); @@ -220,7 +237,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Set extra fields in the newly allocated object. // rax: initial map // rbx: JSObject - // rdi: start of next object + // rdi: start of next object (including memento if create_memento) __ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); if (count_constructions) { @@ -236,8 +253,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, } __ InitializeFieldsWithFiller(rcx, rsi, rdx); __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); + __ InitializeFieldsWithFiller(rcx, rdi, rdx); + } else if (create_memento) { + __ lea(rsi, Operand(rdi, -AllocationMemento::kSize)); + __ InitializeFieldsWithFiller(rcx, rsi, rdx); + + // Fill in memento fields if necessary. + // rsi: points to the allocated but uninitialized memento. + Handle allocation_memento_map = factory->allocation_memento_map(); + __ Move(Operand(rsi, AllocationMemento::kMapOffset), + allocation_memento_map); + // Get the cell or undefined. + __ movp(rdx, Operand(rsp, kPointerSize*2)); + __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), + rdx); + } else { + __ InitializeFieldsWithFiller(rcx, rdi, rdx); } - __ InitializeFieldsWithFiller(rcx, rdi, rdx); // Add the object tag to make the JSObject real, so that we can continue // and jump into the continuation code at any time from now on. Any @@ -329,15 +361,48 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Allocate the new receiver object using the runtime call. // rdi: function (constructor) __ bind(&rt_call); + int offset = 0; + if (create_memento) { + // Get the cell or allocation site. + __ movp(rdi, Operand(rsp, kPointerSize*2)); + __ Push(rdi); + offset = kPointerSize; + } + // Must restore rdi (constructor) before calling runtime. - __ movp(rdi, Operand(rsp, 0)); + __ movp(rdi, Operand(rsp, offset)); __ Push(rdi); - __ CallRuntime(Runtime::kNewObject, 1); + if (create_memento) { + __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); + } else { + __ CallRuntime(Runtime::kNewObject, 1); + } __ movp(rbx, rax); // store result in rbx + // If we ended up using the runtime, and we want a memento, then the + // runtime call made it for us, and we shouldn't do create count + // increment. + Label count_incremented; + if (create_memento) { + __ jmp(&count_incremented); + } + // New object allocated. // rbx: newly allocated object __ bind(&allocated); + + if (create_memento) { + __ movp(rcx, Operand(rsp, kPointerSize*2)); + __ Cmp(rcx, masm->isolate()->factory()->undefined_value()); + __ j(equal, &count_incremented); + // rcx is an AllocationSite. We are creating a memento from it, so we + // need to increment the memento create count. + __ SmiAddConstant( + FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset), + Smi::FromInt(1)); + __ bind(&count_incremented); + } + // Retrieve the function from the stack. __ Pop(rdi); @@ -420,17 +485,17 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, true); + Generate_JSConstructStubHelper(masm, false, true, false); } void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, false, false); + Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new); } void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { - Generate_JSConstructStubHelper(masm, true, false); + Generate_JSConstructStubHelper(masm, true, false, false); } diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index a481960206..c34d1c84ba 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -2111,20 +2111,22 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate)); __ j(equal, &done); - // If we came here, we need to see if we are the array function. - // If we didn't have a matching function, and we didn't find the megamorph - // sentinel, then we have in the slot either some other function or an - // AllocationSite. Do a map check on the object in rcx. - Handle allocation_site_map = - masm->isolate()->factory()->allocation_site_map(); - __ Cmp(FieldOperand(rcx, 0), allocation_site_map); - __ j(not_equal, &miss); + if (!FLAG_pretenuring_call_new) { + // If we came here, we need to see if we are the array function. + // If we didn't have a matching function, and we didn't find the megamorph + // sentinel, then we have in the slot either some other function or an + // AllocationSite. Do a map check on the object in rcx. + Handle allocation_site_map = + masm->isolate()->factory()->allocation_site_map(); + __ Cmp(FieldOperand(rcx, 0), allocation_site_map); + __ j(not_equal, &miss); - // Make sure the function is the Array() function - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); - __ cmpq(rdi, rcx); - __ j(not_equal, &megamorphic); - __ jmp(&done); + // Make sure the function is the Array() function + __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); + __ cmpq(rdi, rcx); + __ j(not_equal, &megamorphic); + __ jmp(&done); + } __ bind(&miss); @@ -2142,36 +2144,38 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // An uninitialized cache is patched with the function or sentinel to // indicate the ElementsKind if function is the Array constructor. __ bind(&initialize); - // Make sure the function is the Array() function - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); - __ cmpq(rdi, rcx); - __ j(not_equal, ¬_array_function); - // The target function is the Array constructor, - // Create an AllocationSite if we don't already have it, store it in the slot. - { - FrameScope scope(masm, StackFrame::INTERNAL); + if (!FLAG_pretenuring_call_new) { + // Make sure the function is the Array() function + __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); + __ cmpq(rdi, rcx); + __ j(not_equal, ¬_array_function); - // Arguments register must be smi-tagged to call out. - __ Integer32ToSmi(rax, rax); - __ Push(rax); - __ Push(rdi); - __ Integer32ToSmi(rdx, rdx); - __ Push(rdx); - __ Push(rbx); + { + FrameScope scope(masm, StackFrame::INTERNAL); - CreateAllocationSiteStub create_stub; - __ CallStub(&create_stub); + // Arguments register must be smi-tagged to call out. + __ Integer32ToSmi(rax, rax); + __ Push(rax); + __ Push(rdi); + __ Integer32ToSmi(rdx, rdx); + __ Push(rdx); + __ Push(rbx); - __ Pop(rbx); - __ Pop(rdx); - __ Pop(rdi); - __ Pop(rax); - __ SmiToInteger32(rax, rax); + CreateAllocationSiteStub create_stub; + __ CallStub(&create_stub); + + __ Pop(rbx); + __ Pop(rdx); + __ Pop(rdi); + __ Pop(rax); + __ SmiToInteger32(rax, rax); + } + __ jmp(&done_no_smi_convert); + + __ bind(¬_array_function); } - __ jmp(&done_no_smi_convert); - __ bind(¬_array_function); __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), rdi); @@ -2323,16 +2327,26 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ j(not_equal, &slow); if (RecordCallTarget()) { - Label feedback_register_initialized; GenerateRecordCallTarget(masm); - // Put the AllocationSite from the feedback vector into rbx, or undefined. + __ SmiToInteger32(rdx, rdx); - __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size, - FixedArray::kHeaderSize)); - __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex); - __ j(equal, &feedback_register_initialized); - __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); - __ bind(&feedback_register_initialized); + if (FLAG_pretenuring_call_new) { + // Put the AllocationSite from the feedback vector into ebx. + // By adding kPointerSize we encode that we know the AllocationSite + // entry is at the feedback vector slot given by rdx + 1. + __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size, + FixedArray::kHeaderSize + kPointerSize)); + } else { + Label feedback_register_initialized; + // Put the AllocationSite from the feedback vector into rbx, or undefined. + __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size, + FixedArray::kHeaderSize)); + __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex); + __ j(equal, &feedback_register_initialized); + __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); + __ bind(&feedback_register_initialized); + } + __ AssertUndefinedOrAllocationSite(rbx); } @@ -4961,9 +4975,8 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { } Label no_info; - // If the feedback slot is the megamorphic sentinel, or contains anything - // other than an AllocationSite, call an array constructor that doesn't use - // AllocationSites. + // If the feedback vector is the undefined value call an array constructor + // that doesn't use AllocationSites. __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); __ j(equal, &no_info); diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 1e7cc6c661..846b7138ce 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -2822,6 +2822,13 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { Handle uninitialized = TypeFeedbackInfo::UninitializedSentinel(isolate()); StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); + if (FLAG_pretenuring_call_new) { + StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), + isolate()->factory()->NewAllocationSite()); + ASSERT(expr->AllocationSiteFeedbackSlot() == + expr->CallNewFeedbackSlot() + 1); + } + __ Move(rbx, FeedbackVector()); __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot())); diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index 96af44be15..8f3e4c833a 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -2518,6 +2518,44 @@ TEST(OptimizedPretenuringNestedDoubleLiterals) { } +// Make sure pretenuring feedback is gathered for constructed objects as well +// as for literals. +TEST(OptimizedPretenuringConstructorCalls) { + if (!FLAG_allocation_site_pretenuring || !i::FLAG_pretenuring_call_new) { + // FLAG_pretenuring_call_new needs to be synced with the snapshot. + return; + } + i::FLAG_allow_natives_syntax = true; + i::FLAG_max_new_space_size = 2048; + CcTest::InitializeVM(); + if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; + if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; + v8::HandleScope scope(CcTest::isolate()); + + v8::Local res = CompileRun( + "var number_elements = 20000;" + "var elements = new Array(number_elements);" + "function foo() {" + " this.a = 3;" + " this.b = {};" + "}" + "function f() {" + " for (var i = 0; i < number_elements; i++) {" + " elements[i] = new foo();" + " }" + " return elements[number_elements - 1];" + "};" + "f(); f(); f();" + "%OptimizeFunctionOnNextCall(f);" + "f();"); + + Handle o = + v8::Utils::OpenHandle(*v8::Handle::Cast(res)); + + CHECK(CcTest::heap()->InOldPointerSpace(*o)); +} + + // Test regular array literals allocation. TEST(OptimizedAllocationArrayLiterals) { i::FLAG_allow_natives_syntax = true; @@ -2545,6 +2583,7 @@ TEST(OptimizedAllocationArrayLiterals) { } +// Test global pretenuring call new. TEST(OptimizedPretenuringCallNew) { i::FLAG_allow_natives_syntax = true; i::FLAG_allocation_site_pretenuring = false; diff --git a/test/cctest/test-mementos.cc b/test/cctest/test-mementos.cc index 9662effa50..1dc38f9af5 100644 --- a/test/cctest/test-mementos.cc +++ b/test/cctest/test-mementos.cc @@ -77,3 +77,47 @@ TEST(BadMementoAfterTopForceScavenge) { // Force GC to test the poisoned memento handling CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE); } + + +TEST(PretenuringCallNew) { + CcTest::InitializeVM(); + if (!i::FLAG_allocation_site_pretenuring) return; + if (!i::FLAG_pretenuring_call_new) return; + + v8::HandleScope scope(CcTest::isolate()); + Isolate* isolate = CcTest::i_isolate(); + Heap* heap = isolate->heap(); + + // We need to create several instances to get past the slack-tracking + // phase, where mementos aren't emitted. + int call_count = 10; + CHECK_GE(call_count, SharedFunctionInfo::kGenerousAllocationCount); + i::ScopedVector test_buf(1024); + const char* program = + "function f() {" + " this.a = 3;" + " this.b = {};" + " return this;" + "};" + "var a;" + "for(var i = 0; i < %d; i++) {" + " a = new f();" + "}" + "a;"; + i::OS::SNPrintF(test_buf, program, call_count); + v8::Local res = CompileRun(test_buf.start()); + Handle o = + v8::Utils::OpenHandle(*v8::Handle::Cast(res)); + + // The object of class f should have a memento secreted behind it. + Address memento_address = o->address() + o->map()->instance_size(); + AllocationMemento* memento = + reinterpret_cast(memento_address + kHeapObjectTag); + CHECK_EQ(memento->map(), heap->allocation_memento_map()); + + // Furthermore, how many mementos did we create? The count should match + // call_count - SharedFunctionInfo::kGenerousAllocationCount. + AllocationSite* site = memento->GetAllocationSite(); + CHECK_EQ(call_count - SharedFunctionInfo::kGenerousAllocationCount, + site->pretenure_create_count()->value()); +} diff --git a/test/mjsunit/allocation-site-info.js b/test/mjsunit/allocation-site-info.js index cd086d3506..35b60ee266 100644 --- a/test/mjsunit/allocation-site-info.js +++ b/test/mjsunit/allocation-site-info.js @@ -232,14 +232,13 @@ if (support_smi_only_arrays) { obj = newarraycase_length_smidouble(2); assertKind(elements_kind.fast_double, obj); - // Try to continue the transition to fast object. This won't work for - // constructed arrays because constructor dispatch is done on the - // elements kind, and a DOUBLE array constructor won't create an allocation - // memento. + // Try to continue the transition to fast object. + // TODO(mvstanton): re-enable commented out code when + // FLAG_pretenuring_call_new is turned on in the build. obj = newarraycase_length_smidouble("coates"); assertKind(elements_kind.fast, obj); obj = newarraycase_length_smidouble(2); - assertKind(elements_kind.fast_double, obj); + // assertKind(elements_kind.fast, obj); function newarraycase_length_smiobj(value) { var a = new Array(3); diff --git a/test/mjsunit/array-constructor-feedback.js b/test/mjsunit/array-constructor-feedback.js index 7cd421bd1b..45d5c58c77 100644 --- a/test/mjsunit/array-constructor-feedback.js +++ b/test/mjsunit/array-constructor-feedback.js @@ -82,8 +82,9 @@ function assertKind(expected, obj, name_opt) { if (support_smi_only_arrays) { - // Test: If a call site goes megamorphic, it loses the ability to - // use allocation site feedback. + // Test: If a call site goes megamorphic, it retains the ability to + // use allocation site feedback (if FLAG_allocation_site_pretenuring + // is on). (function() { function bar(t, len) { return new t(len); @@ -95,10 +96,9 @@ if (support_smi_only_arrays) { assertKind(elements_kind.fast_double, b); c = bar(Object, 3); b = bar(Array, 10); - assertKind(elements_kind.fast_smi_only, b); - b[0] = 3.5; - c = bar(Array, 10); - assertKind(elements_kind.fast_smi_only, c); + // TODO(mvstanton): re-enable when FLAG_allocation_site_pretenuring + // is on in the build. + // assertKind(elements_kind.fast_double, b); })(); @@ -123,13 +123,16 @@ if (support_smi_only_arrays) { bar0(Array); %OptimizeFunctionOnNextCall(bar0); b = bar0(Array); - // We also lost our ability to record kind feedback, as the site - // is megamorphic now. - assertKind(elements_kind.fast_smi_only, b); - assertOptimized(bar0); - b[0] = 3.5; - c = bar0(Array); - assertKind(elements_kind.fast_smi_only, c); + // This only makes sense to test if we allow crankshafting + if (4 != %GetOptimizationStatus(bar0)) { + // We also lost our ability to record kind feedback, as the site + // is megamorphic now. + assertKind(elements_kind.fast_smi_only, b); + assertOptimized(bar0); + b[0] = 3.5; + c = bar0(Array); + assertKind(elements_kind.fast_smi_only, c); + } })();