From b134ae74b5fe4750588cef81a06b6fabd2507409 Mon Sep 17 00:00:00 2001 From: mvstanton Date: Thu, 2 Apr 2015 02:39:32 -0700 Subject: [PATCH] v8:3539 - hold constructor feedback in weak cells BUG=v8:3539 R=verwaest@chromium.org LOG=N Review URL: https://codereview.chromium.org/1029093002 Cr-Commit-Position: refs/heads/master@{#27581} --- src/arm/code-stubs-arm.cc | 64 ++++++++++------ src/arm64/code-stubs-arm64.cc | 93 +++++++++++++---------- src/ia32/code-stubs-ia32.cc | 78 ++++++++++--------- src/objects.cc | 2 +- src/type-feedback-vector.cc | 18 ++--- src/type-feedback-vector.h | 22 +++++- src/type-info.cc | 12 +++ src/x64/code-stubs-x64.cc | 87 +++++++++++---------- test/cctest/test-feedback-vector.cc | 10 ++- test/cctest/test-heap.cc | 114 ++++++++++++++++++++++++++++ 10 files changed, 343 insertions(+), 157 deletions(-) diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 0eb8295ea1..a111427960 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -2367,6 +2367,24 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { } +static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { + // r0 : number of arguments to the construct function + // r2 : Feedback vector + // r3 : slot in feedback vector (Smi) + // r1 : the function to call + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); + + // Arguments register must be smi-tagged to call out. + __ SmiTag(r0); + __ Push(r3, r2, r1, r0); + + __ CallStub(stub); + + __ Pop(r3, r2, r1, r0); + __ SmiUntag(r0); +} + + static void GenerateRecordCallTarget(MacroAssembler* masm) { // Cache the called function in a feedback vector slot. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and @@ -2388,16 +2406,30 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // A monomorphic cache hit or an already megamorphic state: invoke the // function without changing the state. - __ cmp(r4, r1); + Label check_allocation_site; + Register feedback_map = r5; + Register weak_value = r8; + __ ldr(weak_value, FieldMemOperand(r4, WeakCell::kValueOffset)); + __ cmp(r1, weak_value); __ b(eq, &done); + __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex); + __ b(eq, &done); + __ ldr(feedback_map, FieldMemOperand(r4, 0)); + __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); + __ b(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site); + + // If r1 is not equal to the weak cell value, and the weak cell value is + // cleared, we have a new chance to become monomorphic. + __ JumpIfSmi(weak_value, &initialize); + __ jmp(&megamorphic); if (!FLAG_pretenuring_call_new) { + __ bind(&check_allocation_site); // If we came here, we need to see if we are the array function. // If we didn't have a matching function, and we didn't find the megamorph // sentinel, then we have in the slot either some other function or an // AllocationSite. Do a map check on the object in ecx. - __ ldr(r5, FieldMemOperand(r4, 0)); - __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); + __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex); __ b(ne, &miss); // Make sure the function is the Array() function @@ -2433,33 +2465,15 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // The target function is the Array constructor, // Create an AllocationSite if we don't already have it, store it in the // slot. - { - FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); - - // Arguments register must be smi-tagged to call out. - __ SmiTag(r0); - __ Push(r3, r2, r1, r0); - - CreateAllocationSiteStub create_stub(masm->isolate()); - __ CallStub(&create_stub); - - __ Pop(r3, r2, r1, r0); - __ SmiUntag(r0); - } + CreateAllocationSiteStub create_stub(masm->isolate()); + CallStubInRecordCallTarget(masm, &create_stub); __ b(&done); __ bind(¬_array_function); } - __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); - __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - __ str(r1, MemOperand(r4, 0)); - - __ Push(r4, r2, r1); - __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs, - EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); - __ Pop(r4, r2, r1); - + CreateWeakCellStub create_stub(masm->isolate()); + CallStubInRecordCallTarget(masm, &create_stub); __ bind(&done); } diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc index abc047a127..5e2e79ce47 100644 --- a/src/arm64/code-stubs-arm64.cc +++ b/src/arm64/code-stubs-arm64.cc @@ -2730,16 +2730,32 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { } -static void GenerateRecordCallTarget(MacroAssembler* masm, - Register argc, +static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub, + Register argc, Register function, + Register feedback_vector, + Register index) { + FrameScope scope(masm, StackFrame::INTERNAL); + + // Arguments register must be smi-tagged to call out. + __ SmiTag(argc); + __ Push(argc, function, feedback_vector, index); + + DCHECK(feedback_vector.Is(x2) && index.Is(x3)); + __ CallStub(stub); + + __ Pop(index, feedback_vector, function, argc); + __ SmiUntag(argc); +} + + +static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc, Register function, - Register feedback_vector, - Register index, - Register scratch1, - Register scratch2) { + Register feedback_vector, Register index, + Register scratch1, Register scratch2, + Register scratch3) { ASM_LOCATION("GenerateRecordCallTarget"); - DCHECK(!AreAliased(scratch1, scratch2, - argc, function, feedback_vector, index)); + DCHECK(!AreAliased(scratch1, scratch2, scratch3, argc, function, + feedback_vector, index)); // Cache the called function in a feedback vector slot. Cache states are // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. // argc : number of arguments to the construct function @@ -2754,22 +2770,37 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, masm->isolate()->heap()->uninitialized_symbol()); // Load the cache state. - __ Add(scratch1, feedback_vector, + Register feedback = scratch1; + Register feedback_map = scratch2; + Register feedback_value = scratch3; + __ Add(feedback, feedback_vector, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); - __ Ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); + __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); // A monomorphic cache hit or an already megamorphic state: invoke the // function without changing the state. - __ Cmp(scratch1, function); + Label check_allocation_site; + __ Ldr(feedback_value, FieldMemOperand(feedback, WeakCell::kValueOffset)); + __ Cmp(function, feedback_value); __ B(eq, &done); + __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); + __ B(eq, &done); + __ Ldr(feedback_map, FieldMemOperand(feedback, 0)); + __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); + __ B(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site); + + // If function is not equal to the weak cell value, and the weak cell value is + // cleared, we have a new chance to become monomorphic. + __ JumpIfSmi(feedback_value, &initialize); + __ B(&megamorphic); if (!FLAG_pretenuring_call_new) { + __ bind(&check_allocation_site); // If we came here, we need to see if we are the array function. // If we didn't have a matching function, and we didn't find the megamorph // sentinel, then we have in the slot either some other function or an // AllocationSite. Do a map check on the object in scratch1 register. - __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset)); - __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss); + __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss); // Make sure the function is the Array() function __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); @@ -2805,39 +2836,17 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, // The target function is the Array constructor, // Create an AllocationSite if we don't already have it, store it in the // slot. - { - FrameScope scope(masm, StackFrame::INTERNAL); - CreateAllocationSiteStub create_stub(masm->isolate()); - - // Arguments register must be smi-tagged to call out. - __ SmiTag(argc); - __ Push(argc, function, feedback_vector, index); - - // CreateAllocationSiteStub expect the feedback vector in x2 and the slot - // index in x3. - DCHECK(feedback_vector.Is(x2) && index.Is(x3)); - __ CallStub(&create_stub); - - __ Pop(index, feedback_vector, function, argc); - __ SmiUntag(argc); - } + CreateAllocationSiteStub create_stub(masm->isolate()); + CallStubInRecordCallTarget(masm, &create_stub, argc, function, + feedback_vector, index); __ B(&done); __ Bind(¬_array_function); } - // An uninitialized cache is patched with the function. - - __ Add(scratch1, feedback_vector, - Operand::UntagSmiAndScale(index, kPointerSizeLog2)); - __ Add(scratch1, scratch1, FixedArray::kHeaderSize - kHeapObjectTag); - __ Str(function, MemOperand(scratch1, 0)); - - __ Push(function); - __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved, - kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); - __ Pop(function); - + CreateWeakCellStub create_stub(masm->isolate()); + CallStubInRecordCallTarget(masm, &create_stub, argc, function, + feedback_vector, index); __ Bind(&done); } @@ -2976,7 +2985,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) { &slow); if (RecordCallTarget()) { - GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5); + GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11); __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); if (FLAG_pretenuring_call_new) { diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 23dca4ec01..c640a29c10 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -1913,6 +1913,30 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) { } +static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { + // eax : number of arguments to the construct function + // ebx : Feedback vector + // edx : slot in feedback vector (Smi) + // edi : the function to call + FrameScope scope(masm, StackFrame::INTERNAL); + + // Arguments register must be smi-tagged to call out. + __ SmiTag(eax); + __ push(eax); + __ push(edi); + __ push(edx); + __ push(ebx); + + __ CallStub(stub); + + __ pop(ebx); + __ pop(edx); + __ pop(edi); + __ pop(eax); + __ SmiUntag(eax); +} + + static void GenerateRecordCallTarget(MacroAssembler* masm) { // Cache the called function in a feedback vector slot. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and @@ -1930,18 +1954,26 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // A monomorphic cache hit or an already megamorphic state: invoke the // function without changing the state. - __ cmp(ecx, edi); + Label check_allocation_site; + __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); __ j(equal, &done, Label::kFar); - __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); + __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex); __ j(equal, &done, Label::kFar); + __ CompareRoot(FieldOperand(ecx, 0), Heap::kWeakCellMapRootIndex); + __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site); + + // If edi is not equal to the weak cell value, and the weak cell value is + // cleared, we have a new chance to become monomorphic. + __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize); + __ jmp(&megamorphic); if (!FLAG_pretenuring_call_new) { + __ bind(&check_allocation_site); // If we came here, we need to see if we are the array function. // If we didn't have a matching function, and we didn't find the megamorph // sentinel, then we have in the slot either some other function or an // AllocationSite. Do a map check on the object in ecx. - Handle allocation_site_map = isolate->factory()->allocation_site_map(); - __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map)); + __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex); __ j(not_equal, &miss); // Make sure the function is the Array() function @@ -1955,7 +1987,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // A monomorphic miss (i.e, here the cache is not uninitialized) goes // megamorphic. - __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate))); + __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex); __ j(equal, &initialize); // MegamorphicSentinel is an immortal immovable object (undefined) so no // write-barrier is needed. @@ -1977,43 +2009,15 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // The target function is the Array constructor, // Create an AllocationSite if we don't already have it, store it in the // slot. - { - FrameScope scope(masm, StackFrame::INTERNAL); - - // Arguments register must be smi-tagged to call out. - __ SmiTag(eax); - __ push(eax); - __ push(edi); - __ push(edx); - __ push(ebx); - - CreateAllocationSiteStub create_stub(isolate); - __ CallStub(&create_stub); - - __ pop(ebx); - __ pop(edx); - __ pop(edi); - __ pop(eax); - __ SmiUntag(eax); - } + CreateAllocationSiteStub create_stub(isolate); + CallStubInRecordCallTarget(masm, &create_stub); __ jmp(&done); __ bind(¬_array_function); } - __ mov(FieldOperand(ebx, edx, times_half_pointer_size, - FixedArray::kHeaderSize), - edi); - // We won't need edx or ebx anymore, just save edi - __ push(edi); - __ push(ebx); - __ push(edx); - __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs, - EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); - __ pop(edx); - __ pop(ebx); - __ pop(edi); - + CreateWeakCellStub create_stub(isolate); + CallStubInRecordCallTarget(masm, &create_stub); __ bind(&done); } diff --git a/src/objects.cc b/src/objects.cc index 371adbe95d..f3fad4d0c5 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -11124,7 +11124,7 @@ void SharedFunctionInfo::ClearTypeFeedbackInfo() { void SharedFunctionInfo::ClearTypeFeedbackInfoAtGCTime() { - feedback_vector()->ClearSlots(this); + feedback_vector()->ClearSlotsAtGCTime(this); feedback_vector()->ClearICSlotsAtGCTime(this); } diff --git a/src/type-feedback-vector.cc b/src/type-feedback-vector.cc index 6653bea0ee..1455063349 100644 --- a/src/type-feedback-vector.cc +++ b/src/type-feedback-vector.cc @@ -139,18 +139,21 @@ Handle TypeFeedbackVector::Copy( // This logic is copied from // StaticMarkingVisitor::VisitCodeTarget. -static bool ClearLogic(Heap* heap, int ic_age) { +static bool ClearLogic(Heap* heap) { return FLAG_cleanup_code_caches_at_gc && heap->isolate()->serializer_enabled(); } -void TypeFeedbackVector::ClearSlots(SharedFunctionInfo* shared) { +void TypeFeedbackVector::ClearSlotsImpl(SharedFunctionInfo* shared, + bool force_clear) { int slots = Slots(); - Isolate* isolate = GetIsolate(); - Object* uninitialized_sentinel = - TypeFeedbackVector::RawUninitializedSentinel(isolate->heap()); + Heap* heap = GetIsolate()->heap(); + if (!force_clear && !ClearLogic(heap)) return; + + Object* uninitialized_sentinel = + TypeFeedbackVector::RawUninitializedSentinel(heap); for (int i = 0; i < slots; i++) { FeedbackVectorSlot slot(i); Object* obj = Get(slot); @@ -172,10 +175,7 @@ void TypeFeedbackVector::ClearICSlotsImpl(SharedFunctionInfo* shared, bool force_clear) { Heap* heap = GetIsolate()->heap(); - // I'm not sure yet if this ic age is the correct one. - int ic_age = shared->ic_age(); - - if (!force_clear && !ClearLogic(heap, ic_age)) return; + if (!force_clear && !ClearLogic(heap)) return; int slots = ICSlots(); Code* host = shared->code(); diff --git a/src/type-feedback-vector.h b/src/type-feedback-vector.h index c26a2d3e0d..9ba5ef4d4b 100644 --- a/src/type-feedback-vector.h +++ b/src/type-feedback-vector.h @@ -195,7 +195,11 @@ class TypeFeedbackVector : public FixedArray { Handle vector); // Clears the vector slots and the vector ic slots. - void ClearSlots(SharedFunctionInfo* shared); + void ClearSlots(SharedFunctionInfo* shared) { ClearSlotsImpl(shared, true); } + void ClearSlotsAtGCTime(SharedFunctionInfo* shared) { + ClearSlotsImpl(shared, false); + } + void ClearICSlots(SharedFunctionInfo* shared) { ClearICSlotsImpl(shared, true); } @@ -237,12 +241,28 @@ class TypeFeedbackVector : public FixedArray { typedef BitSetComputer VectorICComputer; + void ClearSlotsImpl(SharedFunctionInfo* shared, bool force_clear); void ClearICSlotsImpl(SharedFunctionInfo* shared, bool force_clear); DISALLOW_IMPLICIT_CONSTRUCTORS(TypeFeedbackVector); }; +// The following asserts protect an optimization in type feedback vector +// code that looks into the contents of a slot assuming to find a String, +// a Symbol, an AllocationSite, a WeakCell, or a FixedArray. +STATIC_ASSERT(WeakCell::kSize >= 2 * kPointerSize); +STATIC_ASSERT(WeakCell::kValueOffset == AllocationSite::kTransitionInfoOffset); +STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset); +STATIC_ASSERT(WeakCell::kValueOffset == Name::kHashFieldSlot); +// Verify that an empty hash field looks like a tagged object, but can't +// possibly be confused with a pointer. +STATIC_ASSERT((Name::kEmptyHashField & kHeapObjectTag) == kHeapObjectTag); +STATIC_ASSERT(Name::kEmptyHashField == 0x3); +// Verify that a set hash field will not look like a tagged object. +STATIC_ASSERT(Name::kHashNotComputedMask == kHeapObjectTag); + + // A FeedbackNexus is the combination of a TypeFeedbackVector and a slot. // Derived classes customize the update and retrieval of feedback. class FeedbackNexus { diff --git a/src/type-info.cc b/src/type-info.cc index 087e1db148..040e25ecf4 100644 --- a/src/type-info.cc +++ b/src/type-info.cc @@ -51,7 +51,19 @@ Handle TypeFeedbackOracle::GetInfo(TypeFeedbackId ast_id) { Handle TypeFeedbackOracle::GetInfo(FeedbackVectorSlot slot) { DCHECK(slot.ToInt() >= 0 && slot.ToInt() < feedback_vector_->length()); + Handle undefined = + Handle::cast(isolate()->factory()->undefined_value()); Object* obj = feedback_vector_->Get(slot); + + // Slots do not embed direct pointers to functions. Instead a WeakCell is + // always used. + DCHECK(!obj->IsJSFunction()); + if (obj->IsWeakCell()) { + WeakCell* cell = WeakCell::cast(obj); + if (cell->cleared()) return undefined; + obj = cell->value(); + } + return Handle(obj, isolate()); } diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index 6dc9204467..49904297fe 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -1778,6 +1778,31 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) { } +static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { + // eax : number of arguments to the construct function + // ebx : Feedback vector + // edx : slot in feedback vector (Smi) + // edi : the function to call + FrameScope scope(masm, StackFrame::INTERNAL); + + // Arguments register must be smi-tagged to call out. + __ Integer32ToSmi(rax, rax); + __ Push(rax); + __ Push(rdi); + __ Integer32ToSmi(rdx, rdx); + __ Push(rdx); + __ Push(rbx); + + __ CallStub(stub); + + __ Pop(rbx); + __ Pop(rdx); + __ Pop(rdi); + __ Pop(rax); + __ SmiToInteger32(rax, rax); +} + + static void GenerateRecordCallTarget(MacroAssembler* masm) { // Cache the called function in a feedback vector slot. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and @@ -1797,19 +1822,28 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // A monomorphic cache hit or an already megamorphic state: invoke the // function without changing the state. - __ cmpp(rcx, rdi); - __ j(equal, &done); - __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); - __ j(equal, &done); + Label check_allocation_site; + __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset)); + __ j(equal, &done, Label::kFar); + __ CompareRoot(rcx, Heap::kmegamorphic_symbolRootIndex); + __ j(equal, &done, Label::kFar); + __ CompareRoot(FieldOperand(rcx, 0), Heap::kWeakCellMapRootIndex); + __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site); + + // If edi is not equal to the weak cell value, and the weak cell value is + // cleared, we have a new chance to become monomorphic. Otherwise, we + // need to go megamorphic. + __ CheckSmi(FieldOperand(rcx, WeakCell::kValueOffset)); + __ j(equal, &initialize); + __ jmp(&megamorphic); if (!FLAG_pretenuring_call_new) { + __ bind(&check_allocation_site); // If we came here, we need to see if we are the array function. // If we didn't have a matching function, and we didn't find the megamorph // sentinel, then we have in the slot either some other function or an // AllocationSite. Do a map check on the object in rcx. - Handle allocation_site_map = - masm->isolate()->factory()->allocation_site_map(); - __ Cmp(FieldOperand(rcx, 0), allocation_site_map); + __ CompareRoot(FieldOperand(rcx, 0), Heap::kAllocationSiteMapRootIndex); __ j(not_equal, &miss); // Make sure the function is the Array() function @@ -1823,7 +1857,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { // A monomorphic miss (i.e, here the cache is not uninitialized) goes // megamorphic. - __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate)); + __ CompareRoot(rcx, Heap::kuninitialized_symbolRootIndex); __ j(equal, &initialize); // MegamorphicSentinel is an immortal immovable object (undefined) so no // write-barrier is needed. @@ -1842,43 +1876,16 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { __ cmpp(rdi, rcx); __ j(not_equal, ¬_array_function); - { - FrameScope scope(masm, StackFrame::INTERNAL); - - // Arguments register must be smi-tagged to call out. - __ Integer32ToSmi(rax, rax); - __ Push(rax); - __ Push(rdi); - __ Integer32ToSmi(rdx, rdx); - __ Push(rdx); - __ Push(rbx); - - CreateAllocationSiteStub create_stub(isolate); - __ CallStub(&create_stub); - - __ Pop(rbx); - __ Pop(rdx); - __ Pop(rdi); - __ Pop(rax); - __ SmiToInteger32(rax, rax); - } + CreateAllocationSiteStub create_stub(isolate); + CallStubInRecordCallTarget(masm, &create_stub); __ jmp(&done_no_smi_convert); __ bind(¬_array_function); } - __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), - rdi); - - // We won't need rdx or rbx anymore, just save rdi - __ Push(rdi); - __ Push(rbx); - __ Push(rdx); - __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs, - EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); - __ Pop(rdx); - __ Pop(rbx); - __ Pop(rdi); + CreateWeakCellStub create_stub(isolate); + CallStubInRecordCallTarget(masm, &create_stub); + __ jmp(&done_no_smi_convert); __ bind(&done); __ Integer32ToSmi(rdx, rdx); diff --git a/test/cctest/test-feedback-vector.cc b/test/cctest/test-feedback-vector.cc index f53dfde10e..464a0a724a 100644 --- a/test/cctest/test-feedback-vector.cc +++ b/test/cctest/test-feedback-vector.cc @@ -140,13 +140,19 @@ TEST(VectorSlotClearing) { // Fill with information vector->Set(FeedbackVectorSlot(0), Smi::FromInt(1)); - vector->Set(FeedbackVectorSlot(1), *factory->fixed_array_map()); + Handle cell = factory->NewWeakCell(factory->fixed_array_map()); + vector->Set(FeedbackVectorSlot(1), *cell); Handle site = factory->NewAllocationSite(); vector->Set(FeedbackVectorSlot(2), *site); + // GC time clearing leaves slots alone. + vector->ClearSlotsAtGCTime(NULL); + Object* obj = vector->Get(FeedbackVectorSlot(1)); + CHECK(obj->IsWeakCell() && !WeakCell::cast(obj)->cleared()); + vector->ClearSlots(NULL); - // The feedback vector slots are cleared. AllocationSites are granted + // The feedback vector slots are cleared. AllocationSites are still granted // an exemption from clearing, as are smis. CHECK_EQ(Smi::FromInt(1), vector->Get(FeedbackVectorSlot(0))); CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate), diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index 5895e77f03..d39e850782 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -3387,6 +3387,66 @@ static void CheckVectorICCleared(Handle f, int ic_slot_index) { } +TEST(IncrementalMarkingPreservesMonomorphicConstructor) { + if (i::FLAG_always_opt) return; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + + // Prepare function f that contains a monomorphic IC for object + // originating from the same native context. + CompileRun( + "function fun() { this.x = 1; };" + "function f(o) { return new o(); } f(fun); f(fun);"); + Handle f = v8::Utils::OpenHandle( + *v8::Handle::Cast(CcTest::global()->Get(v8_str("f")))); + + + Handle vector(f->shared()->feedback_vector()); + CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); + + SimulateIncrementalMarking(CcTest::heap()); + CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); + + CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); +} + + +TEST(IncrementalMarkingClearsMonomorphicConstructor) { + if (i::FLAG_always_opt) return; + CcTest::InitializeVM(); + Isolate* isolate = CcTest::i_isolate(); + v8::HandleScope scope(CcTest::isolate()); + v8::Local fun1; + + { + LocalContext env; + CompileRun("function fun() { this.x = 1; };"); + fun1 = env->Global()->Get(v8_str("fun")); + } + + // Prepare function f that contains a monomorphic constructor for object + // originating from a different native context. + CcTest::global()->Set(v8_str("fun1"), fun1); + CompileRun( + "function fun() { this.x = 1; };" + "function f(o) { return new o(); } f(fun1); f(fun1);"); + Handle f = v8::Utils::OpenHandle( + *v8::Handle::Cast(CcTest::global()->Get(v8_str("f")))); + + + Handle vector(f->shared()->feedback_vector()); + CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); + + // Fire context dispose notification. + CcTest::isolate()->ContextDisposedNotification(); + SimulateIncrementalMarking(CcTest::heap()); + CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); + + CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate), + vector->Get(FeedbackVectorSlot(0))); +} + + TEST(IncrementalMarkingPreservesMonomorphicIC) { if (i::FLAG_always_opt) return; CcTest::InitializeVM(); @@ -4415,6 +4475,60 @@ static void ClearWeakIC(const v8::WeakCallbackData& data) { } +TEST(WeakFunctionInConstructor) { + if (i::FLAG_always_opt) return; + i::FLAG_stress_compaction = false; + CcTest::InitializeVM(); + v8::Isolate* isolate = CcTest::isolate(); + v8::HandleScope scope(isolate); + CompileRun( + "function createObj(obj) {" + " return new obj();" + "}"); + Handle createObj = + v8::Utils::OpenHandle(*v8::Handle::Cast( + CcTest::global()->Get(v8_str("createObj")))); + + v8::Persistent garbage; + { + v8::HandleScope scope(isolate); + const char* source = + " (function() {" + " function hat() { this.x = 5; }" + " createObj(hat);" + " createObj(hat);" + " return hat;" + " })();"; + garbage.Reset(isolate, CompileRun(source)->ToObject(isolate)); + } + weak_ic_cleared = false; + garbage.SetWeak(static_cast(&garbage), &ClearWeakIC); + Heap* heap = CcTest::i_isolate()->heap(); + heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); + CHECK(weak_ic_cleared); + + // We've determined the constructor in createObj has had it's weak cell + // cleared. Now, verify that one additional call with a new function + // allows monomorphicity. + Handle feedback_vector = Handle( + createObj->shared()->feedback_vector(), CcTest::i_isolate()); + for (int i = 0; i < 20; i++) { + Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); + CHECK(slot_value->IsWeakCell()); + if (WeakCell::cast(slot_value)->cleared()) break; + heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); + } + + Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); + CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared()); + CompileRun( + "function coat() { this.x = 6; }" + "createObj(coat);"); + slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); + CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared()); +} + + // Checks that the value returned by execution of the source is weak. void CheckWeakness(const char* source) { i::FLAG_stress_compaction = false;