diff --git a/src/flag-definitions.h b/src/flag-definitions.h index 4f38985992..58d5ae5f31 100644 --- a/src/flag-definitions.h +++ b/src/flag-definitions.h @@ -766,6 +766,11 @@ DEFINE_BOOL(stress_compaction, false, "stress the GC compactor to flush out bugs (implies " "--force_marking_deque_overflows)") +DEFINE_BOOL(manual_evacuation_candidates_selection, false, + "Test mode only flag. It allows an unit test to select evacuation " + "candidates pages (requires --stress_compaction).") + + // // Dev shell flags // diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index f75f323c45..b4fafec109 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -287,6 +287,60 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) { } +void MarkCompactCollector::ClearInvalidSlotsBufferEntries(PagedSpace* space) { + PageIterator it(space); + while (it.has_next()) { + Page* p = it.next(); + SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); + } +} + + +void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { + heap_->store_buffer()->ClearInvalidStoreBufferEntries(); + + ClearInvalidSlotsBufferEntries(heap_->old_pointer_space()); + ClearInvalidSlotsBufferEntries(heap_->old_data_space()); + ClearInvalidSlotsBufferEntries(heap_->code_space()); + ClearInvalidSlotsBufferEntries(heap_->cell_space()); + ClearInvalidSlotsBufferEntries(heap_->map_space()); + + LargeObjectIterator it(heap_->lo_space()); + for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { + MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); + SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer()); + } +} + + +#ifdef VERIFY_HEAP +static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) { + PageIterator it(space); + while (it.has_next()) { + Page* p = it.next(); + SlotsBuffer::VerifySlots(heap, p->slots_buffer()); + } +} + + +static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) { + heap->store_buffer()->VerifyValidStoreBufferEntries(); + + VerifyValidSlotsBufferEntries(heap, heap->old_pointer_space()); + VerifyValidSlotsBufferEntries(heap, heap->old_data_space()); + VerifyValidSlotsBufferEntries(heap, heap->code_space()); + VerifyValidSlotsBufferEntries(heap, heap->cell_space()); + VerifyValidSlotsBufferEntries(heap, heap->map_space()); + + LargeObjectIterator it(heap->lo_space()); + for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { + MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); + SlotsBuffer::VerifySlots(heap, chunk->slots_buffer()); + } +} +#endif + + void MarkCompactCollector::CollectGarbage() { // Make sure that Prepare() has been called. The individual steps below will // update the state as they proceed. @@ -312,11 +366,11 @@ void MarkCompactCollector::CollectGarbage() { } #endif - heap_->store_buffer()->ClearInvalidStoreBufferEntries(); + ClearInvalidStoreAndSlotsBufferEntries(); #ifdef VERIFY_HEAP if (FLAG_verify_heap) { - heap_->store_buffer()->VerifyValidStoreBufferEntries(); + VerifyValidStoreAndSlotsBufferEntries(heap_); } #endif @@ -723,6 +777,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { int count = 0; int fragmentation = 0; + int page_number = 0; Candidate* least = NULL; PageIterator it(space); @@ -737,9 +792,16 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { CHECK(p->slots_buffer() == NULL); if (FLAG_stress_compaction) { - unsigned int counter = space->heap()->ms_count(); - uintptr_t page_number = reinterpret_cast(p) >> kPageSizeBits; - if ((counter & 1) == (page_number & 1)) fragmentation = 1; + if (FLAG_manual_evacuation_candidates_selection) { + if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) { + p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + fragmentation = 1; + } + } else { + unsigned int counter = space->heap()->ms_count(); + if ((counter & 1) == (page_number & 1)) fragmentation = 1; + page_number++; + } } else if (mode == REDUCE_MEMORY_FOOTPRINT && !FLAG_always_compact) { // Don't try to release too many pages. if (estimated_release >= over_reserved) { @@ -3035,10 +3097,14 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object, } -bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) { +bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot, + HeapObject** out_object) { // This function does not support large objects right now. Space* owner = p->owner(); - if (owner == heap_->lo_space() || owner == NULL) return true; + if (owner == heap_->lo_space() || owner == NULL) { + *out_object = NULL; + return true; + } uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot); unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2; @@ -3092,6 +3158,7 @@ bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) { (object->address() + object->Size()) > slot) { // If the slot is within the last found object in the cell, the slot is // in a live object. + *out_object = object; return true; } return false; @@ -3133,28 +3200,38 @@ bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) { } -bool MarkCompactCollector::IsSlotInLiveObject(HeapObject** address, - HeapObject* object) { - // If the target object is not black, the source slot must be part - // of a non-black (dead) object. - if (!Marking::IsBlack(Marking::MarkBitFrom(object))) { - return false; - } - +bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { + HeapObject* object = NULL; // The target object is black but we don't know if the source slot is black. // The source object could have died and the slot could be part of a free // space. Find out based on mark bits if the slot is part of a live object. - if (!IsSlotInBlackObject( - Page::FromAddress(reinterpret_cast
(address)), - reinterpret_cast
(address))) { + if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) { return false; } +#if V8_DOUBLE_FIELDS_UNBOXING + // |object| is NULL only when the slot belongs to large object space. + DCHECK(object != NULL || + Page::FromAnyPointerAddress(heap_, slot)->owner() == + heap_->lo_space()); + // We don't need to check large objects' layout descriptor since it can't + // contain in-object fields anyway. + if (object != NULL) { + // Filter out slots that happens to point to unboxed double fields. + LayoutDescriptorHelper helper(object->map()); + bool has_only_tagged_fields = helper.all_fields_tagged(); + if (!has_only_tagged_fields && + !helper.IsTagged(static_cast(slot - object->address()))) { + return false; + } + } +#endif + return true; } -void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address, +void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot, HeapObject* object) { // The target object has to be black. CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); @@ -3162,9 +3239,7 @@ void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address, // The target object is black but we don't know if the source slot is black. // The source object could have died and the slot could be part of a free // space. Use the mark bit iterator to find out about liveness of the slot. - CHECK(IsSlotInBlackObjectSlow( - Page::FromAddress(reinterpret_cast
(address)), - reinterpret_cast
(address))); + CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot)); } @@ -4483,6 +4558,66 @@ bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator, } +static Object* g_smi_slot = NULL; + + +void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) { + DCHECK_EQ(Smi::FromInt(0), g_smi_slot); + + // Remove entries by replacing them with a dummy slot containing a smi. + const ObjectSlot kRemovedEntry = &g_smi_slot; + + while (buffer != NULL) { + SlotsBuffer::ObjectSlot* slots = buffer->slots_; + intptr_t slots_count = buffer->idx_; + + for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { + ObjectSlot slot = slots[slot_idx]; + if (!IsTypedSlot(slot)) { + Object* object = *slot; + if (object->IsHeapObject()) { + if (heap->InNewSpace(object) || + !heap->mark_compact_collector()->IsSlotInLiveObject( + reinterpret_cast
(slot))) { + slots[slot_idx] = kRemovedEntry; + } + } + } else { + ++slot_idx; + DCHECK(slot_idx < slots_count); + } + } + buffer = buffer->next(); + } +} + + +void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) { + DCHECK_EQ(Smi::FromInt(0), g_smi_slot); + + while (buffer != NULL) { + SlotsBuffer::ObjectSlot* slots = buffer->slots_; + intptr_t slots_count = buffer->idx_; + + for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { + ObjectSlot slot = slots[slot_idx]; + if (!IsTypedSlot(slot)) { + Object* object = *slot; + if (object->IsHeapObject()) { + CHECK(!heap->InNewSpace(object)); + CHECK(heap->mark_compact_collector()->IsSlotInLiveObject( + reinterpret_cast
(slot))); + } + } else { + ++slot_idx; + DCHECK(slot_idx < slots_count); + } + } + buffer = buffer->next(); + } +} + + static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { if (RelocInfo::IsCodeTarget(rmode)) { return SlotsBuffer::CODE_TARGET_SLOT; diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h index ddb993f5a2..3ffeeed7b1 100644 --- a/src/heap/mark-compact.h +++ b/src/heap/mark-compact.h @@ -363,6 +363,15 @@ class SlotsBuffer { SlotsBuffer** buffer_address, SlotType type, Address addr, AdditionMode mode); + // Eliminates all stale entries from the slots buffer, i.e., slots that + // are not part of live objects anymore. This method must be called after + // marking, when the whole transitive closure is known and must be called + // before sweeping when mark bits are still intact. + static void RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer); + + // Ensures that there are no invalid slots in the chain of slots buffers. + static void VerifySlots(Heap* heap, SlotsBuffer* buffer); + static const int kNumberOfElements = 1021; private: @@ -679,10 +688,10 @@ class MarkCompactCollector { // The following four methods can just be called after marking, when the // whole transitive closure is known. They must be called before sweeping // when mark bits are still intact. - bool IsSlotInBlackObject(Page* p, Address slot); + bool IsSlotInBlackObject(Page* p, Address slot, HeapObject** out_object); bool IsSlotInBlackObjectSlow(Page* p, Address slot); - bool IsSlotInLiveObject(HeapObject** address, HeapObject* object); - void VerifyIsSlotInLiveObject(HeapObject** address, HeapObject* object); + bool IsSlotInLiveObject(Address slot); + void VerifyIsSlotInLiveObject(Address slot, HeapObject* object); private: class SweeperTask; @@ -694,6 +703,8 @@ class MarkCompactCollector { bool WillBeDeoptimized(Code* code); void RemoveDeadInvalidatedCode(); void ProcessInvalidatedCode(ObjectVisitor* visitor); + void ClearInvalidSlotsBufferEntries(PagedSpace* space); + void ClearInvalidStoreAndSlotsBufferEntries(); void StartSweeperThreads(); diff --git a/src/heap/spaces.h b/src/heap/spaces.h index ec8821d7d8..0272d59944 100644 --- a/src/heap/spaces.h +++ b/src/heap/spaces.h @@ -385,6 +385,12 @@ class MemoryChunk { // to grey transition is performed in the value. HAS_PROGRESS_BAR, + // This flag is intended to be used for testing. Works only when both + // FLAG_stress_compaction and FLAG_manual_evacuation_candidates_selection + // are set. It forces the page to become an evacuation candidate at next + // candidates selection cycle. + FORCE_EVACUATION_CANDIDATE_FOR_TESTING, + // Last flag, keep at bottom. NUM_MEMORY_CHUNK_FLAGS }; diff --git a/src/heap/store-buffer.cc b/src/heap/store-buffer.cc index dec11daa90..6bf3188717 100644 --- a/src/heap/store-buffer.cc +++ b/src/heap/store-buffer.cc @@ -361,12 +361,14 @@ void StoreBuffer::ClearInvalidStoreBufferEntries() { Address* new_top = old_start_; for (Address* current = old_start_; current < old_top_; current++) { Address addr = *current; - Object** slot = reinterpret_cast(*current); + Object** slot = reinterpret_cast(addr); Object* object = *slot; - if (heap_->InNewSpace(object)) { - if (heap_->mark_compact_collector()->IsSlotInLiveObject( - reinterpret_cast(slot), - reinterpret_cast(object))) { + if (heap_->InNewSpace(object) && object->IsHeapObject()) { + // If the target object is not black, the source slot must be part + // of a non-black (dead) object. + HeapObject* heap_object = HeapObject::cast(object); + if (Marking::IsBlack(Marking::MarkBitFrom(heap_object)) && + heap_->mark_compact_collector()->IsSlotInLiveObject(addr)) { *new_top++ = addr; } } @@ -389,10 +391,10 @@ void StoreBuffer::VerifyValidStoreBufferEntries() { for (Address* current = old_start_; current < old_top_; current++) { Object** slot = reinterpret_cast(*current); Object* object = *slot; + CHECK(object->IsHeapObject()); CHECK(heap_->InNewSpace(object)); heap_->mark_compact_collector()->VerifyIsSlotInLiveObject( - reinterpret_cast(slot), - reinterpret_cast(object)); + reinterpret_cast
(slot), HeapObject::cast(object)); } } diff --git a/test/cctest/test-unboxed-doubles.cc b/test/cctest/test-unboxed-doubles.cc index fdcac3af35..c52017a003 100644 --- a/test/cctest/test-unboxed-doubles.cc +++ b/test/cctest/test-unboxed-doubles.cc @@ -48,6 +48,12 @@ static Handle MakeName(const char* str, int suffix) { } +Handle GetObject(const char* name) { + return v8::Utils::OpenHandle( + *v8::Handle::Cast(CcTest::global()->Get(v8_str(name)))); +} + + static double GetDoubleFieldValue(JSObject* obj, FieldIndex field_index) { if (obj->IsUnboxedDoubleField(field_index)) { return obj->RawFastDoublePropertyAt(field_index); @@ -1305,4 +1311,211 @@ TEST(WriteBarriersInCopyJSObject) { CHECK_EQ(boom_value, clone->RawFastDoublePropertyAt(index)); } + +static void TestWriteBarrier(Handle map, Handle new_map, + int tagged_descriptor, int double_descriptor, + bool check_tagged_value = true) { + FLAG_stress_compaction = true; + FLAG_manual_evacuation_candidates_selection = true; + Isolate* isolate = CcTest::i_isolate(); + Factory* factory = isolate->factory(); + Heap* heap = CcTest::heap(); + PagedSpace* old_pointer_space = heap->old_pointer_space(); + + // The plan: create |obj| by |map| in old space, create |obj_value| in + // new space and ensure that write barrier is triggered when |obj_value| is + // written to property |tagged_descriptor| of |obj|. + // Then migrate object to |new_map| and set proper value for property + // |double_descriptor|. Call GC and ensure that it did not crash during + // store buffer entries updating. + + Handle obj; + Handle obj_value; + { + AlwaysAllocateScope always_allocate(isolate); + obj = factory->NewJSObjectFromMap(map, TENURED, false); + CHECK(old_pointer_space->Contains(*obj)); + + obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS); + } + + CHECK(heap->InNewSpace(*obj_value)); + + { + FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor); + const int n = 153; + for (int i = 0; i < n; i++) { + obj->FastPropertyAtPut(index, *obj_value); + } + } + + // Migrate |obj| to |new_map| which should shift fields and put the + // |boom_value| to the slot that was earlier recorded by write barrier. + JSObject::MigrateToMap(obj, new_map); + + Address fake_object = reinterpret_cast
(*obj_value) + kPointerSize; + double boom_value = bit_cast(fake_object); + + FieldIndex double_field_index = + FieldIndex::ForDescriptor(*new_map, double_descriptor); + CHECK(obj->IsUnboxedDoubleField(double_field_index)); + obj->RawFastDoublePropertyAtPut(double_field_index, boom_value); + + // Trigger GC to evacuate all candidates. + CcTest::heap()->CollectGarbage(NEW_SPACE, "boom"); + + if (check_tagged_value) { + FieldIndex tagged_field_index = + FieldIndex::ForDescriptor(*new_map, tagged_descriptor); + CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index)); + } + CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index)); +} + + +static void TestIncrementalWriteBarrier(Handle map, Handle new_map, + int tagged_descriptor, + int double_descriptor, + bool check_tagged_value = true) { + if (FLAG_never_compact || !FLAG_incremental_marking) return; + FLAG_stress_compaction = true; + FLAG_manual_evacuation_candidates_selection = true; + Isolate* isolate = CcTest::i_isolate(); + Factory* factory = isolate->factory(); + Heap* heap = CcTest::heap(); + PagedSpace* old_pointer_space = heap->old_pointer_space(); + + // The plan: create |obj| by |map| in old space, create |obj_value| in + // old space and ensure it end up in evacuation candidate page. Start + // incremental marking and ensure that incremental write barrier is triggered + // when |obj_value| is written to property |tagged_descriptor| of |obj|. + // Then migrate object to |new_map| and set proper value for property + // |double_descriptor|. Call GC and ensure that it did not crash during + // slots buffer entries updating. + + Handle obj; + Handle obj_value; + Page* ec_page; + { + AlwaysAllocateScope always_allocate(isolate); + obj = factory->NewJSObjectFromMap(map, TENURED, false); + CHECK(old_pointer_space->Contains(*obj)); + + // Make sure |obj_value| is placed on an old-space evacuation candidate. + SimulateFullSpace(old_pointer_space); + obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED); + ec_page = Page::FromAddress(obj_value->address()); + CHECK_NE(ec_page, Page::FromAddress(obj->address())); + } + + // Heap is ready, force |ec_page| to become an evacuation candidate and + // simulate incremental marking. + ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + SimulateIncrementalMarking(heap); + + // Check that everything is ready for triggering incremental write barrier + // (i.e. that both |obj| and |obj_value| are black and the marking phase is + // still active and |obj_value|'s page is indeed an evacuation candidate). + IncrementalMarking* marking = heap->incremental_marking(); + CHECK(marking->IsMarking()); + CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj))); + CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj_value))); + CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value)); + + // Trigger incremental write barrier, which should add a slot to |ec_page|'s + // slots buffer. + { + int slots_buffer_len = SlotsBuffer::SizeOfChain(ec_page->slots_buffer()); + FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor); + const int n = SlotsBuffer::kNumberOfElements + 10; + for (int i = 0; i < n; i++) { + obj->FastPropertyAtPut(index, *obj_value); + } + // Ensure that the slot was actually added to the |ec_page|'s slots buffer. + CHECK_EQ(slots_buffer_len + n, + SlotsBuffer::SizeOfChain(ec_page->slots_buffer())); + } + + // Migrate |obj| to |new_map| which should shift fields and put the + // |boom_value| to the slot that was earlier recorded by incremental write + // barrier. + JSObject::MigrateToMap(obj, new_map); + + double boom_value = bit_cast(UINT64_C(0xbaad0176a37c28e1)); + + FieldIndex double_field_index = + FieldIndex::ForDescriptor(*new_map, double_descriptor); + CHECK(obj->IsUnboxedDoubleField(double_field_index)); + obj->RawFastDoublePropertyAtPut(double_field_index, boom_value); + + // Trigger GC to evacuate all candidates. + CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE, "boom"); + + // Ensure that the values are still there and correct. + CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value)); + + if (check_tagged_value) { + FieldIndex tagged_field_index = + FieldIndex::ForDescriptor(*new_map, tagged_descriptor); + CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index)); + } + CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index)); +} + + +enum WriteBarrierKind { OLD_TO_OLD_WRITE_BARRIER, OLD_TO_NEW_WRITE_BARRIER }; +static void TestWriteBarrierObjectShiftFieldsRight( + WriteBarrierKind write_barrier_kind) { + CcTest::InitializeVM(); + Isolate* isolate = CcTest::i_isolate(); + v8::HandleScope scope(CcTest::isolate()); + + Handle any_type = HeapType::Any(isolate); + + CompileRun("function func() { return 1; }"); + + Handle func = GetObject("func"); + + Handle map = Map::Create(isolate, 10); + map = Map::CopyWithConstant(map, MakeName("prop", 0), func, NONE, + INSERT_TRANSITION).ToHandleChecked(); + map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE, + Representation::Double(), + INSERT_TRANSITION).ToHandleChecked(); + map = Map::CopyWithField(map, MakeName("prop", 2), any_type, NONE, + Representation::Tagged(), + INSERT_TRANSITION).ToHandleChecked(); + + // Shift fields right by turning constant property to a field. + Handle new_map = Map::ReconfigureProperty( + map, 0, kData, NONE, Representation::Tagged(), any_type, FORCE_FIELD); + + if (write_barrier_kind == OLD_TO_NEW_WRITE_BARRIER) { + TestWriteBarrier(map, new_map, 2, 1); + } else { + CHECK_EQ(OLD_TO_OLD_WRITE_BARRIER, write_barrier_kind); + TestIncrementalWriteBarrier(map, new_map, 2, 1); + } +} + + +// TODO(ishell): enable when this issue is fixed. +DISABLED_TEST(WriteBarrierObjectShiftFieldsRight) { + TestWriteBarrierObjectShiftFieldsRight(OLD_TO_NEW_WRITE_BARRIER); +} + + +TEST(IncrementalWriteBarrierObjectShiftFieldsRight) { + TestWriteBarrierObjectShiftFieldsRight(OLD_TO_OLD_WRITE_BARRIER); +} + + +// TODO(ishell): add respective tests for property kind reconfiguring from +// accessor field to double, once accessor fields are supported by +// Map::ReconfigureProperty(). + + +// TODO(ishell): add respective tests for fast property removal case once +// Map::ReconfigureProperty() supports that. + #endif