diff --git a/src/heap/scavenger-inl.h b/src/heap/scavenger-inl.h index 8a0a1da96b..015c440d9f 100644 --- a/src/heap/scavenger-inl.h +++ b/src/heap/scavenger-inl.h @@ -182,7 +182,11 @@ CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot, : CopyAndForwardResult::SUCCESS_OLD_GENERATION; } HeapObjectReference::Update(slot, target); - if (object_fields == ObjectFields::kMaybePointers) { + + // During incremental marking we want to push every object in order to + // record slots for map words. Necessary for map space compaction. + if (object_fields == ObjectFields::kMaybePointers || + is_compacting_including_map_space_) { promotion_list_local_.PushRegularObject(target, object_size); } promoted_size_ += object_size; diff --git a/src/heap/scavenger.cc b/src/heap/scavenger.cc index 12e8fe4a39..05c8200f3d 100644 --- a/src/heap/scavenger.cc +++ b/src/heap/scavenger.cc @@ -4,6 +4,7 @@ #include "src/heap/scavenger.h" +#include "src/common/globals.h" #include "src/handles/global-handles.h" #include "src/heap/array-buffer-sweeper.h" #include "src/heap/barrier.h" @@ -12,7 +13,9 @@ #include "src/heap/heap-inl.h" #include "src/heap/invalidated-slots-inl.h" #include "src/heap/mark-compact-inl.h" +#include "src/heap/mark-compact.h" #include "src/heap/memory-chunk-inl.h" +#include "src/heap/memory-chunk.h" #include "src/heap/objects-visiting-inl.h" #include "src/heap/remembered-set-inl.h" #include "src/heap/scavenger-inl.h" @@ -33,6 +36,17 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { bool record_slots) : scavenger_(scavenger), record_slots_(record_slots) {} + V8_INLINE void VisitMapPointer(HeapObject host) final { + if (!record_slots_) return; + MapWord map_word = host.map_word(kRelaxedLoad); + if (map_word.IsForwardingAddress()) { + // Surviving new large objects have forwarding pointers in the map word. + DCHECK(MemoryChunk::FromHeapObject(host)->InNewLargeObjectSpace()); + return; + } + HandleSlot(host, HeapObjectSlot(host.map_slot()), map_word.ToMap()); + } + V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start, ObjectSlot end) final { VisitPointersImpl(host, start, end); @@ -119,10 +133,9 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { slot.address()); } } - SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate( - HeapObject::cast(target))); - } else if (record_slots_ && MarkCompactCollector::IsOnEvacuationCandidate( - HeapObject::cast(target))) { + SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(target)); + } else if (record_slots_ && + MarkCompactCollector::IsOnEvacuationCandidate(target)) { // We should never try to record off-heap slots. DCHECK((std::is_same::value)); // Code slots never appear in new space because CodeDataContainers, the @@ -498,6 +511,10 @@ void ScavengerCollector::SweepArrayBufferExtensions() { } void ScavengerCollector::HandleSurvivingNewLargeObjects() { + const bool is_compacting = heap_->incremental_marking()->IsCompacting(); + MajorAtomicMarkingState* marking_state = + heap_->incremental_marking()->atomic_marking_state(); + for (SurvivingNewLargeObjectMapEntry update_info : surviving_new_large_objects_) { HeapObject object = update_info.first; @@ -505,6 +522,12 @@ void ScavengerCollector::HandleSurvivingNewLargeObjects() { // Order is important here. We have to re-install the map to have access // to meta-data like size during page promotion. object.set_map_word(MapWord::FromMap(map), kRelaxedStore); + + if (is_compacting && marking_state->IsBlack(object) && + MarkCompactCollector::IsOnEvacuationCandidate(map)) { + RememberedSet::Insert( + MemoryChunk::FromHeapObject(object), object.map_slot().address()); + } LargePage* page = LargePage::FromHeapObject(object); heap_->lo_space()->PromoteNewLargeObject(page); } @@ -568,6 +591,8 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging, is_logging_(is_logging), is_incremental_marking_(heap->incremental_marking()->IsMarking()), is_compacting_(heap->incremental_marking()->IsCompacting()), + is_compacting_including_map_space_(is_compacting_ && + FLAG_compact_map_space), shared_string_table_(shared_old_allocator_.get() != nullptr) {} void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map, @@ -583,7 +608,13 @@ void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map, heap()->incremental_marking()->atomic_marking_state()->IsBlack(target); IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots); - target.IterateBodyFast(map, size, &visitor); + + if (is_compacting_including_map_space_) { + // When we compact map space, we also want to visit the map word. + target.IterateFast(map, size, &visitor); + } else { + target.IterateBodyFast(map, size, &visitor); + } if (map.IsJSArrayBufferMap()) { DCHECK(!BasicMemoryChunk::FromHeapObject(target)->IsLargePage()); diff --git a/src/heap/scavenger.h b/src/heap/scavenger.h index da1240c810..0f5a267610 100644 --- a/src/heap/scavenger.h +++ b/src/heap/scavenger.h @@ -204,6 +204,7 @@ class Scavenger { const bool is_logging_; const bool is_incremental_marking_; const bool is_compacting_; + const bool is_compacting_including_map_space_; const bool shared_string_table_; friend class IterateAndScavengePromotedObjectsVisitor;