From d032734c1276f5d3185ac7d96478266976ef448e Mon Sep 17 00:00:00 2001 From: Michael Lippautz Date: Mon, 6 Feb 2023 12:19:46 +0100 Subject: [PATCH] [heap] MinorMC: marking refactoring MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Move CppHeap code into scopes - Move class declarations out of inline header - Add TODO for working global handles processing Bug: v8:12612 Change-Id: I97737d4f5ded940f0145ba093963f45338d44d57 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4221701 Reviewed-by: Dominik Inführ Commit-Queue: Michael Lippautz Cr-Commit-Position: refs/heads/main@{#85673} --- src/heap/mark-compact-inl.h | 58 ---------------------------------- src/heap/mark-compact.cc | 55 +++++++++++++++++--------------- src/heap/mark-compact.h | 63 +++++++++++++++++++++++++++++++++++-- 3 files changed, 91 insertions(+), 85 deletions(-) diff --git a/src/heap/mark-compact-inl.h b/src/heap/mark-compact-inl.h index da353373cc..2b3c4e3b9f 100644 --- a/src/heap/mark-compact-inl.h +++ b/src/heap/mark-compact-inl.h @@ -291,64 +291,6 @@ typename LiveObjectRange::iterator LiveObjectRange::end() { Isolate* CollectorBase::isolate() { return heap()->isolate(); } -class YoungGenerationMarkingTask; - -class PageMarkingItem : public ParallelWorkItem { - public: - explicit PageMarkingItem(MemoryChunk* chunk) : chunk_(chunk) {} - ~PageMarkingItem() = default; - - void Process(YoungGenerationMarkingTask* task); - - private: - inline Heap* heap() { return chunk_->heap(); } - - void MarkUntypedPointers(YoungGenerationMarkingTask* task); - - void MarkTypedPointers(YoungGenerationMarkingTask* task); - - template - V8_INLINE SlotCallbackResult - CheckAndMarkObject(YoungGenerationMarkingTask* task, TSlot slot); - - MemoryChunk* chunk_; -}; - -enum class YoungMarkingJobType { kAtomic, kIncremental }; - -class YoungGenerationMarkingJob : public v8::JobTask { - public: - YoungGenerationMarkingJob(Isolate* isolate, Heap* heap, - MarkingWorklists* global_worklists, - std::vector marking_items, - YoungMarkingJobType young_marking_job_type) - : isolate_(isolate), - heap_(heap), - global_worklists_(global_worklists), - marking_items_(std::move(marking_items)), - remaining_marking_items_(marking_items_.size()), - generator_(marking_items_.size()), - young_marking_job_type_(young_marking_job_type) {} - - void Run(JobDelegate* delegate) override; - size_t GetMaxConcurrency(size_t worker_count) const override; - bool incremental() const { - return young_marking_job_type_ == YoungMarkingJobType::kIncremental; - } - - private: - void ProcessItems(JobDelegate* delegate); - void ProcessMarkingItems(YoungGenerationMarkingTask* task); - - Isolate* isolate_; - Heap* heap_; - MarkingWorklists* global_worklists_; - std::vector marking_items_; - std::atomic_size_t remaining_marking_items_{0}; - IndexGenerator generator_; - YoungMarkingJobType young_marking_job_type_; -}; - } // namespace internal } // namespace v8 diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index 4a5a84a3ba..da0baf9d99 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -6110,7 +6110,7 @@ class YoungGenerationMarkingTask { } } - void EmptyMarkingWorklist() { + void DrainMarkingWorklist() { HeapObject object; while (marking_worklists_local_->Pop(&object) || marking_worklists_local_->PopOnHold(&object)) { @@ -6204,7 +6204,7 @@ size_t YoungGenerationMarkingJob::GetMaxConcurrency(size_t worker_count) const { const int kPagesPerTask = 2; size_t items = remaining_marking_items_.load(std::memory_order_relaxed); size_t num_tasks; - if (!incremental()) { + if (ShouldDrainMarkingWorklist()) { num_tasks = std::max( (items + 1) / kPagesPerTask, global_worklists_->shared()->Size() + @@ -6228,8 +6228,8 @@ void YoungGenerationMarkingJob::ProcessItems(JobDelegate* delegate) { TimedScope scope(&marking_time); YoungGenerationMarkingTask task(isolate_, heap_, global_worklists_); ProcessMarkingItems(&task); - if (!incremental()) { - task.EmptyMarkingWorklist(); + if (ShouldDrainMarkingWorklist()) { + task.DrainMarkingWorklist(); } else { task.PublishMarkingWorklist(); } @@ -6254,8 +6254,8 @@ void YoungGenerationMarkingJob::ProcessMarkingItems( auto& work_item = marking_items_[i]; if (!work_item.TryAcquire()) break; work_item.Process(task); - if (!incremental()) { - task->EmptyMarkingWorklist(); + if (ShouldDrainMarkingWorklist()) { + task->DrainMarkingWorklist(); } if (remaining_marking_items_.fetch_sub(1, std::memory_order_relaxed) <= 1) { @@ -6265,7 +6265,7 @@ void YoungGenerationMarkingJob::ProcessMarkingItems( } } -void MinorMarkCompactCollector::MarkRootSetInParallel( +void MinorMarkCompactCollector::MarkLiveObjectsInParallel( RootMarkingVisitor* root_visitor, bool was_marked_incrementally) { std::vector marking_items; @@ -6306,20 +6306,20 @@ void MinorMarkCompactCollector::MarkRootSetInParallel( } } - // CppGC starts parallel marking tasks that will trace TracedReferences. Start - // if after marking of traced handles. - if (heap_->cpp_heap()) { - CppHeap::From(heap_->cpp_heap()) - ->EnterFinalPause(heap_->embedder_stack_state_); - } - // Add tasks and run in parallel. { + TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_CLOSURE_PARALLEL); + + // CppGC starts parallel marking tasks that will trace TracedReferences. + if (heap_->cpp_heap()) { + CppHeap::From(heap_->cpp_heap()) + ->EnterFinalPause(heap_->embedder_stack_state_); + } + // The main thread might hold local items, while GlobalPoolSize() == // 0. Flush to ensure these items are visible globally and picked up // by the job. local_marking_worklists_->Publish(); - TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_CLOSURE_PARALLEL); V8::GetCurrentPlatform() ->CreateJob(v8::TaskPriority::kUserBlocking, std::make_unique( @@ -6357,19 +6357,24 @@ void MinorMarkCompactCollector::MarkLiveObjects() { RootMarkingVisitor root_visitor(this); - MarkRootSetInParallel(&root_visitor, was_marked_incrementally); + MarkLiveObjectsInParallel(&root_visitor, was_marked_incrementally); - if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap())) { - cpp_heap->FinishConcurrentMarkingIfNeeded(); - } - - // Mark rest on the main thread. { + // Finish marking the transitive closure on the main thread. TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_CLOSURE); + if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap())) { + cpp_heap->FinishConcurrentMarkingIfNeeded(); + } DrainMarkingWorklist(); } { + // Process global handles. + // + // TODO(v8:12612): There should be no need for passing the root visitor here + // and restarting marking. If the nodes are considered dead, then they + // should merely be reset. Otherwise, they are alive and still point to + // their corresponding objects. TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_GLOBAL_HANDLES); isolate()->global_handles()->ProcessWeakYoungObjects( &root_visitor, &IsUnmarkedObjectForYoungGeneration); @@ -6378,13 +6383,13 @@ void MinorMarkCompactCollector::MarkLiveObjects() { DrainMarkingWorklist(); } - if (v8_flags.minor_mc_trace_fragmentation) { - TraceFragmentation(); - } - if (was_marked_incrementally) { MarkingBarrier::DeactivateAll(heap()); } + + if (v8_flags.minor_mc_trace_fragmentation) { + TraceFragmentation(); + } } void MinorMarkCompactCollector::DrainMarkingWorklist() { diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h index fbf460b798..77fd5e51ef 100644 --- a/src/heap/mark-compact.h +++ b/src/heap/mark-compact.h @@ -11,6 +11,7 @@ #include "include/v8-internal.h" #include "src/heap/base/worklist.h" #include "src/heap/concurrent-marking.h" +#include "src/heap/index-generator.h" #include "src/heap/marking-state.h" #include "src/heap/marking-visitor.h" #include "src/heap/marking-worklist.h" @@ -34,6 +35,7 @@ class PagedNewSpace; class ReadOnlySpace; class RecordMigratedSlotVisitor; class UpdatingItem; +class YoungGenerationMarkingTask; class MarkBitCellIterator { public: @@ -720,8 +722,8 @@ class MinorMarkCompactCollector final : public CollectorBase { Sweeper* sweeper() { return sweeper_; } void MarkLiveObjects(); - void MarkRootSetInParallel(RootMarkingVisitor* root_visitor, - bool was_marked_incrementally); + void MarkLiveObjectsInParallel(RootMarkingVisitor* root_visitor, + bool was_marked_incrementally); V8_INLINE void MarkRootObject(HeapObject obj); void DrainMarkingWorklist(); void TraceFragmentation(); @@ -752,6 +754,63 @@ class MinorMarkCompactCollector final : public CollectorBase { friend class YoungGenerationMainMarkingVisitor; }; +class PageMarkingItem : public ParallelWorkItem { + public: + explicit PageMarkingItem(MemoryChunk* chunk) : chunk_(chunk) {} + ~PageMarkingItem() = default; + + void Process(YoungGenerationMarkingTask* task); + + private: + inline Heap* heap() { return chunk_->heap(); } + + void MarkUntypedPointers(YoungGenerationMarkingTask* task); + + void MarkTypedPointers(YoungGenerationMarkingTask* task); + + template + V8_INLINE SlotCallbackResult + CheckAndMarkObject(YoungGenerationMarkingTask* task, TSlot slot); + + MemoryChunk* chunk_; +}; + +enum class YoungMarkingJobType { kAtomic, kIncremental }; + +class YoungGenerationMarkingJob : public v8::JobTask { + public: + YoungGenerationMarkingJob(Isolate* isolate, Heap* heap, + MarkingWorklists* global_worklists, + std::vector marking_items, + YoungMarkingJobType young_marking_job_type) + : isolate_(isolate), + heap_(heap), + global_worklists_(global_worklists), + marking_items_(std::move(marking_items)), + remaining_marking_items_(marking_items_.size()), + generator_(marking_items_.size()), + young_marking_job_type_(young_marking_job_type) {} + + void Run(JobDelegate* delegate) override; + size_t GetMaxConcurrency(size_t worker_count) const override; + + bool ShouldDrainMarkingWorklist() const { + return young_marking_job_type_ == YoungMarkingJobType::kAtomic; + } + + private: + void ProcessItems(JobDelegate* delegate); + void ProcessMarkingItems(YoungGenerationMarkingTask* task); + + Isolate* isolate_; + Heap* heap_; + MarkingWorklists* global_worklists_; + std::vector marking_items_; + std::atomic_size_t remaining_marking_items_{0}; + IndexGenerator generator_; + YoungMarkingJobType young_marking_job_type_; +}; + } // namespace internal } // namespace v8