[heap] MinorMC: marking refactoring

- Move CppHeap code into scopes
- Move class declarations out of inline header
- Add TODO for working global handles processing

Bug: v8:12612
Change-Id: I97737d4f5ded940f0145ba093963f45338d44d57
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4221701
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85673}
This commit is contained in:
Michael Lippautz 2023-02-06 12:19:46 +01:00 committed by V8 LUCI CQ
parent 229f91a339
commit d032734c12
3 changed files with 91 additions and 85 deletions

View File

@ -291,64 +291,6 @@ typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
Isolate* CollectorBase::isolate() { return heap()->isolate(); }
class YoungGenerationMarkingTask;
class PageMarkingItem : public ParallelWorkItem {
public:
explicit PageMarkingItem(MemoryChunk* chunk) : chunk_(chunk) {}
~PageMarkingItem() = default;
void Process(YoungGenerationMarkingTask* task);
private:
inline Heap* heap() { return chunk_->heap(); }
void MarkUntypedPointers(YoungGenerationMarkingTask* task);
void MarkTypedPointers(YoungGenerationMarkingTask* task);
template <typename TSlot>
V8_INLINE SlotCallbackResult
CheckAndMarkObject(YoungGenerationMarkingTask* task, TSlot slot);
MemoryChunk* chunk_;
};
enum class YoungMarkingJobType { kAtomic, kIncremental };
class YoungGenerationMarkingJob : public v8::JobTask {
public:
YoungGenerationMarkingJob(Isolate* isolate, Heap* heap,
MarkingWorklists* global_worklists,
std::vector<PageMarkingItem> marking_items,
YoungMarkingJobType young_marking_job_type)
: isolate_(isolate),
heap_(heap),
global_worklists_(global_worklists),
marking_items_(std::move(marking_items)),
remaining_marking_items_(marking_items_.size()),
generator_(marking_items_.size()),
young_marking_job_type_(young_marking_job_type) {}
void Run(JobDelegate* delegate) override;
size_t GetMaxConcurrency(size_t worker_count) const override;
bool incremental() const {
return young_marking_job_type_ == YoungMarkingJobType::kIncremental;
}
private:
void ProcessItems(JobDelegate* delegate);
void ProcessMarkingItems(YoungGenerationMarkingTask* task);
Isolate* isolate_;
Heap* heap_;
MarkingWorklists* global_worklists_;
std::vector<PageMarkingItem> marking_items_;
std::atomic_size_t remaining_marking_items_{0};
IndexGenerator generator_;
YoungMarkingJobType young_marking_job_type_;
};
} // namespace internal
} // namespace v8

View File

@ -6110,7 +6110,7 @@ class YoungGenerationMarkingTask {
}
}
void EmptyMarkingWorklist() {
void DrainMarkingWorklist() {
HeapObject object;
while (marking_worklists_local_->Pop(&object) ||
marking_worklists_local_->PopOnHold(&object)) {
@ -6204,7 +6204,7 @@ size_t YoungGenerationMarkingJob::GetMaxConcurrency(size_t worker_count) const {
const int kPagesPerTask = 2;
size_t items = remaining_marking_items_.load(std::memory_order_relaxed);
size_t num_tasks;
if (!incremental()) {
if (ShouldDrainMarkingWorklist()) {
num_tasks = std::max(
(items + 1) / kPagesPerTask,
global_worklists_->shared()->Size() +
@ -6228,8 +6228,8 @@ void YoungGenerationMarkingJob::ProcessItems(JobDelegate* delegate) {
TimedScope scope(&marking_time);
YoungGenerationMarkingTask task(isolate_, heap_, global_worklists_);
ProcessMarkingItems(&task);
if (!incremental()) {
task.EmptyMarkingWorklist();
if (ShouldDrainMarkingWorklist()) {
task.DrainMarkingWorklist();
} else {
task.PublishMarkingWorklist();
}
@ -6254,8 +6254,8 @@ void YoungGenerationMarkingJob::ProcessMarkingItems(
auto& work_item = marking_items_[i];
if (!work_item.TryAcquire()) break;
work_item.Process(task);
if (!incremental()) {
task->EmptyMarkingWorklist();
if (ShouldDrainMarkingWorklist()) {
task->DrainMarkingWorklist();
}
if (remaining_marking_items_.fetch_sub(1, std::memory_order_relaxed) <=
1) {
@ -6265,7 +6265,7 @@ void YoungGenerationMarkingJob::ProcessMarkingItems(
}
}
void MinorMarkCompactCollector::MarkRootSetInParallel(
void MinorMarkCompactCollector::MarkLiveObjectsInParallel(
RootMarkingVisitor* root_visitor, bool was_marked_incrementally) {
std::vector<PageMarkingItem> marking_items;
@ -6306,20 +6306,20 @@ void MinorMarkCompactCollector::MarkRootSetInParallel(
}
}
// CppGC starts parallel marking tasks that will trace TracedReferences. Start
// if after marking of traced handles.
if (heap_->cpp_heap()) {
CppHeap::From(heap_->cpp_heap())
->EnterFinalPause(heap_->embedder_stack_state_);
}
// Add tasks and run in parallel.
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_CLOSURE_PARALLEL);
// CppGC starts parallel marking tasks that will trace TracedReferences.
if (heap_->cpp_heap()) {
CppHeap::From(heap_->cpp_heap())
->EnterFinalPause(heap_->embedder_stack_state_);
}
// The main thread might hold local items, while GlobalPoolSize() ==
// 0. Flush to ensure these items are visible globally and picked up
// by the job.
local_marking_worklists_->Publish();
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_CLOSURE_PARALLEL);
V8::GetCurrentPlatform()
->CreateJob(v8::TaskPriority::kUserBlocking,
std::make_unique<YoungGenerationMarkingJob>(
@ -6357,19 +6357,24 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
RootMarkingVisitor root_visitor(this);
MarkRootSetInParallel(&root_visitor, was_marked_incrementally);
MarkLiveObjectsInParallel(&root_visitor, was_marked_incrementally);
if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap())) {
cpp_heap->FinishConcurrentMarkingIfNeeded();
}
// Mark rest on the main thread.
{
// Finish marking the transitive closure on the main thread.
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_CLOSURE);
if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap())) {
cpp_heap->FinishConcurrentMarkingIfNeeded();
}
DrainMarkingWorklist();
}
{
// Process global handles.
//
// TODO(v8:12612): There should be no need for passing the root visitor here
// and restarting marking. If the nodes are considered dead, then they
// should merely be reset. Otherwise, they are alive and still point to
// their corresponding objects.
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_GLOBAL_HANDLES);
isolate()->global_handles()->ProcessWeakYoungObjects(
&root_visitor, &IsUnmarkedObjectForYoungGeneration);
@ -6378,13 +6383,13 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
DrainMarkingWorklist();
}
if (v8_flags.minor_mc_trace_fragmentation) {
TraceFragmentation();
}
if (was_marked_incrementally) {
MarkingBarrier::DeactivateAll(heap());
}
if (v8_flags.minor_mc_trace_fragmentation) {
TraceFragmentation();
}
}
void MinorMarkCompactCollector::DrainMarkingWorklist() {

View File

@ -11,6 +11,7 @@
#include "include/v8-internal.h"
#include "src/heap/base/worklist.h"
#include "src/heap/concurrent-marking.h"
#include "src/heap/index-generator.h"
#include "src/heap/marking-state.h"
#include "src/heap/marking-visitor.h"
#include "src/heap/marking-worklist.h"
@ -34,6 +35,7 @@ class PagedNewSpace;
class ReadOnlySpace;
class RecordMigratedSlotVisitor;
class UpdatingItem;
class YoungGenerationMarkingTask;
class MarkBitCellIterator {
public:
@ -720,8 +722,8 @@ class MinorMarkCompactCollector final : public CollectorBase {
Sweeper* sweeper() { return sweeper_; }
void MarkLiveObjects();
void MarkRootSetInParallel(RootMarkingVisitor* root_visitor,
bool was_marked_incrementally);
void MarkLiveObjectsInParallel(RootMarkingVisitor* root_visitor,
bool was_marked_incrementally);
V8_INLINE void MarkRootObject(HeapObject obj);
void DrainMarkingWorklist();
void TraceFragmentation();
@ -752,6 +754,63 @@ class MinorMarkCompactCollector final : public CollectorBase {
friend class YoungGenerationMainMarkingVisitor;
};
class PageMarkingItem : public ParallelWorkItem {
public:
explicit PageMarkingItem(MemoryChunk* chunk) : chunk_(chunk) {}
~PageMarkingItem() = default;
void Process(YoungGenerationMarkingTask* task);
private:
inline Heap* heap() { return chunk_->heap(); }
void MarkUntypedPointers(YoungGenerationMarkingTask* task);
void MarkTypedPointers(YoungGenerationMarkingTask* task);
template <typename TSlot>
V8_INLINE SlotCallbackResult
CheckAndMarkObject(YoungGenerationMarkingTask* task, TSlot slot);
MemoryChunk* chunk_;
};
enum class YoungMarkingJobType { kAtomic, kIncremental };
class YoungGenerationMarkingJob : public v8::JobTask {
public:
YoungGenerationMarkingJob(Isolate* isolate, Heap* heap,
MarkingWorklists* global_worklists,
std::vector<PageMarkingItem> marking_items,
YoungMarkingJobType young_marking_job_type)
: isolate_(isolate),
heap_(heap),
global_worklists_(global_worklists),
marking_items_(std::move(marking_items)),
remaining_marking_items_(marking_items_.size()),
generator_(marking_items_.size()),
young_marking_job_type_(young_marking_job_type) {}
void Run(JobDelegate* delegate) override;
size_t GetMaxConcurrency(size_t worker_count) const override;
bool ShouldDrainMarkingWorklist() const {
return young_marking_job_type_ == YoungMarkingJobType::kAtomic;
}
private:
void ProcessItems(JobDelegate* delegate);
void ProcessMarkingItems(YoungGenerationMarkingTask* task);
Isolate* isolate_;
Heap* heap_;
MarkingWorklists* global_worklists_;
std::vector<PageMarkingItem> marking_items_;
std::atomic_size_t remaining_marking_items_{0};
IndexGenerator generator_;
YoungMarkingJobType young_marking_job_type_;
};
} // namespace internal
} // namespace v8