[heap] Use MarkingWorklists in MinorMC

This CL is part of an effort to enable concurrent marking in MinorMC.

For this purpose we plan to reuse the IncrementalMarking class which
already implements a part of the concurrent marking code for MajorMC.
IncrementalMarking internally uses the MarkingWorklists class.

This CL adapts the stop-the-world marking implementation of
MinorMC to use the MarkingWorklists class.

Bug: v8:13012
Change-Id: I3c4eb33142f2630e89aa3771b6065b9f82dc0847
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3747862
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Omer Katz <omerkatz@chromium.org>
Commit-Queue: Leon Bettscheider <bettscheider@google.com>
Cr-Commit-Position: refs/heads/main@{#81646}
This commit is contained in:
Leon Bettscheider 2022-07-11 15:36:36 +00:00 committed by V8 LUCI CQ
parent 8ab9821b9d
commit 110fa66e13
5 changed files with 94 additions and 99 deletions

View File

@ -43,7 +43,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) { void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
if (Heap::InYoungGeneration(obj) && if (Heap::InYoungGeneration(obj) &&
non_atomic_marking_state_.WhiteToBlack(obj)) { non_atomic_marking_state_.WhiteToBlack(obj)) {
main_thread_worklist_local_.Push(obj); main_thread_worklists_local_->Push(obj);
} }
} }

View File

@ -4971,17 +4971,16 @@ void MarkCompactCollector::UpdatePointersInClientHeap(Isolate* client) {
chunk->ReleaseInvalidatedSlots<OLD_TO_SHARED>(); chunk->ReleaseInvalidatedSlots<OLD_TO_SHARED>();
RememberedSet<OLD_TO_SHARED>::IterateTyped( RememberedSet<OLD_TO_SHARED>::IterateTyped(chunk, [this](SlotType slot_type,
chunk, [this](SlotType slot_type, Address slot) { Address slot) {
// Using UpdateStrongSlot is OK here, because there are no weak // Using UpdateStrongSlot is OK here, because there are no weak
// typed slots. // typed slots.
PtrComprCageBase cage_base = heap_->isolate(); PtrComprCageBase cage_base = heap_->isolate();
return UpdateTypedSlotHelper::UpdateTypedSlot( return UpdateTypedSlotHelper::UpdateTypedSlot(
heap_, slot_type, slot, [cage_base](FullMaybeObjectSlot slot) { heap_, slot_type, slot, [cage_base](FullMaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
slot); });
}); });
});
} }
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
@ -5350,9 +5349,9 @@ class YoungGenerationMarkingVisitor final
: public NewSpaceVisitor<YoungGenerationMarkingVisitor> { : public NewSpaceVisitor<YoungGenerationMarkingVisitor> {
public: public:
YoungGenerationMarkingVisitor(Isolate* isolate, MarkingState* marking_state, YoungGenerationMarkingVisitor(Isolate* isolate, MarkingState* marking_state,
MarkingWorklist::Local* worklist_local) MarkingWorklists::Local* worklists_local)
: NewSpaceVisitor(isolate), : NewSpaceVisitor(isolate),
worklist_local_(worklist_local), worklists_local_(worklists_local),
marking_state_(marking_state) {} marking_state_(marking_state) {}
V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start, V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
@ -5420,14 +5419,16 @@ class YoungGenerationMarkingVisitor final
inline void MarkObjectViaMarkingWorklist(HeapObject object) { inline void MarkObjectViaMarkingWorklist(HeapObject object) {
if (marking_state_->WhiteToBlack(object)) { if (marking_state_->WhiteToBlack(object)) {
worklist_local_->Push(object); worklists_local_->Push(object);
} }
} }
MarkingWorklist::Local* worklist_local_; MarkingWorklists::Local* worklists_local_;
MarkingState* marking_state_; MarkingState* marking_state_;
}; };
MinorMarkCompactCollector::~MinorMarkCompactCollector() = default;
void MinorMarkCompactCollector::SetUp() {} void MinorMarkCompactCollector::SetUp() {}
void MinorMarkCompactCollector::TearDown() {} void MinorMarkCompactCollector::TearDown() {}
@ -5437,19 +5438,10 @@ constexpr size_t MinorMarkCompactCollector::kMaxParallelTasks;
MinorMarkCompactCollector::MinorMarkCompactCollector(Heap* heap) MinorMarkCompactCollector::MinorMarkCompactCollector(Heap* heap)
: heap_(heap), : heap_(heap),
worklist_(new MarkingWorklist()),
main_thread_worklist_local_(worklist_),
marking_state_(heap->isolate()), marking_state_(heap->isolate()),
non_atomic_marking_state_(heap->isolate()), non_atomic_marking_state_(heap->isolate()),
main_marking_visitor_(new YoungGenerationMarkingVisitor(
heap->isolate(), marking_state(), &main_thread_worklist_local_)),
page_parallel_job_semaphore_(0) {} page_parallel_job_semaphore_(0) {}
MinorMarkCompactCollector::~MinorMarkCompactCollector() {
delete worklist_;
delete main_marking_visitor_;
}
void MinorMarkCompactCollector::CleanupPromotedPages() { void MinorMarkCompactCollector::CleanupPromotedPages() {
for (Page* p : promoted_pages_) { for (Page* p : promoted_pages_) {
p->ClearFlag(Page::PAGE_NEW_NEW_PROMOTION); p->ClearFlag(Page::PAGE_NEW_NEW_PROMOTION);
@ -5483,8 +5475,8 @@ class YoungGenerationMigrationObserver final : public MigrationObserver {
inline void Move(AllocationSpace dest, HeapObject src, HeapObject dst, inline void Move(AllocationSpace dest, HeapObject src, HeapObject dst,
int size) final { int size) final {
// Migrate color to old generation marking in case the object survived young // Migrate color to old generation marking in case the object survived
// generation garbage collection. // young generation garbage collection.
if (heap_->incremental_marking()->IsMarking()) { if (heap_->incremental_marking()->IsMarking()) {
DCHECK( DCHECK(
heap_->incremental_marking()->atomic_marking_state()->IsWhite(dst)); heap_->incremental_marking()->atomic_marking_state()->IsWhite(dst));
@ -5515,8 +5507,8 @@ class YoungGenerationRecordMigratedSlotVisitor final
} }
private: private:
// Only record slots for host objects that are considered as live by the full // Only record slots for host objects that are considered as live by the
// collector. // full collector.
inline bool IsLive(HeapObject object) { inline bool IsLive(HeapObject object) {
return collector_->non_atomic_marking_state()->IsBlack(object); return collector_->non_atomic_marking_state()->IsBlack(object);
} }
@ -5678,8 +5670,8 @@ void MinorMarkCompactCollector::CollectGarbage() {
heap()->concurrent_marking()->ClearMemoryChunkData(p); heap()->concurrent_marking()->ClearMemoryChunkData(p);
} }
} }
// Since we promote all surviving large objects immediately, all remaining // Since we promote all surviving large objects immediately, all
// large objects must be dead. // remaining large objects must be dead.
// TODO(v8:11685): Don't free all as soon as we have an intermediate // TODO(v8:11685): Don't free all as soon as we have an intermediate
// generation. // generation.
heap()->new_lo_space()->FreeDeadObjects([](HeapObject) { return true; }); heap()->new_lo_space()->FreeDeadObjects([](HeapObject) { return true; });
@ -5780,8 +5772,8 @@ void MinorMarkCompactCollector::ClearNonLiveReferences() {
{ {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_CLEAR_STRING_TABLE); TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_CLEAR_STRING_TABLE);
// Internalized strings are always stored in old space, so there is no need // Internalized strings are always stored in old space, so there is no
// to clean them here. // need to clean them here.
YoungGenerationExternalStringTableCleaner external_visitor(this); YoungGenerationExternalStringTableCleaner external_visitor(this);
heap()->external_string_table_.IterateYoung(&external_visitor); heap()->external_string_table_.IterateYoung(&external_visitor);
heap()->external_string_table_.CleanUpYoung(); heap()->external_string_table_.CleanUpYoung();
@ -5846,10 +5838,11 @@ class YoungGenerationMarkingTask {
public: public:
YoungGenerationMarkingTask(Isolate* isolate, YoungGenerationMarkingTask(Isolate* isolate,
MinorMarkCompactCollector* collector, MinorMarkCompactCollector* collector,
MarkingWorklist* global_worklist) MarkingWorklists* global_worklists)
: marking_worklist_local_(global_worklist), : marking_worklists_local_(
std::make_unique<MarkingWorklists::Local>(global_worklists)),
marking_state_(collector->marking_state()), marking_state_(collector->marking_state()),
visitor_(isolate, marking_state_, &marking_worklist_local_) {} visitor_(isolate, marking_state_, marking_worklists_local()) {}
void MarkObject(Object object) { void MarkObject(Object object) {
if (!Heap::InYoungGeneration(object)) return; if (!Heap::InYoungGeneration(object)) return;
@ -5861,13 +5854,17 @@ class YoungGenerationMarkingTask {
void EmptyMarkingWorklist() { void EmptyMarkingWorklist() {
HeapObject object; HeapObject object;
while (marking_worklist_local_.Pop(&object)) { while (marking_worklists_local_->Pop(&object)) {
visitor_.Visit(object); visitor_.Visit(object);
} }
} }
MarkingWorklists::Local* marking_worklists_local() {
return marking_worklists_local_.get();
}
private: private:
MarkingWorklist::Local marking_worklist_local_; std::unique_ptr<MarkingWorklists::Local> marking_worklists_local_;
MarkingState* marking_state_; MarkingState* marking_state_;
YoungGenerationMarkingVisitor visitor_; YoungGenerationMarkingVisitor visitor_;
}; };
@ -5939,11 +5936,11 @@ class YoungGenerationMarkingJob : public v8::JobTask {
public: public:
YoungGenerationMarkingJob(Isolate* isolate, YoungGenerationMarkingJob(Isolate* isolate,
MinorMarkCompactCollector* collector, MinorMarkCompactCollector* collector,
MarkingWorklist* global_worklist, MarkingWorklists* global_worklists,
std::vector<PageMarkingItem> marking_items) std::vector<PageMarkingItem> marking_items)
: isolate_(isolate), : isolate_(isolate),
collector_(collector), collector_(collector),
global_worklist_(global_worklist), global_worklists_(global_worklists),
marking_items_(std::move(marking_items)), marking_items_(std::move(marking_items)),
remaining_marking_items_(marking_items_.size()), remaining_marking_items_(marking_items_.size()),
generator_(marking_items_.size()) {} generator_(marking_items_.size()) {}
@ -5966,8 +5963,12 @@ class YoungGenerationMarkingJob : public v8::JobTask {
// the amount of marking that is required. // the amount of marking that is required.
const int kPagesPerTask = 2; const int kPagesPerTask = 2;
size_t items = remaining_marking_items_.load(std::memory_order_relaxed); size_t items = remaining_marking_items_.load(std::memory_order_relaxed);
size_t num_tasks = size_t num_tasks = std::max(
std::max((items + 1) / kPagesPerTask, global_worklist_->Size()); (items + 1) / kPagesPerTask,
global_worklists_->shared()->Size() +
global_worklists_->on_hold()
->Size()); // TODO(v8:13012): If this is used with concurrent
// marking, we need to remove on_hold() here.
if (!FLAG_parallel_marking) { if (!FLAG_parallel_marking) {
num_tasks = std::min<size_t>(1, num_tasks); num_tasks = std::min<size_t>(1, num_tasks);
} }
@ -5980,7 +5981,7 @@ class YoungGenerationMarkingJob : public v8::JobTask {
double marking_time = 0.0; double marking_time = 0.0;
{ {
TimedScope scope(&marking_time); TimedScope scope(&marking_time);
YoungGenerationMarkingTask task(isolate_, collector_, global_worklist_); YoungGenerationMarkingTask task(isolate_, collector_, global_worklists_);
ProcessMarkingItems(&task); ProcessMarkingItems(&task);
task.EmptyMarkingWorklist(); task.EmptyMarkingWorklist();
} }
@ -6009,7 +6010,7 @@ class YoungGenerationMarkingJob : public v8::JobTask {
Isolate* isolate_; Isolate* isolate_;
MinorMarkCompactCollector* collector_; MinorMarkCompactCollector* collector_;
MarkingWorklist* global_worklist_; MarkingWorklists* global_worklists_;
std::vector<PageMarkingItem> marking_items_; std::vector<PageMarkingItem> marking_items_;
std::atomic_size_t remaining_marking_items_{0}; std::atomic_size_t remaining_marking_items_{0};
IndexGenerator generator_; IndexGenerator generator_;
@ -6043,19 +6044,18 @@ void MinorMarkCompactCollector::MarkRootSetInParallel(
// Add tasks and run in parallel. // Add tasks and run in parallel.
{ {
// The main thread might hold local items, while GlobalPoolSize() == 0. // The main thread might hold local items, while GlobalPoolSize() ==
// Flush to ensure these items are visible globally and picked up by the // 0. Flush to ensure these items are visible globally and picked up
// job. // by the job.
main_thread_worklist_local_.Publish(); main_thread_worklists_local_->Publish();
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS); TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS);
V8::GetCurrentPlatform() V8::GetCurrentPlatform()
->PostJob(v8::TaskPriority::kUserBlocking, ->PostJob(v8::TaskPriority::kUserBlocking,
std::make_unique<YoungGenerationMarkingJob>( std::make_unique<YoungGenerationMarkingJob>(
isolate(), this, worklist(), std::move(marking_items))) isolate(), this, worklists(), std::move(marking_items)))
->Join(); ->Join();
DCHECK(worklist()->IsEmpty()); DCHECK(main_thread_worklists_local_->IsEmpty());
DCHECK(main_thread_worklist_local_.IsLocalEmpty());
} }
} }
} }
@ -6063,6 +6063,11 @@ void MinorMarkCompactCollector::MarkRootSetInParallel(
void MinorMarkCompactCollector::MarkLiveObjects() { void MinorMarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK); TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK);
main_thread_worklists_local_ =
std::make_unique<MarkingWorklists::Local>(&worklists_);
main_marking_visitor_ = std::make_unique<YoungGenerationMarkingVisitor>(
heap()->isolate(), marking_state(), main_thread_worklists_local());
PostponeInterruptsScope postpone(isolate()); PostponeInterruptsScope postpone(isolate());
RootMarkingVisitor root_visitor(this); RootMarkingVisitor root_visitor(this);
@ -6085,19 +6090,22 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
if (FLAG_minor_mc_trace_fragmentation) { if (FLAG_minor_mc_trace_fragmentation) {
TraceFragmentation(); TraceFragmentation();
} }
main_thread_worklists_local_.reset();
main_marking_visitor_.reset();
} }
void MinorMarkCompactCollector::DrainMarkingWorklist() { void MinorMarkCompactCollector::DrainMarkingWorklist() {
PtrComprCageBase cage_base(isolate()); PtrComprCageBase cage_base(isolate());
HeapObject object; HeapObject object;
while (main_thread_worklist_local_.Pop(&object)) { while (main_thread_worklists_local_->Pop(&object)) {
DCHECK(!object.IsFreeSpaceOrFiller(cage_base)); DCHECK(!object.IsFreeSpaceOrFiller(cage_base));
DCHECK(object.IsHeapObject()); DCHECK(object.IsHeapObject());
DCHECK(heap()->Contains(object)); DCHECK(heap()->Contains(object));
DCHECK(non_atomic_marking_state()->IsBlack(object)); DCHECK(non_atomic_marking_state()->IsBlack(object));
main_marking_visitor()->Visit(object); main_marking_visitor_->Visit(object);
} }
DCHECK(main_thread_worklist_local_.IsLocalEmpty()); DCHECK(main_thread_worklists_local_->IsEmpty());
} }
void MinorMarkCompactCollector::TraceFragmentation() { void MinorMarkCompactCollector::TraceFragmentation() {
@ -6144,12 +6152,14 @@ void MinorMarkCompactCollector::TraceFragmentation() {
allocatable_bytes += area_end - p->area_start(); allocatable_bytes += area_end - p->area_start();
CHECK_EQ(allocatable_bytes, live_bytes + free_bytes_of_class[0]); CHECK_EQ(allocatable_bytes, live_bytes + free_bytes_of_class[0]);
} }
PrintIsolate( PrintIsolate(isolate(),
isolate(), "Minor Mark-Compact Fragmentation: allocatable_bytes=%zu "
"Minor Mark-Compact Fragmentation: allocatable_bytes=%zu live_bytes=%zu " "live_bytes=%zu "
"free_bytes=%zu free_bytes_1K=%zu free_bytes_2K=%zu free_bytes_4K=%zu\n", "free_bytes=%zu free_bytes_1K=%zu free_bytes_2K=%zu "
allocatable_bytes, live_bytes, free_bytes_of_class[0], "free_bytes_4K=%zu\n",
free_bytes_of_class[1], free_bytes_of_class[2], free_bytes_of_class[3]); allocatable_bytes, live_bytes, free_bytes_of_class[0],
free_bytes_of_class[1], free_bytes_of_class[2],
free_bytes_of_class[3]);
} }
void MinorMarkCompactCollector::Evacuate() { void MinorMarkCompactCollector::Evacuate() {
@ -6243,9 +6253,10 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
collector_->MakeIterable(static_cast<Page*>(chunk), collector_->MakeIterable(static_cast<Page*>(chunk),
FreeSpaceTreatmentMode::kZapFreeSpace); FreeSpaceTreatmentMode::kZapFreeSpace);
} else if (heap()->incremental_marking()->IsMarking()) { } else if (heap()->incremental_marking()->IsMarking()) {
// When incremental marking is on, we need to clear the mark bits of // When incremental marking is on, we need to clear the mark bits
// the full collector. We cannot yet discard the young generation mark // of the full collector. We cannot yet discard the young
// bits as they are still relevant for pointers updating. // generation mark bits as they are still relevant for pointers
// updating.
collector_->MakeIterable(static_cast<Page*>(chunk), collector_->MakeIterable(static_cast<Page*>(chunk),
FreeSpaceTreatmentMode::kIgnoreFreeSpace); FreeSpaceTreatmentMode::kIgnoreFreeSpace);
} }
@ -6263,8 +6274,8 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
FreeSpaceTreatmentMode::kZapFreeSpace); FreeSpaceTreatmentMode::kZapFreeSpace);
} else if (heap()->incremental_marking()->IsMarking()) { } else if (heap()->incremental_marking()->IsMarking()) {
// When incremental marking is on, we need to clear the mark bits of // When incremental marking is on, we need to clear the mark bits of
// the full collector. We cannot yet discard the young generation mark // the full collector. We cannot yet discard the young generation
// bits as they are still relevant for pointers updating. // mark bits as they are still relevant for pointers updating.
collector_->MakeIterable(static_cast<Page*>(chunk), collector_->MakeIterable(static_cast<Page*>(chunk),
FreeSpaceTreatmentMode::kIgnoreFreeSpace); FreeSpaceTreatmentMode::kIgnoreFreeSpace);
} }

View File

@ -777,17 +777,17 @@ class MinorMarkCompactCollector final {
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem( std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode); MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode);
MarkingWorklists::Local* main_thread_worklists_local() {
return main_thread_worklists_local_.get();
}
private: private:
class RootMarkingVisitor; class RootMarkingVisitor;
static const int kNumMarkers = 8; static const int kNumMarkers = 8;
static const int kMainMarker = 0; static const int kMainMarker = 0;
inline MarkingWorklist* worklist() { return worklist_; } inline MarkingWorklists* worklists() { return &worklists_; }
inline YoungGenerationMarkingVisitor* main_marking_visitor() {
return main_marking_visitor_;
}
void MarkLiveObjects(); void MarkLiveObjects();
void MarkRootSetInParallel(RootMarkingVisitor* root_visitor); void MarkRootSetInParallel(RootMarkingVisitor* root_visitor);
@ -813,13 +813,13 @@ class MinorMarkCompactCollector final {
Heap* heap_; Heap* heap_;
MarkingWorklist* worklist_; MarkingWorklists worklists_;
MarkingWorklist::Local main_thread_worklist_local_; std::unique_ptr<MarkingWorklists::Local> main_thread_worklists_local_;
std::unique_ptr<YoungGenerationMarkingVisitor> main_marking_visitor_;
MarkingState marking_state_; MarkingState marking_state_;
NonAtomicMarkingState non_atomic_marking_state_; NonAtomicMarkingState non_atomic_marking_state_;
YoungGenerationMarkingVisitor* main_marking_visitor_;
base::Semaphore page_parallel_job_semaphore_; base::Semaphore page_parallel_job_semaphore_;
std::vector<Page*> new_space_evacuation_pages_; std::vector<Page*> new_space_evacuation_pages_;
std::vector<Page*> promoted_pages_; std::vector<Page*> promoted_pages_;

View File

@ -21,14 +21,6 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
MarkingWorklists::~MarkingWorklists() {
DCHECK(shared_.IsEmpty());
DCHECK(on_hold_.IsEmpty());
DCHECK(other_.IsEmpty());
DCHECK(worklists_.empty());
DCHECK(context_worklists_.empty());
}
void MarkingWorklists::Clear() { void MarkingWorklists::Clear() {
shared_.Clear(); shared_.Clear();
on_hold_.Clear(); on_hold_.Clear();
@ -106,7 +98,6 @@ MarkingWorklists::Local::Local(
std::unique_ptr<CppMarkingState> cpp_marking_state) std::unique_ptr<CppMarkingState> cpp_marking_state)
: on_hold_(global->on_hold()), : on_hold_(global->on_hold()),
wrapper_(global->wrapper()), wrapper_(global->wrapper()),
is_per_context_mode_(false),
cpp_marking_state_(std::move(cpp_marking_state)) { cpp_marking_state_(std::move(cpp_marking_state)) {
if (global->context_worklists().empty()) { if (global->context_worklists().empty()) {
MarkingWorklist::Local shared(global->shared()); MarkingWorklist::Local shared(global->shared());
@ -126,17 +117,6 @@ MarkingWorklists::Local::Local(
} }
} }
MarkingWorklists::Local::~Local() {
DCHECK(active_.IsLocalEmpty());
if (is_per_context_mode_) {
for (auto& cw : worklist_by_context_) {
if (cw.first != active_context_) {
DCHECK(cw.second->IsLocalEmpty());
}
}
}
}
void MarkingWorklists::Local::Publish() { void MarkingWorklists::Local::Publish() {
active_.Publish(); active_.Publish();
on_hold_.Publish(); on_hold_.Publish();

View File

@ -66,7 +66,7 @@ struct ContextWorklistPair {
}; };
// A helper class that owns all global marking worklists. // A helper class that owns all global marking worklists.
class V8_EXPORT_PRIVATE MarkingWorklists { class V8_EXPORT_PRIVATE MarkingWorklists final {
public: public:
class Local; class Local;
// Fake addresses of special contexts used for per-context accounting. // Fake addresses of special contexts used for per-context accounting.
@ -77,7 +77,9 @@ class V8_EXPORT_PRIVATE MarkingWorklists {
static const Address kOtherContext = 8; static const Address kOtherContext = 8;
MarkingWorklists() = default; MarkingWorklists() = default;
~MarkingWorklists();
// Worklists implicitly check for emptiness on destruction.
~MarkingWorklists() = default;
// Calls the specified callback on each element of the deques and replaces // Calls the specified callback on each element of the deques and replaces
// the element with the result of the callback. If the callback returns // the element with the result of the callback. If the callback returns
@ -141,16 +143,18 @@ class V8_EXPORT_PRIVATE MarkingWorklists {
// - active_owner == worlist_by_context[active_context_].get() // - active_owner == worlist_by_context[active_context_].get()
// - *active_owner is empty (all fields are null) because its content has // - *active_owner is empty (all fields are null) because its content has
// been moved to active_. // been moved to active_.
class V8_EXPORT_PRIVATE MarkingWorklists::Local { class V8_EXPORT_PRIVATE MarkingWorklists::Local final {
public: public:
static constexpr Address kSharedContext = MarkingWorklists::kSharedContext; static constexpr Address kSharedContext = MarkingWorklists::kSharedContext;
static constexpr Address kOtherContext = MarkingWorklists::kOtherContext; static constexpr Address kOtherContext = MarkingWorklists::kOtherContext;
static constexpr std::nullptr_t kNoCppMarkingState = nullptr; static constexpr std::nullptr_t kNoCppMarkingState = nullptr;
Local( explicit Local(
MarkingWorklists* global, MarkingWorklists* global,
std::unique_ptr<CppMarkingState> cpp_marking_state = kNoCppMarkingState); std::unique_ptr<CppMarkingState> cpp_marking_state = kNoCppMarkingState);
~Local();
// Local worklists implicitly check for emptiness on destruction.
~Local() = default;
inline void Push(HeapObject object); inline void Push(HeapObject object);
inline bool Pop(HeapObject* object); inline bool Pop(HeapObject* object);
@ -200,7 +204,7 @@ class V8_EXPORT_PRIVATE MarkingWorklists::Local {
MarkingWorklist::Local active_; MarkingWorklist::Local active_;
Address active_context_; Address active_context_;
MarkingWorklist::Local* active_owner_; MarkingWorklist::Local* active_owner_;
bool is_per_context_mode_; bool is_per_context_mode_ = false;
std::unordered_map<Address, std::unique_ptr<MarkingWorklist::Local>> std::unordered_map<Address, std::unique_ptr<MarkingWorklist::Local>>
worklist_by_context_; worklist_by_context_;