[heap] MinorMC: Split up PageMarkingItem further

PageMarkingItem is used to process OLD_TO_NEW regular and typed slots
sets. These slot sets are disjoint and do not share state that needs to
be modified, i.e., can be processed in parallel.

Rework PageMarkingItem to allows for parallel processing of slot sets on
a single page. Remove the lock as it should not be necessary.

The CL does not change the cost function for computing tasks.

Drive-by: Optimize marking a single object on filtering.

Bug: v8:12612
Change-Id: I6595d857d6df23d9d427bcdf5ecb3c9ea1c3c9ad
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4224451
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85694}
This commit is contained in:
Michael Lippautz 2023-02-06 15:54:47 +01:00 committed by V8 LUCI CQ
parent 2262ba881c
commit 908fc3c89c
3 changed files with 42 additions and 21 deletions

View File

@ -275,8 +275,18 @@ void IncrementalMarking::MarkRoots() {
std::vector<PageMarkingItem> marking_items;
RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
heap_, [&marking_items](MemoryChunk* chunk) {
marking_items.emplace_back(chunk);
heap(), [&marking_items](MemoryChunk* chunk) {
if (chunk->slot_set<OLD_TO_NEW>()) {
marking_items.emplace_back(
chunk, PageMarkingItem::SlotsType::kRegularSlots);
} else {
chunk->ReleaseInvalidatedSlots<OLD_TO_NEW>();
}
if (chunk->typed_slot_set<OLD_TO_NEW>()) {
marking_items.emplace_back(chunk,
PageMarkingItem::SlotsType::kTypedSlots);
}
});
V8::GetCurrentPlatform()

View File

@ -6097,9 +6097,7 @@ class YoungGenerationMarkingTask {
marking_state_(heap->marking_state()),
visitor_(isolate, marking_state_, marking_worklists_local()) {}
void MarkObject(Object object) {
if (!Heap::InYoungGeneration(object)) return;
HeapObject heap_object = HeapObject::cast(object);
void MarkYoungObject(HeapObject heap_object) {
if (marking_state_->WhiteToGrey(heap_object)) {
visitor_.Visit(heap_object);
// Objects transition to black when visited.
@ -6130,13 +6128,17 @@ class YoungGenerationMarkingTask {
};
void PageMarkingItem::Process(YoungGenerationMarkingTask* task) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "PageMarkingItem::Process");
base::MutexGuard guard(chunk_->mutex());
MarkUntypedPointers(task);
MarkTypedPointers(task);
if (slots_type_ == SlotsType::kRegularSlots) {
MarkUntypedPointers(task);
} else {
MarkTypedPointers(task);
}
}
void PageMarkingItem::MarkUntypedPointers(YoungGenerationMarkingTask* task) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
"PageMarkingItem::MarkUntypedPointers");
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(
chunk_, InvalidatedSlotsFilter::LivenessCheck::kNo);
RememberedSet<OLD_TO_NEW>::Iterate(
@ -6152,6 +6154,8 @@ void PageMarkingItem::MarkUntypedPointers(YoungGenerationMarkingTask* task) {
}
void PageMarkingItem::MarkTypedPointers(YoungGenerationMarkingTask* task) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
"PageMarkingItem::MarkTypedPointers");
RememberedSet<OLD_TO_NEW>::IterateTyped(
chunk_, [this, task](SlotType slot_type, Address slot) {
return UpdateTypedSlotHelper::UpdateTypedSlot(
@ -6169,15 +6173,10 @@ V8_INLINE SlotCallbackResult PageMarkingItem::CheckAndMarkObject(
std::is_same<TSlot, MaybeObjectSlot>::value,
"Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
MaybeObject object = *slot;
if (Heap::InYoungGeneration(object)) {
// Marking happens before flipping the young generation, so the object
// has to be in a to page.
DCHECK(Heap::InToPage(object));
HeapObject heap_object;
bool success = object.GetHeapObject(&heap_object);
USE(success);
DCHECK(success);
task->MarkObject(heap_object);
HeapObject heap_object;
if (object.GetHeapObject(&heap_object) &&
Heap::InYoungGeneration(heap_object)) {
task->MarkYoungObject(heap_object);
return KEEP_SLOT;
}
return REMOVE_SLOT;
@ -6298,7 +6297,17 @@ void MinorMarkCompactCollector::MarkLiveObjectsInParallel(
// Create items for each page.
RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
heap(), [&marking_items](MemoryChunk* chunk) {
marking_items.emplace_back(chunk);
if (chunk->slot_set<OLD_TO_NEW>()) {
marking_items.emplace_back(
chunk, PageMarkingItem::SlotsType::kRegularSlots);
} else {
chunk->ReleaseInvalidatedSlots<OLD_TO_NEW>();
}
if (chunk->typed_slot_set<OLD_TO_NEW>()) {
marking_items.emplace_back(
chunk, PageMarkingItem::SlotsType::kTypedSlots);
}
});
}
}

View File

@ -754,7 +754,10 @@ class MinorMarkCompactCollector final : public CollectorBase {
class PageMarkingItem : public ParallelWorkItem {
public:
explicit PageMarkingItem(MemoryChunk* chunk) : chunk_(chunk) {}
enum class SlotsType { kRegularSlots, kTypedSlots };
PageMarkingItem(MemoryChunk* chunk, SlotsType slots_type)
: chunk_(chunk), slots_type_(slots_type) {}
~PageMarkingItem() = default;
void Process(YoungGenerationMarkingTask* task);
@ -763,14 +766,13 @@ class PageMarkingItem : public ParallelWorkItem {
inline Heap* heap() { return chunk_->heap(); }
void MarkUntypedPointers(YoungGenerationMarkingTask* task);
void MarkTypedPointers(YoungGenerationMarkingTask* task);
template <typename TSlot>
V8_INLINE SlotCallbackResult
CheckAndMarkObject(YoungGenerationMarkingTask* task, TSlot slot);
MemoryChunk* chunk_;
const SlotsType slots_type_;
};
enum class YoungMarkingJobType { kAtomic, kIncremental };