[heap] Rename MarkingDeque to MarkingWorklist.

This prepares ground for switching mark-compactor to use
Worklist data-structure instead of the existing marking deque.

BUG=chromium:694255

Change-Id: I0ac4c563018a9619962fb4bf388b5f3cceffb86d
Reviewed-on: https://chromium-review.googlesource.com/544933
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46178}
This commit is contained in:
Ulan Degenbaev 2017-06-23 14:32:55 +02:00 committed by Commit Bot
parent ef1a80d67f
commit 66e9152fe6
9 changed files with 128 additions and 123 deletions

View File

@ -13,7 +13,7 @@ void LocalEmbedderHeapTracer::TracePrologue() {
if (!InUse()) return;
CHECK(cached_wrappers_to_trace_.empty());
num_v8_marking_deque_was_empty_ = 0;
num_v8_marking_worklist_was_empty_ = 0;
remote_tracer_->TracePrologue();
}

View File

@ -19,7 +19,7 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
typedef std::pair<void*, void*> WrapperInfo;
LocalEmbedderHeapTracer()
: remote_tracer_(nullptr), num_v8_marking_deque_was_empty_(0) {}
: remote_tracer_(nullptr), num_v8_marking_worklist_was_empty_(0) {}
void SetRemoteTracer(EmbedderHeapTracer* tracer) { remote_tracer_ = tracer; }
bool InUse() { return remote_tracer_ != nullptr; }
@ -45,12 +45,14 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
// are too many of them.
bool RequiresImmediateWrapperProcessing();
void NotifyV8MarkingDequeWasEmpty() { num_v8_marking_deque_was_empty_++; }
void NotifyV8MarkingWorklistWasEmpty() {
num_v8_marking_worklist_was_empty_++;
}
bool ShouldFinalizeIncrementalMarking() {
static const size_t kMaxIncrementalFixpointRounds = 3;
return !FLAG_incremental_marking_wrappers || !InUse() ||
NumberOfWrappersToTrace() == 0 ||
num_v8_marking_deque_was_empty_ > kMaxIncrementalFixpointRounds;
num_v8_marking_worklist_was_empty_ > kMaxIncrementalFixpointRounds;
}
private:
@ -58,7 +60,7 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
EmbedderHeapTracer* remote_tracer_;
WrapperCache cached_wrappers_to_trace_;
size_t num_v8_marking_deque_was_empty_;
size_t num_v8_marking_worklist_was_empty_;
};
} // namespace internal

View File

@ -1536,7 +1536,7 @@ void Heap::MarkCompactEpilogue() {
PreprocessStackTraces();
DCHECK(incremental_marking()->IsStopped());
mark_compact_collector()->marking_deque()->StopUsing();
mark_compact_collector()->marking_worklist()->StopUsing();
}
@ -1790,7 +1790,7 @@ void Heap::Scavenge() {
promotion_queue_.Destroy();
incremental_marking()->UpdateMarkingDequeAfterScavenge();
incremental_marking()->UpdateMarkingWorklistAfterScavenge();
ScavengeWeakObjectRetainer weak_object_retainer(this);
ProcessYoungWeakReferences(&weak_object_retainer);
@ -4254,11 +4254,11 @@ void Heap::FinalizeIncrementalMarkingIfComplete(
if (incremental_marking()->IsMarking() &&
(incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
(!incremental_marking()->finalize_marking_completed() &&
mark_compact_collector()->marking_deque()->IsEmpty() &&
mark_compact_collector()->marking_worklist()->IsEmpty() &&
local_embedder_heap_tracer()->ShouldFinalizeIncrementalMarking()))) {
FinalizeIncrementalMarking(gc_reason);
} else if (incremental_marking()->IsComplete() ||
(mark_compact_collector()->marking_deque()->IsEmpty() &&
(mark_compact_collector()->marking_worklist()->IsEmpty() &&
local_embedder_heap_tracer()
->ShouldFinalizeIncrementalMarking())) {
CollectAllGarbage(current_gc_flags_, gc_reason, current_gc_callback_flags_);
@ -5760,11 +5760,11 @@ bool Heap::SetUp() {
tracer_ = new GCTracer(this);
scavenge_collector_ = new Scavenger(this);
mark_compact_collector_ = new MarkCompactCollector(this);
incremental_marking_->set_marking_deque(
mark_compact_collector_->marking_deque());
incremental_marking_->set_marking_worklist(
mark_compact_collector_->marking_worklist());
#ifdef V8_CONCURRENT_MARKING
concurrent_marking_ =
new ConcurrentMarking(this, mark_compact_collector_->marking_deque());
new ConcurrentMarking(this, mark_compact_collector_->marking_worklist());
#else
concurrent_marking_ = new ConcurrentMarking(this, nullptr);
#endif

View File

@ -33,7 +33,7 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address, size_t) {
IncrementalMarking::IncrementalMarking(Heap* heap)
: heap_(heap),
marking_deque_(nullptr),
marking_worklist_(nullptr),
initial_old_generation_size_(0),
bytes_marked_ahead_of_schedule_(0),
unscanned_bytes_of_large_object_(0),
@ -132,7 +132,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) {
marking_deque()->Push(obj);
marking_worklist()->Push(obj);
return true;
}
return false;
@ -143,9 +143,9 @@ void IncrementalMarking::MarkBlackAndPush(HeapObject* obj) {
ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj));
if (ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj))) {
#ifdef V8_CONCURRENT_MARKING
marking_deque()->Push(obj, MarkingThread::kMain, TargetDeque::kBailout);
marking_worklist()->Push(obj, MarkingThread::kMain, TargetDeque::kBailout);
#else
if (!marking_deque()->Push(obj)) {
if (!marking_worklist()->Push(obj)) {
ObjectMarking::BlackToGrey<kAtomicity>(obj, marking_state(obj));
}
#endif
@ -208,7 +208,7 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) {
DCHECK(success);
USE(success);
}
marking_deque()->Push(to);
marking_worklist()->Push(to);
RestartIfNotMarking();
}
}
@ -246,13 +246,14 @@ class IncrementalMarkingMarkingVisitor
HeapObject::RawField(object, end_offset));
start_offset = end_offset;
end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
scan_until_end = heap->incremental_marking()->marking_deque()->IsFull();
scan_until_end =
heap->incremental_marking()->marking_worklist()->IsFull();
} while (scan_until_end && start_offset < object_size);
chunk->set_progress_bar(start_offset);
if (start_offset < object_size) {
if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object))) {
heap->incremental_marking()->marking_deque()->Unshift(object);
heap->incremental_marking()->marking_worklist()->Unshift(object);
} else {
DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object)));
@ -571,7 +572,7 @@ void IncrementalMarking::StartMarking() {
PatchIncrementalMarkingRecordWriteStubs(heap_, mode);
marking_deque()->StartUsing();
marking_worklist()->StartUsing();
ActivateIncrementalWriteBarrier();
@ -782,7 +783,7 @@ void IncrementalMarking::FinalizeIncrementally() {
ProcessWeakCells();
int marking_progress =
heap_->mark_compact_collector()->marking_deque()->Size() +
heap_->mark_compact_collector()->marking_worklist()->Size() +
static_cast<int>(
heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
@ -812,13 +813,13 @@ void IncrementalMarking::FinalizeIncrementally() {
}
}
void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
if (!IsMarking()) return;
Map* filler_map = heap_->one_pointer_filler_map();
marking_deque()->Update([this, filler_map](HeapObject* obj) -> HeapObject* {
marking_worklist()->Update([this,
filler_map](HeapObject* obj) -> HeapObject* {
DCHECK(obj->IsHeapObject());
// Only pointers to from space have to be updated.
if (heap_->InFromSpace(obj)) {
@ -908,11 +909,11 @@ void IncrementalMarking::RevisitObject(HeapObject* obj) {
IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
}
intptr_t IncrementalMarking::ProcessMarkingDeque(
intptr_t IncrementalMarking::ProcessMarkingWorklist(
intptr_t bytes_to_process, ForceCompletionAction completion) {
intptr_t bytes_processed = 0;
while (bytes_processed < bytes_to_process || completion == FORCE_COMPLETION) {
HeapObject* obj = marking_deque()->Pop();
HeapObject* obj = marking_worklist()->Pop();
if (obj == nullptr) break;
// Left trimming may result in white, grey, or black filler objects on the
// marking deque. Ignore these objects.
@ -942,7 +943,7 @@ void IncrementalMarking::Hurry() {
// forced e.g. in tests. It should not happen when COMPLETE was set when
// incremental marking finished and a regular GC was triggered after that
// because should_hurry_ will force a full GC.
if (!marking_deque()->IsEmpty()) {
if (!marking_worklist()->IsEmpty()) {
double start = 0.0;
if (FLAG_trace_incremental_marking) {
start = heap_->MonotonicallyIncreasingTimeInMs();
@ -952,7 +953,7 @@ void IncrementalMarking::Hurry() {
}
// TODO(gc) hurry can mark objects it encounters black as mutator
// was stopped.
ProcessMarkingDeque(0, FORCE_COMPLETION);
ProcessMarkingWorklist(0, FORCE_COMPLETION);
state_ = COMPLETE;
if (FLAG_trace_incremental_marking) {
double end = heap_->MonotonicallyIncreasingTimeInMs();
@ -1104,7 +1105,7 @@ double IncrementalMarking::AdvanceIncrementalMarking(
remaining_time_in_ms =
deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
} while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() &&
!marking_deque()->IsEmpty());
!marking_worklist()->IsEmpty());
return remaining_time_in_ms;
}
@ -1201,12 +1202,12 @@ size_t IncrementalMarking::Step(size_t bytes_to_process,
size_t bytes_processed = 0;
if (state_ == MARKING) {
bytes_processed = ProcessMarkingDeque(bytes_to_process);
bytes_processed = ProcessMarkingWorklist(bytes_to_process);
if (step_origin == StepOrigin::kTask) {
bytes_marked_ahead_of_schedule_ += bytes_processed;
}
if (marking_deque()->IsEmpty()) {
if (marking_worklist()->IsEmpty()) {
if (heap_->local_embedder_heap_tracer()
->ShouldFinalizeIncrementalMarking()) {
if (completion == FORCE_COMPLETION ||
@ -1220,7 +1221,7 @@ size_t IncrementalMarking::Step(size_t bytes_to_process,
IncrementIdleMarkingDelayCounter();
}
} else {
heap_->local_embedder_heap_tracer()->NotifyV8MarkingDequeWasEmpty();
heap_->local_embedder_heap_tracer()->NotifyV8MarkingWorklistWasEmpty();
}
}
}

View File

@ -138,7 +138,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void FinalizeIncrementally();
void UpdateMarkingDequeAfterScavenge();
void UpdateMarkingWorklistAfterScavenge();
void Hurry();
@ -262,13 +262,14 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void AbortBlackAllocation();
MarkingDeque* marking_deque() {
SLOW_DCHECK(marking_deque_ != nullptr);
return marking_deque_;
MarkCompactCollector::MarkingWorklist* marking_worklist() {
SLOW_DCHECK(marking_worklist_ != nullptr);
return marking_worklist_;
}
void set_marking_deque(MarkingDeque* marking_deque) {
marking_deque_ = marking_deque;
void set_marking_worklist(
MarkCompactCollector::MarkingWorklist* marking_worklist) {
marking_worklist_ = marking_worklist;
}
private:
@ -311,7 +312,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
static void SetNewSpacePageFlags(MemoryChunk* chunk, bool is_marking);
INLINE(intptr_t ProcessMarkingDeque(
INLINE(intptr_t ProcessMarkingWorklist(
intptr_t bytes_to_process,
ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION));
@ -328,7 +329,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
size_t StepSizeToMakeProgress();
Heap* heap_;
MarkingDeque* marking_deque_;
MarkCompactCollector::MarkingWorklist* marking_worklist_;
double start_time_ms_;
size_t initial_old_generation_size_;

View File

@ -15,7 +15,7 @@ namespace internal {
void MarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK((ObjectMarking::IsBlack<AccessMode::NON_ATOMIC>(
obj, MarkingState::Internal(obj))));
if (!marking_deque()->Push(obj)) {
if (!marking_worklist()->Push(obj)) {
ObjectMarking::BlackToGrey<AccessMode::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
}
@ -23,7 +23,7 @@ void MarkCompactCollector::PushBlack(HeapObject* obj) {
void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
DCHECK(ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)));
if (!marking_deque()->Unshift(obj)) {
if (!marking_worklist()->Unshift(obj)) {
ObjectMarking::BlackToGrey(obj, MarkingState::Internal(obj));
}
}

View File

@ -404,7 +404,7 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap)
compacting_(false),
black_allocation_(false),
have_code_to_deoptimize_(false),
marking_deque_(heap),
marking_worklist_(heap),
sweeper_(heap) {
old_to_new_slots_ = -1;
}
@ -414,14 +414,14 @@ void MarkCompactCollector::SetUp() {
DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
marking_deque()->SetUp();
marking_worklist()->SetUp();
}
void MinorMarkCompactCollector::SetUp() {}
void MarkCompactCollector::TearDown() {
AbortCompaction();
marking_deque()->TearDown();
marking_worklist()->TearDown();
}
void MinorMarkCompactCollector::TearDown() {}
@ -943,7 +943,7 @@ void MarkCompactCollector::Prepare() {
AbortTransitionArrays();
AbortCompaction();
heap_->local_embedder_heap_tracer()->AbortTracing();
marking_deque()->Clear();
marking_worklist()->Clear();
was_marked_incrementally_ = false;
}
@ -1252,7 +1252,7 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor,
MarkCompactMarkingVisitor::IterateBody(map, object);
// Mark all the objects reachable from the map and body. May leave
// overflowed objects in the heap.
collector_->EmptyMarkingDeque();
collector_->EmptyMarkingWorklist();
}
}
@ -1420,20 +1420,20 @@ template <class T>
void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
// The caller should ensure that the marking stack is initially not full,
// so that we don't waste effort pointlessly scanning for objects.
DCHECK(!marking_deque()->IsFull());
DCHECK(!marking_worklist()->IsFull());
Map* filler_map = heap()->one_pointer_filler_map();
for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) {
if ((object->map() != filler_map) &&
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object))) {
PushBlack(object);
if (marking_deque()->IsFull()) return;
if (marking_worklist()->IsFull()) return;
}
}
}
void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) {
DCHECK(!marking_deque()->IsFull());
DCHECK(!marking_worklist()->IsFull());
for (auto object_and_size :
LiveObjectRange<kGreyObjects>(p, marking_state(p))) {
HeapObject* const object = object_and_size.first;
@ -1441,7 +1441,7 @@ void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) {
DCHECK(success);
USE(success);
PushBlack(object);
if (marking_deque()->IsFull()) return;
if (marking_worklist()->IsFull()) return;
}
}
@ -1985,7 +1985,7 @@ class EvacuateRecordOnlyVisitor final : public HeapObjectVisitor {
void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) {
for (Page* p : *space) {
DiscoverGreyObjectsOnPage(p);
if (marking_deque()->IsFull()) return;
if (marking_worklist()->IsFull()) return;
}
}
@ -1994,7 +1994,7 @@ void MarkCompactCollector::DiscoverGreyObjectsInNewSpace() {
NewSpace* space = heap()->new_space();
for (Page* page : PageRange(space->bottom(), space->top())) {
DiscoverGreyObjectsOnPage(page);
if (marking_deque()->IsFull()) return;
if (marking_worklist()->IsFull()) return;
}
}
@ -2013,7 +2013,7 @@ void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) {
MarkingState::Internal(string_table))) {
// Explicitly mark the prefix.
string_table->IteratePrefix(visitor);
ProcessMarkingDeque();
ProcessMarkingWorklist();
}
}
@ -2026,9 +2026,9 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
MarkStringTable(visitor);
// There may be overflowed objects in the heap. Visit them now.
while (marking_deque()->overflowed()) {
RefillMarkingDeque();
EmptyMarkingDeque();
while (marking_worklist()->overflowed()) {
RefillMarkingWorklist();
EmptyMarkingWorklist();
}
}
@ -2036,9 +2036,9 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
// Before: the marking stack contains zero or more heap object pointers.
// After: the marking stack is empty, and all objects reachable from the
// marking stack have been marked, or are overflowed in the heap.
void MarkCompactCollector::EmptyMarkingDeque() {
void MarkCompactCollector::EmptyMarkingWorklist() {
HeapObject* object;
while ((object = marking_deque()->Pop()) != nullptr) {
while ((object = marking_worklist()->Pop()) != nullptr) {
DCHECK(!object->IsFiller());
DCHECK(object->IsHeapObject());
DCHECK(heap()->Contains(object));
@ -2049,7 +2049,7 @@ void MarkCompactCollector::EmptyMarkingDeque() {
MarkObject(map);
MarkCompactMarkingVisitor::IterateBody(map, object);
}
DCHECK(marking_deque()->IsEmpty());
DCHECK(marking_worklist()->IsEmpty());
}
@ -2058,44 +2058,44 @@ void MarkCompactCollector::EmptyMarkingDeque() {
// before sweeping completes. If sweeping completes, there are no remaining
// overflowed objects in the heap so the overflow flag on the markings stack
// is cleared.
void MarkCompactCollector::RefillMarkingDeque() {
void MarkCompactCollector::RefillMarkingWorklist() {
isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow);
DCHECK(marking_deque()->overflowed());
DCHECK(marking_worklist()->overflowed());
DiscoverGreyObjectsInNewSpace();
if (marking_deque()->IsFull()) return;
if (marking_worklist()->IsFull()) return;
DiscoverGreyObjectsInSpace(heap()->old_space());
if (marking_deque()->IsFull()) return;
if (marking_worklist()->IsFull()) return;
DiscoverGreyObjectsInSpace(heap()->code_space());
if (marking_deque()->IsFull()) return;
if (marking_worklist()->IsFull()) return;
DiscoverGreyObjectsInSpace(heap()->map_space());
if (marking_deque()->IsFull()) return;
if (marking_worklist()->IsFull()) return;
LargeObjectIterator lo_it(heap()->lo_space());
DiscoverGreyObjectsWithIterator(&lo_it);
if (marking_deque()->IsFull()) return;
if (marking_worklist()->IsFull()) return;
marking_deque()->ClearOverflowed();
marking_worklist()->ClearOverflowed();
}
// Mark all objects reachable (transitively) from objects on the marking
// stack. Before: the marking stack contains zero or more heap object
// pointers. After: the marking stack is empty and there are no overflowed
// objects in the heap.
void MarkCompactCollector::ProcessMarkingDeque() {
EmptyMarkingDeque();
while (marking_deque()->overflowed()) {
RefillMarkingDeque();
EmptyMarkingDeque();
void MarkCompactCollector::ProcessMarkingWorklist() {
EmptyMarkingWorklist();
while (marking_worklist()->overflowed()) {
RefillMarkingWorklist();
EmptyMarkingWorklist();
}
DCHECK(marking_deque()->IsEmpty());
DCHECK(marking_worklist()->IsEmpty());
}
// Mark all objects reachable (transitively) from objects on the marking
// stack including references only considered in the atomic marking pause.
void MarkCompactCollector::ProcessEphemeralMarking(
bool only_process_harmony_weak_collections) {
DCHECK(marking_deque()->IsEmpty() && !marking_deque()->overflowed());
DCHECK(marking_worklist()->IsEmpty() && !marking_worklist()->overflowed());
bool work_to_do = true;
while (work_to_do) {
if (!only_process_harmony_weak_collections) {
@ -2115,10 +2115,10 @@ void MarkCompactCollector::ProcessEphemeralMarking(
heap_->local_embedder_heap_tracer()->ClearCachedWrappersToTrace();
}
ProcessWeakCollections();
work_to_do = !marking_deque()->IsEmpty();
ProcessMarkingDeque();
work_to_do = !marking_worklist()->IsEmpty();
ProcessMarkingWorklist();
}
CHECK(marking_deque()->IsEmpty());
CHECK(marking_worklist()->IsEmpty());
CHECK_EQ(0, heap()->local_embedder_heap_tracer()->NumberOfWrappersToTrace());
}
@ -2134,7 +2134,7 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(
if (!code->CanDeoptAt(it.frame()->pc())) {
Code::BodyDescriptor::IterateBody(code, visitor);
}
ProcessMarkingDeque();
ProcessMarkingWorklist();
return;
}
}
@ -2219,7 +2219,7 @@ class YoungGenerationMarkingVisitor final : public NewSpaceVisitor {
Object* target = *slot;
if (heap_->InNewSpace(target)) {
HeapObject* target_object = HeapObject::cast(target);
MarkObjectViaMarkingDeque(target_object);
MarkObjectViaMarkingWorklist(target_object);
}
}
@ -2231,7 +2231,7 @@ class YoungGenerationMarkingVisitor final : public NewSpaceVisitor {
return MarkingState::External(object);
}
inline void MarkObjectViaMarkingDeque(HeapObject* object) {
inline void MarkObjectViaMarkingWorklist(HeapObject* object) {
if (ObjectMarking::WhiteToGrey<AccessMode::ATOMIC>(object,
marking_state(object))) {
// Marking deque overflow is unsupported for the young generation.
@ -2240,7 +2240,7 @@ class YoungGenerationMarkingVisitor final : public NewSpaceVisitor {
}
Heap* heap_;
WorklistView worklist_;
MinorMarkCompactCollector::MarkingWorklist worklist_;
};
class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
@ -2273,7 +2273,7 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
if (ObjectMarking::WhiteToGrey<AccessMode::NON_ATOMIC>(
object, marking_state(object))) {
collector_->main_marking_visitor()->Visit(object);
collector_->EmptyMarkingDeque();
collector_->EmptyMarkingWorklist();
}
}
@ -2296,11 +2296,11 @@ class YoungGenerationMarkingTask : public ItemParallelJob::Task {
public:
YoungGenerationMarkingTask(Isolate* isolate,
MinorMarkCompactCollector* collector,
Worklist* marking_deque, int task_id)
Worklist* global_worklist, int task_id)
: ItemParallelJob::Task(isolate),
collector_(collector),
marking_deque_(marking_deque, task_id),
visitor_(isolate->heap(), marking_deque, task_id) {
marking_worklist_(global_worklist, task_id),
visitor_(isolate->heap(), global_worklist, task_id) {
local_live_bytes_.reserve(isolate->heap()->new_space()->Capacity() /
Page::kPageSize);
}
@ -2313,10 +2313,10 @@ class YoungGenerationMarkingTask : public ItemParallelJob::Task {
while ((item = GetItem<MarkingItem>()) != nullptr) {
item->Process(this);
item->MarkFinished();
EmptyLocalMarkingDeque();
EmptyLocalMarkingWorklist();
}
EmptyMarkingDeque();
DCHECK(marking_deque_.IsLocalEmpty());
EmptyMarkingWorklist();
DCHECK(marking_worklist_.IsLocalEmpty());
FlushLiveBytes();
}
if (FLAG_trace_minor_mc_parallel_marking) {
@ -2340,17 +2340,17 @@ class YoungGenerationMarkingTask : public ItemParallelJob::Task {
return MarkingState::External(object);
}
void EmptyLocalMarkingDeque() {
void EmptyLocalMarkingWorklist() {
HeapObject* object = nullptr;
while (marking_deque_.Pop(&object)) {
while (marking_worklist_.Pop(&object)) {
const int size = visitor_.Visit(object);
IncrementLiveBytes(object, size);
}
}
void EmptyMarkingDeque() {
void EmptyMarkingWorklist() {
HeapObject* object = nullptr;
while (marking_deque_.Pop(&object)) {
while (marking_worklist_.Pop(&object)) {
const int size = visitor_.Visit(object);
IncrementLiveBytes(object, size);
}
@ -2369,7 +2369,7 @@ class YoungGenerationMarkingTask : public ItemParallelJob::Task {
}
MinorMarkCompactCollector* collector_;
WorklistView marking_deque_;
MinorMarkCompactCollector::MarkingWorklist marking_worklist_;
YoungGenerationMarkingVisitor visitor_;
std::unordered_map<Page*, intptr_t, Page::Hasher> local_live_bytes_;
};
@ -2621,7 +2621,7 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK);
heap()->IterateEncounteredWeakCollections(&root_visitor);
ProcessMarkingDeque();
ProcessMarkingWorklist();
}
{
@ -2630,18 +2630,18 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
&IsUnmarkedObjectForYoungGeneration);
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&root_visitor);
ProcessMarkingDeque();
ProcessMarkingWorklist();
}
}
void MinorMarkCompactCollector::ProcessMarkingDeque() {
EmptyMarkingDeque();
void MinorMarkCompactCollector::ProcessMarkingWorklist() {
EmptyMarkingWorklist();
}
void MinorMarkCompactCollector::EmptyMarkingDeque() {
WorklistView worklist_view(worklist(), kMainMarker);
void MinorMarkCompactCollector::EmptyMarkingWorklist() {
MarkingWorklist marking_worklist(worklist(), kMainMarker);
HeapObject* object = nullptr;
while (worklist_view.Pop(&object)) {
while (marking_worklist.Pop(&object)) {
DCHECK(!object->IsFiller());
DCHECK(object->IsHeapObject());
DCHECK(heap()->Contains(object));
@ -2651,7 +2651,7 @@ void MinorMarkCompactCollector::EmptyMarkingDeque() {
object, marking_state(object))));
main_marking_visitor()->Visit(object);
}
DCHECK(worklist_view.IsLocalEmpty());
DCHECK(marking_worklist.IsLocalEmpty());
}
void MinorMarkCompactCollector::CollectGarbage() {
@ -2680,7 +2680,7 @@ void MinorMarkCompactCollector::CollectGarbage() {
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARKING_DEQUE);
heap()->incremental_marking()->UpdateMarkingDequeAfterScavenge();
heap()->incremental_marking()->UpdateMarkingWorklistAfterScavenge();
}
{
@ -2847,7 +2847,7 @@ void MarkCompactCollector::MarkLiveObjects() {
state_ = MARK_LIVE_OBJECTS;
#endif
marking_deque()->StartUsing();
marking_worklist()->StartUsing();
heap_->local_embedder_heap_tracer()->EnterFinalPause();
@ -2883,7 +2883,7 @@ void MarkCompactCollector::MarkLiveObjects() {
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES);
heap()->isolate()->global_handles()->IdentifyWeakHandles(
&IsUnmarkedHeapObject);
ProcessMarkingDeque();
ProcessMarkingWorklist();
}
// Then we mark the objects.
@ -2891,7 +2891,7 @@ void MarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS);
heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
ProcessMarkingDeque();
ProcessMarkingWorklist();
}
// Repeat Harmony weak maps marking to mark unmarked objects reachable from

View File

@ -33,12 +33,6 @@ class ThreadLocalTop;
class Worklist;
class YoungGenerationMarkingVisitor;
#ifdef V8_CONCURRENT_MARKING
using MarkingDeque = ConcurrentMarkingDeque;
#else
using MarkingDeque = SequentialMarkingDeque;
#endif
class ObjectMarking : public AllStatic {
public:
V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj,
@ -290,8 +284,8 @@ class MarkCompactCollectorBase {
virtual void MarkLiveObjects() = 0;
// Mark objects reachable (transitively) from objects in the marking
// stack.
virtual void EmptyMarkingDeque() = 0;
virtual void ProcessMarkingDeque() = 0;
virtual void EmptyMarkingWorklist() = 0;
virtual void ProcessMarkingWorklist() = 0;
// Clear non-live references held in side data structures.
virtual void ClearNonLiveReferences() = 0;
virtual void EvacuatePrologue() = 0;
@ -346,6 +340,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
void CleanupSweepToIteratePages();
private:
using MarkingWorklist = WorklistView;
class RootMarkingVisitorSeedOnly;
class RootMarkingVisitor;
@ -360,8 +355,8 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
void MarkLiveObjects() override;
void MarkRootSetInParallel();
void ProcessMarkingDeque() override;
void EmptyMarkingDeque() override;
void ProcessMarkingWorklist() override;
void EmptyMarkingWorklist() override;
void ClearNonLiveReferences() override;
void EvacuatePrologue() override;
@ -386,6 +381,12 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
// Collector for young and old generation.
class MarkCompactCollector final : public MarkCompactCollectorBase {
public:
#ifdef V8_CONCURRENT_MARKING
using MarkingWorklist = ConcurrentMarkingDeque;
#else
using MarkingWorklist = SequentialMarkingDeque;
#endif
class RootMarkingVisitor;
class Sweeper {
@ -539,7 +540,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
bool evacuation() const { return evacuation_; }
MarkingDeque* marking_deque() { return &marking_deque_; }
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
Sweeper& sweeper() { return sweeper_; }
@ -595,7 +596,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// the string table are weak.
void MarkStringTable(RootMarkingVisitor* visitor);
void ProcessMarkingDeque() override;
void ProcessMarkingWorklist() override;
// Mark objects reachable (transitively) from objects in the marking stack
// or overflowed in the heap. This respects references only considered in
@ -615,15 +616,15 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// This function empties the marking stack, but may leave overflowed objects
// in the heap, in which case the marking stack's overflow flag will be set.
void EmptyMarkingDeque() override;
void EmptyMarkingWorklist() override;
// Refill the marking stack with overflowed objects from the heap. This
// function either leaves the marking stack full or clears the overflow
// flag on the marking stack.
void RefillMarkingDeque();
void RefillMarkingWorklist();
// Helper methods for refilling the marking stack by discovering grey objects
// on various pages of the heap. Used by {RefillMarkingDeque} only.
// on various pages of the heap. Used by {RefillMarkingWorklist} only.
template <class T>
void DiscoverGreyObjectsWithIterator(T* it);
void DiscoverGreyObjectsOnPage(MemoryChunk* p);
@ -713,7 +714,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
bool have_code_to_deoptimize_;
MarkingDeque marking_deque_;
MarkingWorklist marking_worklist_;
// Candidates for pages that should be evacuated.
List<Page*> evacuation_candidates_;

View File

@ -2370,7 +2370,7 @@ TEST(IdleNotificationFinishMarking) {
IncrementalMarking::DO_NOT_FORCE_COMPLETION, StepOrigin::kV8);
CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
} while (
!CcTest::heap()->mark_compact_collector()->marking_deque()->IsEmpty());
!CcTest::heap()->mark_compact_collector()->marking_worklist()->IsEmpty());
// The next invocations of incremental marking are not going to complete
// marking