cppgc: Refactor visitation 3/3

Split off MarkingWorklists and from Marker and introduce MarkerBase.

MarkerBase refers just to interfaces types for passing along visitors.
The concrete Marker provides the impl for these interfaces. Unified
heap marker uses different marking visitors internally but provides an
implementation for the same interface.

Change-Id: Ibc4b2c88e2e69bd303a95da7d167a701934f4a07
Bug: chromium:1056170
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2270539
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: Omer Katz <omerkatz@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68676}
This commit is contained in:
Michael Lippautz 2020-07-03 12:25:25 +02:00 committed by Commit Bot
parent 5ab2769012
commit 8cf4ca8f75
12 changed files with 272 additions and 177 deletions

View File

@ -4215,6 +4215,8 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/marking-state.h",
"src/heap/cppgc/marking-visitor.cc",
"src/heap/cppgc/marking-visitor.h",
"src/heap/cppgc/marking-worklists.cc",
"src/heap/cppgc/marking-worklists.h",
"src/heap/cppgc/object-allocator.cc",
"src/heap/cppgc/object-allocator.h",
"src/heap/cppgc/object-start-bitmap.h",

View File

@ -59,21 +59,34 @@ class CppgcPlatformAdapter final : public cppgc::Platform {
v8::Isolate* isolate_;
};
class UnifiedHeapMarker : public cppgc::internal::Marker {
class UnifiedHeapMarker : public cppgc::internal::MarkerBase {
public:
explicit UnifiedHeapMarker(cppgc::internal::HeapBase& heap);
void AddObject(void*);
// TODO(chromium:1056170): Implement unified heap specific
// CreateMutatorThreadMarkingVisitor and AdvanceMarkingWithDeadline.
protected:
cppgc::Visitor& visitor() final { return marking_visitor_; }
cppgc::internal::ConservativeTracingVisitor& conservative_visitor() final {
return conservative_marking_visitor_;
}
heap::base::StackVisitor& stack_visitor() final {
return conservative_marking_visitor_;
}
private:
// TODO(chromium:1056170): Implement unified heap specific marking visitors.
cppgc::internal::MarkingVisitor marking_visitor_;
cppgc::internal::ConservativeMarkingVisitor conservative_marking_visitor_;
};
UnifiedHeapMarker::UnifiedHeapMarker(cppgc::internal::HeapBase& heap)
: cppgc::internal::Marker(heap) {}
: cppgc::internal::MarkerBase(heap),
marking_visitor_(heap, marking_state()),
conservative_marking_visitor_(heap, marking_state(), marking_visitor_) {}
void UnifiedHeapMarker::AddObject(void* object) {
mutator_marking_state_->MarkAndPush(
mutator_marking_state_.MarkAndPush(
cppgc::internal::HeapObjectHeader::FromPayload(object));
}
@ -97,7 +110,7 @@ void CppHeap::RegisterV8References(
}
void CppHeap::TracePrologue(TraceFlags flags) {
marker_ = std::make_unique<UnifiedHeapMarker>(AsBase());
marker_.reset(new UnifiedHeapMarker(AsBase()));
const UnifiedHeapMarker::MarkingConfig marking_config{
UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
cppgc::Heap::StackState::kNoHeapPointers,

View File

@ -35,7 +35,7 @@ namespace testing {
class TestWithHeap;
}
class Marker;
class MarkerBase;
class PageBackend;
class PreFinalizerHandler;
class StatsCollector;
@ -90,7 +90,7 @@ class V8_EXPORT_PRIVATE HeapBase {
return prefinalizer_handler_.get();
}
Marker* marker() const { return marker_.get(); }
MarkerBase* marker() const { return marker_.get(); }
ObjectAllocator& object_allocator() { return object_allocator_; }
@ -128,7 +128,7 @@ class V8_EXPORT_PRIVATE HeapBase {
std::unique_ptr<StatsCollector> stats_collector_;
std::unique_ptr<heap::base::Stack> stack_;
std::unique_ptr<PreFinalizerHandler> prefinalizer_handler_;
std::unique_ptr<Marker> marker_;
std::unique_ptr<MarkerBase> marker_;
ObjectAllocator object_allocator_;
Sweeper sweeper_;

View File

@ -50,7 +50,7 @@ void ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
}
// Visit remembered set that was recorded in the generational barrier.
void VisitRememberedSlots(HeapBase& heap, MarkingVisitor* visitor) {
void VisitRememberedSlots(HeapBase& heap, MarkingState& marking_state) {
#if defined(CPPGC_YOUNG_GENERATION)
for (void* slot : heap.remembered_slots()) {
auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
@ -60,10 +60,11 @@ void VisitRememberedSlots(HeapBase& heap, MarkingVisitor* visitor) {
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!MarkingVisitor::IsInConstruction(slot_header));
DCHECK(
!header.IsInConstruction<HeapObjectHeader::AccessMode::kNonAtomic>());
void* value = *reinterpret_cast<void**>(slot);
visitor->DynamicallyMarkAddress(static_cast<Address>(value));
marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
}
#endif
}
@ -99,28 +100,25 @@ bool DrainWorklistWithDeadline(v8::base::TimeTicks deadline, Worklist* worklist,
} // namespace
constexpr int Marker::kMutatorThreadId;
Marker::Marker(HeapBase& heap)
MarkerBase::MarkerBase(HeapBase& heap)
: heap_(heap),
mutator_marking_state_(std::make_unique<MarkingState>(
heap, &marking_worklist_, &not_fully_constructed_worklist_,
&weak_callback_worklist_, Marker::kMutatorThreadId)),
marking_visitor_(CreateMutatorThreadMarkingVisitor()),
conservative_marking_visitor_(
std::make_unique<ConservativeMarkingVisitor>(
heap, *mutator_marking_state_.get(), *marking_visitor_.get())) {}
mutator_marking_state_(
heap, marking_worklists_.marking_worklist(),
marking_worklists_.not_fully_constructed_worklist(),
marking_worklists_.weak_callback_worklist(),
MarkingWorklists::kMutatorThreadId) {}
Marker::~Marker() {
MarkerBase::~MarkerBase() {
// The fixed point iteration may have found not-fully-constructed objects.
// Such objects should have already been found through the stack scan though
// and should thus already be marked.
if (!not_fully_constructed_worklist_.IsEmpty()) {
if (!marking_worklists_.not_fully_constructed_worklist()->IsEmpty()) {
#if DEBUG
DCHECK_NE(MarkingConfig::StackState::kNoHeapPointers, config_.stack_state);
NotFullyConstructedItem item;
NotFullyConstructedWorklist::View view(&not_fully_constructed_worklist_,
kMutatorThreadId);
MarkingWorklists::NotFullyConstructedItem item;
MarkingWorklists::NotFullyConstructedWorklist::View view(
marking_worklists_.not_fully_constructed_worklist(),
MarkingWorklists::kMutatorThreadId);
while (view.Pop(&item)) {
const HeapObjectHeader& header =
BasePage::FromPayload(item)->ObjectHeaderFromInnerAddress(
@ -128,12 +126,12 @@ Marker::~Marker() {
DCHECK(header.IsMarked());
}
#else
not_fully_constructed_worklist_.Clear();
marking_worklists_.not_fully_constructed_worklist()->Clear();
#endif
}
}
void Marker::StartMarking(MarkingConfig config) {
void MarkerBase::StartMarking(MarkingConfig config) {
heap().stats_collector()->NotifyMarkingStarted();
config_ = config;
@ -141,66 +139,62 @@ void Marker::StartMarking(MarkingConfig config) {
EnterIncrementalMarkingIfNeeded(config, heap());
}
void Marker::EnterAtomicPause(MarkingConfig config) {
void MarkerBase::EnterAtomicPause(MarkingConfig config) {
ExitIncrementalMarkingIfNeeded(config_, heap());
config_ = config;
// VisitRoots also resets the LABs.
VisitRoots();
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
FlushNotFullyConstructedObjects();
marking_worklists_.FlushNotFullyConstructedObjects();
} else {
MarkNotFullyConstructedObjects();
}
}
void Marker::LeaveAtomicPause() {
void MarkerBase::LeaveAtomicPause() {
ResetRememberedSet(heap());
heap().stats_collector()->NotifyMarkingCompleted(
mutator_marking_state_->marked_bytes());
mutator_marking_state_.marked_bytes());
}
void Marker::FinishMarking(MarkingConfig config) {
void MarkerBase::FinishMarking(MarkingConfig config) {
EnterAtomicPause(config);
AdvanceMarkingWithDeadline(v8::base::TimeDelta::Max());
LeaveAtomicPause();
}
void Marker::ProcessWeakness() {
heap().GetWeakPersistentRegion().Trace(marking_visitor_.get());
void MarkerBase::ProcessWeakness() {
heap().GetWeakPersistentRegion().Trace(&visitor());
// Call weak callbacks on objects that may now be pointing to dead objects.
WeakCallbackItem item;
MarkingWorklists::WeakCallbackItem item;
LivenessBroker broker = LivenessBrokerFactory::Create();
WeakCallbackWorklist::View view(&weak_callback_worklist_, kMutatorThreadId);
MarkingWorklists::WeakCallbackWorklist::View view(
marking_worklists_.weak_callback_worklist(),
MarkingWorklists::kMutatorThreadId);
while (view.Pop(&item)) {
item.callback(broker, item.parameter);
}
// Weak callbacks should not add any new objects for marking.
DCHECK(marking_worklist_.IsEmpty());
DCHECK(marking_worklists_.marking_worklist()->IsEmpty());
}
void Marker::VisitRoots() {
void MarkerBase::VisitRoots() {
// Reset LABs before scanning roots. LABs are cleared to allow
// ObjectStartBitmap handling without considering LABs.
heap().object_allocator().ResetLinearAllocationBuffers();
heap().GetStrongPersistentRegion().Trace(marking_visitor_.get());
heap().GetStrongPersistentRegion().Trace(&visitor());
if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers) {
heap().stack()->IteratePointers(conservative_marking_visitor_.get());
heap().stack()->IteratePointers(&stack_visitor());
}
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
VisitRememberedSlots(heap(), marking_visitor_.get());
VisitRememberedSlots(heap(), mutator_marking_state_);
}
}
std::unique_ptr<MutatorThreadMarkingVisitor>
Marker::CreateMutatorThreadMarkingVisitor() {
return std::make_unique<MutatorThreadMarkingVisitor>(this);
}
bool Marker::AdvanceMarkingWithDeadline(v8::base::TimeDelta duration) {
MutatorThreadMarkingVisitor* visitor = marking_visitor_.get();
bool MarkerBase::AdvanceMarkingWithDeadline(v8::base::TimeDelta duration) {
v8::base::TimeTicks deadline = v8::base::TimeTicks::Now() + duration;
do {
@ -208,70 +202,65 @@ bool Marker::AdvanceMarkingWithDeadline(v8::base::TimeDelta duration) {
// |marking_worklist_|. This merely re-adds items with the proper
// callbacks.
if (!DrainWorklistWithDeadline(
deadline, &previously_not_fully_constructed_worklist_,
[this](NotFullyConstructedItem& item) {
mutator_marking_state_->DynamicallyMarkAddress(
deadline,
marking_worklists_.previously_not_fully_constructed_worklist(),
[this](MarkingWorklists::NotFullyConstructedItem& item) {
mutator_marking_state_.DynamicallyMarkAddress(
reinterpret_cast<ConstAddress>(item));
},
kMutatorThreadId))
MarkingWorklists::kMutatorThreadId))
return false;
if (!DrainWorklistWithDeadline(
deadline, &marking_worklist_,
[this, visitor](const MarkingItem& item) {
deadline, marking_worklists_.marking_worklist(),
[this](const MarkingWorklists::MarkingItem& item) {
const HeapObjectHeader& header =
HeapObjectHeader::FromPayload(item.base_object_payload);
DCHECK(!header.IsInConstruction<
HeapObjectHeader::AccessMode::kNonAtomic>());
item.callback(visitor, item.base_object_payload);
mutator_marking_state_->AccountMarkedBytes(header);
item.callback(&visitor(), item.base_object_payload);
mutator_marking_state_.AccountMarkedBytes(header);
},
kMutatorThreadId))
MarkingWorklists::kMutatorThreadId))
return false;
if (!DrainWorklistWithDeadline(
deadline, &write_barrier_worklist_,
[this, visitor](HeapObjectHeader* header) {
deadline, marking_worklists_.write_barrier_worklist(),
[this](HeapObjectHeader* header) {
DCHECK(header);
DCHECK(!header->IsInConstruction<
HeapObjectHeader::AccessMode::kNonAtomic>());
const GCInfo& gcinfo =
GlobalGCInfoTable::GCInfoFromIndex(header->GetGCInfoIndex());
gcinfo.trace(visitor, header->Payload());
mutator_marking_state_->AccountMarkedBytes(*header);
gcinfo.trace(&visitor(), header->Payload());
mutator_marking_state_.AccountMarkedBytes(*header);
},
kMutatorThreadId))
MarkingWorklists::kMutatorThreadId))
return false;
} while (!marking_worklist_.IsLocalViewEmpty(kMutatorThreadId));
} while (!marking_worklists_.marking_worklist()->IsLocalViewEmpty(
MarkingWorklists::kMutatorThreadId));
return true;
}
void Marker::FlushNotFullyConstructedObjects() {
if (!not_fully_constructed_worklist_.IsLocalViewEmpty(kMutatorThreadId)) {
not_fully_constructed_worklist_.FlushToGlobal(kMutatorThreadId);
previously_not_fully_constructed_worklist_.MergeGlobalPool(
&not_fully_constructed_worklist_);
}
DCHECK(not_fully_constructed_worklist_.IsLocalViewEmpty(kMutatorThreadId));
}
void Marker::MarkNotFullyConstructedObjects() {
NotFullyConstructedItem item;
NotFullyConstructedWorklist::View view(&not_fully_constructed_worklist_,
kMutatorThreadId);
void MarkerBase::MarkNotFullyConstructedObjects() {
MarkingWorklists::NotFullyConstructedItem item;
MarkingWorklists::NotFullyConstructedWorklist::View view(
marking_worklists_.not_fully_constructed_worklist(),
MarkingWorklists::kMutatorThreadId);
while (view.Pop(&item)) {
conservative_marking_visitor_->TraceConservativelyIfNeeded(item);
conservative_visitor().TraceConservativelyIfNeeded(item);
}
}
void Marker::ClearAllWorklistsForTesting() {
marking_worklist_.Clear();
not_fully_constructed_worklist_.Clear();
previously_not_fully_constructed_worklist_.Clear();
write_barrier_worklist_.Clear();
weak_callback_worklist_.Clear();
void MarkerBase::ClearAllWorklistsForTesting() {
marking_worklists_.ClearForTesting();
}
Marker::Marker(HeapBase& heap)
: MarkerBase(heap),
marking_visitor_(heap, marking_state()),
conservative_marking_visitor_(heap, marking_state(), marking_visitor_) {}
} // namespace internal
} // namespace cppgc

View File

@ -10,17 +10,17 @@
#include "include/cppgc/heap.h"
#include "include/cppgc/trace-trait.h"
#include "include/cppgc/visitor.h"
#include "src/base/macros.h"
#include "src/base/platform/time.h"
#include "src/heap/cppgc/marking-state.h"
#include "src/heap/cppgc/marking-visitor.h"
#include "src/heap/cppgc/marking-worklists.h"
#include "src/heap/cppgc/worklist.h"
namespace cppgc {
namespace internal {
class HeapBase;
class HeapObjectHeader;
class MarkingState;
class MutatorThreadMarkingVisitor;
// Marking algorithm. Example for a valid call sequence creating the marking
// phase:
@ -31,31 +31,8 @@ class MutatorThreadMarkingVisitor;
// 5. LeaveAtomicPause()
//
// Alternatively, FinishMarking combines steps 3.-5.
class V8_EXPORT_PRIVATE Marker {
static constexpr int kNumConcurrentMarkers = 0;
static constexpr int kNumMarkers = 1 + kNumConcurrentMarkers;
class V8_EXPORT_PRIVATE MarkerBase {
public:
static constexpr int kMutatorThreadId = 0;
using MarkingItem = cppgc::TraceDescriptor;
using NotFullyConstructedItem = const void*;
struct WeakCallbackItem {
cppgc::WeakCallback callback;
const void* parameter;
};
// Segment size of 512 entries necessary to avoid throughput regressions.
// Since the work list is currently a temporary object this is not a problem.
using MarkingWorklist =
Worklist<MarkingItem, 512 /* local entries */, kNumMarkers>;
using NotFullyConstructedWorklist =
Worklist<NotFullyConstructedItem, 16 /* local entries */, kNumMarkers>;
using WeakCallbackWorklist =
Worklist<WeakCallbackItem, 64 /* local entries */, kNumMarkers>;
using WriteBarrierWorklist =
Worklist<HeapObjectHeader*, 64 /*local entries */, kNumMarkers>;
struct MarkingConfig {
enum class CollectionType : uint8_t {
kMinor,
@ -75,11 +52,10 @@ class V8_EXPORT_PRIVATE Marker {
MarkingType marking_type = MarkingType::kAtomic;
};
explicit Marker(HeapBase& heap);
virtual ~Marker();
virtual ~MarkerBase();
Marker(const Marker&) = delete;
Marker& operator=(const Marker&) = delete;
MarkerBase(const MarkerBase&) = delete;
MarkerBase& operator=(const MarkerBase&) = delete;
// Initialize marking according to the given config. This method will
// trigger incremental/concurrent marking if needed.
@ -107,45 +83,46 @@ class V8_EXPORT_PRIVATE Marker {
void ProcessWeakness();
HeapBase& heap() { return heap_; }
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
NotFullyConstructedWorklist* not_fully_constructed_worklist() {
return &not_fully_constructed_worklist_;
}
WriteBarrierWorklist* write_barrier_worklist() {
return &write_barrier_worklist_;
}
WeakCallbackWorklist* weak_callback_worklist() {
return &weak_callback_worklist_;
}
MarkingState& marking_state() const { return *mutator_marking_state_.get(); }
MarkingState& marking_state() { return mutator_marking_state_; }
MarkingWorklists& marking_worklists() { return marking_worklists_; }
cppgc::Visitor& VisitorForTesting() { return visitor(); }
void ClearAllWorklistsForTesting();
MutatorThreadMarkingVisitor* GetMarkingVisitorForTesting() {
return marking_visitor_.get();
}
protected:
virtual std::unique_ptr<MutatorThreadMarkingVisitor>
CreateMutatorThreadMarkingVisitor();
explicit MarkerBase(HeapBase& heap);
virtual cppgc::Visitor& visitor() = 0;
virtual ConservativeTracingVisitor& conservative_visitor() = 0;
virtual heap::base::StackVisitor& stack_visitor() = 0;
void VisitRoots();
void FlushNotFullyConstructedObjects();
void MarkNotFullyConstructedObjects();
HeapBase& heap_;
MarkingConfig config_ = MarkingConfig::Default();
std::unique_ptr<MarkingState> mutator_marking_state_;
std::unique_ptr<MutatorThreadMarkingVisitor> marking_visitor_;
std::unique_ptr<ConservativeMarkingVisitor> conservative_marking_visitor_;
MarkingWorklists marking_worklists_;
MarkingState mutator_marking_state_;
};
MarkingWorklist marking_worklist_;
NotFullyConstructedWorklist not_fully_constructed_worklist_;
NotFullyConstructedWorklist previously_not_fully_constructed_worklist_;
WriteBarrierWorklist write_barrier_worklist_;
WeakCallbackWorklist weak_callback_worklist_;
class V8_EXPORT_PRIVATE Marker final : public MarkerBase {
public:
explicit Marker(HeapBase&);
protected:
cppgc::Visitor& visitor() final { return marking_visitor_; }
ConservativeTracingVisitor& conservative_visitor() final {
return conservative_marking_visitor_;
}
heap::base::StackVisitor& stack_visitor() final {
return conservative_marking_visitor_;
}
private:
MarkingVisitor marking_visitor_;
ConservativeMarkingVisitor conservative_marking_visitor_;
};
} // namespace internal

View File

@ -10,7 +10,7 @@
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/liveness-broker.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/marking-worklists.h"
namespace cppgc {
namespace internal {
@ -18,9 +18,9 @@ namespace internal {
// C++ marking implementation.
class MarkingState {
public:
inline MarkingState(HeapBase& heap, Marker::MarkingWorklist*,
Marker::NotFullyConstructedWorklist*,
Marker::WeakCallbackWorklist*, int);
inline MarkingState(HeapBase& heap, MarkingWorklists::MarkingWorklist*,
MarkingWorklists::NotFullyConstructedWorklist*,
MarkingWorklists::WeakCallbackWorklist*, int);
MarkingState(const MarkingState&) = delete;
MarkingState& operator=(const MarkingState&) = delete;
@ -47,17 +47,19 @@ class MarkingState {
HeapBase& heap_;
#endif // DEBUG
Marker::MarkingWorklist::View marking_worklist_;
Marker::NotFullyConstructedWorklist::View not_fully_constructed_worklist_;
Marker::WeakCallbackWorklist::View weak_callback_worklist_;
MarkingWorklists::MarkingWorklist::View marking_worklist_;
MarkingWorklists::NotFullyConstructedWorklist::View
not_fully_constructed_worklist_;
MarkingWorklists::WeakCallbackWorklist::View weak_callback_worklist_;
size_t marked_bytes_ = 0;
};
MarkingState::MarkingState(
HeapBase& heap, Marker::MarkingWorklist* marking_worklist,
Marker::NotFullyConstructedWorklist* not_fully_constructed_worklist,
Marker::WeakCallbackWorklist* weak_callback_worklist, int task_id)
HeapBase& heap, MarkingWorklists::MarkingWorklist* marking_worklist,
MarkingWorklists::NotFullyConstructedWorklist*
not_fully_constructed_worklist,
MarkingWorklists::WeakCallbackWorklist* weak_callback_worklist, int task_id)
:
#ifdef DEBUG
heap_(heap),

View File

@ -0,0 +1,32 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/marking-worklists.h"
namespace cppgc {
namespace internal {
constexpr int MarkingWorklists::kMutatorThreadId;
void MarkingWorklists::ClearForTesting() {
marking_worklist_.Clear();
not_fully_constructed_worklist_.Clear();
previously_not_fully_constructed_worklist_.Clear();
write_barrier_worklist_.Clear();
weak_callback_worklist_.Clear();
}
void MarkingWorklists::FlushNotFullyConstructedObjects() {
if (!not_fully_constructed_worklist_.IsLocalViewEmpty(kMutatorThreadId)) {
not_fully_constructed_worklist_.FlushToGlobal(kMutatorThreadId);
previously_not_fully_constructed_worklist_.MergeGlobalPool(
&not_fully_constructed_worklist_);
}
DCHECK(not_fully_constructed_worklist_.IsLocalViewEmpty(
MarkingWorklists::kMutatorThreadId));
}
} // namespace internal
} // namespace cppgc

View File

@ -0,0 +1,72 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
#define V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
#include "include/cppgc/visitor.h"
#include "src/heap/cppgc/worklist.h"
namespace cppgc {
namespace internal {
class HeapObjectHeader;
class MarkingWorklists {
static constexpr int kNumConcurrentMarkers = 0;
static constexpr int kNumMarkers = 1 + kNumConcurrentMarkers;
public:
static constexpr int kMutatorThreadId = 0;
using MarkingItem = cppgc::TraceDescriptor;
using NotFullyConstructedItem = const void*;
struct WeakCallbackItem {
cppgc::WeakCallback callback;
const void* parameter;
};
// Segment size of 512 entries necessary to avoid throughput regressions.
// Since the work list is currently a temporary object this is not a problem.
using MarkingWorklist =
Worklist<MarkingItem, 512 /* local entries */, kNumMarkers>;
using NotFullyConstructedWorklist =
Worklist<NotFullyConstructedItem, 16 /* local entries */, kNumMarkers>;
using WeakCallbackWorklist =
Worklist<WeakCallbackItem, 64 /* local entries */, kNumMarkers>;
using WriteBarrierWorklist =
Worklist<HeapObjectHeader*, 64 /*local entries */, kNumMarkers>;
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
NotFullyConstructedWorklist* not_fully_constructed_worklist() {
return &not_fully_constructed_worklist_;
}
NotFullyConstructedWorklist* previously_not_fully_constructed_worklist() {
return &previously_not_fully_constructed_worklist_;
}
WriteBarrierWorklist* write_barrier_worklist() {
return &write_barrier_worklist_;
}
WeakCallbackWorklist* weak_callback_worklist() {
return &weak_callback_worklist_;
}
// Moves objects in not_fully_constructed_worklist_ to
// previously_not_full_constructed_worklists_.
void FlushNotFullyConstructedObjects();
void ClearForTesting();
private:
MarkingWorklist marking_worklist_;
NotFullyConstructedWorklist not_fully_constructed_worklist_;
NotFullyConstructedWorklist previously_not_fully_constructed_worklist_;
WriteBarrierWorklist write_barrier_worklist_;
WeakCallbackWorklist weak_callback_worklist_;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_MARKING_WORKLISTS_H_

View File

@ -21,7 +21,7 @@ namespace internal {
namespace {
void MarkValue(const BasePage* page, Marker* marker, const void* value) {
void MarkValue(const BasePage* page, MarkerBase* marker, const void* value) {
#if defined(CPPGC_CAGED_HEAP)
DCHECK(reinterpret_cast<CagedHeapLocalData*>(
reinterpret_cast<uintptr_t>(value) &
@ -40,14 +40,17 @@ void MarkValue(const BasePage* page, Marker* marker, const void* value) {
// It is assumed that objects on not_fully_constructed_worklist_ are not
// marked.
header.Unmark();
Marker::NotFullyConstructedWorklist::View not_fully_constructed_worklist(
marker->not_fully_constructed_worklist(), Marker::kMutatorThreadId);
MarkingWorklists::NotFullyConstructedWorklist::View
not_fully_constructed_worklist(
marker->marking_worklists().not_fully_constructed_worklist(),
MarkingWorklists::kMutatorThreadId);
not_fully_constructed_worklist.Push(header.Payload());
return;
}
Marker::WriteBarrierWorklist::View write_barrier_worklist(
marker->write_barrier_worklist(), Marker::kMutatorThreadId);
MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist(
marker->marking_worklists().write_barrier_worklist(),
MarkingWorklists::kMutatorThreadId);
write_barrier_worklist.Push(&header);
}

View File

@ -222,7 +222,7 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj);
marker.GetMarkingVisitorForTesting()->Trace(member);
marker.VisitorForTesting().Trace(member);
});
EXPECT_FALSE(HeapObjectHeader::FromPayload(object).IsMarked());
marker.FinishMarking({MarkingConfig::CollectionType::kMajor,
@ -239,7 +239,7 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj);
marker.GetMarkingVisitorForTesting()->Trace(member);
marker.VisitorForTesting().Trace(member);
EXPECT_FALSE(HeapObjectHeader::FromPayload(obj).IsMarked());
marker.FinishMarking(config);
EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked());

View File

@ -43,7 +43,7 @@ class TestWithHeap : public TestWithPlatform {
return allocation_handle_;
}
std::unique_ptr<Marker>& GetMarkerRef() {
std::unique_ptr<MarkerBase>& GetMarkerRef() {
return Heap::From(GetHeap())->marker_;
}

View File

@ -21,7 +21,7 @@ namespace {
class IncrementalMarkingScope {
public:
explicit IncrementalMarkingScope(Marker* marker) : marker_(marker) {
explicit IncrementalMarkingScope(MarkerBase* marker) : marker_(marker) {
marker_->StartMarking(kIncrementalConfig);
}
@ -35,18 +35,21 @@ class IncrementalMarkingScope {
Marker::MarkingConfig::StackState::kNoHeapPointers,
Marker::MarkingConfig::MarkingType::kIncremental};
Marker* marker_;
MarkerBase* marker_;
};
constexpr Marker::MarkingConfig IncrementalMarkingScope::kIncrementalConfig;
class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
public:
ExpectWriteBarrierFires(Marker* marker, std::initializer_list<void*> objects)
ExpectWriteBarrierFires(MarkerBase* marker,
std::initializer_list<void*> objects)
: IncrementalMarkingScope(marker),
marking_worklist_(marker->marking_worklist(), Marker::kMutatorThreadId),
write_barrier_worklist_(marker->write_barrier_worklist(),
Marker::kMutatorThreadId),
marking_worklist_(marker->marking_worklists().marking_worklist(),
MarkingWorklists::kMutatorThreadId),
write_barrier_worklist_(
marker->marking_worklists().write_barrier_worklist(),
MarkingWorklists::kMutatorThreadId),
objects_(objects) {
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
@ -58,7 +61,7 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
~ExpectWriteBarrierFires() V8_NOEXCEPT {
{
Marker::MarkingItem item;
MarkingWorklists::MarkingItem item;
while (marking_worklist_.Pop(&item)) {
auto pos = std::find(objects_.begin(), objects_.end(),
item.base_object_payload);
@ -82,20 +85,22 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
}
private:
Marker::MarkingWorklist::View marking_worklist_;
Marker::WriteBarrierWorklist::View write_barrier_worklist_;
MarkingWorklists::MarkingWorklist::View marking_worklist_;
MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist_;
std::vector<void*> objects_;
std::vector<HeapObjectHeader*> headers_;
};
class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope {
public:
ExpectNoWriteBarrierFires(Marker* marker,
ExpectNoWriteBarrierFires(MarkerBase* marker,
std::initializer_list<void*> objects)
: IncrementalMarkingScope(marker),
marking_worklist_(marker->marking_worklist(), Marker::kMutatorThreadId),
write_barrier_worklist_(marker->write_barrier_worklist(),
Marker::kMutatorThreadId) {
marking_worklist_(marker->marking_worklists().marking_worklist(),
MarkingWorklists::kMutatorThreadId),
write_barrier_worklist_(
marker->marking_worklists().write_barrier_worklist(),
MarkingWorklists::kMutatorThreadId) {
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
for (void* object : objects) {
@ -113,8 +118,8 @@ class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope {
}
private:
Marker::MarkingWorklist::View marking_worklist_;
Marker::WriteBarrierWorklist::View write_barrier_worklist_;
MarkingWorklists::MarkingWorklist::View marking_worklist_;
MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist_;
std::vector<std::pair<HeapObjectHeader*, bool /* was marked */>> headers_;
};
@ -151,11 +156,11 @@ class WriteBarrierTest : public testing::TestWithHeap {
GetMarkerRef().reset();
}
Marker* marker() const { return marker_; }
MarkerBase* marker() const { return marker_; }
private:
Heap* internal_heap_;
Marker* marker_;
MarkerBase* marker_;
};
// =============================================================================