cppgc: Initial incremental marking implementation.

This CL adds a basic implementation of incremental marking for standalone GC.
Followup CLs include:
* Use bytes instead of time as deadline
* Port incremental marking schedule from blink
* Mark on allocation
* Guarantees for progres/termination for standalone GC
* etc...

Calling StartIncrementalGarbageCollection triggers StartMarking which
schedules incremental marking as non-nestable tasks.
For unified heap, marking will continue running until it runs out of
work but it won't finalize independently.
For standalone, when incremental runs out of work it will schedule a new
task in which it will finalize marking and trigger the rest of the GC.
Users of standalone can also force finalization before incremental
marking as finished using FinalizeIncrementalGarbageCollectionIfRunning.
Calling CollectGarbage would also finalize an on-going incremental GC
if one exists. Otherwise it will trigger an atomic GC.

See the following doc for explanation of the various methods:
https://docs.google.com/document/d/1ZhJY2fOoD8sH53ZxMh2927Zl8sXqA7azJgcQTWx-YKs/edit?usp=sharing

Bug: chromium:1056170
Change-Id: I75ead414eb9da9f8b7f71c4638b9830fce7708ca
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2298009
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69480}
This commit is contained in:
Omer Katz 2020-08-19 14:59:00 +02:00 committed by Commit Bot
parent f0bade979d
commit 132727fd46
16 changed files with 380 additions and 82 deletions

View File

@ -91,6 +91,9 @@ class V8_BASE_EXPORT TimeDelta final {
return TimeDelta(nanoseconds / TimeConstants::kNanosecondsPerMicrosecond);
}
static TimeDelta FromSecondsD(double seconds) {
return FromDouble(seconds * TimeConstants::kMicrosecondsPerSecond);
}
static TimeDelta FromMillisecondsD(double milliseconds) {
return FromDouble(milliseconds *
TimeConstants::kMicrosecondsPerMillisecond);

View File

@ -65,7 +65,8 @@ class CppgcPlatformAdapter final : public cppgc::Platform {
class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
public:
explicit UnifiedHeapMarker(Heap& v8_heap, cppgc::internal::HeapBase& heap);
UnifiedHeapMarker(Heap& v8_heap, cppgc::internal::HeapBase& cpp_heap,
cppgc::Platform* platform, MarkingConfig config);
~UnifiedHeapMarker() final = default;
@ -87,8 +88,10 @@ class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
};
UnifiedHeapMarker::UnifiedHeapMarker(Heap& v8_heap,
cppgc::internal::HeapBase& heap)
: cppgc::internal::MarkerBase(heap),
cppgc::internal::HeapBase& heap,
cppgc::Platform* platform,
MarkingConfig config)
: cppgc::internal::MarkerBase(heap, platform, config),
unified_heap_mutator_marking_state_(v8_heap),
marking_visitor_(heap, mutator_marking_state_,
unified_heap_mutator_marking_state_),
@ -121,12 +124,13 @@ void CppHeap::RegisterV8References(
}
void CppHeap::TracePrologue(TraceFlags flags) {
marker_.reset(new UnifiedHeapMarker(*isolate_.heap(), AsBase()));
const UnifiedHeapMarker::MarkingConfig marking_config{
UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
cppgc::Heap::StackState::kNoHeapPointers,
UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic};
marker_->StartMarking(marking_config);
UnifiedHeapMarker::MarkingConfig::MarkingType::kIncremental};
marker_ = std::make_unique<UnifiedHeapMarker>(
*isolate_.heap(), AsBase(), platform_.get(), marking_config);
marker_->StartMarking();
marking_done_ = false;
}
@ -139,11 +143,7 @@ bool CppHeap::AdvanceTracing(double deadline_in_ms) {
bool CppHeap::IsTracingDone() { return marking_done_; }
void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
const UnifiedHeapMarker::MarkingConfig marking_config{
UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
cppgc::Heap::StackState::kNoHeapPointers,
UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic};
marker_->EnterAtomicPause(marking_config);
marker_->EnterAtomicPause(cppgc::Heap::StackState::kNoHeapPointers);
}
void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {

View File

@ -33,6 +33,12 @@ class V8_EXPORT_PRIVATE CppHeap final : public cppgc::internal::HeapBase,
void EnterFinalPause(EmbedderStackState stack_state) final;
private:
void FinalizeIncrementalGarbageCollectionIfNeeded(
cppgc::Heap::StackState) final {
// For unified heap, CppHeap shouldn't finalize independently (i.e.
// finalization is not needed) thus this method is left empty.
}
Isolate& isolate_;
bool marking_done_ = false;
};

View File

@ -12,6 +12,7 @@
#include "include/cppgc/internal/persistent-node.h"
#include "include/cppgc/macros.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/object-allocator.h"
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/sweeper.h"
@ -36,7 +37,6 @@ namespace testing {
class TestWithHeap;
} // namespace testing
class MarkerBase;
class PageBackend;
class PreFinalizerHandler;
class StatsCollector;
@ -119,6 +119,9 @@ class V8_EXPORT_PRIVATE HeapBase {
protected:
void VerifyMarking(cppgc::Heap::StackState);
virtual void FinalizeIncrementalGarbageCollectionIfNeeded(
cppgc::Heap::StackState) = 0;
bool in_no_gc_scope() const { return no_gc_scope_ > 0; }
RawHeap raw_heap_;
@ -145,6 +148,7 @@ class V8_EXPORT_PRIVATE HeapBase {
size_t no_gc_scope_ = 0;
friend class MarkerBase::IncrementalMarkingTask;
friend class testing::TestWithHeap;
};

View File

@ -40,8 +40,9 @@ std::unique_ptr<Heap> Heap::Create(std::shared_ptr<cppgc::Platform> platform,
void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason,
Heap::StackState stack_state) {
internal::Heap::From(this)->CollectGarbage(
{internal::GarbageCollector::Config::CollectionType::kMajor,
stack_state});
{internal::GarbageCollector::Config::CollectionType::kMajor, stack_state,
internal::GarbageCollector::Config::MarkingType::kAtomic,
internal::GarbageCollector::Config::SweepingType::kAtomic});
}
AllocationHandle& Heap::GetAllocationHandle() {
@ -84,14 +85,55 @@ Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
Heap::~Heap() {
NoGCScope no_gc(*this);
// Finish already running GC if any, but don't finalize live objects.
sweeper_.Finish();
sweeper_.FinishIfRunning();
}
void Heap::CollectGarbage(Config config) {
DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type);
CheckConfig(config);
config_ = config;
if (in_no_gc_scope()) return;
if (!gc_in_progress_) StartGarbageCollection(config);
DCHECK(marker_);
FinalizeGarbageCollection(config.stack_state);
}
void Heap::StartIncrementalGarbageCollection(Config config) {
DCHECK(!gc_in_progress_);
DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type);
CheckConfig(config);
config_ = config;
if (in_no_gc_scope()) return;
StartGarbageCollection(config);
}
void Heap::FinalizeIncrementalGarbageCollectionIfRunning(Config config) {
if (!gc_in_progress_) return;
DCHECK(!in_no_gc_scope());
DCHECK_NE(Config::MarkingType::kAtomic, config_.marking_type);
config_ = config;
FinalizeGarbageCollection(config.stack_state);
}
void Heap::StartGarbageCollection(Config config) {
DCHECK(!gc_in_progress_);
DCHECK(!in_no_gc_scope());
// Finish sweeping in case it is still running.
sweeper_.FinishIfRunning();
gc_in_progress_ = true;
epoch_++;
#if defined(CPPGC_YOUNG_GENERATION)
@ -99,13 +141,18 @@ void Heap::CollectGarbage(Config config) {
Unmarker unmarker(&raw_heap());
#endif
// "Marking".
marker_ = std::make_unique<Marker>(AsBase());
const Marker::MarkingConfig marking_config{
config.collection_type, config.stack_state, config.marking_type};
marker_->StartMarking(marking_config);
marker_->FinishMarking(marking_config);
// "Sweeping and finalization".
marker_ = std::make_unique<Marker>(AsBase(), platform_.get(), marking_config);
marker_->StartMarking();
}
void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
DCHECK(gc_in_progress_);
DCHECK(!in_no_gc_scope());
config_.stack_state = stack_state;
DCHECK(marker_);
marker_->FinishMarking(stack_state);
{
// Pre finalizers are forbidden from allocating objects.
ObjectAllocator::NoAllocationScope no_allocation_scope_(object_allocator_);
@ -115,12 +162,13 @@ void Heap::CollectGarbage(Config config) {
marker_.reset();
// TODO(chromium:1056170): replace build flag with dedicated flag.
#if DEBUG
VerifyMarking(config.stack_state);
VerifyMarking(stack_state);
#endif
{
NoGCScope no_gc(*this);
sweeper_.Start(config.sweeping_type);
sweeper_.Start(config_.sweeping_type);
}
gc_in_progress_ = false;
}
} // namespace internal

View File

@ -32,14 +32,26 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
HeapBase& AsBase() { return *this; }
const HeapBase& AsBase() const { return *this; }
void CollectGarbage(Config config) final;
void CollectGarbage(Config) final;
void StartIncrementalGarbageCollection(Config);
void FinalizeIncrementalGarbageCollectionIfRunning(Config);
size_t epoch() const final { return epoch_; }
private:
void StartGarbageCollection(Config);
void FinalizeGarbageCollection(Config::StackState);
void FinalizeIncrementalGarbageCollectionIfNeeded(
Config::StackState stack_state) final {
FinalizeGarbageCollection(stack_state);
}
Config config_;
GCInvoker gc_invoker_;
HeapGrowing growing_;
bool gc_in_progress_ = false;
size_t epoch_ = 0;
};

View File

@ -7,6 +7,7 @@
#include <memory>
#include "include/cppgc/internal/process-heap.h"
#include "include/cppgc/platform.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap-visitor.h"
@ -25,28 +26,32 @@ namespace internal {
namespace {
void EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
bool EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
HeapBase& heap) {
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
config.marking_type ==
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
ProcessHeap::EnterIncrementalOrConcurrentMarking();
}
#if defined(CPPGC_CAGED_HEAP)
heap.caged_heap().local_data().is_marking_in_progress = true;
heap.caged_heap().local_data().is_marking_in_progress = true;
#endif
return true;
}
return false;
}
void ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
HeapBase& heap) {
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
config.marking_type ==
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
ProcessHeap::ExitIncrementalOrConcurrentMarking();
}
#if defined(CPPGC_CAGED_HEAP)
heap.caged_heap().local_data().is_marking_in_progress = false;
heap.caged_heap().local_data().is_marking_in_progress = false;
#endif
return true;
}
return false;
}
// Visit remembered set that was recorded in the generational barrier.
@ -109,8 +114,38 @@ void TraceMarkedObject(Visitor* visitor, const HeapObjectHeader* header) {
} // namespace
MarkerBase::MarkerBase(HeapBase& heap)
MarkerBase::IncrementalMarkingTask::IncrementalMarkingTask(MarkerBase* marker)
: marker_(marker), handle_(Handle::NonEmptyTag{}) {}
// static
MarkerBase::IncrementalMarkingTask::Handle
MarkerBase::IncrementalMarkingTask::Post(v8::TaskRunner* runner,
MarkerBase* marker) {
auto task = std::make_unique<IncrementalMarkingTask>(marker);
auto handle = task->handle_;
runner->PostNonNestableTask(std::move(task));
return handle;
}
void MarkerBase::IncrementalMarkingTask::Run() {
if (handle_.IsCanceled()) return;
// TODO(chromium:1056170): Replace hardcoded duration with schedule.
if (marker_->IncrementalMarkingStep(
MarkingConfig::StackState::kNoHeapPointers,
v8::base::TimeDelta::FromMillisecondsD(2))) {
// Incremental marking is done so should finalize GC.
marker_->heap().FinalizeIncrementalGarbageCollectionIfNeeded(
MarkingConfig::StackState::kNoHeapPointers);
}
}
MarkerBase::MarkerBase(HeapBase& heap, cppgc::Platform* platform,
MarkingConfig config)
: heap_(heap),
config_(config),
platform_(platform),
foreground_task_runner_(platform_->GetForegroundTaskRunner()),
mutator_marking_state_(
heap, marking_worklists_.marking_worklist(),
marking_worklists_.not_fully_constructed_worklist(),
@ -137,20 +172,28 @@ MarkerBase::~MarkerBase() {
}
}
void MarkerBase::StartMarking(MarkingConfig config) {
void MarkerBase::StartMarking() {
heap().stats_collector()->NotifyMarkingStarted();
config_ = config;
VisitRoots();
EnterIncrementalMarkingIfNeeded(config, heap());
is_marking_started_ = true;
if (EnterIncrementalMarkingIfNeeded(config_, heap())) {
// Performing incremental or concurrent marking.
// Scanning the stack is expensive so we only do it at the atomic pause.
VisitRoots(MarkingConfig::StackState::kNoHeapPointers);
ScheduleIncrementalMarkingTask();
}
}
void MarkerBase::EnterAtomicPause(MarkingConfig config) {
ExitIncrementalMarkingIfNeeded(config_, heap());
config_ = config;
void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
if (ExitIncrementalMarkingIfNeeded(config_, heap())) {
// Cancel remaining incremental tasks.
if (incremental_marking_handle_) incremental_marking_handle_.Cancel();
}
config_.stack_state = stack_state;
config_.marking_type = MarkingConfig::MarkingType::kAtomic;
// VisitRoots also resets the LABs.
VisitRoots();
VisitRoots(config_.stack_state);
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
marking_worklists_.FlushNotFullyConstructedObjects();
} else {
@ -159,15 +202,18 @@ void MarkerBase::EnterAtomicPause(MarkingConfig config) {
}
void MarkerBase::LeaveAtomicPause() {
DCHECK(!incremental_marking_handle_);
ResetRememberedSet(heap());
heap().stats_collector()->NotifyMarkingCompleted(
mutator_marking_state_.marked_bytes());
}
void MarkerBase::FinishMarking(MarkingConfig config) {
EnterAtomicPause(config);
AdvanceMarkingWithDeadline(v8::base::TimeDelta::Max());
void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
DCHECK(is_marking_started_);
EnterAtomicPause(stack_state);
ProcessWorklistsWithDeadline(v8::base::TimeDelta::Max());
LeaveAtomicPause();
is_marking_started_ = false;
}
void MarkerBase::ProcessWeakness() {
@ -186,13 +232,13 @@ void MarkerBase::ProcessWeakness() {
DCHECK(marking_worklists_.marking_worklist()->IsEmpty());
}
void MarkerBase::VisitRoots() {
void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
// Reset LABs before scanning roots. LABs are cleared to allow
// ObjectStartBitmap handling without considering LABs.
heap().object_allocator().ResetLinearAllocationBuffers();
heap().GetStrongPersistentRegion().Trace(&visitor());
if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers) {
if (stack_state != MarkingConfig::StackState::kNoHeapPointers) {
heap().stack()->IteratePointers(&stack_visitor());
}
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
@ -200,7 +246,39 @@ void MarkerBase::VisitRoots() {
}
}
void MarkerBase::ScheduleIncrementalMarkingTask() {
if (!platform_ || !foreground_task_runner_) return;
DCHECK(!incremental_marking_handle_);
incremental_marking_handle_ =
IncrementalMarkingTask::Post(foreground_task_runner_.get(), this);
}
bool MarkerBase::IncrementalMarkingStepForTesting(
MarkingConfig::StackState stack_state, v8::base::TimeDelta deadline) {
return IncrementalMarkingStep(stack_state, deadline);
}
bool MarkerBase::IncrementalMarkingStep(MarkingConfig::StackState stack_state,
v8::base::TimeDelta duration) {
if (stack_state == MarkingConfig::StackState::kNoHeapPointers) {
marking_worklists_.FlushNotFullyConstructedObjects();
}
config_.stack_state = stack_state;
return AdvanceMarkingWithDeadline(duration);
}
bool MarkerBase::AdvanceMarkingWithDeadline(v8::base::TimeDelta duration) {
bool is_done = ProcessWorklistsWithDeadline(duration);
if (!is_done) {
// If marking is atomic, |is_done| should always be true.
DCHECK_NE(MarkingConfig::MarkingType::kAtomic, config_.marking_type);
ScheduleIncrementalMarkingTask();
}
return is_done;
}
bool MarkerBase::ProcessWorklistsWithDeadline(v8::base::TimeDelta duration) {
v8::base::TimeTicks deadline = v8::base::TimeTicks::Now() + duration;
do {
@ -264,8 +342,8 @@ void MarkerBase::ClearAllWorklistsForTesting() {
marking_worklists_.ClearForTesting();
}
Marker::Marker(HeapBase& heap)
: MarkerBase(heap),
Marker::Marker(HeapBase& heap, cppgc::Platform* platform, MarkingConfig config)
: MarkerBase(heap, platform, config),
marking_visitor_(heap, mutator_marking_state_),
conservative_marking_visitor_(heap, mutator_marking_state_,
marking_visitor_) {}

View File

@ -15,6 +15,7 @@
#include "src/heap/cppgc/marking-state.h"
#include "src/heap/cppgc/marking-visitor.h"
#include "src/heap/cppgc/marking-worklists.h"
#include "src/heap/cppgc/task-handle.h"
#include "src/heap/cppgc/worklist.h"
namespace cppgc {
@ -47,9 +48,9 @@ class V8_EXPORT_PRIVATE MarkerBase {
static constexpr MarkingConfig Default() { return {}; }
CollectionType collection_type = CollectionType::kMajor;
const CollectionType collection_type = CollectionType::kMajor;
StackState stack_state = StackState::kMayContainHeapPointers;
MarkingType marking_type = MarkingType::kAtomic;
MarkingType marking_type = MarkingType::kIncremental;
};
virtual ~MarkerBase();
@ -59,16 +60,16 @@ class V8_EXPORT_PRIVATE MarkerBase {
// Initialize marking according to the given config. This method will
// trigger incremental/concurrent marking if needed.
void StartMarking(MarkingConfig config);
void StartMarking();
// Signals entering the atomic marking pause. The method
// - stops incremental/concurrent marking;
// - flushes back any in-construction worklists if needed;
// - Updates the MarkingConfig if the stack state has changed;
void EnterAtomicPause(MarkingConfig config);
void EnterAtomicPause(MarkingConfig::StackState);
// Makes marking progress.
virtual bool AdvanceMarkingWithDeadline(v8::base::TimeDelta);
bool AdvanceMarkingWithDeadline(v8::base::TimeDelta);
// Signals leaving the atomic marking pause. This method expects no more
// objects to be marked and merely updates marking states if needed.
@ -78,7 +79,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
// - EnterAtomicPause()
// - AdvanceMarkingWithDeadline()
// - LeaveAtomicPause()
void FinishMarking(MarkingConfig config);
void FinishMarking(MarkingConfig::StackState);
void ProcessWeakness();
@ -92,27 +93,57 @@ class V8_EXPORT_PRIVATE MarkerBase {
cppgc::Visitor& VisitorForTesting() { return visitor(); }
void ClearAllWorklistsForTesting();
bool IncrementalMarkingStepForTesting(MarkingConfig::StackState,
v8::base::TimeDelta);
class IncrementalMarkingTask final : public v8::Task {
public:
using Handle = SingleThreadedHandle;
explicit IncrementalMarkingTask(MarkerBase*);
static Handle Post(v8::TaskRunner*, MarkerBase*);
private:
void Run() final;
MarkerBase* const marker_;
// TODO(chromium:1056170): Change to CancelableTask.
Handle handle_;
};
protected:
explicit MarkerBase(HeapBase& heap);
MarkerBase(HeapBase&, cppgc::Platform*, MarkingConfig);
virtual cppgc::Visitor& visitor() = 0;
virtual ConservativeTracingVisitor& conservative_visitor() = 0;
virtual heap::base::StackVisitor& stack_visitor() = 0;
void VisitRoots();
bool ProcessWorklistsWithDeadline(v8::base::TimeDelta);
void VisitRoots(MarkingConfig::StackState);
void MarkNotFullyConstructedObjects();
void ScheduleIncrementalMarkingTask();
bool IncrementalMarkingStep(MarkingConfig::StackState, v8::base::TimeDelta);
HeapBase& heap_;
MarkingConfig config_ = MarkingConfig::Default();
cppgc::Platform* platform_;
std::shared_ptr<v8::TaskRunner> foreground_task_runner_;
IncrementalMarkingTask::Handle incremental_marking_handle_;
MarkingWorklists marking_worklists_;
MarkingState mutator_marking_state_;
bool is_marking_started_ = false;
};
class V8_EXPORT_PRIVATE Marker final : public MarkerBase {
public:
explicit Marker(HeapBase&);
Marker(HeapBase&, cppgc::Platform*, MarkingConfig = MarkingConfig::Default());
protected:
cppgc::Visitor& visitor() final { return marking_visitor_; }

View File

@ -136,7 +136,7 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
// TODO(chromium:1056170): Add lazy sweep.
// 4. Complete sweeping.
raw_heap_->heap()->sweeper().Finish();
raw_heap_->heap()->sweeper().FinishIfRunning();
// 5. Add a new page to this heap.
auto* new_page = NormalPage::Create(page_backend_, space);

View File

@ -499,9 +499,15 @@ class Sweeper::SweeperImpl final {
}
}
void Finish() {
void FinishIfRunning() {
if (!is_in_progress_) return;
Finish();
}
void Finish() {
DCHECK(is_in_progress_);
// First, call finalizers on the mutator thread.
SweepFinalizer finalizer(platform_);
finalizer.FinalizeHeap(&space_states_);
@ -600,7 +606,7 @@ Sweeper::Sweeper(RawHeap* heap, cppgc::Platform* platform,
Sweeper::~Sweeper() = default;
void Sweeper::Start(Config config) { impl_->Start(config); }
void Sweeper::Finish() { impl_->Finish(); }
void Sweeper::FinishIfRunning() { impl_->FinishIfRunning(); }
} // namespace internal
} // namespace cppgc

View File

@ -30,7 +30,7 @@ class V8_EXPORT_PRIVATE Sweeper final {
// Sweeper::Start assumes the heap holds no linear allocation buffers.
void Start(Config);
void Finish();
void FinishIfRunning();
private:
class SweeperImpl;

View File

@ -79,7 +79,7 @@ class ConcurrentSweeperTest : public testing::TestWithHeap {
void FinishSweeping() {
Heap* heap = Heap::From(GetHeap());
Sweeper& sweeper = heap->sweeper();
sweeper.Finish();
sweeper.FinishIfRunning();
}
const RawHeap& GetRawHeap() const {

View File

@ -27,9 +27,9 @@ class MarkerTest : public testing::TestWithHeap {
const MarkingConfig config = {MarkingConfig::CollectionType::kMajor,
stack_state};
auto* heap = Heap::From(GetHeap());
Marker marker(heap->AsBase());
marker.StartMarking(config);
marker.FinishMarking(config);
Marker marker(*heap, GetPlatformHandle().get(), config);
marker.StartMarking();
marker.FinishMarking(stack_state);
marker.ProcessWeakness();
// Pretend do finish sweeping as StatsCollector verifies that Notify*
// methods are called in the right order.
@ -216,50 +216,157 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
} // namespace
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
Marker marker(Heap::From(GetHeap())->AsBase());
marker.StartMarking({MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kMayContainHeapPointers});
static const Marker::MarkingConfig config = {
MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kMayContainHeapPointers};
Marker marker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
marker.StartMarking();
GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj);
marker.VisitorForTesting().Trace(member);
});
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
marker.FinishMarking({MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kMayContainHeapPointers});
marker.FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
}
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
Marker marker(Heap::From(GetHeap())->AsBase());
static const Marker::MarkingConfig config = {
MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kMayContainHeapPointers};
marker.StartMarking(config);
Marker marker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
marker.StartMarking();
MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj);
marker.VisitorForTesting().Trace(member);
EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked());
marker.FinishMarking(config);
marker.FinishMarking(
MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked());
});
}
TEST_F(MarkerTest, SentinelNotClearedOnWeakPersistentHandling) {
Marker marker(Heap::From(GetHeap())->AsBase());
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
auto* tmp = MakeGarbageCollected<GCed>(GetAllocationHandle());
root->SetWeakChild(tmp);
static const Marker::MarkingConfig config = {
MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kNoHeapPointers};
marker.StartMarking(config);
marker.FinishMarking(config);
Marker marker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
auto* tmp = MakeGarbageCollected<GCed>(GetAllocationHandle());
root->SetWeakChild(tmp);
marker.StartMarking();
marker.FinishMarking(MarkingConfig::StackState::kNoHeapPointers);
root->SetWeakChild(kSentinelPointer);
marker.ProcessWeakness();
EXPECT_EQ(kSentinelPointer, root->weak_child());
}
// Incremental Marking
class IncrementalMarkingTest : public testing::TestWithHeap {
public:
using MarkingConfig = Marker::MarkingConfig;
static constexpr MarkingConfig IncrementalPreciseMarkingConfig = {
MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kNoHeapPointers,
MarkingConfig::MarkingType::kIncremental};
static constexpr MarkingConfig IncrementalConservativeMarkingConfig = {
MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kMayContainHeapPointers,
MarkingConfig::MarkingType::kIncremental};
void FinishSteps(Marker& marker, MarkingConfig::StackState stack_state) {
SingleStep(marker, stack_state, v8::base::TimeDelta::Max());
}
void FinishMarking(Marker& marker) {
marker.FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers);
marker.ProcessWeakness();
// Pretend do finish sweeping as StatsCollector verifies that Notify*
// methods are called in the right order.
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted();
}
private:
bool SingleStep(Marker& marker, MarkingConfig::StackState stack_state,
v8::base::TimeDelta deadline) {
return marker.IncrementalMarkingStepForTesting(stack_state, deadline);
}
};
constexpr IncrementalMarkingTest::MarkingConfig
IncrementalMarkingTest::IncrementalPreciseMarkingConfig;
constexpr IncrementalMarkingTest::MarkingConfig
IncrementalMarkingTest::IncrementalConservativeMarkingConfig;
TEST_F(IncrementalMarkingTest, RootIsMarkedAfterStartMarking) {
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
EXPECT_FALSE(HeapObjectHeader::FromPayload(root).IsMarked());
Marker marker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
IncrementalPreciseMarkingConfig);
marker.StartMarking();
EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
FinishMarking(marker);
}
TEST_F(IncrementalMarkingTest, MemberIsMarkedAfterMarkingSteps) {
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromPayload(root->child());
EXPECT_FALSE(header.IsMarked());
Marker marker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
IncrementalPreciseMarkingConfig);
marker.StartMarking();
FinishSteps(marker, MarkingConfig::StackState::kNoHeapPointers);
EXPECT_TRUE(header.IsMarked());
FinishMarking(marker);
}
TEST_F(IncrementalMarkingTest,
MemberWithWriteBarrierIsMarkedAfterMarkingSteps) {
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
Marker marker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
IncrementalPreciseMarkingConfig);
marker.StartMarking();
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromPayload(root->child());
EXPECT_FALSE(header.IsMarked());
FinishSteps(marker, MarkingConfig::StackState::kNoHeapPointers);
EXPECT_TRUE(header.IsMarked());
FinishMarking(marker);
}
namespace {
class Holder : public GarbageCollected<Holder> {
public:
void Trace(Visitor* visitor) const { visitor->Trace(member_); }
Member<GCedWithCallback> member_;
};
} // namespace
TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) {
Persistent<Holder> holder =
MakeGarbageCollected<Holder>(GetAllocationHandle());
Marker marker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
IncrementalPreciseMarkingConfig);
marker.StartMarking();
const HeapObjectHeader* header;
MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(),
[this, &holder, &header, &marker](GCedWithCallback* obj) {
header = &HeapObjectHeader::FromPayload(obj);
holder->member_ = obj;
EXPECT_FALSE(header->IsMarked());
FinishSteps(marker, MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(header->IsMarked());
});
FinishSteps(marker, MarkingConfig::StackState::kNoHeapPointers);
EXPECT_TRUE(header->IsMarked());
FinishMarking(marker);
}
} // namespace internal
} // namespace cppgc

View File

@ -23,7 +23,8 @@ namespace {
class MarkingVisitorTest : public testing::TestWithHeap {
public:
MarkingVisitorTest()
: marker_(std::make_unique<Marker>(Heap::From(GetHeap())->AsBase())) {}
: marker_(std::make_unique<Marker>(*Heap::From(GetHeap()),
GetPlatformHandle().get())) {}
~MarkingVisitorTest() override { marker_->ClearAllWorklistsForTesting(); }
Marker* GetMarker() { return marker_.get(); }

View File

@ -49,7 +49,7 @@ class SweeperTest : public testing::TestWithHeap {
heap->stats_collector()->NotifyMarkingStarted();
heap->stats_collector()->NotifyMarkingCompleted(0);
sweeper.Start(Sweeper::Config::kAtomic);
sweeper.Finish();
sweeper.FinishIfRunning();
}
void MarkObject(void* payload) {

View File

@ -22,19 +22,19 @@ namespace {
class IncrementalMarkingScope {
public:
explicit IncrementalMarkingScope(MarkerBase* marker) : marker_(marker) {
marker_->StartMarking(kIncrementalConfig);
marker_->StartMarking();
}
~IncrementalMarkingScope() V8_NOEXCEPT {
marker_->FinishMarking(kIncrementalConfig);
marker_->FinishMarking(kIncrementalConfig.stack_state);
}
private:
static constexpr Marker::MarkingConfig kIncrementalConfig{
Marker::MarkingConfig::CollectionType::kMajor,
Marker::MarkingConfig::StackState::kNoHeapPointers,
Marker::MarkingConfig::MarkingType::kIncremental};
private:
MarkerBase* marker_;
};
@ -149,7 +149,9 @@ class GCed : public GarbageCollected<GCed> {
class WriteBarrierTest : public testing::TestWithHeap {
public:
WriteBarrierTest() : internal_heap_(Heap::From(GetHeap())) {
GetMarkerRef() = std::make_unique<Marker>(internal_heap_->AsBase());
GetMarkerRef() =
std::make_unique<Marker>(*internal_heap_, GetPlatformHandle().get(),
IncrementalMarkingScope::kIncrementalConfig);
marker_ = GetMarkerRef().get();
}