cppgc: Move configs to heap-config.h
Change-Id: Ibaea8f237d3bbee983f763a178eda0f7ca97d419 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3911515 Reviewed-by: Omer Katz <omerkatz@chromium.org> Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Anton Bikineev <bikineev@chromium.org> Cr-Commit-Position: refs/heads/main@{#83397}
This commit is contained in:
parent
f08547afd4
commit
2a24668a21
@ -286,7 +286,8 @@ class UnifiedHeapConservativeMarkingVisitor final
|
||||
class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
|
||||
public:
|
||||
UnifiedHeapMarker(Heap* v8_heap, cppgc::internal::HeapBase& cpp_heap,
|
||||
cppgc::Platform* platform, MarkingConfig config);
|
||||
cppgc::Platform* platform,
|
||||
cppgc::internal::MarkingConfig config);
|
||||
|
||||
~UnifiedHeapMarker() final = default;
|
||||
|
||||
@ -324,7 +325,7 @@ class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
|
||||
UnifiedHeapMarker::UnifiedHeapMarker(Heap* v8_heap,
|
||||
cppgc::internal::HeapBase& heap,
|
||||
cppgc::Platform* platform,
|
||||
MarkingConfig config)
|
||||
cppgc::internal::MarkingConfig config)
|
||||
: cppgc::internal::MarkerBase(heap, platform, config),
|
||||
mutator_unified_heap_marking_state_(v8_heap, nullptr),
|
||||
marking_visitor_(config.collection_type == CppHeap::CollectionType::kMajor
|
||||
@ -625,11 +626,11 @@ void CppHeap::InitializeTracing(CollectionType collection_type,
|
||||
|
||||
current_gc_flags_ = gc_flags;
|
||||
|
||||
const UnifiedHeapMarker::MarkingConfig marking_config{
|
||||
const cppgc::internal::MarkingConfig marking_config{
|
||||
*collection_type_, StackState::kNoHeapPointers, SelectMarkingType(),
|
||||
IsForceGC(current_gc_flags_)
|
||||
? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced
|
||||
: UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced};
|
||||
? cppgc::internal::MarkingConfig::IsForcedGC::kForced
|
||||
: cppgc::internal::MarkingConfig::IsForcedGC::kNotForced};
|
||||
DCHECK_IMPLIES(!isolate_,
|
||||
(MarkingType::kAtomic == marking_config.marking_type) ||
|
||||
force_incremental_marking_for_testing_);
|
||||
@ -1004,14 +1005,15 @@ CppHeap::PauseConcurrentMarkingScope::PauseConcurrentMarkingScope(
|
||||
}
|
||||
}
|
||||
|
||||
void CppHeap::CollectGarbage(Config config) {
|
||||
void CppHeap::CollectGarbage(cppgc::internal::GCConfig config) {
|
||||
if (in_no_gc_scope() || !isolate_) return;
|
||||
|
||||
// TODO(mlippautz): Respect full config.
|
||||
const int flags = (config.free_memory_handling ==
|
||||
Config::FreeMemoryHandling::kDiscardWherePossible)
|
||||
? Heap::kReduceMemoryFootprintMask
|
||||
: Heap::kNoGCFlags;
|
||||
const int flags =
|
||||
(config.free_memory_handling ==
|
||||
cppgc::internal::GCConfig::FreeMemoryHandling::kDiscardWherePossible)
|
||||
? Heap::kReduceMemoryFootprintMask
|
||||
: Heap::kNoGCFlags;
|
||||
isolate_->heap()->CollectAllGarbage(
|
||||
flags, GarbageCollectionReason::kCppHeapAllocationFailure);
|
||||
}
|
||||
@ -1020,7 +1022,9 @@ const cppgc::EmbedderStackState* CppHeap::override_stack_state() const {
|
||||
return HeapBase::override_stack_state();
|
||||
}
|
||||
|
||||
void CppHeap::StartIncrementalGarbageCollection(Config) { UNIMPLEMENTED(); }
|
||||
void CppHeap::StartIncrementalGarbageCollection(cppgc::internal::GCConfig) {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
size_t CppHeap::epoch() const { UNIMPLEMENTED(); }
|
||||
|
||||
} // namespace internal
|
||||
|
@ -43,9 +43,8 @@ class V8_EXPORT_PRIVATE CppHeap final
|
||||
};
|
||||
|
||||
using GarbageCollectionFlags = base::Flags<GarbageCollectionFlagValues>;
|
||||
using StackState = cppgc::internal::GarbageCollector::Config::StackState;
|
||||
using CollectionType =
|
||||
cppgc::internal::GarbageCollector::Config::CollectionType;
|
||||
using StackState = cppgc::internal::StackState;
|
||||
using CollectionType = cppgc::internal::CollectionType;
|
||||
|
||||
class MetricRecorderAdapter final : public cppgc::internal::MetricRecorder {
|
||||
public:
|
||||
@ -139,9 +138,7 @@ class V8_EXPORT_PRIVATE CppHeap final
|
||||
void FinishSweepingIfRunning();
|
||||
void FinishSweepingIfOutOfWork();
|
||||
|
||||
void InitializeTracing(
|
||||
cppgc::internal::GarbageCollector::Config::CollectionType,
|
||||
GarbageCollectionFlags);
|
||||
void InitializeTracing(CollectionType, GarbageCollectionFlags);
|
||||
void StartTracing();
|
||||
bool AdvanceTracing(double max_duration);
|
||||
bool IsTracingDone();
|
||||
@ -168,9 +165,9 @@ class V8_EXPORT_PRIVATE CppHeap final
|
||||
std::unique_ptr<CppMarkingState> CreateCppMarkingStateForMutatorThread();
|
||||
|
||||
// cppgc::internal::GarbageCollector interface.
|
||||
void CollectGarbage(Config) override;
|
||||
void CollectGarbage(cppgc::internal::GCConfig) override;
|
||||
const cppgc::EmbedderStackState* override_stack_state() const override;
|
||||
void StartIncrementalGarbageCollection(Config) override;
|
||||
void StartIncrementalGarbageCollection(cppgc::internal::GCConfig) override;
|
||||
size_t epoch() const override;
|
||||
|
||||
private:
|
||||
@ -194,8 +191,7 @@ class V8_EXPORT_PRIVATE CppHeap final
|
||||
Isolate* isolate_ = nullptr;
|
||||
bool marking_done_ = false;
|
||||
// |collection_type_| is initialized when marking is in progress.
|
||||
base::Optional<cppgc::internal::GarbageCollector::Config::CollectionType>
|
||||
collection_type_;
|
||||
base::Optional<CollectionType> collection_type_;
|
||||
GarbageCollectionFlags current_gc_flags_;
|
||||
|
||||
// Buffered allocated bytes. Reporting allocated bytes to V8 can trigger a GC
|
||||
|
@ -57,7 +57,7 @@ class UnifiedHeapVerificationVisitor final : public JSVisitor {
|
||||
|
||||
UnifiedHeapMarkingVerifier::UnifiedHeapMarkingVerifier(
|
||||
cppgc::internal::HeapBase& heap_base,
|
||||
cppgc::internal::Heap::Config::CollectionType collection_type)
|
||||
cppgc::internal::CollectionType collection_type)
|
||||
: MarkingVerifierBase(
|
||||
heap_base, collection_type, state_,
|
||||
std::make_unique<UnifiedHeapVerificationVisitor>(state_)) {}
|
||||
|
@ -14,7 +14,7 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVerifier final
|
||||
: public cppgc::internal::MarkingVerifierBase {
|
||||
public:
|
||||
UnifiedHeapMarkingVerifier(cppgc::internal::HeapBase&,
|
||||
cppgc::internal::Heap::Config::CollectionType);
|
||||
cppgc::internal::CollectionType);
|
||||
~UnifiedHeapMarkingVerifier() final = default;
|
||||
|
||||
private:
|
||||
|
@ -452,13 +452,11 @@ Compactor::Compactor(RawHeap& heap) : heap_(heap) {
|
||||
}
|
||||
}
|
||||
|
||||
bool Compactor::ShouldCompact(
|
||||
GarbageCollector::Config::MarkingType marking_type,
|
||||
GarbageCollector::Config::StackState stack_state) const {
|
||||
bool Compactor::ShouldCompact(GCConfig::MarkingType marking_type,
|
||||
StackState stack_state) const {
|
||||
if (compactable_spaces_.empty() ||
|
||||
(marking_type == GarbageCollector::Config::MarkingType::kAtomic &&
|
||||
stack_state ==
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers)) {
|
||||
(marking_type == GCConfig::MarkingType::kAtomic &&
|
||||
stack_state == StackState::kMayContainHeapPointers)) {
|
||||
// The following check ensures that tests that want to test compaction are
|
||||
// not interrupted by garbage collections that cannot use compaction.
|
||||
DCHECK(!enable_for_next_gc_for_testing_);
|
||||
@ -474,9 +472,8 @@ bool Compactor::ShouldCompact(
|
||||
return free_list_size > kFreeListSizeThreshold;
|
||||
}
|
||||
|
||||
void Compactor::InitializeIfShouldCompact(
|
||||
GarbageCollector::Config::MarkingType marking_type,
|
||||
GarbageCollector::Config::StackState stack_state) {
|
||||
void Compactor::InitializeIfShouldCompact(GCConfig::MarkingType marking_type,
|
||||
StackState stack_state) {
|
||||
DCHECK(!is_enabled_);
|
||||
|
||||
if (!ShouldCompact(marking_type, stack_state)) return;
|
||||
@ -487,9 +484,8 @@ void Compactor::InitializeIfShouldCompact(
|
||||
is_cancelled_ = false;
|
||||
}
|
||||
|
||||
void Compactor::CancelIfShouldNotCompact(
|
||||
GarbageCollector::Config::MarkingType marking_type,
|
||||
GarbageCollector::Config::StackState stack_state) {
|
||||
void Compactor::CancelIfShouldNotCompact(GCConfig::MarkingType marking_type,
|
||||
StackState stack_state) {
|
||||
if (!is_enabled_ || ShouldCompact(marking_type, stack_state)) return;
|
||||
|
||||
is_cancelled_ = true;
|
||||
|
@ -12,6 +12,8 @@
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
class NormalPageSpace;
|
||||
|
||||
class V8_EXPORT_PRIVATE Compactor final {
|
||||
using CompactableSpaceHandling = SweepingConfig::CompactableSpaceHandling;
|
||||
|
||||
@ -22,10 +24,8 @@ class V8_EXPORT_PRIVATE Compactor final {
|
||||
Compactor(const Compactor&) = delete;
|
||||
Compactor& operator=(const Compactor&) = delete;
|
||||
|
||||
void InitializeIfShouldCompact(GarbageCollector::Config::MarkingType,
|
||||
GarbageCollector::Config::StackState);
|
||||
void CancelIfShouldNotCompact(GarbageCollector::Config::MarkingType,
|
||||
GarbageCollector::Config::StackState);
|
||||
void InitializeIfShouldCompact(GCConfig::MarkingType, StackState);
|
||||
void CancelIfShouldNotCompact(GCConfig::MarkingType, StackState);
|
||||
// Returns whether spaces need to be processed by the Sweeper after
|
||||
// compaction.
|
||||
CompactableSpaceHandling CompactSpacesIfEnabled();
|
||||
@ -38,8 +38,7 @@ class V8_EXPORT_PRIVATE Compactor final {
|
||||
bool IsEnabledForTesting() const { return is_enabled_; }
|
||||
|
||||
private:
|
||||
bool ShouldCompact(GarbageCollector::Config::MarkingType,
|
||||
GarbageCollector::Config::StackState) const;
|
||||
bool ShouldCompact(GCConfig::MarkingType, StackState) const;
|
||||
|
||||
RawHeap& heap_;
|
||||
// Compactor does not own the compactable spaces. The heap owns all spaces.
|
||||
|
@ -7,7 +7,6 @@
|
||||
|
||||
#include "include/cppgc/common.h"
|
||||
#include "src/heap/cppgc/heap-config.h"
|
||||
#include "src/heap/cppgc/marker.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
@ -16,62 +15,9 @@ namespace internal {
|
||||
// needed to mock/fake GC for testing.
|
||||
class GarbageCollector {
|
||||
public:
|
||||
struct Config {
|
||||
using CollectionType = Marker::MarkingConfig::CollectionType;
|
||||
using StackState = cppgc::Heap::StackState;
|
||||
using MarkingType = Marker::MarkingConfig::MarkingType;
|
||||
using SweepingType = SweepingConfig::SweepingType;
|
||||
using FreeMemoryHandling = SweepingConfig::FreeMemoryHandling;
|
||||
using IsForcedGC = Marker::MarkingConfig::IsForcedGC;
|
||||
|
||||
static constexpr Config ConservativeAtomicConfig() {
|
||||
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr Config PreciseAtomicConfig() {
|
||||
return {CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr Config ConservativeIncrementalConfig() {
|
||||
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
|
||||
MarkingType::kIncremental, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr Config PreciseIncrementalConfig() {
|
||||
return {CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingType::kIncremental, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr Config
|
||||
PreciseIncrementalMarkingConcurrentSweepingConfig() {
|
||||
return {CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingType::kIncremental,
|
||||
SweepingType::kIncrementalAndConcurrent};
|
||||
}
|
||||
|
||||
static constexpr Config MinorPreciseAtomicConfig() {
|
||||
return {CollectionType::kMinor, StackState::kNoHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr Config MinorConservativeAtomicConfig() {
|
||||
return {CollectionType::kMinor, StackState::kMayContainHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
CollectionType collection_type = CollectionType::kMajor;
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
MarkingType marking_type = MarkingType::kAtomic;
|
||||
SweepingType sweeping_type = SweepingType::kAtomic;
|
||||
FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard;
|
||||
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
|
||||
};
|
||||
|
||||
// Executes a garbage collection specified in config.
|
||||
virtual void CollectGarbage(Config) = 0;
|
||||
virtual void StartIncrementalGarbageCollection(Config) = 0;
|
||||
virtual void CollectGarbage(GCConfig) = 0;
|
||||
virtual void StartIncrementalGarbageCollection(GCConfig) = 0;
|
||||
|
||||
// The current epoch that the GC maintains. The epoch is increased on every
|
||||
// GC invocation.
|
||||
|
@ -8,7 +8,6 @@
|
||||
|
||||
#include "include/cppgc/common.h"
|
||||
#include "include/cppgc/platform.h"
|
||||
#include "src/heap/cppgc/heap.h"
|
||||
#include "src/heap/cppgc/task-handle.h"
|
||||
|
||||
namespace cppgc {
|
||||
@ -22,8 +21,8 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
|
||||
GCInvokerImpl(const GCInvokerImpl&) = delete;
|
||||
GCInvokerImpl& operator=(const GCInvokerImpl&) = delete;
|
||||
|
||||
void CollectGarbage(GarbageCollector::Config) final;
|
||||
void StartIncrementalGarbageCollection(GarbageCollector::Config) final;
|
||||
void CollectGarbage(GCConfig) final;
|
||||
void StartIncrementalGarbageCollection(GCConfig) final;
|
||||
size_t epoch() const final { return collector_->epoch(); }
|
||||
const EmbedderStackState* override_stack_state() const final {
|
||||
return collector_->override_stack_state();
|
||||
@ -35,7 +34,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
|
||||
using Handle = SingleThreadedHandle;
|
||||
|
||||
static Handle Post(GarbageCollector* collector, cppgc::TaskRunner* runner,
|
||||
GarbageCollector::Config config) {
|
||||
GCConfig config) {
|
||||
auto task =
|
||||
std::make_unique<GCInvoker::GCInvokerImpl::GCTask>(collector, config);
|
||||
auto handle = task->GetHandle();
|
||||
@ -43,8 +42,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
|
||||
return handle;
|
||||
}
|
||||
|
||||
explicit GCTask(GarbageCollector* collector,
|
||||
GarbageCollector::Config config)
|
||||
explicit GCTask(GarbageCollector* collector, GCConfig config)
|
||||
: collector_(collector),
|
||||
config_(config),
|
||||
handle_(Handle::NonEmptyTag{}),
|
||||
@ -63,7 +61,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
|
||||
Handle GetHandle() { return handle_; }
|
||||
|
||||
GarbageCollector* collector_;
|
||||
GarbageCollector::Config config_;
|
||||
GCConfig config_;
|
||||
Handle handle_;
|
||||
size_t saved_epoch_;
|
||||
};
|
||||
@ -87,10 +85,9 @@ GCInvoker::GCInvokerImpl::~GCInvokerImpl() {
|
||||
}
|
||||
}
|
||||
|
||||
void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
|
||||
void GCInvoker::GCInvokerImpl::CollectGarbage(GCConfig config) {
|
||||
DCHECK_EQ(config.marking_type, cppgc::Heap::MarkingType::kAtomic);
|
||||
if ((config.stack_state ==
|
||||
GarbageCollector::Config::StackState::kNoHeapPointers) ||
|
||||
if ((config.stack_state == StackState::kNoHeapPointers) ||
|
||||
(stack_support_ ==
|
||||
cppgc::Heap::StackSupport::kSupportsConservativeStackScan)) {
|
||||
collector_->CollectGarbage(config);
|
||||
@ -98,8 +95,7 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
|
||||
platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled()) {
|
||||
if (!gc_task_handle_) {
|
||||
// Force a precise GC since it will run in a non-nestable task.
|
||||
config.stack_state =
|
||||
GarbageCollector::Config::StackState::kNoHeapPointers;
|
||||
config.stack_state = StackState::kNoHeapPointers;
|
||||
DCHECK_NE(cppgc::Heap::StackSupport::kSupportsConservativeStackScan,
|
||||
stack_support_);
|
||||
gc_task_handle_ = GCTask::Post(
|
||||
@ -109,7 +105,7 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
|
||||
}
|
||||
|
||||
void GCInvoker::GCInvokerImpl::StartIncrementalGarbageCollection(
|
||||
GarbageCollector::Config config) {
|
||||
GCConfig config) {
|
||||
DCHECK_NE(config.marking_type, cppgc::Heap::MarkingType::kAtomic);
|
||||
if ((stack_support_ !=
|
||||
cppgc::Heap::StackSupport::kSupportsConservativeStackScan) &&
|
||||
@ -134,12 +130,11 @@ GCInvoker::GCInvoker(GarbageCollector* collector, cppgc::Platform* platform,
|
||||
|
||||
GCInvoker::~GCInvoker() = default;
|
||||
|
||||
void GCInvoker::CollectGarbage(GarbageCollector::Config config) {
|
||||
void GCInvoker::CollectGarbage(GCConfig config) {
|
||||
impl_->CollectGarbage(config);
|
||||
}
|
||||
|
||||
void GCInvoker::StartIncrementalGarbageCollection(
|
||||
GarbageCollector::Config config) {
|
||||
void GCInvoker::StartIncrementalGarbageCollection(GCConfig config) {
|
||||
impl_->StartIncrementalGarbageCollection(config);
|
||||
}
|
||||
|
||||
|
@ -34,8 +34,8 @@ class V8_EXPORT_PRIVATE GCInvoker final : public GarbageCollector {
|
||||
GCInvoker(const GCInvoker&) = delete;
|
||||
GCInvoker& operator=(const GCInvoker&) = delete;
|
||||
|
||||
void CollectGarbage(GarbageCollector::Config) final;
|
||||
void StartIncrementalGarbageCollection(GarbageCollector::Config) final;
|
||||
void CollectGarbage(GCConfig) final;
|
||||
void StartIncrementalGarbageCollection(GCConfig) final;
|
||||
size_t epoch() const final;
|
||||
const EmbedderStackState* override_stack_state() const final;
|
||||
|
||||
|
@ -250,10 +250,9 @@ void HeapBase::Terminate() {
|
||||
#endif // defined(CPPGC_YOUNG_GENERATION)
|
||||
|
||||
in_atomic_pause_ = true;
|
||||
stats_collector()->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kForced);
|
||||
stats_collector()->NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kForced);
|
||||
object_allocator().ResetLinearAllocationBuffers();
|
||||
stats_collector()->NotifyMarkingCompleted(0);
|
||||
ExecutePreFinalizers();
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Copyright 2022 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
@ -9,6 +9,28 @@
|
||||
|
||||
namespace cppgc::internal {
|
||||
|
||||
using StackState = cppgc::Heap::StackState;
|
||||
|
||||
enum class CollectionType : uint8_t {
|
||||
kMinor,
|
||||
kMajor,
|
||||
};
|
||||
|
||||
struct MarkingConfig {
|
||||
using MarkingType = cppgc::Heap::MarkingType;
|
||||
enum class IsForcedGC : uint8_t {
|
||||
kNotForced,
|
||||
kForced,
|
||||
};
|
||||
|
||||
static constexpr MarkingConfig Default() { return {}; }
|
||||
|
||||
const CollectionType collection_type = CollectionType::kMajor;
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
MarkingType marking_type = MarkingType::kIncremental;
|
||||
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
|
||||
};
|
||||
|
||||
struct SweepingConfig {
|
||||
using SweepingType = cppgc::Heap::SweepingType;
|
||||
enum class CompactableSpaceHandling { kSweep, kIgnore };
|
||||
@ -20,6 +42,62 @@ struct SweepingConfig {
|
||||
FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard;
|
||||
};
|
||||
|
||||
struct GCConfig {
|
||||
using MarkingType = MarkingConfig::MarkingType;
|
||||
using SweepingType = SweepingConfig::SweepingType;
|
||||
using FreeMemoryHandling = SweepingConfig::FreeMemoryHandling;
|
||||
using IsForcedGC = MarkingConfig::IsForcedGC;
|
||||
|
||||
static constexpr GCConfig ConservativeAtomicConfig() {
|
||||
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr GCConfig PreciseAtomicConfig() {
|
||||
return {CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr GCConfig ConservativeIncrementalConfig() {
|
||||
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
|
||||
MarkingType::kIncremental, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr GCConfig PreciseIncrementalConfig() {
|
||||
return {CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingType::kIncremental, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr GCConfig
|
||||
PreciseIncrementalMarkingConcurrentSweepingConfig() {
|
||||
return {CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingType::kIncremental, SweepingType::kIncrementalAndConcurrent};
|
||||
}
|
||||
|
||||
static constexpr GCConfig PreciseConcurrentConfig() {
|
||||
return {CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingType::kIncrementalAndConcurrent,
|
||||
SweepingType::kIncrementalAndConcurrent};
|
||||
}
|
||||
|
||||
static constexpr GCConfig MinorPreciseAtomicConfig() {
|
||||
return {CollectionType::kMinor, StackState::kNoHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr GCConfig MinorConservativeAtomicConfig() {
|
||||
return {CollectionType::kMinor, StackState::kMayContainHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
CollectionType collection_type = CollectionType::kMajor;
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
MarkingType marking_type = MarkingType::kAtomic;
|
||||
SweepingType sweeping_type = SweepingType::kAtomic;
|
||||
FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard;
|
||||
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
|
||||
};
|
||||
|
||||
} // namespace cppgc::internal
|
||||
|
||||
#endif // V8_HEAP_CPPGC_HEAP_CONFIG_H_
|
||||
|
@ -93,14 +93,12 @@ void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) {
|
||||
size_t allocated_object_size = stats_collector_->allocated_object_size();
|
||||
if (allocated_object_size > limit_for_atomic_gc_) {
|
||||
collector_->CollectGarbage(
|
||||
{GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers,
|
||||
GarbageCollector::Config::MarkingType::kAtomic, sweeping_support_});
|
||||
{CollectionType::kMajor, StackState::kMayContainHeapPointers,
|
||||
GCConfig::MarkingType::kAtomic, sweeping_support_});
|
||||
} else if (allocated_object_size > limit_for_incremental_gc_) {
|
||||
if (marking_support_ == cppgc::Heap::MarkingType::kAtomic) return;
|
||||
collector_->StartIncrementalGarbageCollection(
|
||||
{GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers,
|
||||
{CollectionType::kMajor, StackState::kMayContainHeapPointers,
|
||||
marking_support_, sweeping_support_});
|
||||
}
|
||||
}
|
||||
|
@ -45,11 +45,10 @@ std::unique_ptr<Heap> Heap::Create(std::shared_ptr<cppgc::Platform> platform,
|
||||
void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason,
|
||||
Heap::StackState stack_state) {
|
||||
internal::Heap::From(this)->CollectGarbage(
|
||||
{internal::GarbageCollector::Config::CollectionType::kMajor, stack_state,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic,
|
||||
internal::GarbageCollector::Config::FreeMemoryHandling::
|
||||
kDiscardWherePossible,
|
||||
internal::GarbageCollector::Config::IsForcedGC::kForced});
|
||||
{internal::CollectionType::kMajor, stack_state, MarkingType::kAtomic,
|
||||
SweepingType::kAtomic,
|
||||
internal::GCConfig::FreeMemoryHandling::kDiscardWherePossible,
|
||||
internal::GCConfig::IsForcedGC::kForced});
|
||||
}
|
||||
|
||||
AllocationHandle& Heap::GetAllocationHandle() {
|
||||
@ -62,12 +61,11 @@ namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
void CheckConfig(Heap::Config config, HeapBase::MarkingType marking_support,
|
||||
void CheckConfig(GCConfig config, HeapBase::MarkingType marking_support,
|
||||
HeapBase::SweepingType sweeping_support) {
|
||||
CHECK_WITH_MSG(
|
||||
(config.collection_type != Heap::Config::CollectionType::kMinor) ||
|
||||
(config.stack_state == Heap::Config::StackState::kNoHeapPointers),
|
||||
"Minor GCs with stack is currently not supported");
|
||||
CHECK_WITH_MSG((config.collection_type != CollectionType::kMinor) ||
|
||||
(config.stack_state == StackState::kNoHeapPointers),
|
||||
"Minor GCs with stack is currently not supported");
|
||||
CHECK_LE(static_cast<int>(config.marking_type),
|
||||
static_cast<int>(marking_support));
|
||||
CHECK_LE(static_cast<int>(config.sweeping_type),
|
||||
@ -94,17 +92,16 @@ Heap::~Heap() {
|
||||
// Gracefully finish already running GC if any, but don't finalize live
|
||||
// objects.
|
||||
FinalizeIncrementalGarbageCollectionIfRunning(
|
||||
{Config::CollectionType::kMajor,
|
||||
Config::StackState::kMayContainHeapPointers,
|
||||
Config::MarkingType::kAtomic, Config::SweepingType::kAtomic});
|
||||
{CollectionType::kMajor, StackState::kMayContainHeapPointers,
|
||||
GCConfig::MarkingType::kAtomic, GCConfig::SweepingType::kAtomic});
|
||||
{
|
||||
subtle::NoGarbageCollectionScope no_gc(*this);
|
||||
sweeper_.FinishIfRunning();
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::CollectGarbage(Config config) {
|
||||
DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type);
|
||||
void Heap::CollectGarbage(GCConfig config) {
|
||||
DCHECK_EQ(GCConfig::MarkingType::kAtomic, config.marking_type);
|
||||
CheckConfig(config, marking_support_, sweeping_support_);
|
||||
|
||||
if (in_no_gc_scope()) return;
|
||||
@ -118,9 +115,9 @@ void Heap::CollectGarbage(Config config) {
|
||||
FinalizeGarbageCollection(config.stack_state);
|
||||
}
|
||||
|
||||
void Heap::StartIncrementalGarbageCollection(Config config) {
|
||||
DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type);
|
||||
DCHECK_NE(marking_support_, Config::MarkingType::kAtomic);
|
||||
void Heap::StartIncrementalGarbageCollection(GCConfig config) {
|
||||
DCHECK_NE(GCConfig::MarkingType::kAtomic, config.marking_type);
|
||||
DCHECK_NE(marking_support_, GCConfig::MarkingType::kAtomic);
|
||||
CheckConfig(config, marking_support_, sweeping_support_);
|
||||
|
||||
if (IsMarking() || in_no_gc_scope()) return;
|
||||
@ -130,19 +127,19 @@ void Heap::StartIncrementalGarbageCollection(Config config) {
|
||||
StartGarbageCollection(config);
|
||||
}
|
||||
|
||||
void Heap::FinalizeIncrementalGarbageCollectionIfRunning(Config config) {
|
||||
void Heap::FinalizeIncrementalGarbageCollectionIfRunning(GCConfig config) {
|
||||
CheckConfig(config, marking_support_, sweeping_support_);
|
||||
|
||||
if (!IsMarking()) return;
|
||||
|
||||
DCHECK(!in_no_gc_scope());
|
||||
|
||||
DCHECK_NE(Config::MarkingType::kAtomic, config_.marking_type);
|
||||
DCHECK_NE(GCConfig::MarkingType::kAtomic, config_.marking_type);
|
||||
config_ = config;
|
||||
FinalizeGarbageCollection(config.stack_state);
|
||||
}
|
||||
|
||||
void Heap::StartGarbageCollection(Config config) {
|
||||
void Heap::StartGarbageCollection(GCConfig config) {
|
||||
DCHECK(!IsMarking());
|
||||
DCHECK(!in_no_gc_scope());
|
||||
|
||||
@ -152,18 +149,17 @@ void Heap::StartGarbageCollection(Config config) {
|
||||
epoch_++;
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
if (config.collection_type == Config::CollectionType::kMajor)
|
||||
if (config.collection_type == CollectionType::kMajor)
|
||||
SequentialUnmarker unmarker(raw_heap());
|
||||
#endif // defined(CPPGC_YOUNG_GENERATION)
|
||||
|
||||
const Marker::MarkingConfig marking_config{
|
||||
config.collection_type, config.stack_state, config.marking_type,
|
||||
config.is_forced_gc};
|
||||
const MarkingConfig marking_config{config.collection_type, config.stack_state,
|
||||
config.marking_type, config.is_forced_gc};
|
||||
marker_ = std::make_unique<Marker>(AsBase(), platform_.get(), marking_config);
|
||||
marker_->StartMarking();
|
||||
}
|
||||
|
||||
void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
|
||||
void Heap::FinalizeGarbageCollection(StackState stack_state) {
|
||||
DCHECK(IsMarking());
|
||||
DCHECK(!in_no_gc_scope());
|
||||
CHECK(!in_disallow_gc_scope());
|
||||
@ -220,7 +216,7 @@ void Heap::EnableGenerationalGC() {
|
||||
void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); }
|
||||
|
||||
void Heap::FinalizeIncrementalGarbageCollectionIfNeeded(
|
||||
Config::StackState stack_state) {
|
||||
StackState stack_state) {
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
stats_collector(), StatsCollector::kMarkIncrementalFinalize);
|
||||
FinalizeGarbageCollection(stack_state);
|
||||
@ -229,10 +225,9 @@ void Heap::FinalizeIncrementalGarbageCollectionIfNeeded(
|
||||
void Heap::StartIncrementalGarbageCollectionForTesting() {
|
||||
DCHECK(!IsMarking());
|
||||
DCHECK(!in_no_gc_scope());
|
||||
StartGarbageCollection({Config::CollectionType::kMajor,
|
||||
Config::StackState::kNoHeapPointers,
|
||||
Config::MarkingType::kIncrementalAndConcurrent,
|
||||
Config::SweepingType::kIncrementalAndConcurrent});
|
||||
StartGarbageCollection({CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
GCConfig::MarkingType::kIncrementalAndConcurrent,
|
||||
GCConfig::SweepingType::kIncrementalAndConcurrent});
|
||||
}
|
||||
|
||||
void Heap::FinalizeIncrementalGarbageCollectionForTesting(
|
||||
|
@ -32,9 +32,9 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
|
||||
HeapBase& AsBase() { return *this; }
|
||||
const HeapBase& AsBase() const { return *this; }
|
||||
|
||||
void CollectGarbage(Config) final;
|
||||
void StartIncrementalGarbageCollection(Config) final;
|
||||
void FinalizeIncrementalGarbageCollectionIfRunning(Config);
|
||||
void CollectGarbage(GCConfig) final;
|
||||
void StartIncrementalGarbageCollection(GCConfig) final;
|
||||
void FinalizeIncrementalGarbageCollectionIfRunning(GCConfig);
|
||||
|
||||
size_t epoch() const final { return epoch_; }
|
||||
const EmbedderStackState* override_stack_state() const final {
|
||||
@ -46,15 +46,15 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
|
||||
void DisableHeapGrowingForTesting();
|
||||
|
||||
private:
|
||||
void StartGarbageCollection(Config);
|
||||
void FinalizeGarbageCollection(Config::StackState);
|
||||
void StartGarbageCollection(GCConfig);
|
||||
void FinalizeGarbageCollection(StackState);
|
||||
|
||||
void FinalizeIncrementalGarbageCollectionIfNeeded(Config::StackState) final;
|
||||
void FinalizeIncrementalGarbageCollectionIfNeeded(StackState) final;
|
||||
|
||||
void StartIncrementalGarbageCollectionForTesting() final;
|
||||
void FinalizeIncrementalGarbageCollectionForTesting(EmbedderStackState) final;
|
||||
|
||||
Config config_;
|
||||
GCConfig config_;
|
||||
GCInvoker gc_invoker_;
|
||||
HeapGrowing growing_;
|
||||
bool generational_gc_enabled_ = false;
|
||||
|
@ -32,11 +32,10 @@ namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
bool EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
|
||||
HeapBase& heap) {
|
||||
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
|
||||
bool EnterIncrementalMarkingIfNeeded(MarkingConfig config, HeapBase& heap) {
|
||||
if (config.marking_type == MarkingConfig::MarkingType::kIncremental ||
|
||||
config.marking_type ==
|
||||
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
|
||||
MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
|
||||
WriteBarrier::FlagUpdater::Enter();
|
||||
heap.set_incremental_marking_in_progress(true);
|
||||
return true;
|
||||
@ -44,11 +43,10 @@ bool EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
|
||||
HeapBase& heap) {
|
||||
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
|
||||
bool ExitIncrementalMarkingIfNeeded(MarkingConfig config, HeapBase& heap) {
|
||||
if (config.marking_type == MarkingConfig::MarkingType::kIncremental ||
|
||||
config.marking_type ==
|
||||
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
|
||||
MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
|
||||
WriteBarrier::FlagUpdater::Exit();
|
||||
heap.set_incremental_marking_in_progress(false);
|
||||
return true;
|
||||
@ -87,7 +85,7 @@ class MarkerBase::IncrementalMarkingTask final : public cppgc::Task {
|
||||
public:
|
||||
using Handle = SingleThreadedHandle;
|
||||
|
||||
IncrementalMarkingTask(MarkerBase*, MarkingConfig::StackState);
|
||||
IncrementalMarkingTask(MarkerBase*, StackState);
|
||||
|
||||
static Handle Post(cppgc::TaskRunner*, MarkerBase*);
|
||||
|
||||
@ -95,13 +93,13 @@ class MarkerBase::IncrementalMarkingTask final : public cppgc::Task {
|
||||
void Run() final;
|
||||
|
||||
MarkerBase* const marker_;
|
||||
MarkingConfig::StackState stack_state_;
|
||||
StackState stack_state_;
|
||||
// TODO(chromium:1056170): Change to CancelableTask.
|
||||
Handle handle_;
|
||||
};
|
||||
|
||||
MarkerBase::IncrementalMarkingTask::IncrementalMarkingTask(
|
||||
MarkerBase* marker, MarkingConfig::StackState stack_state)
|
||||
MarkerBase* marker, StackState stack_state)
|
||||
: marker_(marker),
|
||||
stack_state_(stack_state),
|
||||
handle_(Handle::NonEmptyTag{}) {}
|
||||
@ -117,10 +115,9 @@ MarkerBase::IncrementalMarkingTask::Post(cppgc::TaskRunner* runner,
|
||||
DCHECK_IMPLIES(marker->heap().stack_support() !=
|
||||
HeapBase::StackSupport::kSupportsConservativeStackScan,
|
||||
runner->NonNestableTasksEnabled());
|
||||
MarkingConfig::StackState stack_state_for_task =
|
||||
runner->NonNestableTasksEnabled()
|
||||
? MarkingConfig::StackState::kNoHeapPointers
|
||||
: MarkingConfig::StackState::kMayContainHeapPointers;
|
||||
const auto stack_state_for_task = runner->NonNestableTasksEnabled()
|
||||
? StackState::kNoHeapPointers
|
||||
: StackState::kMayContainHeapPointers;
|
||||
auto task =
|
||||
std::make_unique<IncrementalMarkingTask>(marker, stack_state_for_task);
|
||||
auto handle = task->handle_;
|
||||
@ -152,9 +149,8 @@ MarkerBase::MarkerBase(HeapBase& heap, cppgc::Platform* platform,
|
||||
foreground_task_runner_(platform_->GetForegroundTaskRunner()),
|
||||
mutator_marking_state_(heap, marking_worklists_,
|
||||
heap.compactor().compaction_worklists()) {
|
||||
DCHECK_IMPLIES(
|
||||
config_.collection_type == MarkingConfig::CollectionType::kMinor,
|
||||
heap_.generational_gc_supported());
|
||||
DCHECK_IMPLIES(config_.collection_type == CollectionType::kMinor,
|
||||
heap_.generational_gc_supported());
|
||||
}
|
||||
|
||||
MarkerBase::~MarkerBase() {
|
||||
@ -163,7 +159,7 @@ MarkerBase::~MarkerBase() {
|
||||
// and should thus already be marked.
|
||||
if (!marking_worklists_.not_fully_constructed_worklist()->IsEmpty()) {
|
||||
#if DEBUG
|
||||
DCHECK_NE(MarkingConfig::StackState::kNoHeapPointers, config_.stack_state);
|
||||
DCHECK_NE(StackState::kNoHeapPointers, config_.stack_state);
|
||||
std::unordered_set<HeapObjectHeader*> objects =
|
||||
mutator_marking_state_.not_fully_constructed_worklist().Extract();
|
||||
for (HeapObjectHeader* object : objects) DCHECK(object->IsMarked());
|
||||
@ -229,7 +225,7 @@ void MarkerBase::StartMarking() {
|
||||
// Performing incremental or concurrent marking.
|
||||
schedule_.NotifyIncrementalMarkingStart();
|
||||
// Scanning the stack is expensive so we only do it at the atomic pause.
|
||||
VisitRoots(MarkingConfig::StackState::kNoHeapPointers);
|
||||
VisitRoots(StackState::kNoHeapPointers);
|
||||
ScheduleIncrementalMarkingTask();
|
||||
if (config_.marking_type ==
|
||||
MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
|
||||
@ -244,14 +240,14 @@ void MarkerBase::StartMarking() {
|
||||
}
|
||||
|
||||
void MarkerBase::HandleNotFullyConstructedObjects() {
|
||||
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
|
||||
if (config_.stack_state == StackState::kNoHeapPointers) {
|
||||
mutator_marking_state_.FlushNotFullyConstructedObjects();
|
||||
} else {
|
||||
MarkNotFullyConstructedObjects();
|
||||
}
|
||||
}
|
||||
|
||||
void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
|
||||
void MarkerBase::EnterAtomicPause(StackState stack_state) {
|
||||
StatsCollector::EnabledScope top_stats_scope(heap().stats_collector(),
|
||||
StatsCollector::kAtomicMark);
|
||||
StatsCollector::EnabledScope stats_scope(heap().stats_collector(),
|
||||
@ -310,7 +306,7 @@ void MarkerBase::LeaveAtomicPause() {
|
||||
heap().SetStackStateOfPrevGC(config_.stack_state);
|
||||
}
|
||||
|
||||
void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
|
||||
void MarkerBase::FinishMarking(StackState stack_state) {
|
||||
DCHECK(is_marking_);
|
||||
EnterAtomicPause(stack_state);
|
||||
{
|
||||
@ -383,7 +379,7 @@ void MarkerBase::ProcessWeakness() {
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
if (heap().generational_gc_supported()) {
|
||||
auto& remembered_set = heap().remembered_set();
|
||||
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
|
||||
if (config_.collection_type == CollectionType::kMinor) {
|
||||
// Custom callbacks assume that untraced pointers point to not yet freed
|
||||
// objects. They must make sure that upon callback completion no
|
||||
// UntracedMember points to a freed object. This may not hold true if a
|
||||
@ -425,7 +421,7 @@ void MarkerBase::ProcessWeakness() {
|
||||
DCHECK(marking_worklists_.marking_worklist()->IsEmpty());
|
||||
}
|
||||
|
||||
void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
|
||||
void MarkerBase::VisitRoots(StackState stack_state) {
|
||||
StatsCollector::EnabledScope stats_scope(heap().stats_collector(),
|
||||
StatsCollector::kMarkVisitRoots);
|
||||
|
||||
@ -442,13 +438,13 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
|
||||
}
|
||||
}
|
||||
|
||||
if (stack_state != MarkingConfig::StackState::kNoHeapPointers) {
|
||||
if (stack_state != StackState::kNoHeapPointers) {
|
||||
StatsCollector::DisabledScope stack_stats_scope(
|
||||
heap().stats_collector(), StatsCollector::kMarkVisitStack);
|
||||
heap().stack()->IteratePointers(&stack_visitor());
|
||||
}
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
|
||||
if (config_.collection_type == CollectionType::kMinor) {
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
heap().stats_collector(), StatsCollector::kMarkVisitRememberedSets);
|
||||
heap().remembered_set().Visit(visitor(), mutator_marking_state_);
|
||||
@ -482,13 +478,12 @@ void MarkerBase::ScheduleIncrementalMarkingTask() {
|
||||
IncrementalMarkingTask::Post(foreground_task_runner_.get(), this);
|
||||
}
|
||||
|
||||
bool MarkerBase::IncrementalMarkingStepForTesting(
|
||||
MarkingConfig::StackState stack_state) {
|
||||
bool MarkerBase::IncrementalMarkingStepForTesting(StackState stack_state) {
|
||||
return IncrementalMarkingStep(stack_state);
|
||||
}
|
||||
|
||||
bool MarkerBase::IncrementalMarkingStep(MarkingConfig::StackState stack_state) {
|
||||
if (stack_state == MarkingConfig::StackState::kNoHeapPointers) {
|
||||
bool MarkerBase::IncrementalMarkingStep(StackState stack_state) {
|
||||
if (stack_state == StackState::kNoHeapPointers) {
|
||||
mutator_marking_state_.FlushNotFullyConstructedObjects();
|
||||
}
|
||||
config_.stack_state = stack_state;
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include "src/heap/base/worklist.h"
|
||||
#include "src/heap/cppgc/concurrent-marker.h"
|
||||
#include "src/heap/cppgc/globals.h"
|
||||
#include "src/heap/cppgc/heap-config.h"
|
||||
#include "src/heap/cppgc/incremental-marking-schedule.h"
|
||||
#include "src/heap/cppgc/marking-state.h"
|
||||
#include "src/heap/cppgc/marking-visitor.h"
|
||||
@ -39,26 +40,6 @@ class V8_EXPORT_PRIVATE MarkerBase {
|
||||
public:
|
||||
class IncrementalMarkingTask;
|
||||
|
||||
struct MarkingConfig {
|
||||
enum class CollectionType : uint8_t {
|
||||
kMinor,
|
||||
kMajor,
|
||||
};
|
||||
using StackState = cppgc::Heap::StackState;
|
||||
using MarkingType = cppgc::Heap::MarkingType;
|
||||
enum class IsForcedGC : uint8_t {
|
||||
kNotForced,
|
||||
kForced,
|
||||
};
|
||||
|
||||
static constexpr MarkingConfig Default() { return {}; }
|
||||
|
||||
const CollectionType collection_type = CollectionType::kMajor;
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
MarkingType marking_type = MarkingType::kIncremental;
|
||||
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
|
||||
};
|
||||
|
||||
enum class WriteBarrierType {
|
||||
kDijkstra,
|
||||
kSteele,
|
||||
@ -89,7 +70,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
|
||||
// - stops incremental/concurrent marking;
|
||||
// - flushes back any in-construction worklists if needed;
|
||||
// - Updates the MarkingConfig if the stack state has changed;
|
||||
void EnterAtomicPause(MarkingConfig::StackState);
|
||||
void EnterAtomicPause(StackState);
|
||||
|
||||
// Makes marking progress. A `marked_bytes_limit` of 0 means that the limit
|
||||
// is determined by the internal marking scheduler.
|
||||
@ -113,7 +94,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
|
||||
// - AdvanceMarkingWithLimits()
|
||||
// - ProcessWeakness()
|
||||
// - LeaveAtomicPause()
|
||||
void FinishMarking(MarkingConfig::StackState);
|
||||
void FinishMarking(StackState);
|
||||
|
||||
void ProcessWeakness();
|
||||
|
||||
@ -134,7 +115,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
|
||||
void SetMainThreadMarkingDisabledForTesting(bool);
|
||||
void WaitForConcurrentMarkingForTesting();
|
||||
void ClearAllWorklistsForTesting();
|
||||
bool IncrementalMarkingStepForTesting(MarkingConfig::StackState);
|
||||
bool IncrementalMarkingStepForTesting(StackState);
|
||||
|
||||
MarkingWorklists& MarkingWorklistsForTesting() { return marking_worklists_; }
|
||||
MutatorMarkingState& MutatorMarkingStateForTesting() {
|
||||
@ -157,7 +138,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
|
||||
|
||||
bool ProcessWorklistsWithDeadline(size_t, v8::base::TimeTicks);
|
||||
|
||||
void VisitRoots(MarkingConfig::StackState);
|
||||
void VisitRoots(StackState);
|
||||
|
||||
bool VisitCrossThreadPersistentsIfNeeded();
|
||||
|
||||
@ -165,7 +146,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
|
||||
|
||||
void ScheduleIncrementalMarkingTask();
|
||||
|
||||
bool IncrementalMarkingStep(MarkingConfig::StackState);
|
||||
bool IncrementalMarkingStep(StackState);
|
||||
|
||||
void AdvanceMarkingOnAllocation();
|
||||
|
||||
|
@ -36,7 +36,7 @@ void VerificationState::VerifyMarked(const void* base_object_payload) const {
|
||||
}
|
||||
|
||||
MarkingVerifierBase::MarkingVerifierBase(
|
||||
HeapBase& heap, Heap::Config::CollectionType collection_type,
|
||||
HeapBase& heap, CollectionType collection_type,
|
||||
VerificationState& verification_state,
|
||||
std::unique_ptr<cppgc::Visitor> visitor)
|
||||
: ConservativeTracingVisitor(heap, *heap.page_backend(), *visitor.get()),
|
||||
@ -45,7 +45,7 @@ MarkingVerifierBase::MarkingVerifierBase(
|
||||
collection_type_(collection_type) {}
|
||||
|
||||
void MarkingVerifierBase::Run(
|
||||
Heap::Config::StackState stack_state, uintptr_t stack_end,
|
||||
StackState stack_state, uintptr_t stack_end,
|
||||
v8::base::Optional<size_t> expected_marked_bytes) {
|
||||
Traverse(heap_.raw_heap());
|
||||
// Avoid verifying the stack when running with TSAN as the TSAN runtime changes
|
||||
@ -61,7 +61,7 @@ void MarkingVerifierBase::Run(
|
||||
// TODO(chromium:1325007): Investigate if Oilpan verification can be moved
|
||||
// before V8 compaction or compaction never runs with stack.
|
||||
#if !defined(THREAD_SANITIZER) && !defined(CPPGC_POINTER_COMPRESSION)
|
||||
if (stack_state == Heap::Config::StackState::kMayContainHeapPointers) {
|
||||
if (stack_state == StackState::kMayContainHeapPointers) {
|
||||
in_construction_objects_ = &in_construction_objects_stack_;
|
||||
heap_.stack()->IteratePointersUnsafe(this, stack_end);
|
||||
// The objects found through the unsafe iteration are only a subset of the
|
||||
@ -114,7 +114,7 @@ bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader& header) {
|
||||
DCHECK(!header.IsFree());
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
if (collection_type_ == Heap::Config::CollectionType::kMinor) {
|
||||
if (collection_type_ == CollectionType::kMinor) {
|
||||
auto& caged_heap = CagedHeap::Instance();
|
||||
const auto age = CagedHeapLocalData::Get().age_table.GetAge(
|
||||
caged_heap.OffsetFromAddress(header.ObjectStart()));
|
||||
@ -185,7 +185,7 @@ class VerificationVisitor final : public cppgc::Visitor {
|
||||
} // namespace
|
||||
|
||||
MarkingVerifier::MarkingVerifier(HeapBase& heap_base,
|
||||
Heap::Config::CollectionType collection_type)
|
||||
CollectionType collection_type)
|
||||
: MarkingVerifierBase(heap_base, collection_type, state_,
|
||||
std::make_unique<VerificationVisitor>(state_)) {}
|
||||
|
||||
|
@ -41,11 +41,11 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase
|
||||
MarkingVerifierBase(const MarkingVerifierBase&) = delete;
|
||||
MarkingVerifierBase& operator=(const MarkingVerifierBase&) = delete;
|
||||
|
||||
void Run(Heap::Config::StackState, uintptr_t, v8::base::Optional<size_t>);
|
||||
void Run(StackState, uintptr_t, v8::base::Optional<size_t>);
|
||||
|
||||
protected:
|
||||
MarkingVerifierBase(HeapBase&, Heap::Config::CollectionType,
|
||||
VerificationState&, std::unique_ptr<cppgc::Visitor>);
|
||||
MarkingVerifierBase(HeapBase&, CollectionType, VerificationState&,
|
||||
std::unique_ptr<cppgc::Visitor>);
|
||||
|
||||
private:
|
||||
void VisitInConstructionConservatively(HeapObjectHeader&,
|
||||
@ -63,12 +63,12 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase
|
||||
&in_construction_objects_heap_;
|
||||
size_t verifier_found_marked_bytes_ = 0;
|
||||
bool verifier_found_marked_bytes_are_exact_ = true;
|
||||
Heap::Config::CollectionType collection_type_;
|
||||
CollectionType collection_type_;
|
||||
};
|
||||
|
||||
class V8_EXPORT_PRIVATE MarkingVerifier final : public MarkingVerifierBase {
|
||||
public:
|
||||
MarkingVerifier(HeapBase&, Heap::Config::CollectionType);
|
||||
MarkingVerifier(HeapBase&, CollectionType);
|
||||
~MarkingVerifier() final = default;
|
||||
|
||||
private:
|
||||
|
@ -148,9 +148,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
|
||||
void* result = TryAllocateLargeObject(page_backend_, large_space,
|
||||
stats_collector_, size, gcinfo);
|
||||
if (!result) {
|
||||
auto config = GarbageCollector::Config::ConservativeAtomicConfig();
|
||||
auto config = GCConfig::ConservativeAtomicConfig();
|
||||
config.free_memory_handling =
|
||||
GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible;
|
||||
GCConfig::FreeMemoryHandling::kDiscardWherePossible;
|
||||
garbage_collector_.CollectGarbage(config);
|
||||
result = TryAllocateLargeObject(page_backend_, large_space,
|
||||
stats_collector_, size, gcinfo);
|
||||
@ -170,9 +170,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
|
||||
}
|
||||
|
||||
if (!TryRefillLinearAllocationBuffer(space, request_size)) {
|
||||
auto config = GarbageCollector::Config::ConservativeAtomicConfig();
|
||||
auto config = GCConfig::ConservativeAtomicConfig();
|
||||
config.free_memory_handling =
|
||||
GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible;
|
||||
GCConfig::FreeMemoryHandling::kDiscardWherePossible;
|
||||
garbage_collector_.CollectGarbage(config);
|
||||
if (!TryRefillLinearAllocationBuffer(space, request_size)) {
|
||||
oom_handler_("Oilpan: Normal allocation.");
|
||||
|
@ -171,8 +171,7 @@ int64_t SumPhases(const MetricRecorder::GCCycle::Phases& phases) {
|
||||
}
|
||||
|
||||
MetricRecorder::GCCycle GetCycleEventForMetricRecorder(
|
||||
StatsCollector::CollectionType type,
|
||||
StatsCollector::MarkingType marking_type,
|
||||
CollectionType type, StatsCollector::MarkingType marking_type,
|
||||
StatsCollector::SweepingType sweeping_type, int64_t atomic_mark_us,
|
||||
int64_t atomic_weak_us, int64_t atomic_compact_us, int64_t atomic_sweep_us,
|
||||
int64_t incremental_mark_us, int64_t incremental_sweep_us,
|
||||
@ -181,7 +180,7 @@ MetricRecorder::GCCycle GetCycleEventForMetricRecorder(
|
||||
int64_t objects_freed_bytes, int64_t memory_before_bytes,
|
||||
int64_t memory_after_bytes, int64_t memory_freed_bytes) {
|
||||
MetricRecorder::GCCycle event;
|
||||
event.type = (type == StatsCollector::CollectionType::kMajor)
|
||||
event.type = (type == CollectionType::kMajor)
|
||||
? MetricRecorder::GCCycle::Type::kMajor
|
||||
: MetricRecorder::GCCycle::Type::kMinor;
|
||||
// MainThread.Incremental:
|
||||
|
@ -68,12 +68,11 @@ namespace internal {
|
||||
|
||||
// Sink for various time and memory statistics.
|
||||
class V8_EXPORT_PRIVATE StatsCollector final {
|
||||
using IsForcedGC = GarbageCollector::Config::IsForcedGC;
|
||||
using IsForcedGC = GCConfig::IsForcedGC;
|
||||
|
||||
public:
|
||||
using CollectionType = GarbageCollector::Config::CollectionType;
|
||||
using MarkingType = GarbageCollector::Config::MarkingType;
|
||||
using SweepingType = GarbageCollector::Config::SweepingType;
|
||||
using MarkingType = GCConfig::MarkingType;
|
||||
using SweepingType = GCConfig::SweepingType;
|
||||
|
||||
#if defined(CPPGC_DECLARE_ENUM)
|
||||
static_assert(false, "CPPGC_DECLARE_ENUM macro is already defined");
|
||||
|
@ -45,9 +45,8 @@ CppHeap::GarbageCollectionFlags ConvertTraceFlags(
|
||||
void LocalEmbedderHeapTracer::PrepareForTrace(
|
||||
EmbedderHeapTracer::TraceFlags flags) {
|
||||
if (cpp_heap_)
|
||||
cpp_heap()->InitializeTracing(
|
||||
cppgc::internal::GarbageCollector::Config::CollectionType::kMajor,
|
||||
ConvertTraceFlags(flags));
|
||||
cpp_heap()->InitializeTracing(cppgc::internal::CollectionType::kMajor,
|
||||
ConvertTraceFlags(flags));
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::TracePrologue(
|
||||
|
@ -74,9 +74,8 @@ class CompactorTest : public testing::TestWithPlatform {
|
||||
|
||||
void StartCompaction() {
|
||||
compactor().EnableForNextGCForTesting();
|
||||
compactor().InitializeIfShouldCompact(
|
||||
GarbageCollector::Config::MarkingType::kIncremental,
|
||||
GarbageCollector::Config::StackState::kNoHeapPointers);
|
||||
compactor().InitializeIfShouldCompact(GCConfig::MarkingType::kIncremental,
|
||||
StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(compactor().IsEnabledForTesting());
|
||||
}
|
||||
|
||||
@ -86,12 +85,11 @@ class CompactorTest : public testing::TestWithPlatform {
|
||||
CompactableGCed::g_destructor_callcount = 0u;
|
||||
StartCompaction();
|
||||
heap()->StartIncrementalGarbageCollection(
|
||||
GarbageCollector::Config::PreciseIncrementalConfig());
|
||||
GCConfig::PreciseIncrementalConfig());
|
||||
}
|
||||
|
||||
void EndGC() {
|
||||
heap()->marker()->FinishMarking(
|
||||
GarbageCollector::Config::StackState::kNoHeapPointers);
|
||||
heap()->marker()->FinishMarking(StackState::kNoHeapPointers);
|
||||
heap()->GetMarkerRefForTesting().reset();
|
||||
FinishCompaction();
|
||||
// Sweeping also verifies the object start bitmap.
|
||||
@ -125,13 +123,12 @@ namespace internal {
|
||||
TEST_F(CompactorTest, NothingToCompact) {
|
||||
StartCompaction();
|
||||
heap()->stats_collector()->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
heap()->stats_collector()->NotifyMarkingCompleted(0);
|
||||
FinishCompaction();
|
||||
heap()->stats_collector()->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
GCConfig::SweepingType::kAtomic);
|
||||
}
|
||||
|
||||
TEST_F(CompactorTest, NonEmptySpaceAllLive) {
|
||||
|
@ -27,20 +27,15 @@ class ConcurrentMarkingTest : public testing::TestWithHeap {
|
||||
static constexpr int kNumStep = 10;
|
||||
#endif // defined(THREAD_SANITIZER)
|
||||
|
||||
using Config = Heap::Config;
|
||||
static constexpr Config ConcurrentPreciseConfig = {
|
||||
Config::CollectionType::kMajor, Config::StackState::kNoHeapPointers,
|
||||
Config::MarkingType::kIncrementalAndConcurrent,
|
||||
Config::SweepingType::kIncrementalAndConcurrent};
|
||||
|
||||
void StartConcurrentGC() {
|
||||
Heap* heap = Heap::From(GetHeap());
|
||||
heap->DisableHeapGrowingForTesting();
|
||||
heap->StartIncrementalGarbageCollection(ConcurrentPreciseConfig);
|
||||
heap->StartIncrementalGarbageCollection(
|
||||
GCConfig::PreciseConcurrentConfig());
|
||||
heap->marker()->SetMainThreadMarkingDisabledForTesting(true);
|
||||
}
|
||||
|
||||
bool SingleStep(Config::StackState stack_state) {
|
||||
bool SingleStep(StackState stack_state) {
|
||||
MarkerBase* marker = Heap::From(GetHeap())->marker();
|
||||
DCHECK(marker);
|
||||
return marker->IncrementalMarkingStepForTesting(stack_state);
|
||||
@ -50,14 +45,10 @@ class ConcurrentMarkingTest : public testing::TestWithHeap {
|
||||
Heap* heap = Heap::From(GetHeap());
|
||||
heap->marker()->SetMainThreadMarkingDisabledForTesting(false);
|
||||
heap->FinalizeIncrementalGarbageCollectionIfRunning(
|
||||
ConcurrentPreciseConfig);
|
||||
GCConfig::PreciseConcurrentConfig());
|
||||
}
|
||||
};
|
||||
|
||||
// static
|
||||
constexpr ConcurrentMarkingTest::Config
|
||||
ConcurrentMarkingTest::ConcurrentPreciseConfig;
|
||||
|
||||
template <typename T>
|
||||
struct GCedHolder : public GarbageCollected<GCedHolder<T>> {
|
||||
void Trace(cppgc::Visitor* visitor) const { visitor->Trace(object); }
|
||||
@ -110,7 +101,7 @@ TEST_F(ConcurrentMarkingTest, MarkingObjects) {
|
||||
last_object = &(*last_object)->child_;
|
||||
}
|
||||
// Use SingleStep to re-post concurrent jobs.
|
||||
SingleStep(Config::StackState::kNoHeapPointers);
|
||||
SingleStep(StackState::kNoHeapPointers);
|
||||
}
|
||||
FinishGC();
|
||||
}
|
||||
@ -129,7 +120,7 @@ TEST_F(ConcurrentMarkingTest, MarkingInConstructionObjects) {
|
||||
});
|
||||
}
|
||||
// Use SingleStep to re-post concurrent jobs.
|
||||
SingleStep(Config::StackState::kNoHeapPointers);
|
||||
SingleStep(StackState::kNoHeapPointers);
|
||||
}
|
||||
FinishGC();
|
||||
}
|
||||
@ -145,7 +136,7 @@ TEST_F(ConcurrentMarkingTest, MarkingMixinObjects) {
|
||||
last_object = &(*last_object)->child_;
|
||||
}
|
||||
// Use SingleStep to re-post concurrent jobs.
|
||||
SingleStep(Config::StackState::kNoHeapPointers);
|
||||
SingleStep(StackState::kNoHeapPointers);
|
||||
}
|
||||
FinishGC();
|
||||
}
|
||||
|
@ -73,9 +73,8 @@ class ConcurrentSweeperTest : public testing::TestWithHeap {
|
||||
// Pretend do finish marking as StatsCollector verifies that Notify*
|
||||
// methods are called in the right order.
|
||||
heap->stats_collector()->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
heap->stats_collector()->NotifyMarkingCompleted(0);
|
||||
Sweeper& sweeper = heap->sweeper();
|
||||
const SweepingConfig sweeping_config{
|
||||
|
@ -49,11 +49,8 @@ class EphemeronHolderTraceEphemeron
|
||||
};
|
||||
|
||||
class EphemeronPairTest : public testing::TestWithHeap {
|
||||
using MarkingConfig = Marker::MarkingConfig;
|
||||
|
||||
static constexpr Marker::MarkingConfig IncrementalPreciseMarkingConfig = {
|
||||
MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kNoHeapPointers,
|
||||
static constexpr MarkingConfig IncrementalPreciseMarkingConfig = {
|
||||
CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingConfig::MarkingType::kIncremental};
|
||||
|
||||
public:
|
||||
@ -63,11 +60,11 @@ class EphemeronPairTest : public testing::TestWithHeap {
|
||||
}
|
||||
|
||||
void FinishMarking() {
|
||||
marker_->FinishMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
marker_->FinishMarking(StackState::kNoHeapPointers);
|
||||
// Pretend do finish sweeping as StatsCollector verifies that Notify*
|
||||
// methods are called in the right order.
|
||||
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kIncremental);
|
||||
GCConfig::SweepingType::kIncremental);
|
||||
}
|
||||
|
||||
void InitializeMarker(HeapBase& heap, cppgc::Platform* platform) {
|
||||
@ -81,15 +78,14 @@ class EphemeronPairTest : public testing::TestWithHeap {
|
||||
private:
|
||||
bool SingleStep() {
|
||||
return marker_->IncrementalMarkingStepForTesting(
|
||||
MarkingConfig::StackState::kNoHeapPointers);
|
||||
StackState::kNoHeapPointers);
|
||||
}
|
||||
|
||||
std::unique_ptr<Marker> marker_;
|
||||
};
|
||||
|
||||
// static
|
||||
constexpr Marker::MarkingConfig
|
||||
EphemeronPairTest::IncrementalPreciseMarkingConfig;
|
||||
constexpr MarkingConfig EphemeronPairTest::IncrementalPreciseMarkingConfig;
|
||||
|
||||
} // namespace
|
||||
|
||||
|
@ -18,9 +18,8 @@ namespace {
|
||||
|
||||
class MockGarbageCollector : public GarbageCollector {
|
||||
public:
|
||||
MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override));
|
||||
MOCK_METHOD(void, StartIncrementalGarbageCollection,
|
||||
(GarbageCollector::Config), (override));
|
||||
MOCK_METHOD(void, CollectGarbage, (GCConfig), (override));
|
||||
MOCK_METHOD(void, StartIncrementalGarbageCollection, (GCConfig), (override));
|
||||
MOCK_METHOD(size_t, epoch, (), (const, override));
|
||||
MOCK_METHOD(const EmbedderStackState*, override_stack_state, (),
|
||||
(const, override));
|
||||
@ -73,9 +72,8 @@ TEST(GCInvokerTest, PrecideGCIsInvokedSynchronously) {
|
||||
GCInvoker invoker(&gc, &platform,
|
||||
cppgc::Heap::StackSupport::kNoConservativeStackScan);
|
||||
EXPECT_CALL(gc, CollectGarbage(::testing::Field(
|
||||
&GarbageCollector::Config::stack_state,
|
||||
GarbageCollector::Config::StackState::kNoHeapPointers)));
|
||||
invoker.CollectGarbage(GarbageCollector::Config::PreciseAtomicConfig());
|
||||
&GCConfig::stack_state, StackState::kNoHeapPointers)));
|
||||
invoker.CollectGarbage(GCConfig::PreciseAtomicConfig());
|
||||
}
|
||||
|
||||
TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) {
|
||||
@ -85,9 +83,8 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) {
|
||||
cppgc::Heap::StackSupport::kSupportsConservativeStackScan);
|
||||
EXPECT_CALL(
|
||||
gc, CollectGarbage(::testing::Field(
|
||||
&GarbageCollector::Config::stack_state,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
|
||||
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
|
||||
&GCConfig::stack_state, StackState::kMayContainHeapPointers)));
|
||||
invoker.CollectGarbage(GCConfig::ConservativeAtomicConfig());
|
||||
}
|
||||
|
||||
TEST(GCInvokerTest, ConservativeGCIsScheduledAsPreciseGCViaPlatform) {
|
||||
@ -100,7 +97,7 @@ TEST(GCInvokerTest, ConservativeGCIsScheduledAsPreciseGCViaPlatform) {
|
||||
EXPECT_CALL(gc, epoch).WillOnce(::testing::Return(0));
|
||||
EXPECT_CALL(*static_cast<MockTaskRunner*>(runner.get()),
|
||||
PostNonNestableTask(::testing::_));
|
||||
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
|
||||
invoker.CollectGarbage(GCConfig::ConservativeAtomicConfig());
|
||||
}
|
||||
|
||||
TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) {
|
||||
@ -110,7 +107,7 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) {
|
||||
cppgc::Heap::StackSupport::kNoConservativeStackScan);
|
||||
EXPECT_CALL(gc, epoch).WillRepeatedly(::testing::Return(0));
|
||||
EXPECT_CALL(gc, CollectGarbage);
|
||||
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
|
||||
invoker.CollectGarbage(GCConfig::ConservativeAtomicConfig());
|
||||
platform.RunAllForegroundTasks();
|
||||
}
|
||||
|
||||
@ -125,20 +122,18 @@ TEST(GCInvokerTest, IncrementalGCIsStarted) {
|
||||
cppgc::Heap::StackSupport::kSupportsConservativeStackScan);
|
||||
EXPECT_CALL(
|
||||
gc, StartIncrementalGarbageCollection(::testing::Field(
|
||||
&GarbageCollector::Config::stack_state,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
|
||||
&GCConfig::stack_state, StackState::kMayContainHeapPointers)));
|
||||
invoker_with_support.StartIncrementalGarbageCollection(
|
||||
GarbageCollector::Config::ConservativeIncrementalConfig());
|
||||
GCConfig::ConservativeIncrementalConfig());
|
||||
// Conservative stack scanning *not* supported.
|
||||
GCInvoker invoker_without_support(
|
||||
&gc, &platform, cppgc::Heap::StackSupport::kNoConservativeStackScan);
|
||||
EXPECT_CALL(
|
||||
gc, StartIncrementalGarbageCollection(::testing::Field(
|
||||
&GarbageCollector::Config::stack_state,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers)))
|
||||
EXPECT_CALL(gc,
|
||||
StartIncrementalGarbageCollection(::testing::Field(
|
||||
&GCConfig::stack_state, StackState::kMayContainHeapPointers)))
|
||||
.Times(0);
|
||||
invoker_without_support.StartIncrementalGarbageCollection(
|
||||
GarbageCollector::Config::ConservativeIncrementalConfig());
|
||||
GCConfig::ConservativeIncrementalConfig());
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
|
@ -22,19 +22,16 @@ class FakeGarbageCollector : public GarbageCollector {
|
||||
|
||||
void SetLiveBytes(size_t live_bytes) { live_bytes_ = live_bytes; }
|
||||
|
||||
void CollectGarbage(GarbageCollector::Config config) override {
|
||||
stats_collector_->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
void CollectGarbage(GCConfig config) override {
|
||||
stats_collector_->NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
stats_collector_->NotifyMarkingCompleted(live_bytes_);
|
||||
stats_collector_->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats_collector_->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
callcount_++;
|
||||
}
|
||||
|
||||
void StartIncrementalGarbageCollection(
|
||||
GarbageCollector::Config config) override {
|
||||
void StartIncrementalGarbageCollection(GCConfig config) override {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
@ -51,9 +48,8 @@ class FakeGarbageCollector : public GarbageCollector {
|
||||
|
||||
class MockGarbageCollector : public GarbageCollector {
|
||||
public:
|
||||
MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override));
|
||||
MOCK_METHOD(void, StartIncrementalGarbageCollection,
|
||||
(GarbageCollector::Config), (override));
|
||||
MOCK_METHOD(void, CollectGarbage, (GCConfig), (override));
|
||||
MOCK_METHOD(void, StartIncrementalGarbageCollection, (GCConfig), (override));
|
||||
MOCK_METHOD(size_t, epoch, (), (const, override));
|
||||
MOCK_METHOD(const EmbedderStackState*, override_stack_state, (),
|
||||
(const, override));
|
||||
@ -79,8 +75,7 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) {
|
||||
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
|
||||
EXPECT_CALL(
|
||||
gc, CollectGarbage(::testing::Field(
|
||||
&GarbageCollector::Config::stack_state,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
|
||||
&GCConfig::stack_state, StackState::kMayContainHeapPointers)));
|
||||
FakeAllocate(&stats_collector, 100 * kMB);
|
||||
}
|
||||
|
||||
@ -97,8 +92,7 @@ TEST(HeapGrowingTest, InitialHeapSize) {
|
||||
FakeAllocate(&stats_collector, kObjectSize - 1);
|
||||
EXPECT_CALL(
|
||||
gc, CollectGarbage(::testing::Field(
|
||||
&GarbageCollector::Config::stack_state,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
|
||||
&GCConfig::stack_state, StackState::kMayContainHeapPointers)));
|
||||
FakeAllocate(&stats_collector, kObjectSize);
|
||||
}
|
||||
|
||||
@ -146,9 +140,8 @@ TEST(HeapGrowingTest, IncrementalGCStarted) {
|
||||
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
|
||||
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
|
||||
EXPECT_CALL(
|
||||
gc, CollectGarbage(::testing::Field(
|
||||
&GarbageCollector::Config::stack_state,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers)))
|
||||
gc, CollectGarbage(::testing::Field(&GCConfig::stack_state,
|
||||
StackState::kMayContainHeapPointers)))
|
||||
.Times(0);
|
||||
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
|
||||
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
|
||||
@ -163,9 +156,8 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) {
|
||||
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
|
||||
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
|
||||
EXPECT_CALL(
|
||||
gc, CollectGarbage(::testing::Field(
|
||||
&GarbageCollector::Config::stack_state,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers)))
|
||||
gc, CollectGarbage(::testing::Field(&GCConfig::stack_state,
|
||||
StackState::kMayContainHeapPointers)))
|
||||
.Times(0);
|
||||
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
|
||||
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
|
||||
@ -174,8 +166,7 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) {
|
||||
::testing::Mock::VerifyAndClearExpectations(&gc);
|
||||
EXPECT_CALL(
|
||||
gc, CollectGarbage(::testing::Field(
|
||||
&GarbageCollector::Config::stack_state,
|
||||
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
|
||||
&GCConfig::stack_state, StackState::kMayContainHeapPointers)));
|
||||
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)).Times(0);
|
||||
// Allocate the rest needed to trigger atomic gc ().
|
||||
FakeAllocate(&stats_collector, StatsCollector::kAllocationThresholdBytes);
|
||||
|
@ -27,11 +27,11 @@ class GCHeapTest : public testing::TestWithHeap {
|
||||
public:
|
||||
void ConservativeGC() {
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
Heap::Config::ConservativeAtomicConfig());
|
||||
GCConfig::ConservativeAtomicConfig());
|
||||
}
|
||||
void PreciseGC() {
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
Heap::Config::PreciseAtomicConfig());
|
||||
GCConfig::PreciseAtomicConfig());
|
||||
}
|
||||
};
|
||||
|
||||
@ -74,7 +74,7 @@ namespace {
|
||||
const void* ConservativeGCReturningObject(cppgc::Heap* heap,
|
||||
const void* object) {
|
||||
internal::Heap::From(heap)->CollectGarbage(
|
||||
Heap::Config::ConservativeAtomicConfig());
|
||||
GCConfig::ConservativeAtomicConfig());
|
||||
return object;
|
||||
}
|
||||
|
||||
@ -113,7 +113,7 @@ class LargeObjectGCDuringCtor final
|
||||
: child_(MakeGarbageCollected<GCedWithFinalizer>(
|
||||
heap->GetAllocationHandle())) {
|
||||
internal::Heap::From(heap)->CollectGarbage(
|
||||
Heap::Config::ConservativeAtomicConfig());
|
||||
GCConfig::ConservativeAtomicConfig());
|
||||
}
|
||||
|
||||
void Trace(Visitor* visitor) const { visitor->Trace(child_); }
|
||||
@ -235,8 +235,8 @@ TEST_F(GCHeapTest, IsGarbageCollectionAllowed) {
|
||||
}
|
||||
|
||||
TEST_F(GCHeapTest, IsMarking) {
|
||||
GarbageCollector::Config config = GarbageCollector::Config::
|
||||
PreciseIncrementalMarkingConcurrentSweepingConfig();
|
||||
GCConfig config =
|
||||
GCConfig::PreciseIncrementalMarkingConcurrentSweepingConfig();
|
||||
auto* heap = Heap::From(GetHeap());
|
||||
EXPECT_FALSE(subtle::HeapState::IsMarking(*heap));
|
||||
heap->StartIncrementalGarbageCollection(config);
|
||||
@ -248,8 +248,8 @@ TEST_F(GCHeapTest, IsMarking) {
|
||||
}
|
||||
|
||||
TEST_F(GCHeapTest, IsSweeping) {
|
||||
GarbageCollector::Config config = GarbageCollector::Config::
|
||||
PreciseIncrementalMarkingConcurrentSweepingConfig();
|
||||
GCConfig config =
|
||||
GCConfig::PreciseIncrementalMarkingConcurrentSweepingConfig();
|
||||
auto* heap = Heap::From(GetHeap());
|
||||
EXPECT_FALSE(subtle::HeapState::IsSweeping(*heap));
|
||||
heap->StartIncrementalGarbageCollection(config);
|
||||
@ -280,8 +280,8 @@ class GCedExpectSweepingOnOwningThread final
|
||||
} // namespace
|
||||
|
||||
TEST_F(GCHeapTest, IsSweepingOnOwningThread) {
|
||||
GarbageCollector::Config config = GarbageCollector::Config::
|
||||
PreciseIncrementalMarkingConcurrentSweepingConfig();
|
||||
GCConfig config =
|
||||
GCConfig::PreciseIncrementalMarkingConcurrentSweepingConfig();
|
||||
auto* heap = Heap::From(GetHeap());
|
||||
MakeGarbageCollected<GCedExpectSweepingOnOwningThread>(
|
||||
heap->GetAllocationHandle(), *heap);
|
||||
@ -316,8 +316,7 @@ class ExpectAtomicPause final : public GarbageCollected<ExpectAtomicPause> {
|
||||
} // namespace
|
||||
|
||||
TEST_F(GCHeapTest, IsInAtomicPause) {
|
||||
GarbageCollector::Config config =
|
||||
GarbageCollector::Config::PreciseIncrementalConfig();
|
||||
GCConfig config = GCConfig::PreciseIncrementalConfig();
|
||||
auto* heap = Heap::From(GetHeap());
|
||||
MakeGarbageCollected<ExpectAtomicPause>(heap->object_allocator(), *heap);
|
||||
EXPECT_FALSE(subtle::HeapState::IsInAtomicPause(*heap));
|
||||
|
@ -25,18 +25,15 @@ namespace internal {
|
||||
namespace {
|
||||
class MarkerTest : public testing::TestWithHeap {
|
||||
public:
|
||||
using MarkingConfig = Marker::MarkingConfig;
|
||||
|
||||
void DoMarking(MarkingConfig::StackState stack_state) {
|
||||
const MarkingConfig config = {MarkingConfig::CollectionType::kMajor,
|
||||
stack_state};
|
||||
void DoMarking(StackState stack_state) {
|
||||
const MarkingConfig config = {CollectionType::kMajor, stack_state};
|
||||
auto* heap = Heap::From(GetHeap());
|
||||
InitializeMarker(*heap, GetPlatformHandle().get(), config);
|
||||
marker_->FinishMarking(stack_state);
|
||||
// Pretend do finish sweeping as StatsCollector verifies that Notify*
|
||||
// methods are called in the right order.
|
||||
heap->stats_collector()->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
GCConfig::SweepingType::kAtomic);
|
||||
}
|
||||
|
||||
void InitializeMarker(HeapBase& heap, cppgc::Platform* platform,
|
||||
@ -80,7 +77,7 @@ TEST_F(MarkerTest, PersistentIsMarked) {
|
||||
Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(header.IsMarked());
|
||||
}
|
||||
|
||||
@ -89,7 +86,7 @@ TEST_F(MarkerTest, ReachableMemberIsMarked) {
|
||||
parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromObject(parent->child());
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(header.IsMarked());
|
||||
}
|
||||
|
||||
@ -97,14 +94,14 @@ TEST_F(MarkerTest, UnreachableMemberIsNotMarked) {
|
||||
Member<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
}
|
||||
|
||||
TEST_F(MarkerTest, ObjectReachableFromStackIsMarked) {
|
||||
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked());
|
||||
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
DoMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked());
|
||||
access(object);
|
||||
}
|
||||
@ -113,7 +110,7 @@ TEST_F(MarkerTest, ObjectReachableOnlyFromStackIsNotMarkedIfStackIsEmpty) {
|
||||
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
access(object);
|
||||
}
|
||||
@ -123,14 +120,14 @@ TEST_F(MarkerTest, WeakReferenceToUnreachableObjectIsCleared) {
|
||||
WeakPersistent<GCed> weak_object =
|
||||
MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_FALSE(weak_object);
|
||||
}
|
||||
{
|
||||
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
parent->SetWeakChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_FALSE(parent->weak_child());
|
||||
}
|
||||
}
|
||||
@ -141,7 +138,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
WeakPersistent<GCed> weak_object(object);
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(weak_object);
|
||||
}
|
||||
{
|
||||
@ -149,7 +146,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
parent->SetWeakChild(object);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
}
|
||||
// Reachable from Member
|
||||
@ -159,7 +156,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
parent->SetChild(weak_object);
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(weak_object);
|
||||
}
|
||||
{
|
||||
@ -167,7 +164,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
parent->SetWeakChild(parent->child());
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
}
|
||||
// Reachable from stack
|
||||
@ -175,7 +172,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
WeakPersistent<GCed> weak_object(object);
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
DoMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(weak_object);
|
||||
access(object);
|
||||
}
|
||||
@ -184,7 +181,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
parent->SetWeakChild(object);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
DoMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
access(object);
|
||||
}
|
||||
@ -199,7 +196,7 @@ TEST_F(MarkerTest, DeepHierarchyIsMarked) {
|
||||
parent->SetWeakChild(parent->child());
|
||||
parent = parent->child();
|
||||
}
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
DoMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked());
|
||||
parent = root;
|
||||
for (int i = 0; i < kHierarchyDepth; ++i) {
|
||||
@ -213,7 +210,7 @@ TEST_F(MarkerTest, NestedObjectsOnStackAreMarked) {
|
||||
GCed* root = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
root->child()->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
DoMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked());
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()).IsMarked());
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()->child()).IsMarked());
|
||||
@ -244,9 +241,8 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
|
||||
} // namespace
|
||||
|
||||
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
|
||||
static const Marker::MarkingConfig config = {
|
||||
MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kMayContainHeapPointers};
|
||||
static const MarkingConfig config = {CollectionType::kMajor,
|
||||
StackState::kMayContainHeapPointers};
|
||||
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
|
||||
GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>(
|
||||
GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) {
|
||||
@ -254,22 +250,20 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
|
||||
marker->Visitor().Trace(member);
|
||||
});
|
||||
EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked());
|
||||
marker()->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
marker()->FinishMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked());
|
||||
}
|
||||
|
||||
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
|
||||
static const Marker::MarkingConfig config = {
|
||||
MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kMayContainHeapPointers};
|
||||
static const MarkingConfig config = {CollectionType::kMajor,
|
||||
StackState::kMayContainHeapPointers};
|
||||
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
|
||||
MakeGarbageCollected<GCedWithCallback>(
|
||||
GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) {
|
||||
Member<GCedWithCallback> member(obj);
|
||||
marker->Visitor().Trace(member);
|
||||
EXPECT_FALSE(HeapObjectHeader::FromObject(obj).IsMarked());
|
||||
marker->FinishMarking(
|
||||
MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
marker->FinishMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(obj).IsMarked());
|
||||
});
|
||||
}
|
||||
@ -318,36 +312,34 @@ V8_NOINLINE void RegisterInConstructionObject(
|
||||
|
||||
TEST_F(MarkerTest,
|
||||
InConstructionObjectIsEventuallyMarkedDifferentNonEmptyStack) {
|
||||
static const Marker::MarkingConfig config = {
|
||||
MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kMayContainHeapPointers};
|
||||
static const MarkingConfig config = {CollectionType::kMajor,
|
||||
StackState::kMayContainHeapPointers};
|
||||
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
|
||||
|
||||
GCObliviousObjectStorage storage;
|
||||
RegisterInConstructionObject(GetAllocationHandle(), marker()->Visitor(),
|
||||
storage);
|
||||
EXPECT_FALSE(HeapObjectHeader::FromObject(storage.object()).IsMarked());
|
||||
marker()->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
marker()->FinishMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(storage.object()).IsMarked());
|
||||
}
|
||||
|
||||
TEST_F(MarkerTest, SentinelNotClearedOnWeakPersistentHandling) {
|
||||
static const Marker::MarkingConfig config = {
|
||||
MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kNoHeapPointers,
|
||||
static const MarkingConfig config = {
|
||||
CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingConfig::MarkingType::kIncremental};
|
||||
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
auto* tmp = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
root->SetWeakChild(tmp);
|
||||
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
|
||||
while (!marker()->IncrementalMarkingStepForTesting(
|
||||
MarkingConfig::StackState::kNoHeapPointers)) {
|
||||
StackState::kNoHeapPointers)) {
|
||||
}
|
||||
// {root} object must be marked at this point because we do not allow
|
||||
// encountering kSentinelPointer in WeakMember on regular Trace() calls.
|
||||
ASSERT_TRUE(HeapObjectHeader::FromObject(root.Get()).IsMarked());
|
||||
root->SetWeakChild(kSentinelPointer);
|
||||
marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
marker()->FinishMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_EQ(kSentinelPointer, root->weak_child());
|
||||
}
|
||||
|
||||
@ -383,15 +375,14 @@ class ObjectWithEphemeronPair final
|
||||
} // namespace
|
||||
|
||||
TEST_F(MarkerTest, MarkerProcessesAllEphemeronPairs) {
|
||||
static const Marker::MarkingConfig config = {
|
||||
MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kNoHeapPointers,
|
||||
MarkingConfig::MarkingType::kAtomic};
|
||||
static const MarkingConfig config = {CollectionType::kMajor,
|
||||
StackState::kNoHeapPointers,
|
||||
MarkingConfig::MarkingType::kAtomic};
|
||||
Persistent<ObjectWithEphemeronPair> obj =
|
||||
MakeGarbageCollected<ObjectWithEphemeronPair>(GetAllocationHandle(),
|
||||
GetAllocationHandle());
|
||||
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
|
||||
marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
marker()->FinishMarking(StackState::kNoHeapPointers);
|
||||
ResetMarker();
|
||||
}
|
||||
|
||||
@ -399,26 +390,22 @@ TEST_F(MarkerTest, MarkerProcessesAllEphemeronPairs) {
|
||||
|
||||
class IncrementalMarkingTest : public testing::TestWithHeap {
|
||||
public:
|
||||
using MarkingConfig = Marker::MarkingConfig;
|
||||
|
||||
static constexpr MarkingConfig IncrementalPreciseMarkingConfig = {
|
||||
MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kNoHeapPointers,
|
||||
CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingConfig::MarkingType::kIncremental};
|
||||
|
||||
void FinishSteps(MarkingConfig::StackState stack_state) {
|
||||
void FinishSteps(StackState stack_state) {
|
||||
while (!SingleStep(stack_state)) {
|
||||
}
|
||||
}
|
||||
|
||||
void FinishMarking() {
|
||||
GetMarkerRef()->FinishMarking(
|
||||
MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
GetMarkerRef()->FinishMarking(StackState::kMayContainHeapPointers);
|
||||
// Pretend do finish sweeping as StatsCollector verifies that Notify*
|
||||
// methods are called in the right order.
|
||||
GetMarkerRef().reset();
|
||||
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kIncremental);
|
||||
GCConfig::SweepingType::kIncremental);
|
||||
}
|
||||
|
||||
void InitializeMarker(HeapBase& heap, cppgc::Platform* platform,
|
||||
@ -430,13 +417,12 @@ class IncrementalMarkingTest : public testing::TestWithHeap {
|
||||
MarkerBase* marker() const { return Heap::From(GetHeap())->marker(); }
|
||||
|
||||
private:
|
||||
bool SingleStep(MarkingConfig::StackState stack_state) {
|
||||
bool SingleStep(StackState stack_state) {
|
||||
return GetMarkerRef()->IncrementalMarkingStepForTesting(stack_state);
|
||||
}
|
||||
};
|
||||
|
||||
constexpr IncrementalMarkingTest::MarkingConfig
|
||||
IncrementalMarkingTest::IncrementalPreciseMarkingConfig;
|
||||
constexpr MarkingConfig IncrementalMarkingTest::IncrementalPreciseMarkingConfig;
|
||||
|
||||
TEST_F(IncrementalMarkingTest, RootIsMarkedAfterMarkingStarted) {
|
||||
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
@ -454,7 +440,7 @@ TEST_F(IncrementalMarkingTest, MemberIsMarkedAfterMarkingSteps) {
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
|
||||
IncrementalPreciseMarkingConfig);
|
||||
FinishSteps(MarkingConfig::StackState::kNoHeapPointers);
|
||||
FinishSteps(StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(header.IsMarked());
|
||||
FinishMarking();
|
||||
}
|
||||
@ -465,7 +451,7 @@ TEST_F(IncrementalMarkingTest,
|
||||
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
|
||||
IncrementalPreciseMarkingConfig);
|
||||
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
FinishSteps(MarkingConfig::StackState::kNoHeapPointers);
|
||||
FinishSteps(StackState::kNoHeapPointers);
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromObject(root->child());
|
||||
EXPECT_TRUE(header.IsMarked());
|
||||
FinishMarking();
|
||||
@ -491,10 +477,10 @@ TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) {
|
||||
header = &HeapObjectHeader::FromObject(obj);
|
||||
holder->member_ = obj;
|
||||
EXPECT_FALSE(header->IsMarked());
|
||||
FinishSteps(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
FinishSteps(StackState::kMayContainHeapPointers);
|
||||
EXPECT_FALSE(header->IsMarked());
|
||||
});
|
||||
FinishSteps(MarkingConfig::StackState::kNoHeapPointers);
|
||||
FinishSteps(StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(header->IsMarked());
|
||||
FinishMarking();
|
||||
}
|
||||
@ -502,7 +488,7 @@ TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) {
|
||||
TEST_F(IncrementalMarkingTest, MarkingRunsOutOfWorkEventually) {
|
||||
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
|
||||
IncrementalPreciseMarkingConfig);
|
||||
FinishSteps(MarkingConfig::StackState::kNoHeapPointers);
|
||||
FinishSteps(StackState::kNoHeapPointers);
|
||||
FinishMarking();
|
||||
}
|
||||
|
||||
|
@ -20,12 +20,10 @@ namespace {
|
||||
|
||||
class MarkingVerifierTest : public testing::TestWithHeap {
|
||||
public:
|
||||
using StackState = Heap::Config::StackState;
|
||||
|
||||
V8_NOINLINE void VerifyMarking(HeapBase& heap, StackState stack_state,
|
||||
size_t expected_marked_bytes) {
|
||||
Heap::From(GetHeap())->object_allocator().ResetLinearAllocationBuffers();
|
||||
MarkingVerifier verifier(heap, Heap::Config::CollectionType::kMajor);
|
||||
MarkingVerifier verifier(heap, CollectionType::kMajor);
|
||||
verifier.Run(stack_state, v8::base::Stack::GetCurrentStackPosition(),
|
||||
expected_marked_bytes);
|
||||
}
|
||||
@ -140,16 +138,14 @@ TEST_F(MarkingVerifierTest, DoesntDieOnInConstructionObjectWithWriteBarrier) {
|
||||
Persistent<Holder<GCedWithCallbackAndChild>> persistent =
|
||||
MakeGarbageCollected<Holder<GCedWithCallbackAndChild>>(
|
||||
GetAllocationHandle());
|
||||
GarbageCollector::Config config =
|
||||
GarbageCollector::Config::PreciseIncrementalConfig();
|
||||
GCConfig config = GCConfig::PreciseIncrementalConfig();
|
||||
Heap::From(GetHeap())->StartIncrementalGarbageCollection(config);
|
||||
MakeGarbageCollected<GCedWithCallbackAndChild>(
|
||||
GetAllocationHandle(), MakeGarbageCollected<GCed>(GetAllocationHandle()),
|
||||
[&persistent](GCedWithCallbackAndChild* obj) {
|
||||
persistent->object = obj;
|
||||
});
|
||||
GetMarkerRef()->IncrementalMarkingStepForTesting(
|
||||
GarbageCollector::Config::StackState::kNoHeapPointers);
|
||||
GetMarkerRef()->IncrementalMarkingStepForTesting(StackState::kNoHeapPointers);
|
||||
Heap::From(GetHeap())->FinalizeIncrementalGarbageCollectionIfRunning(config);
|
||||
}
|
||||
|
||||
|
@ -51,15 +51,13 @@ class MetricRecorderTest : public testing::TestWithHeap {
|
||||
}
|
||||
|
||||
void StartGC() {
|
||||
stats->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kIncremental,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats->NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kIncremental,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
}
|
||||
void EndGC(size_t marked_bytes) {
|
||||
stats->NotifyMarkingCompleted(marked_bytes);
|
||||
stats->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kIncremental);
|
||||
stats->NotifySweepingCompleted(GCConfig::SweepingType::kIncremental);
|
||||
}
|
||||
|
||||
StatsCollector* stats;
|
||||
@ -308,8 +306,7 @@ TEST_F(MetricRecorderTest, ObjectSizeMetricsWithAllocations) {
|
||||
stats->NotifyAllocation(150);
|
||||
stats->NotifyAllocatedMemory(1000);
|
||||
stats->NotifyFreedMemory(400);
|
||||
stats->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
EXPECT_EQ(1300u, MetricRecorderImpl::GCCycle_event.objects.before_bytes);
|
||||
EXPECT_EQ(800, MetricRecorderImpl::GCCycle_event.objects.after_bytes);
|
||||
EXPECT_EQ(500u, MetricRecorderImpl::GCCycle_event.objects.freed_bytes);
|
||||
|
@ -107,12 +107,11 @@ class MinorGCTest : public testing::TestWithHeap {
|
||||
}
|
||||
|
||||
void CollectMinor() {
|
||||
Heap::From(GetHeap())->CollectGarbage(
|
||||
Heap::Config::MinorPreciseAtomicConfig());
|
||||
Heap::From(GetHeap())->CollectGarbage(GCConfig::MinorPreciseAtomicConfig());
|
||||
}
|
||||
|
||||
void CollectMajor() {
|
||||
Heap::From(GetHeap())->CollectGarbage(Heap::Config::PreciseAtomicConfig());
|
||||
Heap::From(GetHeap())->CollectGarbage(GCConfig::PreciseAtomicConfig());
|
||||
}
|
||||
|
||||
const auto& RememberedSlots() const {
|
||||
|
@ -79,7 +79,7 @@ class V8_NODISCARD CppgcTracingScopesTest : public testing::TestWithHeap {
|
||||
GetMarkerRef()->FinishMarking(Config::StackState::kNoHeapPointers);
|
||||
GetMarkerRef().reset();
|
||||
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
GCConfig::SweepingType::kAtomic);
|
||||
}
|
||||
|
||||
void ResetDelegatingTracingController(const char* expected_name = nullptr) {
|
||||
@ -228,13 +228,11 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) {
|
||||
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
|
||||
stats_collector->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats_collector->NotifyMarkingStarted(GCConfig::CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
stats_collector->NotifyMarkingCompleted(0);
|
||||
stats_collector->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats_collector->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
const StatsCollector::Event& event =
|
||||
stats_collector->GetPreviousEventForTesting();
|
||||
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
|
||||
@ -249,10 +247,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
|
||||
for (int scope_id = 0; scope_id < StatsCollector::kNumHistogramScopeIds;
|
||||
++scope_id) {
|
||||
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
|
||||
stats_collector->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kIncremental,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats_collector->NotifyMarkingStarted(GCConfig::CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kIncremental,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
DelegatingTracingControllerImpl::check_expectations = false;
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
@ -265,7 +262,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
|
||||
}
|
||||
stats_collector->NotifyMarkingCompleted(0);
|
||||
stats_collector->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kIncremental);
|
||||
GCConfig::SweepingType::kIncremental);
|
||||
const StatsCollector::Event& event =
|
||||
stats_collector->GetPreviousEventForTesting();
|
||||
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
|
||||
@ -284,10 +281,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
|
||||
for (int scope_id = 0;
|
||||
scope_id < StatsCollector::kNumHistogramConcurrentScopeIds; ++scope_id) {
|
||||
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
|
||||
stats_collector->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats_collector->NotifyMarkingStarted(GCConfig::CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
DelegatingTracingControllerImpl::check_expectations = false;
|
||||
{
|
||||
StatsCollector::EnabledConcurrentScope scope(
|
||||
@ -299,8 +295,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
|
||||
}
|
||||
}
|
||||
stats_collector->NotifyMarkingCompleted(0);
|
||||
stats_collector->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats_collector->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
const StatsCollector::Event& event =
|
||||
stats_collector->GetPreviousEventForTesting();
|
||||
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
|
||||
|
@ -38,23 +38,21 @@ class StatsCollectorTest : public ::testing::Test {
|
||||
} // namespace
|
||||
|
||||
TEST_F(StatsCollectorTest, NoMarkedBytes) {
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingCompleted(kNoMarkedBytes);
|
||||
stats.NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
auto event = stats.GetPreviousEventForTesting();
|
||||
EXPECT_EQ(0u, event.marked_bytes);
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, EventPrevGCMarkedObjectSize) {
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingCompleted(1024);
|
||||
stats.NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
auto event = stats.GetPreviousEventForTesting();
|
||||
EXPECT_EQ(1024u, event.marked_bytes);
|
||||
}
|
||||
@ -74,54 +72,50 @@ TEST_F(StatsCollectorTest, AlllocationReportAboveAllocationThresholdBytes) {
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, InitialAllocatedObjectSize) {
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
EXPECT_EQ(0u, stats.allocated_object_size());
|
||||
stats.NotifyMarkingCompleted(kNoMarkedBytes);
|
||||
EXPECT_EQ(0u, stats.allocated_object_size());
|
||||
stats.NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
EXPECT_EQ(0u, stats.allocated_object_size());
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, AllocatedObjectSize) {
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
FakeAllocate(kMinReportedSize);
|
||||
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
|
||||
stats.NotifyMarkingCompleted(kMinReportedSize);
|
||||
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
|
||||
stats.NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, AllocatedObjectSizeNoMarkedBytes) {
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
FakeAllocate(kMinReportedSize);
|
||||
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
|
||||
stats.NotifyMarkingCompleted(kNoMarkedBytes);
|
||||
EXPECT_EQ(0u, stats.allocated_object_size());
|
||||
stats.NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
EXPECT_EQ(0u, stats.allocated_object_size());
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, AllocatedObjectSizeAllocateAfterMarking) {
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
FakeAllocate(kMinReportedSize);
|
||||
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
|
||||
stats.NotifyMarkingCompleted(kMinReportedSize);
|
||||
FakeAllocate(kMinReportedSize);
|
||||
EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size());
|
||||
stats.NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size());
|
||||
}
|
||||
|
||||
@ -153,12 +147,11 @@ TEST_F(StatsCollectorTest, ObserveAllocatedObjectSizeIncreaseAndDecrease) {
|
||||
namespace {
|
||||
|
||||
void FakeGC(StatsCollector* stats, size_t marked_bytes) {
|
||||
stats->NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats->NotifyMarkingStarted(CollectionType::kMajor,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
stats->NotifyMarkingCompleted(marked_bytes);
|
||||
stats->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
stats->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
@ -48,9 +48,8 @@ class SweeperTest : public testing::TestWithHeap {
|
||||
// Pretend do finish marking as StatsCollector verifies that Notify*
|
||||
// methods are called in the right order.
|
||||
heap->stats_collector()->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::MarkingType::kAtomic,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::IsForcedGC::kNotForced);
|
||||
heap->stats_collector()->NotifyMarkingCompleted(0);
|
||||
const SweepingConfig sweeping_config{
|
||||
SweepingConfig::SweepingType::kAtomic,
|
||||
@ -226,8 +225,7 @@ class GCInDestructor final : public GarbageCollected<GCInDestructor> {
|
||||
~GCInDestructor() {
|
||||
// Instead of directly calling GC, allocations should be supported here as
|
||||
// well.
|
||||
heap_->CollectGarbage(
|
||||
internal::GarbageCollector::Config::ConservativeAtomicConfig());
|
||||
heap_->CollectGarbage(internal::GCConfig::ConservativeAtomicConfig());
|
||||
}
|
||||
void Trace(Visitor*) const {}
|
||||
|
||||
@ -299,11 +297,10 @@ TEST_F(SweeperTest, LazySweepingDuringAllocation) {
|
||||
testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope(
|
||||
GetPlatformHandle().get());
|
||||
g_destructor_callcount = 0;
|
||||
static constexpr Heap::Config config = {
|
||||
Heap::Config::CollectionType::kMajor,
|
||||
Heap::Config::StackState::kNoHeapPointers,
|
||||
Heap::Config::MarkingType::kAtomic,
|
||||
Heap::Config::SweepingType::kIncrementalAndConcurrent};
|
||||
static constexpr GCConfig config = {
|
||||
CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::SweepingType::kIncrementalAndConcurrent};
|
||||
Heap::From(GetHeap())->CollectGarbage(config);
|
||||
// Incremental sweeping is active and the space should have two pages with
|
||||
// no room for an additional GCedObject. Allocating a new GCedObject should
|
||||
@ -334,14 +331,13 @@ TEST_F(SweeperTest, LazySweepingNormalPages) {
|
||||
PreciseGC();
|
||||
EXPECT_EQ(0u, g_destructor_callcount);
|
||||
MakeGarbageCollected<GCedObject>(GetAllocationHandle());
|
||||
static constexpr Heap::Config config = {
|
||||
Heap::Config::CollectionType::kMajor,
|
||||
Heap::Config::StackState::kNoHeapPointers,
|
||||
Heap::Config::MarkingType::kAtomic,
|
||||
static constexpr GCConfig config = {
|
||||
CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
// Sweeping type must not include concurrent as that could lead to the
|
||||
// concurrent sweeper holding onto pages in rare cases which delays
|
||||
// reclamation of objects.
|
||||
Heap::Config::SweepingType::kIncremental};
|
||||
GCConfig::SweepingType::kIncremental};
|
||||
Heap::From(GetHeap())->CollectGarbage(config);
|
||||
EXPECT_EQ(0u, g_destructor_callcount);
|
||||
MakeGarbageCollected<GCedObject>(GetAllocationHandle());
|
||||
@ -442,10 +438,9 @@ TEST_F(SweeperTest, CrossThreadPersistentCanBeClearedFromOtherThread) {
|
||||
testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope(
|
||||
GetPlatformHandle().get());
|
||||
Heap::From(GetHeap())->CollectGarbage(
|
||||
{Heap::Config::CollectionType::kMajor,
|
||||
Heap::Config::StackState::kNoHeapPointers,
|
||||
Heap::Config::MarkingType::kAtomic,
|
||||
Heap::Config::SweepingType::kIncrementalAndConcurrent});
|
||||
{CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::SweepingType::kIncrementalAndConcurrent});
|
||||
// `holder` is unreachable (as the stack is not scanned) and will be
|
||||
// reclaimed. Its payload memory is generally poisoned at this point. The
|
||||
// CrossThreadPersistent slot should be unpoisoned.
|
||||
@ -470,11 +465,10 @@ TEST_F(SweeperTest, WeakCrossThreadPersistentCanBeClearedFromOtherThread) {
|
||||
|
||||
testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope(
|
||||
GetPlatformHandle().get());
|
||||
static constexpr Heap::Config config = {
|
||||
Heap::Config::CollectionType::kMajor,
|
||||
Heap::Config::StackState::kNoHeapPointers,
|
||||
Heap::Config::MarkingType::kAtomic,
|
||||
Heap::Config::SweepingType::kIncrementalAndConcurrent};
|
||||
static constexpr GCConfig config = {
|
||||
CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::SweepingType::kIncrementalAndConcurrent};
|
||||
Heap::From(GetHeap())->CollectGarbage(config);
|
||||
// `holder` is unreachable (as the stack is not scanned) and will be
|
||||
// reclaimed. Its payload memory is generally poisoned at this point. The
|
||||
@ -483,10 +477,9 @@ TEST_F(SweeperTest, WeakCrossThreadPersistentCanBeClearedFromOtherThread) {
|
||||
// GC in the remote heap should also clear `holder->weak_ref`. The slot for
|
||||
// `weak_ref` should be unpoisoned by the GC.
|
||||
Heap::From(remote_heap.get())
|
||||
->CollectGarbage({Heap::Config::CollectionType::kMajor,
|
||||
Heap::Config::StackState::kNoHeapPointers,
|
||||
Heap::Config::MarkingType::kAtomic,
|
||||
Heap::Config::SweepingType::kAtomic});
|
||||
->CollectGarbage({CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
GCConfig::MarkingType::kAtomic,
|
||||
GCConfig::SweepingType::kAtomic});
|
||||
|
||||
// Finish the sweeper which will find the CrossThreadPersistent in cleared
|
||||
// state.
|
||||
|
@ -28,7 +28,7 @@ TEST_F(TestingTest,
|
||||
auto* gced = MakeGarbageCollected<GCed>(GetHeap()->GetAllocationHandle());
|
||||
WeakPersistent<GCed> weak{gced};
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
Heap::Config::PreciseAtomicConfig());
|
||||
GCConfig::PreciseAtomicConfig());
|
||||
EXPECT_FALSE(weak);
|
||||
}
|
||||
{
|
||||
@ -38,7 +38,7 @@ TEST_F(TestingTest,
|
||||
GetHeap()->GetHeapHandle(),
|
||||
EmbedderStackState::kMayContainHeapPointers);
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
Heap::Config::PreciseAtomicConfig());
|
||||
GCConfig::PreciseAtomicConfig());
|
||||
EXPECT_FALSE(weak);
|
||||
}
|
||||
{
|
||||
@ -47,7 +47,7 @@ TEST_F(TestingTest,
|
||||
cppgc::testing::OverrideEmbedderStackStateScope override_stack(
|
||||
GetHeap()->GetHeapHandle(), EmbedderStackState::kNoHeapPointers);
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
Heap::Config::ConservativeAtomicConfig());
|
||||
GCConfig::ConservativeAtomicConfig());
|
||||
EXPECT_TRUE(weak);
|
||||
}
|
||||
}
|
||||
|
@ -87,10 +87,9 @@ class TestWithHeap : public TestWithPlatform {
|
||||
// size of the heap and corresponding pages.
|
||||
void ConservativeMemoryDiscardingGC() {
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
{GarbageCollector::Config::CollectionType::kMajor,
|
||||
Heap::StackState::kMayContainHeapPointers,
|
||||
{CollectionType::kMajor, Heap::StackState::kMayContainHeapPointers,
|
||||
cppgc::Heap::MarkingType::kAtomic, cppgc::Heap::SweepingType::kAtomic,
|
||||
GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible});
|
||||
GCConfig::FreeMemoryHandling::kDiscardWherePossible});
|
||||
}
|
||||
|
||||
cppgc::Heap* GetHeap() const { return heap_.get(); }
|
||||
|
@ -18,26 +18,23 @@ namespace internal {
|
||||
namespace {
|
||||
class WeakContainerTest : public testing::TestWithHeap {
|
||||
public:
|
||||
using Config = Marker::MarkingConfig;
|
||||
|
||||
void StartMarking() {
|
||||
CHECK_EQ(0u,
|
||||
Heap::From(GetHeap())->AsBase().stats_collector()->marked_bytes());
|
||||
Config config = {Config::CollectionType::kMajor,
|
||||
Config::StackState::kNoHeapPointers,
|
||||
Config::MarkingType::kIncremental};
|
||||
MarkingConfig config = {CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingConfig::MarkingType::kIncremental};
|
||||
GetMarkerRef() = std::make_unique<Marker>(
|
||||
Heap::From(GetHeap())->AsBase(), GetPlatformHandle().get(), config);
|
||||
GetMarkerRef()->StartMarking();
|
||||
}
|
||||
|
||||
void FinishMarking(Config::StackState stack_state) {
|
||||
void FinishMarking(StackState stack_state) {
|
||||
GetMarkerRef()->FinishMarking(stack_state);
|
||||
marked_bytes_ =
|
||||
Heap::From(GetHeap())->AsBase().stats_collector()->marked_bytes();
|
||||
GetMarkerRef().reset();
|
||||
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted(
|
||||
GarbageCollector::Config::SweepingType::kAtomic);
|
||||
GCConfig::SweepingType::kAtomic);
|
||||
}
|
||||
|
||||
size_t GetMarkedBytes() const { return marked_bytes_; }
|
||||
@ -96,7 +93,7 @@ TEST_F(WeakContainerTest, TraceableGCedTraced) {
|
||||
obj->n_trace_calls = 0u;
|
||||
StartMarking();
|
||||
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
|
||||
FinishMarking(Config::StackState::kNoHeapPointers);
|
||||
FinishMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_NE(0u, obj->n_trace_calls);
|
||||
EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes());
|
||||
}
|
||||
@ -107,7 +104,7 @@ TEST_F(WeakContainerTest, NonTraceableGCedNotTraced) {
|
||||
obj->n_trace_calls = 0u;
|
||||
StartMarking();
|
||||
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
|
||||
FinishMarking(Config::StackState::kNoHeapPointers);
|
||||
FinishMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_EQ(0u, obj->n_trace_calls);
|
||||
EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes());
|
||||
}
|
||||
@ -118,7 +115,7 @@ TEST_F(WeakContainerTest, NonTraceableGCedNotTracedConservatively) {
|
||||
obj->n_trace_calls = 0u;
|
||||
StartMarking();
|
||||
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
|
||||
FinishMarking(Config::StackState::kMayContainHeapPointers);
|
||||
FinishMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_NE(0u, obj->n_trace_calls);
|
||||
EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes());
|
||||
}
|
||||
@ -129,7 +126,7 @@ TEST_F(WeakContainerTest, PreciseGCTracesWeakContainerWhenTraced) {
|
||||
obj->n_trace_calls = 0u;
|
||||
StartMarking();
|
||||
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
|
||||
FinishMarking(Config::StackState::kNoHeapPointers);
|
||||
FinishMarking(StackState::kNoHeapPointers);
|
||||
EXPECT_EQ(1u, obj->n_trace_calls);
|
||||
EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes());
|
||||
}
|
||||
@ -140,7 +137,7 @@ TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainer) {
|
||||
obj->n_trace_calls = 0u;
|
||||
StartMarking();
|
||||
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
|
||||
FinishMarking(Config::StackState::kMayContainHeapPointers);
|
||||
FinishMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_EQ(2u, obj->n_trace_calls);
|
||||
EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes());
|
||||
}
|
||||
@ -155,7 +152,7 @@ TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainerOnce) {
|
||||
obj->n_trace_calls = 0u;
|
||||
StartMarking();
|
||||
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
|
||||
FinishMarking(Config::StackState::kMayContainHeapPointers);
|
||||
FinishMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_EQ(1u, obj->n_trace_calls);
|
||||
EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes());
|
||||
}
|
||||
@ -183,7 +180,7 @@ TEST_F(WeakContainerTest, WeakContainerWeakCallbackCalled) {
|
||||
StartMarking();
|
||||
GetMarkerRef()->Visitor().TraceWeakContainer(obj, WeakCallback::callback,
|
||||
obj);
|
||||
FinishMarking(Config::StackState::kMayContainHeapPointers);
|
||||
FinishMarking(StackState::kMayContainHeapPointers);
|
||||
EXPECT_NE(0u, WeakCallback::n_callback_called);
|
||||
EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes());
|
||||
EXPECT_EQ(obj, WeakCallback::obj);
|
||||
|
@ -26,11 +26,11 @@ class WorkloadsTest : public testing::TestWithHeap {
|
||||
public:
|
||||
void ConservativeGC() {
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
Heap::Config::ConservativeAtomicConfig());
|
||||
GCConfig::ConservativeAtomicConfig());
|
||||
}
|
||||
void PreciseGC() {
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
Heap::Config::PreciseAtomicConfig());
|
||||
GCConfig::PreciseAtomicConfig());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -29,16 +29,15 @@ class V8_NODISCARD IncrementalMarkingScope {
|
||||
marker_->FinishMarking(kIncrementalConfig.stack_state);
|
||||
}
|
||||
|
||||
static constexpr Marker::MarkingConfig kIncrementalConfig{
|
||||
Marker::MarkingConfig::CollectionType::kMajor,
|
||||
Marker::MarkingConfig::StackState::kNoHeapPointers,
|
||||
Marker::MarkingConfig::MarkingType::kIncremental};
|
||||
static constexpr MarkingConfig kIncrementalConfig{
|
||||
CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingConfig::MarkingType::kIncremental};
|
||||
|
||||
private:
|
||||
MarkerBase* marker_;
|
||||
};
|
||||
|
||||
constexpr Marker::MarkingConfig IncrementalMarkingScope::kIncrementalConfig;
|
||||
constexpr MarkingConfig IncrementalMarkingScope::kIncrementalConfig;
|
||||
|
||||
class V8_NODISCARD ExpectWriteBarrierFires final
|
||||
: private IncrementalMarkingScope {
|
||||
|
Loading…
Reference in New Issue
Block a user