cppgc: Add write barrier
This moves from Blink: 1) implementation of the marking write barrier; 2) WriteBarrierWorklist to Marker; 3) incremental/concurrent marking options. Bug: chromium:1056170 Change-Id: Ia3e31ffd920a99803420b1453695fe2fb8d843b8 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2218064 Commit-Queue: Anton Bikineev <bikineev@chromium.org> Reviewed-by: Omer Katz <omerkatz@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#68108}
This commit is contained in:
parent
f672cefd30
commit
ad54f1bb17
5
BUILD.gn
5
BUILD.gn
@ -4080,12 +4080,15 @@ v8_source_set("cppgc_base") {
|
||||
"include/cppgc/heap.h",
|
||||
"include/cppgc/internal/accessors.h",
|
||||
"include/cppgc/internal/api-contants.h",
|
||||
"include/cppgc/internal/atomic-entry-flag.h",
|
||||
"include/cppgc/internal/compiler-specific.h",
|
||||
"include/cppgc/internal/finalizer-traits.h",
|
||||
"include/cppgc/internal/gc-info.h",
|
||||
"include/cppgc/internal/persistent-node.h",
|
||||
"include/cppgc/internal/pointer-policies.h",
|
||||
"include/cppgc/internal/prefinalizer-handler.h",
|
||||
"include/cppgc/internal/process-heap.h",
|
||||
"include/cppgc/internal/write-barrier.h",
|
||||
"include/cppgc/liveness-broker.h",
|
||||
"include/cppgc/liveness-broker.h",
|
||||
"include/cppgc/macros.h",
|
||||
@ -4134,6 +4137,7 @@ v8_source_set("cppgc_base") {
|
||||
"src/heap/cppgc/pointer-policies.cc",
|
||||
"src/heap/cppgc/prefinalizer-handler.cc",
|
||||
"src/heap/cppgc/prefinalizer-handler.h",
|
||||
"src/heap/cppgc/process-heap.cc",
|
||||
"src/heap/cppgc/raw-heap.cc",
|
||||
"src/heap/cppgc/raw-heap.h",
|
||||
"src/heap/cppgc/sanitizers.h",
|
||||
@ -4146,6 +4150,7 @@ v8_source_set("cppgc_base") {
|
||||
"src/heap/cppgc/virtual-memory.h",
|
||||
"src/heap/cppgc/visitor.cc",
|
||||
"src/heap/cppgc/worklist.h",
|
||||
"src/heap/cppgc/write-barrier.cc",
|
||||
]
|
||||
|
||||
if (is_clang || !is_win) {
|
||||
|
48
include/cppgc/internal/atomic-entry-flag.h
Normal file
48
include/cppgc/internal/atomic-entry-flag.h
Normal file
@ -0,0 +1,48 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef INCLUDE_CPPGC_INTERNAL_ATOMIC_ENTRY_FLAG_H_
|
||||
#define INCLUDE_CPPGC_INTERNAL_ATOMIC_ENTRY_FLAG_H_
|
||||
|
||||
#include <atomic>
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
// A flag which provides a fast check whether a scope may be entered on the
|
||||
// current thread, without needing to access thread-local storage or mutex. Can
|
||||
// have false positives (i.e., spuriously report that it might be entered), so
|
||||
// it is expected that this will be used in tandem with a precise check that the
|
||||
// scope is in fact entered on that thread.
|
||||
//
|
||||
// Example:
|
||||
// g_frobnicating_flag.MightBeEntered() &&
|
||||
// ThreadLocalFrobnicator().IsFrobnicating()
|
||||
//
|
||||
// Relaxed atomic operations are sufficient, since:
|
||||
// - all accesses remain atomic
|
||||
// - each thread must observe its own operations in order
|
||||
// - no thread ever exits the flag more times than it enters (if used correctly)
|
||||
// And so if a thread observes zero, it must be because it has observed an equal
|
||||
// number of exits as entries.
|
||||
class AtomicEntryFlag final {
|
||||
public:
|
||||
void Enter() { entries_.fetch_add(1, std::memory_order_relaxed); }
|
||||
void Exit() { entries_.fetch_sub(1, std::memory_order_relaxed); }
|
||||
|
||||
// Returns false only if the current thread is not between a call to Enter
|
||||
// and a call to Exit. Returns true if this thread or another thread may
|
||||
// currently be in the scope guarded by this flag.
|
||||
bool MightBeEntered() const {
|
||||
return entries_.load(std::memory_order_relaxed) != 0;
|
||||
}
|
||||
|
||||
private:
|
||||
std::atomic_int entries_{0};
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
||||
#endif // INCLUDE_CPPGC_INTERNAL_ATOMIC_ENTRY_FLAG_H_
|
@ -8,6 +8,7 @@
|
||||
#include <cstdint>
|
||||
#include <type_traits>
|
||||
|
||||
#include "cppgc/internal/write-barrier.h"
|
||||
#include "cppgc/source-location.h"
|
||||
#include "v8config.h" // NOLINT(build/include_directory)
|
||||
|
||||
@ -26,8 +27,8 @@ struct DijkstraWriteBarrierPolicy {
|
||||
// Since in initializing writes the source object is always white, having no
|
||||
// barrier doesn't break the tri-color invariant.
|
||||
}
|
||||
static void AssigningBarrier(const void*, const void*) {
|
||||
// TODO(chromium:1056170): Add actual implementation.
|
||||
static void AssigningBarrier(const void* slot, const void* value) {
|
||||
WriteBarrier::MarkingBarrier(slot, value);
|
||||
}
|
||||
};
|
||||
|
||||
|
34
include/cppgc/internal/process-heap.h
Normal file
34
include/cppgc/internal/process-heap.h
Normal file
@ -0,0 +1,34 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef INCLUDE_CPPGC_INTERNAL_PROCESS_HEAP_H_
|
||||
#define INCLUDE_CPPGC_INTERNAL_PROCESS_HEAP_H_
|
||||
|
||||
#include "cppgc/internal/atomic-entry-flag.h"
|
||||
#include "v8config.h" // NOLINT(build/include_directory)
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
class V8_EXPORT ProcessHeap final {
|
||||
public:
|
||||
static void EnterIncrementalOrConcurrentMarking() {
|
||||
concurrent_marking_flag_.Enter();
|
||||
}
|
||||
static void ExitIncrementalOrConcurrentMarking() {
|
||||
concurrent_marking_flag_.Exit();
|
||||
}
|
||||
|
||||
static bool IsAnyIncrementalOrConcurrentMarking() {
|
||||
return concurrent_marking_flag_.MightBeEntered();
|
||||
}
|
||||
|
||||
private:
|
||||
static AtomicEntryFlag concurrent_marking_flag_;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
||||
#endif // INCLUDE_CPPGC_INTERNAL_PROCESS_HEAP_H_
|
34
include/cppgc/internal/write-barrier.h
Normal file
34
include/cppgc/internal/write-barrier.h
Normal file
@ -0,0 +1,34 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
|
||||
#define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
|
||||
|
||||
#include "cppgc/internal/process-heap.h"
|
||||
#include "v8config.h" // NOLINT(build/include_directory)
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
class BasePage;
|
||||
class Heap;
|
||||
|
||||
class V8_EXPORT WriteBarrier final {
|
||||
public:
|
||||
static V8_INLINE void MarkingBarrier(const void* slot, const void* value) {
|
||||
if (V8_LIKELY(!ProcessHeap::IsAnyIncrementalOrConcurrentMarking())) return;
|
||||
|
||||
MarkingBarrierSlow(slot, value);
|
||||
}
|
||||
|
||||
private:
|
||||
WriteBarrier() = delete;
|
||||
|
||||
static void MarkingBarrierSlow(const void* slot, const void* value);
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
||||
#endif // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
|
@ -149,10 +149,10 @@ void Heap::CollectGarbage(GCConfig config) {
|
||||
|
||||
epoch_++;
|
||||
|
||||
// TODO(chromium:1056170): Replace with proper mark-sweep algorithm.
|
||||
// "Marking".
|
||||
marker_ = std::make_unique<Marker>(this);
|
||||
Marker::MarkingConfig marking_config(config.stack_state);
|
||||
const Marker::MarkingConfig marking_config{config.stack_state,
|
||||
config.marking_type};
|
||||
marker_->StartMarking(marking_config);
|
||||
marker_->FinishMarking(marking_config);
|
||||
// "Sweeping and finalization".
|
||||
@ -165,7 +165,7 @@ void Heap::CollectGarbage(GCConfig config) {
|
||||
marker_.reset();
|
||||
{
|
||||
NoGCScope no_gc(this);
|
||||
sweeper_.Start(config.sweep_type);
|
||||
sweeper_.Start(config.sweeping_type);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,6 +30,10 @@
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
namespace testing {
|
||||
class TestWithHeap;
|
||||
}
|
||||
|
||||
class Stack;
|
||||
|
||||
class V8_EXPORT_PRIVATE LivenessBrokerFactory {
|
||||
@ -73,14 +77,14 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
|
||||
|
||||
struct GCConfig {
|
||||
using StackState = Heap::StackState;
|
||||
using SweepType = Sweeper::Config;
|
||||
using MarkingType = Marker::MarkingConfig::MarkingType;
|
||||
using SweepingType = Sweeper::Config;
|
||||
|
||||
static GCConfig Default() {
|
||||
return {StackState::kMayContainHeapPointers, SweepType::kAtomic};
|
||||
}
|
||||
static constexpr GCConfig Default() { return {}; }
|
||||
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
SweepType sweep_type = SweepType::kAtomic;
|
||||
MarkingType marking_type = MarkingType::kAtomic;
|
||||
SweepingType sweeping_type = SweepingType::kAtomic;
|
||||
};
|
||||
|
||||
static Heap* From(cppgc::Heap* heap) { return static_cast<Heap*>(heap); }
|
||||
@ -154,6 +158,9 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
|
||||
|
||||
size_t no_gc_scope_ = 0;
|
||||
size_t no_allocation_scope_ = 0;
|
||||
|
||||
friend class WriteBarrier;
|
||||
friend class testing::TestWithHeap;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -4,6 +4,7 @@
|
||||
|
||||
#include "src/heap/cppgc/marker.h"
|
||||
|
||||
#include "include/cppgc/internal/process-heap.h"
|
||||
#include "src/heap/cppgc/heap-object-header-inl.h"
|
||||
#include "src/heap/cppgc/heap-visitor.h"
|
||||
#include "src/heap/cppgc/heap.h"
|
||||
@ -27,6 +28,23 @@ void ResetLocalAllocationBuffers(Heap* heap) {
|
||||
ResetLocalAllocationBufferVisitor visitor;
|
||||
visitor.Traverse(&heap->raw_heap());
|
||||
}
|
||||
|
||||
void EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config) {
|
||||
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
|
||||
config.marking_type ==
|
||||
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
|
||||
ProcessHeap::EnterIncrementalOrConcurrentMarking();
|
||||
}
|
||||
}
|
||||
|
||||
void ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config) {
|
||||
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
|
||||
config.marking_type ==
|
||||
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
|
||||
ProcessHeap::ExitIncrementalOrConcurrentMarking();
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
namespace {
|
||||
@ -62,7 +80,7 @@ Marker::~Marker() {
|
||||
// and should thus already be marked.
|
||||
if (!not_fully_constructed_worklist_.IsEmpty()) {
|
||||
#if DEBUG
|
||||
DCHECK_NE(MarkingConfig::StackState::kNoHeapPointers, config_.stack_state_);
|
||||
DCHECK_NE(MarkingConfig::StackState::kNoHeapPointers, config_.stack_state);
|
||||
NotFullyConstructedItem item;
|
||||
NotFullyConstructedWorklist::View view(¬_fully_constructed_worklist_,
|
||||
kMutatorThreadId);
|
||||
@ -81,15 +99,17 @@ Marker::~Marker() {
|
||||
void Marker::StartMarking(MarkingConfig config) {
|
||||
config_ = config;
|
||||
VisitRoots();
|
||||
EnterIncrementalMarkingIfNeeded(config);
|
||||
}
|
||||
|
||||
void Marker::FinishMarking(MarkingConfig config) {
|
||||
ExitIncrementalMarkingIfNeeded(config_);
|
||||
config_ = config;
|
||||
|
||||
// Reset LABs before trying to conservatively mark in-construction objects.
|
||||
// This is also needed in preparation for sweeping.
|
||||
ResetLocalAllocationBuffers(heap_);
|
||||
if (config_.stack_state_ == MarkingConfig::StackState::kNoHeapPointers) {
|
||||
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
|
||||
FlushNotFullyConstructedObjects();
|
||||
} else {
|
||||
MarkNotFullyConstructedObjects();
|
||||
@ -117,7 +137,7 @@ void Marker::VisitRoots() {
|
||||
ResetLocalAllocationBuffers(heap_);
|
||||
|
||||
heap_->GetStrongPersistentRegion().Trace(marking_visitor_.get());
|
||||
if (config_.stack_state_ != MarkingConfig::StackState::kNoHeapPointers)
|
||||
if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers)
|
||||
heap_->stack()->IteratePointers(marking_visitor_.get());
|
||||
}
|
||||
|
||||
@ -154,6 +174,19 @@ bool Marker::AdvanceMarkingWithDeadline(v8::base::TimeDelta duration) {
|
||||
},
|
||||
kMutatorThreadId))
|
||||
return false;
|
||||
|
||||
if (!DrainWorklistWithDeadline(
|
||||
deadline, &write_barrier_worklist_,
|
||||
[visitor](HeapObjectHeader* header) {
|
||||
DCHECK(header);
|
||||
DCHECK(!MutatorThreadMarkingVisitor::IsInConstruction(*header));
|
||||
const GCInfo& gcinfo =
|
||||
GlobalGCInfoTable::GCInfoFromIndex(header->GetGCInfoIndex());
|
||||
gcinfo.trace(visitor, header->Payload());
|
||||
visitor->AccountMarkedBytes(*header);
|
||||
},
|
||||
kMutatorThreadId))
|
||||
return false;
|
||||
} while (!marking_worklist_.IsLocalViewEmpty(kMutatorThreadId));
|
||||
|
||||
return true;
|
||||
@ -182,6 +215,7 @@ void Marker::ClearAllWorklistsForTesting() {
|
||||
marking_worklist_.Clear();
|
||||
not_fully_constructed_worklist_.Clear();
|
||||
previously_not_fully_constructed_worklist_.Clear();
|
||||
write_barrier_worklist_.Clear();
|
||||
weak_callback_worklist_.Clear();
|
||||
}
|
||||
|
||||
|
@ -17,6 +17,7 @@ namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
class Heap;
|
||||
class HeapObjectHeader;
|
||||
class MutatorThreadMarkingVisitor;
|
||||
|
||||
class V8_EXPORT_PRIVATE Marker {
|
||||
@ -41,31 +42,21 @@ class V8_EXPORT_PRIVATE Marker {
|
||||
Worklist<NotFullyConstructedItem, 16 /* local entries */, kNumMarkers>;
|
||||
using WeakCallbackWorklist =
|
||||
Worklist<WeakCallbackItem, 64 /* local entries */, kNumMarkers>;
|
||||
using WriteBarrierWorklist =
|
||||
Worklist<HeapObjectHeader*, 64 /*local entries */, kNumMarkers>;
|
||||
|
||||
struct MarkingConfig {
|
||||
using StackState = cppgc::Heap::StackState;
|
||||
enum class IncrementalMarking : uint8_t { kDisabled };
|
||||
enum class ConcurrentMarking : uint8_t { kDisabled };
|
||||
enum MarkingType : uint8_t {
|
||||
kAtomic,
|
||||
kIncremental,
|
||||
kIncrementalAndConcurrent
|
||||
};
|
||||
|
||||
static MarkingConfig Default() {
|
||||
return {StackState::kMayContainHeapPointers,
|
||||
IncrementalMarking::kDisabled, ConcurrentMarking::kDisabled};
|
||||
}
|
||||
static constexpr MarkingConfig Default() { return {}; }
|
||||
|
||||
explicit MarkingConfig(StackState stack_state)
|
||||
: MarkingConfig(stack_state, IncrementalMarking::kDisabled,
|
||||
ConcurrentMarking::kDisabled) {}
|
||||
|
||||
MarkingConfig(StackState stack_state,
|
||||
IncrementalMarking incremental_marking_state,
|
||||
ConcurrentMarking concurrent_marking_state)
|
||||
: stack_state_(stack_state),
|
||||
incremental_marking_state_(incremental_marking_state),
|
||||
concurrent_marking_state_(concurrent_marking_state) {}
|
||||
|
||||
StackState stack_state_;
|
||||
IncrementalMarking incremental_marking_state_;
|
||||
ConcurrentMarking concurrent_marking_state_;
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
MarkingType marking_type = MarkingType::kAtomic;
|
||||
};
|
||||
|
||||
explicit Marker(Heap* heap);
|
||||
@ -78,7 +69,7 @@ class V8_EXPORT_PRIVATE Marker {
|
||||
// trigger incremental/concurrent marking if needed.
|
||||
void StartMarking(MarkingConfig config);
|
||||
// Finalize marking. This method stops incremental/concurrent marking
|
||||
// if exsists and performs atomic pause marking. FinishMarking may
|
||||
// if exists and performs atomic pause marking. FinishMarking may
|
||||
// update the MarkingConfig, e.g. if the stack state has changed.
|
||||
void FinishMarking(MarkingConfig config);
|
||||
|
||||
@ -89,6 +80,9 @@ class V8_EXPORT_PRIVATE Marker {
|
||||
NotFullyConstructedWorklist* not_fully_constructed_worklist() {
|
||||
return ¬_fully_constructed_worklist_;
|
||||
}
|
||||
WriteBarrierWorklist* write_barrier_worklist() {
|
||||
return &write_barrier_worklist_;
|
||||
}
|
||||
WeakCallbackWorklist* weak_callback_worklist() {
|
||||
return &weak_callback_worklist_;
|
||||
}
|
||||
@ -118,6 +112,7 @@ class V8_EXPORT_PRIVATE Marker {
|
||||
MarkingWorklist marking_worklist_;
|
||||
NotFullyConstructedWorklist not_fully_constructed_worklist_;
|
||||
NotFullyConstructedWorklist previously_not_fully_constructed_worklist_;
|
||||
WriteBarrierWorklist write_barrier_worklist_;
|
||||
WeakCallbackWorklist weak_callback_worklist_;
|
||||
};
|
||||
|
||||
|
13
src/heap/cppgc/process-heap.cc
Normal file
13
src/heap/cppgc/process-heap.cc
Normal file
@ -0,0 +1,13 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "include/cppgc/internal/process-heap.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
AtomicEntryFlag ProcessHeap::concurrent_marking_flag_;
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
58
src/heap/cppgc/write-barrier.cc
Normal file
58
src/heap/cppgc/write-barrier.cc
Normal file
@ -0,0 +1,58 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "include/cppgc/internal/write-barrier.h"
|
||||
|
||||
#include "include/cppgc/internal/pointer-policies.h"
|
||||
#include "src/heap/cppgc/heap-object-header-inl.h"
|
||||
#include "src/heap/cppgc/heap-object-header.h"
|
||||
#include "src/heap/cppgc/heap-page.h"
|
||||
#include "src/heap/cppgc/heap.h"
|
||||
#include "src/heap/cppgc/marker.h"
|
||||
#include "src/heap/cppgc/marking-visitor.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
void MarkValue(const BasePage* page, Marker* marker, const void* value) {
|
||||
auto& header =
|
||||
const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
|
||||
if (!header.TryMarkAtomic()) return;
|
||||
|
||||
DCHECK(marker);
|
||||
|
||||
if (V8_UNLIKELY(MutatorThreadMarkingVisitor::IsInConstruction(header))) {
|
||||
// It is assumed that objects on not_fully_constructed_worklist_ are not
|
||||
// marked.
|
||||
header.Unmark();
|
||||
Marker::NotFullyConstructedWorklist::View not_fully_constructed_worklist(
|
||||
marker->not_fully_constructed_worklist(), Marker::kMutatorThreadId);
|
||||
not_fully_constructed_worklist.Push(header.Payload());
|
||||
return;
|
||||
}
|
||||
|
||||
Marker::WriteBarrierWorklist::View write_barrier_worklist(
|
||||
marker->write_barrier_worklist(), Marker::kMutatorThreadId);
|
||||
write_barrier_worklist.Push(&header);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void WriteBarrier::MarkingBarrierSlow(const void*, const void* value) {
|
||||
if (!value || value == kSentinelPointer) return;
|
||||
|
||||
const BasePage* page = BasePage::FromPayload(value);
|
||||
const Heap* heap = page->heap();
|
||||
|
||||
// Marker being not set up means that no incremental/concurrent marking is in
|
||||
// progress.
|
||||
if (!heap->marker_) return;
|
||||
|
||||
MarkValue(page, heap->marker_.get(), value);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
@ -70,6 +70,7 @@ v8_source_set("cppgc_unittests_sources") {
|
||||
"heap/cppgc/tests.h",
|
||||
"heap/cppgc/visitor-unittest.cc",
|
||||
"heap/cppgc/worklist-unittest.cc",
|
||||
"heap/cppgc/write-barrier-unittest.cc",
|
||||
]
|
||||
|
||||
configs = [
|
||||
|
@ -37,9 +37,8 @@ class TestWithHeapWithCustomSpaces : public testing::TestWithPlatform {
|
||||
}
|
||||
|
||||
void PreciseGC() {
|
||||
heap_->ForceGarbageCollectionSlow(
|
||||
"TestWithHeapWithCustomSpaces", "Testing",
|
||||
Heap::GCConfig::StackState::kNoHeapPointers);
|
||||
heap_->ForceGarbageCollectionSlow("TestWithHeapWithCustomSpaces", "Testing",
|
||||
cppgc::Heap::StackState::kNoHeapPointers);
|
||||
}
|
||||
|
||||
cppgc::Heap* GetHeap() const { return heap_.get(); }
|
||||
|
@ -22,11 +22,11 @@ class GCHeapTest : public testing::TestWithHeap {
|
||||
public:
|
||||
void ConservativeGC() {
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
{Heap::GCConfig::StackState::kMayContainHeapPointers});
|
||||
{Heap::StackState::kMayContainHeapPointers});
|
||||
}
|
||||
void PreciseGC() {
|
||||
internal::Heap::From(GetHeap())->CollectGarbage(
|
||||
{Heap::GCConfig::StackState::kNoHeapPointers});
|
||||
{Heap::StackState::kNoHeapPointers});
|
||||
}
|
||||
};
|
||||
|
||||
@ -66,7 +66,7 @@ namespace {
|
||||
const void* ConservativeGCReturningObject(cppgc::Heap* heap,
|
||||
const void* volatile object) {
|
||||
internal::Heap::From(heap)->CollectGarbage(
|
||||
{Heap::GCConfig::StackState::kMayContainHeapPointers});
|
||||
{Heap::StackState::kMayContainHeapPointers});
|
||||
return object;
|
||||
}
|
||||
|
||||
|
@ -56,7 +56,7 @@ TEST_F(MarkerTest, PersistentIsMarked) {
|
||||
Persistent<GCed> object = MakeGarbageCollected<GCed>(GetHeap());
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_TRUE(header.IsMarked());
|
||||
}
|
||||
|
||||
@ -65,7 +65,7 @@ TEST_F(MarkerTest, ReachableMemberIsMarked) {
|
||||
parent->SetChild(MakeGarbageCollected<GCed>(GetHeap()));
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromPayload(parent->child());
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_TRUE(header.IsMarked());
|
||||
}
|
||||
|
||||
@ -73,14 +73,14 @@ TEST_F(MarkerTest, UnreachableMemberIsNotMarked) {
|
||||
Member<GCed> object = MakeGarbageCollected<GCed>(GetHeap());
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
}
|
||||
|
||||
TEST_F(MarkerTest, ObjectReachableFromStackIsMarked) {
|
||||
GCed* object = MakeGarbageCollected<GCed>(GetHeap());
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(object).IsMarked());
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kMayContainHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
|
||||
access(object);
|
||||
}
|
||||
@ -89,7 +89,7 @@ TEST_F(MarkerTest, ObjectReachableOnlyFromStackIsNotMarkedIfStackIsEmpty) {
|
||||
GCed* object = MakeGarbageCollected<GCed>(GetHeap());
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
access(object);
|
||||
}
|
||||
@ -98,14 +98,14 @@ TEST_F(MarkerTest, WeakReferenceToUnreachableObjectIsCleared) {
|
||||
{
|
||||
WeakPersistent<GCed> weak_object = MakeGarbageCollected<GCed>(GetHeap());
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_FALSE(weak_object);
|
||||
}
|
||||
{
|
||||
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetHeap());
|
||||
parent->SetWeakChild(MakeGarbageCollected<GCed>(GetHeap()));
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_FALSE(parent->weak_child());
|
||||
}
|
||||
}
|
||||
@ -116,7 +116,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
Persistent<GCed> object = MakeGarbageCollected<GCed>(GetHeap());
|
||||
WeakPersistent<GCed> weak_object(object);
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_TRUE(weak_object);
|
||||
}
|
||||
{
|
||||
@ -124,7 +124,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetHeap());
|
||||
parent->SetWeakChild(object);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
}
|
||||
// Reachable from Member
|
||||
@ -133,7 +133,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
WeakPersistent<GCed> weak_object(MakeGarbageCollected<GCed>(GetHeap()));
|
||||
parent->SetChild(weak_object);
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_TRUE(weak_object);
|
||||
}
|
||||
{
|
||||
@ -141,7 +141,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
parent->SetChild(MakeGarbageCollected<GCed>(GetHeap()));
|
||||
parent->SetWeakChild(parent->child());
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
}
|
||||
// Reachable from stack
|
||||
@ -149,8 +149,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
GCed* object = MakeGarbageCollected<GCed>(GetHeap());
|
||||
WeakPersistent<GCed> weak_object(object);
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking(
|
||||
MarkingConfig(MarkingConfig::StackState::kMayContainHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
EXPECT_TRUE(weak_object);
|
||||
access(object);
|
||||
}
|
||||
@ -159,8 +158,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetHeap());
|
||||
parent->SetWeakChild(object);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking(
|
||||
MarkingConfig(MarkingConfig::StackState::kMayContainHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
access(object);
|
||||
}
|
||||
@ -175,7 +173,7 @@ TEST_F(MarkerTest, DeepHierarchyIsMarked) {
|
||||
parent->SetWeakChild(parent->child());
|
||||
parent = parent->child();
|
||||
}
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
|
||||
parent = root;
|
||||
for (int i = 0; i < kHierarchyDepth; ++i) {
|
||||
@ -189,7 +187,7 @@ TEST_F(MarkerTest, NestedObjectsOnStackAreMarked) {
|
||||
GCed* root = MakeGarbageCollected<GCed>(GetHeap());
|
||||
root->SetChild(MakeGarbageCollected<GCed>(GetHeap()));
|
||||
root->child()->SetChild(MakeGarbageCollected<GCed>(GetHeap()));
|
||||
DoMarking(MarkingConfig(MarkingConfig::StackState::kMayContainHeapPointers));
|
||||
DoMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(root->child()).IsMarked());
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(root->child()->child()).IsMarked());
|
||||
@ -209,30 +207,26 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
|
||||
|
||||
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
|
||||
Marker marker(Heap::From(GetHeap()));
|
||||
marker.StartMarking(
|
||||
MarkingConfig(MarkingConfig::StackState::kMayContainHeapPointers));
|
||||
marker.StartMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>(
|
||||
GetHeap(), [&marker](GCedWithCallback* obj) {
|
||||
Member<GCedWithCallback> member(obj);
|
||||
marker.GetMarkingVisitorForTesting()->Trace(member);
|
||||
});
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(object).IsMarked());
|
||||
marker.FinishMarking(
|
||||
MarkingConfig(MarkingConfig::StackState::kNoHeapPointers));
|
||||
marker.FinishMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
|
||||
}
|
||||
|
||||
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
|
||||
Marker marker(Heap::From(GetHeap()));
|
||||
marker.StartMarking(
|
||||
MarkingConfig(MarkingConfig::StackState::kMayContainHeapPointers));
|
||||
MakeGarbageCollected<GCedWithCallback>(
|
||||
GetHeap(), [&marker](GCedWithCallback* obj) {
|
||||
marker.StartMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
MakeGarbageCollected<GCedWithCallback>(GetHeap(), [&marker](
|
||||
GCedWithCallback* obj) {
|
||||
Member<GCedWithCallback> member(obj);
|
||||
marker.GetMarkingVisitorForTesting()->Trace(member);
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(obj).IsMarked());
|
||||
marker.FinishMarking(
|
||||
MarkingConfig(MarkingConfig::StackState::kMayContainHeapPointers));
|
||||
marker.FinishMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked());
|
||||
});
|
||||
}
|
||||
|
@ -31,12 +31,16 @@ class TestWithHeap : public TestWithPlatform {
|
||||
TestWithHeap();
|
||||
|
||||
void PreciseGC() {
|
||||
heap_->ForceGarbageCollectionSlow(
|
||||
"TestWithHeap", "Testing", Heap::GCConfig::StackState::kNoHeapPointers);
|
||||
heap_->ForceGarbageCollectionSlow("TestWithHeap", "Testing",
|
||||
cppgc::Heap::StackState::kNoHeapPointers);
|
||||
}
|
||||
|
||||
cppgc::Heap* GetHeap() const { return heap_.get(); }
|
||||
|
||||
std::unique_ptr<Marker>& GetMarkerRef() {
|
||||
return Heap::From(GetHeap())->marker_;
|
||||
}
|
||||
|
||||
private:
|
||||
std::unique_ptr<cppgc::Heap> heap_;
|
||||
};
|
||||
|
317
test/unittests/heap/cppgc/write-barrier-unittest.cc
Normal file
317
test/unittests/heap/cppgc/write-barrier-unittest.cc
Normal file
@ -0,0 +1,317 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "include/cppgc/internal/write-barrier.h"
|
||||
|
||||
#include <initializer_list>
|
||||
#include <vector>
|
||||
|
||||
#include "include/cppgc/internal/pointer-policies.h"
|
||||
#include "src/heap/cppgc/heap-object-header-inl.h"
|
||||
#include "src/heap/cppgc/heap-object-header.h"
|
||||
#include "src/heap/cppgc/marker.h"
|
||||
#include "test/unittests/heap/cppgc/tests.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
class IncrementalMarkingScope {
|
||||
public:
|
||||
explicit IncrementalMarkingScope(Marker* marker) : marker_(marker) {
|
||||
marker_->StartMarking(kIncrementalConfig);
|
||||
}
|
||||
|
||||
~IncrementalMarkingScope() V8_NOEXCEPT {
|
||||
marker_->FinishMarking(kIncrementalConfig);
|
||||
}
|
||||
|
||||
private:
|
||||
static constexpr Marker::MarkingConfig kIncrementalConfig{
|
||||
Marker::MarkingConfig::StackState::kNoHeapPointers,
|
||||
Marker::MarkingConfig::MarkingType::kIncremental};
|
||||
|
||||
Marker* marker_;
|
||||
};
|
||||
|
||||
constexpr Marker::MarkingConfig IncrementalMarkingScope::kIncrementalConfig;
|
||||
|
||||
class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
|
||||
public:
|
||||
ExpectWriteBarrierFires(Marker* marker, std::initializer_list<void*> objects)
|
||||
: IncrementalMarkingScope(marker),
|
||||
marking_worklist_(marker->marking_worklist(), Marker::kMutatorThreadId),
|
||||
write_barrier_worklist_(marker->write_barrier_worklist(),
|
||||
Marker::kMutatorThreadId),
|
||||
objects_(objects) {
|
||||
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
|
||||
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
|
||||
for (void* object : objects) {
|
||||
headers_.push_back(&HeapObjectHeader::FromPayload(object));
|
||||
EXPECT_FALSE(headers_.back()->IsMarked());
|
||||
}
|
||||
}
|
||||
|
||||
~ExpectWriteBarrierFires() V8_NOEXCEPT {
|
||||
{
|
||||
Marker::MarkingItem item;
|
||||
while (marking_worklist_.Pop(&item)) {
|
||||
auto pos = std::find(objects_.begin(), objects_.end(),
|
||||
item.base_object_payload);
|
||||
if (pos != objects_.end()) objects_.erase(pos);
|
||||
}
|
||||
}
|
||||
{
|
||||
HeapObjectHeader* item;
|
||||
while (write_barrier_worklist_.Pop(&item)) {
|
||||
auto pos = std::find(objects_.begin(), objects_.end(), item->Payload());
|
||||
if (pos != objects_.end()) objects_.erase(pos);
|
||||
}
|
||||
}
|
||||
EXPECT_TRUE(objects_.empty());
|
||||
for (auto* header : headers_) {
|
||||
EXPECT_TRUE(header->IsMarked());
|
||||
header->Unmark();
|
||||
}
|
||||
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
|
||||
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
|
||||
}
|
||||
|
||||
private:
|
||||
Marker::MarkingWorklist::View marking_worklist_;
|
||||
Marker::WriteBarrierWorklist::View write_barrier_worklist_;
|
||||
std::vector<void*> objects_;
|
||||
std::vector<HeapObjectHeader*> headers_;
|
||||
};
|
||||
|
||||
class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope {
|
||||
public:
|
||||
ExpectNoWriteBarrierFires(Marker* marker,
|
||||
std::initializer_list<void*> objects)
|
||||
: IncrementalMarkingScope(marker),
|
||||
marking_worklist_(marker->marking_worklist(), Marker::kMutatorThreadId),
|
||||
write_barrier_worklist_(marker->write_barrier_worklist(),
|
||||
Marker::kMutatorThreadId) {
|
||||
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
|
||||
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
|
||||
for (void* object : objects) {
|
||||
auto* header = &HeapObjectHeader::FromPayload(object);
|
||||
headers_.emplace_back(header, header->IsMarked());
|
||||
}
|
||||
}
|
||||
|
||||
~ExpectNoWriteBarrierFires() {
|
||||
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
|
||||
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
|
||||
for (const auto& pair : headers_) {
|
||||
EXPECT_EQ(pair.second, pair.first->IsMarked());
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
Marker::MarkingWorklist::View marking_worklist_;
|
||||
Marker::WriteBarrierWorklist::View write_barrier_worklist_;
|
||||
std::vector<std::pair<HeapObjectHeader*, bool /* was marked */>> headers_;
|
||||
};
|
||||
|
||||
class GCed : public GarbageCollected<GCed> {
|
||||
public:
|
||||
GCed() = default;
|
||||
explicit GCed(GCed* next) : next_(next) {}
|
||||
|
||||
void Trace(cppgc::Visitor* v) const { v->Trace(next_); }
|
||||
|
||||
bool IsMarked() const {
|
||||
return HeapObjectHeader::FromPayload(this).IsMarked();
|
||||
}
|
||||
|
||||
void set_next(GCed* next) { next_ = next; }
|
||||
GCed* next() const { return next_; }
|
||||
Member<GCed>& next_ref() { return next_; }
|
||||
|
||||
private:
|
||||
Member<GCed> next_ = nullptr;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
class WriteBarrierTest : public testing::TestWithHeap {
|
||||
public:
|
||||
WriteBarrierTest() : iheap_(Heap::From(GetHeap())) {
|
||||
GetMarkerRef() = std::make_unique<Marker>(iheap_);
|
||||
marker_ = GetMarkerRef().get();
|
||||
}
|
||||
|
||||
~WriteBarrierTest() override {
|
||||
marker_->ClearAllWorklistsForTesting();
|
||||
GetMarkerRef().reset();
|
||||
}
|
||||
|
||||
Marker* marker() const { return marker_; }
|
||||
|
||||
private:
|
||||
Heap* iheap_;
|
||||
Marker* marker_;
|
||||
};
|
||||
|
||||
// =============================================================================
|
||||
// Basic support. ==============================================================
|
||||
// =============================================================================
|
||||
|
||||
TEST_F(WriteBarrierTest, EnableDisableIncrementalMarking) {
|
||||
{
|
||||
IncrementalMarkingScope scope(marker());
|
||||
EXPECT_TRUE(ProcessHeap::IsAnyIncrementalOrConcurrentMarking());
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(WriteBarrierTest, TriggersWhenMarkingIsOn) {
|
||||
auto* object1 = MakeGarbageCollected<GCed>(GetHeap());
|
||||
auto* object2 = MakeGarbageCollected<GCed>(GetHeap());
|
||||
{
|
||||
ExpectWriteBarrierFires scope(marker(), {object1});
|
||||
EXPECT_FALSE(object1->IsMarked());
|
||||
object2->set_next(object1);
|
||||
EXPECT_TRUE(object1->IsMarked());
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(WriteBarrierTest, BailoutWhenMarkingIsOff) {
|
||||
auto* object1 = MakeGarbageCollected<GCed>(GetHeap());
|
||||
auto* object2 = MakeGarbageCollected<GCed>(GetHeap());
|
||||
EXPECT_FALSE(object1->IsMarked());
|
||||
object2->set_next(object1);
|
||||
EXPECT_FALSE(object1->IsMarked());
|
||||
}
|
||||
|
||||
TEST_F(WriteBarrierTest, BailoutIfMarked) {
|
||||
auto* object1 = MakeGarbageCollected<GCed>(GetHeap());
|
||||
auto* object2 = MakeGarbageCollected<GCed>(GetHeap());
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(object1).TryMarkAtomic());
|
||||
{
|
||||
ExpectNoWriteBarrierFires scope(marker(), {object1});
|
||||
object2->set_next(object1);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(WriteBarrierTest, MemberInitializingStoreNoBarrier) {
|
||||
auto* object1 = MakeGarbageCollected<GCed>(GetHeap());
|
||||
{
|
||||
ExpectNoWriteBarrierFires scope(marker(), {object1});
|
||||
auto* object2 = MakeGarbageCollected<GCed>(GetHeap(), object1);
|
||||
HeapObjectHeader& object2_header = HeapObjectHeader::FromPayload(object2);
|
||||
EXPECT_FALSE(object2_header.IsMarked());
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(WriteBarrierTest, MemberReferenceAssignMember) {
|
||||
auto* obj = MakeGarbageCollected<GCed>(GetHeap());
|
||||
auto* ref_obj = MakeGarbageCollected<GCed>(GetHeap());
|
||||
Member<GCed>& m2 = ref_obj->next_ref();
|
||||
Member<GCed> m3(obj);
|
||||
{
|
||||
ExpectWriteBarrierFires scope(marker(), {obj});
|
||||
m2 = m3;
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(WriteBarrierTest, MemberSetSentinelValueNoBarrier) {
|
||||
auto* obj = MakeGarbageCollected<GCed>(GetHeap());
|
||||
Member<GCed>& m = obj->next_ref();
|
||||
{
|
||||
ExpectNoWriteBarrierFires scope(marker(), {});
|
||||
m = kSentinelPointer;
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(WriteBarrierTest, MemberCopySentinelValueNoBarrier) {
|
||||
auto* obj1 = MakeGarbageCollected<GCed>(GetHeap());
|
||||
Member<GCed>& m1 = obj1->next_ref();
|
||||
m1 = kSentinelPointer;
|
||||
{
|
||||
ExpectNoWriteBarrierFires scope(marker(), {});
|
||||
auto* obj2 = MakeGarbageCollected<GCed>(GetHeap());
|
||||
obj2->next_ref() = m1;
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Mixin support. ==============================================================
|
||||
// =============================================================================
|
||||
|
||||
namespace {
|
||||
|
||||
class Mixin : public GarbageCollectedMixin {
|
||||
public:
|
||||
void Trace(cppgc::Visitor* visitor) const override { visitor->Trace(next_); }
|
||||
|
||||
virtual void Bar() {}
|
||||
|
||||
protected:
|
||||
Member<GCed> next_;
|
||||
};
|
||||
|
||||
class ClassWithVirtual {
|
||||
protected:
|
||||
virtual void Foo() {}
|
||||
};
|
||||
|
||||
class Child : public GarbageCollected<Child>,
|
||||
public ClassWithVirtual,
|
||||
public Mixin {
|
||||
USING_GARBAGE_COLLECTED_MIXIN();
|
||||
|
||||
public:
|
||||
Child() : ClassWithVirtual(), Mixin() {}
|
||||
~Child() = default;
|
||||
|
||||
void Trace(cppgc::Visitor* visitor) const override { Mixin::Trace(visitor); }
|
||||
|
||||
void Foo() override {}
|
||||
void Bar() override {}
|
||||
};
|
||||
|
||||
class ParentWithMixinPointer : public GarbageCollected<ParentWithMixinPointer> {
|
||||
public:
|
||||
ParentWithMixinPointer() = default;
|
||||
|
||||
void set_mixin(Mixin* mixin) { mixin_ = mixin; }
|
||||
|
||||
virtual void Trace(cppgc::Visitor* visitor) const { visitor->Trace(mixin_); }
|
||||
|
||||
protected:
|
||||
Member<Mixin> mixin_;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST_F(WriteBarrierTest, WriteBarrierOnUnmarkedMixinApplication) {
|
||||
ParentWithMixinPointer* parent =
|
||||
MakeGarbageCollected<ParentWithMixinPointer>(GetHeap());
|
||||
auto* child = MakeGarbageCollected<Child>(GetHeap());
|
||||
Mixin* mixin = static_cast<Mixin*>(child);
|
||||
EXPECT_NE(static_cast<void*>(child), static_cast<void*>(mixin));
|
||||
{
|
||||
ExpectWriteBarrierFires scope(marker(), {child});
|
||||
parent->set_mixin(mixin);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(WriteBarrierTest, NoWriteBarrierOnMarkedMixinApplication) {
|
||||
ParentWithMixinPointer* parent =
|
||||
MakeGarbageCollected<ParentWithMixinPointer>(GetHeap());
|
||||
auto* child = MakeGarbageCollected<Child>(GetHeap());
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(child).TryMarkAtomic());
|
||||
Mixin* mixin = static_cast<Mixin*>(child);
|
||||
EXPECT_NE(static_cast<void*>(child), static_cast<void*>(mixin));
|
||||
{
|
||||
ExpectNoWriteBarrierFires scope(marker(), {child});
|
||||
parent->set_mixin(mixin);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
Loading…
Reference in New Issue
Block a user