cppgc: young-gen: Support young generation with stack

Before this CL Oilpan young generation didn't support running with
stack. The problem was in initializing stores that don't have the write
barrier. If a GC happens during object initialization, the following
pointer stores can be missed:

struct GCed: GarbageCollected<GCed> {
  GCed():
    m1(MakeGarbageCollected<>()),  // calls GC
    m2(MakeGarbageCollected<>())   // old-to-young ref missing barrier
  {}
  ...
};

The CL solves it by recording in-construction objects in a dedicated
remembered-set, which is retraced on next GCs.

Bug: chromium:1029379
Change-Id: I17975e2e2253b2792f71fb64a639e5bdb2ef4935
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3990829
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#84009}
This commit is contained in:
Anton Bikineev 2022-11-02 13:00:15 +01:00 committed by V8 LUCI CQ
parent 07864a633e
commit 2792a669e4
11 changed files with 275 additions and 41 deletions

View File

@ -781,7 +781,8 @@ void CppHeap::EnterFinalPause(cppgc::EmbedderStackState stack_state) {
in_atomic_pause_ = true;
auto& marker = marker_.get()->To<UnifiedHeapMarker>();
// Scan global handles conservatively in case we are attached to an Isolate.
if (isolate_) {
// TODO(1029379): Support global handle marking visitors with minor GC.
if (isolate_ && !generational_gc_supported()) {
auto& heap = *isolate()->heap();
marker.conservative_visitor().SetGlobalHandlesMarkingVisitor(
std::make_unique<GlobalHandleMarkingVisitor>(
@ -869,13 +870,11 @@ void CppHeap::TraceEpilogue() {
sweeper().NotifyDoneIfNeeded();
}
void CppHeap::RunMinorGCIfNeeded(StackState stack_state) {
void CppHeap::RunMinorGCIfNeeded() {
if (!generational_gc_supported()) return;
if (in_no_gc_scope()) return;
// Minor GC does not support nesting in full GCs.
if (IsMarking()) return;
// Minor GCs with the stack are currently not supported.
if (stack_state == StackState::kMayContainHeapPointers) return;
// Run only when the limit is reached.
if (!minor_gc_heap_growing_->LimitReached()) return;

View File

@ -147,7 +147,7 @@ class V8_EXPORT_PRIVATE CppHeap final
void EnterFinalPause(cppgc::EmbedderStackState stack_state);
bool FinishConcurrentMarkingIfNeeded();
void RunMinorGCIfNeeded(StackState);
void RunMinorGCIfNeeded();
// StatsCollector::AllocationObserver interface.
void AllocatedObjectSizeIncreased(size_t) final;

View File

@ -63,9 +63,6 @@ namespace {
void CheckConfig(GCConfig config, HeapBase::MarkingType marking_support,
HeapBase::SweepingType sweeping_support) {
CHECK_WITH_MSG((config.collection_type != CollectionType::kMinor) ||
(config.stack_state == StackState::kNoHeapPointers),
"Minor GCs with stack is currently not supported");
CHECK_LE(static_cast<int>(config.marking_type),
static_cast<int>(marking_support));
CHECK_LE(static_cast<int>(config.sweeping_type),

View File

@ -443,11 +443,13 @@ void MarkerBase::VisitRoots(StackState stack_state) {
heap().stats_collector(), StatsCollector::kMarkVisitStack);
heap().stack()->IteratePointers(&stack_visitor());
}
#if defined(CPPGC_YOUNG_GENERATION)
if (config_.collection_type == CollectionType::kMinor) {
StatsCollector::EnabledScope stats_scope(
heap().stats_collector(), StatsCollector::kMarkVisitRememberedSets);
heap().remembered_set().Visit(visitor(), mutator_marking_state_);
heap().remembered_set().Visit(visitor(), conservative_visitor(),
mutator_marking_state_);
}
#endif // defined(CPPGC_YOUNG_GENERATION)
}

View File

@ -70,6 +70,14 @@ void ConservativeMarkingVisitor::VisitInConstructionConservatively(
// hold a reference to themselves.
if (!marking_state_.MarkNoPush(header)) return;
marking_state_.AccountMarkedBytes(header);
#if defined(CPPGC_YOUNG_GENERATION)
// An in-construction object can add a reference to a young object that may
// miss the write-barrier on an initializing store. Remember object in the
// root-set to be retraced on the next GC.
if (heap_.generational_gc_supported()) {
heap_.remembered_set().AddInConstructionObjectToBeRetraced(header);
}
#endif // defined(CPPGC_YOUNG_GENERATION)
callback(this, header);
}

View File

@ -93,11 +93,6 @@ void VisitSlot(const HeapBase& heap, const BasePage& page, Address slot,
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (slot_header.IsYoung()) return;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>());
#if defined(CPPGC_POINTER_COMPRESSION)
void* value = nullptr;
@ -241,11 +236,6 @@ void VisitRememberedSourceObjects(
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (source_hoh->IsYoung()) continue;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!source_hoh->template IsInConstruction<AccessMode::kNonAtomic>());
const TraceCallback trace_callback =
GlobalGCInfoTable::GCInfoFromIndex(source_hoh->GetGCInfoIndex()).trace;
@ -255,6 +245,28 @@ void VisitRememberedSourceObjects(
}
}
// Revisit in-construction objects from previous GCs. We must do it to make
// sure that we don't miss any initializing pointer writes if a previous GC
// happened while an object was in-construction.
void RevisitInConstructionObjects(
std::set<HeapObjectHeader*>& remembered_in_construction_objects,
Visitor& visitor, ConservativeTracingVisitor& conservative_visitor) {
for (HeapObjectHeader* hoh : remembered_in_construction_objects) {
DCHECK(hoh);
// The object must be marked on previous GC.
DCHECK(hoh->IsMarked());
if (hoh->template IsInConstruction<AccessMode::kNonAtomic>()) {
conservative_visitor.TraceConservatively(*hoh);
} else {
// If the object is fully constructed, trace precisely.
const TraceCallback trace_callback =
GlobalGCInfoTable::GCInfoFromIndex(hoh->GetGCInfoIndex()).trace;
trace_callback(&visitor, hoh->ObjectStart());
}
}
}
} // namespace
void OldToNewRememberedSet::AddSlot(void* slot) {
@ -297,6 +309,12 @@ void OldToNewRememberedSet::AddWeakCallback(WeakCallbackItem item) {
remembered_weak_callbacks_.insert(item);
}
void OldToNewRememberedSet::AddInConstructionObjectToBeRetraced(
HeapObjectHeader& hoh) {
DCHECK(heap_.generational_gc_supported());
remembered_in_construction_objects_.current.insert(&hoh);
}
void OldToNewRememberedSet::InvalidateRememberedSlotsInRange(void* begin,
void* end) {
DCHECK(heap_.generational_gc_supported());
@ -313,12 +331,15 @@ void OldToNewRememberedSet::InvalidateRememberedSourceObject(
remembered_source_objects_.erase(&header);
}
void OldToNewRememberedSet::Visit(Visitor& visitor,
MutatorMarkingState& marking_state) {
void OldToNewRememberedSet::Visit(
Visitor& visitor, ConservativeTracingVisitor& conservative_visitor,
MutatorMarkingState& marking_state) {
DCHECK(heap_.generational_gc_supported());
VisitRememberedSlots(heap_, marking_state, remembered_uncompressed_slots_,
remembered_slots_for_verification_);
VisitRememberedSourceObjects(remembered_source_objects_, visitor);
RevisitInConstructionObjects(remembered_in_construction_objects_.previous,
visitor, conservative_visitor);
}
void OldToNewRememberedSet::ExecuteCustomCallbacks(LivenessBroker broker) {
@ -342,6 +363,8 @@ void OldToNewRememberedSet::Reset() {
#if DEBUG
remembered_slots_for_verification_.clear();
#endif // DEBUG
remembered_in_construction_objects_.Reset();
// Custom weak callbacks is alive across GCs.
}
bool OldToNewRememberedSet::IsEmpty() const {
@ -351,6 +374,18 @@ bool OldToNewRememberedSet::IsEmpty() const {
remembered_weak_callbacks_.empty();
}
void OldToNewRememberedSet::RememberedInConstructionObjects::Reset() {
// Make sure to keep the still-in-construction objects in the remembered set,
// as otherwise, being marked, the marker won't be able to observe them.
std::copy_if(previous.begin(), previous.end(),
std::inserter(current, current.begin()),
[](const HeapObjectHeader* h) {
return h->template IsInConstruction<AccessMode::kNonAtomic>();
});
previous = std::move(current);
current.clear();
}
} // namespace internal
} // namespace cppgc

View File

@ -42,10 +42,13 @@ class V8_EXPORT_PRIVATE OldToNewRememberedSet final {
void AddSourceObject(HeapObjectHeader& source_hoh);
void AddWeakCallback(WeakCallbackItem);
// Remembers an in-construction object to be retraced on the next minor GC.
void AddInConstructionObjectToBeRetraced(HeapObjectHeader&);
void InvalidateRememberedSlotsInRange(void* begin, void* end);
void InvalidateRememberedSourceObject(HeapObjectHeader& source_hoh);
void Visit(Visitor&, MutatorMarkingState&);
void Visit(Visitor&, ConservativeTracingVisitor&, MutatorMarkingState&);
void ExecuteCustomCallbacks(LivenessBroker);
void ReleaseCustomCallbacks();
@ -57,6 +60,14 @@ class V8_EXPORT_PRIVATE OldToNewRememberedSet final {
private:
friend class MinorGCTest;
// The class keeps track of inconstruction objects that should be revisited.
struct RememberedInConstructionObjects final {
void Reset();
std::set<HeapObjectHeader*> previous;
std::set<HeapObjectHeader*> current;
};
static constexpr struct {
bool operator()(const WeakCallbackItem& lhs,
const WeakCallbackItem& rhs) const {
@ -72,6 +83,7 @@ class V8_EXPORT_PRIVATE OldToNewRememberedSet final {
// whereas uncompressed are stored in std::set.
std::set<void*> remembered_uncompressed_slots_;
std::set<void*> remembered_slots_for_verification_;
RememberedInConstructionObjects remembered_in_construction_objects_;
};
} // namespace internal

View File

@ -31,10 +31,8 @@ ConservativeTracingVisitor::ConservativeTracingVisitor(
HeapBase& heap, PageBackend& page_backend, cppgc::Visitor& visitor)
: heap_(heap), page_backend_(page_backend), visitor_(visitor) {}
namespace {
void TraceConservatively(ConservativeTracingVisitor* conservative_visitor,
const HeapObjectHeader& header) {
void ConservativeTracingVisitor::TraceConservatively(
const HeapObjectHeader& header) {
const auto object_view = ObjectView<>(header);
uintptr_t* word = reinterpret_cast<uintptr_t*>(object_view.Start());
for (size_t i = 0; i < (object_view.Size() / sizeof(uintptr_t)); ++i) {
@ -47,7 +45,7 @@ void TraceConservatively(ConservativeTracingVisitor* conservative_visitor,
#endif
// First, check the full pointer.
if (maybe_full_ptr > SentinelPointer::kSentinelValue)
conservative_visitor->TraceConservativelyIfNeeded(
this->TraceConservativelyIfNeeded(
reinterpret_cast<Address>(maybe_full_ptr));
#if defined(CPPGC_POINTER_COMPRESSION)
// Then, check for compressed pointers.
@ -55,19 +53,17 @@ void TraceConservatively(ConservativeTracingVisitor* conservative_visitor,
CompressedPointer::Decompress(static_cast<uint32_t>(maybe_full_ptr)));
if (decompressed_low >
reinterpret_cast<void*>(SentinelPointer::kSentinelValue))
conservative_visitor->TraceConservativelyIfNeeded(decompressed_low);
this->TraceConservativelyIfNeeded(decompressed_low);
auto decompressed_high = reinterpret_cast<Address>(
CompressedPointer::Decompress(static_cast<uint32_t>(
maybe_full_ptr >> (sizeof(uint32_t) * CHAR_BIT))));
if (decompressed_high >
reinterpret_cast<void*>(SentinelPointer::kSentinelValue))
conservative_visitor->TraceConservativelyIfNeeded(decompressed_high);
this->TraceConservativelyIfNeeded(decompressed_high);
#endif // !defined(CPPGC_POINTER_COMPRESSION)
}
}
} // namespace
void ConservativeTracingVisitor::TryTracePointerConservatively(
Address pointer) {
#if defined(CPPGC_CAGED_HEAP)
@ -130,7 +126,11 @@ void ConservativeTracingVisitor::TraceConservativelyIfNeeded(
if (!header.IsInConstruction<AccessMode::kNonAtomic>()) {
VisitFullyConstructedConservatively(header);
} else {
VisitInConstructionConservatively(header, TraceConservatively);
VisitInConstructionConservatively(
header,
[](ConservativeTracingVisitor* v, const HeapObjectHeader& header) {
v->TraceConservatively(header);
});
}
}

View File

@ -57,6 +57,7 @@ class V8_EXPORT_PRIVATE ConservativeTracingVisitor {
virtual void TraceConservativelyIfNeeded(const void*);
void TraceConservativelyIfNeeded(HeapObjectHeader&);
void TraceConservatively(const HeapObjectHeader&);
protected:
using TraceConservativelyCallback = void(ConservativeTracingVisitor*,

View File

@ -2254,11 +2254,7 @@ size_t Heap::PerformGarbageCollection(
// stack scanning, do it only when Scavenger runs from task, which is
// non-nestable.
if (cpp_heap() && IsYoungGenerationCollector(collector)) {
const bool with_stack = (gc_reason != GarbageCollectionReason::kTask);
CppHeap::From(cpp_heap())
->RunMinorGCIfNeeded(with_stack
? CppHeap::StackState::kMayContainHeapPointers
: CppHeap::StackState::kNoHeapPointers);
CppHeap::From(cpp_heap())->RunMinorGCIfNeeded();
}
#endif // defined(CPPGC_YOUNG_GENERATION)

View File

@ -151,13 +151,28 @@ class MinorGCTest : public testing::TestWithHeap {
Heap::From(GetHeap())->CollectGarbage(GCConfig::MinorPreciseAtomicConfig());
}
void CollectMinorWithStack() {
Heap::From(GetHeap())->CollectGarbage(
GCConfig::MinorConservativeAtomicConfig());
}
void CollectMajor() {
Heap::From(GetHeap())->CollectGarbage(GCConfig::PreciseAtomicConfig());
}
void CollectMajorWithStack() {
Heap::From(GetHeap())->CollectGarbage(GCConfig::ConservativeAtomicConfig());
}
const auto& RememberedSourceObjects() const {
return Heap::From(GetHeap())->remembered_set().remembered_source_objects_;
}
const auto& RememberedInConstructionObjects() const {
return Heap::From(GetHeap())
->remembered_set()
.remembered_in_construction_objects_.previous;
}
};
template <typename SmallOrLarge>
@ -170,10 +185,33 @@ using ObjectTypes = ::testing::Types<Small, Large>;
TYPED_TEST_SUITE(MinorGCTestForType, ObjectTypes);
namespace {
template <typename... Args>
void RunMinorGCAndExpectObjectsPromoted(MinorGCTest& test, Args*... args) {
enum class GCType {
kMinor,
kMajor,
};
enum class StackType {
kWithout,
kWith,
};
template <GCType gc_type, StackType stack_type, typename... Args>
void RunGCAndExpectObjectsPromoted(MinorGCTest& test, Args*... args) {
EXPECT_TRUE((IsHeapObjectYoung(args) && ...));
test.CollectMinor();
if constexpr (gc_type == GCType::kMajor) {
if constexpr (stack_type == StackType::kWithout) {
test.CollectMajor();
} else {
test.CollectMajorWithStack();
}
} else {
if constexpr (stack_type == StackType::kWithout) {
test.CollectMinor();
} else {
test.CollectMinorWithStack();
}
}
EXPECT_TRUE((IsHeapObjectOld(args) && ...));
}
@ -595,7 +633,7 @@ TYPED_TEST(MinorGCTestForType, GenerationalBarrierDeferredTracing) {
&HeapObjectHeader::FromObject(array->objects)));
}
RunMinorGCAndExpectObjectsPromoted(
RunGCAndExpectObjectsPromoted<GCType::kMinor, StackType::kWithout>(
*this, array->objects[2].ref.Get(), array->objects[2].inner.ref.Get(),
array->objects[3].ref.Get(), array->objects[3].inner.ref.Get());
@ -692,6 +730,152 @@ TEST_F(MinorGCTest, AgeTableIsReset) {
ExpectPageOld(*BasePage::FromPayload(p3.Get()));
}
namespace {
template <GCType type>
struct GCOnConstruction {
explicit GCOnConstruction(MinorGCTest& test, size_t depth) {
if constexpr (type == GCType::kMajor) {
test.CollectMajorWithStack();
} else {
test.CollectMinorWithStack();
}
EXPECT_EQ(depth, test.RememberedInConstructionObjects().size());
}
};
template <GCType type>
struct InConstructionWithYoungRef
: GarbageCollected<InConstructionWithYoungRef<type>> {
using ValueType = SimpleGCed<64>;
explicit InConstructionWithYoungRef(MinorGCTest& test)
: call_gc(test, 1u),
m(MakeGarbageCollected<ValueType>(test.GetAllocationHandle())) {}
void Trace(Visitor* v) const { v->Trace(m); }
GCOnConstruction<type> call_gc;
Member<ValueType> m;
};
} // namespace
TEST_F(MinorGCTest, RevisitInConstructionObjectsMinorMinorWithStack) {
static constexpr auto kFirstGCType = GCType::kMinor;
auto* gced = MakeGarbageCollected<InConstructionWithYoungRef<kFirstGCType>>(
GetAllocationHandle(), *this);
RunGCAndExpectObjectsPromoted<GCType::kMinor, StackType::kWith>(
*this, gced->m.Get());
EXPECT_EQ(0u, RememberedInConstructionObjects().size());
}
TEST_F(MinorGCTest, RevisitInConstructionObjectsMinorMinorWithoutStack) {
static constexpr auto kFirstGCType = GCType::kMinor;
Persistent<InConstructionWithYoungRef<kFirstGCType>> gced =
MakeGarbageCollected<InConstructionWithYoungRef<kFirstGCType>>(
GetAllocationHandle(), *this);
RunGCAndExpectObjectsPromoted<GCType::kMinor, StackType::kWithout>(
*this, gced->m.Get());
EXPECT_EQ(0u, RememberedInConstructionObjects().size());
}
TEST_F(MinorGCTest, RevisitInConstructionObjectsMajorMinorWithStack) {
static constexpr auto kFirstGCType = GCType::kMajor;
auto* gced = MakeGarbageCollected<InConstructionWithYoungRef<kFirstGCType>>(
GetAllocationHandle(), *this);
RunGCAndExpectObjectsPromoted<GCType::kMinor, StackType::kWith>(
*this, gced->m.Get());
EXPECT_EQ(0u, RememberedInConstructionObjects().size());
}
TEST_F(MinorGCTest, RevisitInConstructionObjectsMajorMinorWithoutStack) {
static constexpr auto kFirstGCType = GCType::kMajor;
Persistent<InConstructionWithYoungRef<kFirstGCType>> gced =
MakeGarbageCollected<InConstructionWithYoungRef<kFirstGCType>>(
GetAllocationHandle(), *this);
RunGCAndExpectObjectsPromoted<GCType::kMinor, StackType::kWithout>(
*this, gced->m.Get());
EXPECT_EQ(0u, RememberedInConstructionObjects().size());
}
TEST_F(MinorGCTest, PreviousInConstructionObjectsAreDroppedAfterFullGC) {
MakeGarbageCollected<InConstructionWithYoungRef<GCType::kMinor>>(
GetAllocationHandle(), *this);
EXPECT_EQ(1u, RememberedInConstructionObjects().size());
CollectMajor();
EXPECT_EQ(0u, RememberedInConstructionObjects().size());
}
namespace {
template <GCType type>
struct NestedInConstructionWithYoungRef
: GarbageCollected<NestedInConstructionWithYoungRef<type>> {
using ValueType = SimpleGCed<64>;
NestedInConstructionWithYoungRef(MinorGCTest& test, size_t depth)
: NestedInConstructionWithYoungRef(test, 1, depth) {}
NestedInConstructionWithYoungRef(MinorGCTest& test, size_t current_depth,
size_t max_depth)
: current_depth(current_depth),
max_depth(max_depth),
next(current_depth != max_depth
? MakeGarbageCollected<NestedInConstructionWithYoungRef<type>>(
test.GetAllocationHandle(), test, current_depth + 1,
max_depth)
: nullptr),
call_gc(test, current_depth),
m(MakeGarbageCollected<ValueType>(test.GetAllocationHandle())) {}
void Trace(Visitor* v) const {
v->Trace(next);
v->Trace(m);
}
size_t current_depth = 0;
size_t max_depth = 0;
Member<NestedInConstructionWithYoungRef<type>> next;
GCOnConstruction<type> call_gc;
Member<ValueType> m;
};
} // namespace
TEST_F(MinorGCTest, RevisitNestedInConstructionObjects) {
static constexpr auto kFirstGCType = GCType::kMinor;
Persistent<NestedInConstructionWithYoungRef<kFirstGCType>> gced =
MakeGarbageCollected<NestedInConstructionWithYoungRef<kFirstGCType>>(
GetAllocationHandle(), *this, 10);
CollectMinor();
for (auto* p = gced.Get(); p; p = p->next.Get()) {
EXPECT_TRUE(IsHeapObjectOld(p));
EXPECT_TRUE(IsHeapObjectOld(p->m));
}
EXPECT_EQ(0u, RememberedInConstructionObjects().size());
}
} // namespace internal
} // namespace cppgc