cppgc: young-gen: Implement remembered set invalidation

This CL adds invalidations for slots that reside in promptly freed or
shrunk storage.

Bug: chromium:1029379
Change-Id: I05e0ede55c202c952b26f452053b8777d1a2ffae
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3431488
Reviewed-by: Omer Katz <omerkatz@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78912}
This commit is contained in:
Anton Bikineev 2022-02-03 00:45:20 +01:00 committed by V8 LUCI CQ
parent 73a1c635fe
commit a687e9fade
6 changed files with 141 additions and 18 deletions

View File

@ -4,6 +4,7 @@
#include "include/cppgc/explicit-management.h"
#include <algorithm>
#include <tuple>
#include "src/heap/cppgc/heap-base.h"
@ -24,12 +25,29 @@ bool InGC(HeapHandle& heap_handle) {
heap.sweeper().IsSweepingInProgress();
}
void InvalidateRememberedSlots(HeapBase& heap, void* begin, void* end) {
#if defined(CPPGC_YOUNG_GENERATION)
// Invalidate slots that reside within |object|.
auto& remembered_slots = heap.remembered_slots();
// TODO(bikineev): The 2 binary walks can be optimized with a custom
// algorithm.
auto from = remembered_slots.lower_bound(begin),
to = remembered_slots.lower_bound(end);
remembered_slots.erase(from, to);
#ifdef ENABLE_SLOW_DCHECKS
// Check that no remembered slots are referring to the freed area.
DCHECK(std::none_of(remembered_slots.begin(), remembered_slots.end(),
[begin, end](void* slot) {
void* value = *reinterpret_cast<void**>(slot);
return begin <= value && value < end;
}));
#endif // ENABLE_SLOW_DCHECKS
#endif // !defined(CPPGC_YOUNG_GENERATION)
}
} // namespace
void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
// TODO(bikineev): Invalidate slots that reside within |object| and handle free
// values in VisitRememberedSlots.
#if !defined(CPPGC_YOUNG_GENERATION)
if (InGC(heap_handle)) {
return;
}
@ -37,16 +55,21 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
auto& header = HeapObjectHeader::FromObject(object);
header.Finalize();
size_t object_size = 0;
USE(object_size);
// `object` is guaranteed to be of type GarbageCollected, so getting the
// BasePage is okay for regular and large objects.
BasePage* base_page = BasePage::FromPayload(object);
if (base_page->is_large()) { // Large object.
object_size = LargePage::From(base_page)->ObjectSize();
base_page->space().RemovePage(base_page);
base_page->heap().stats_collector()->NotifyExplicitFree(
LargePage::From(base_page)->PayloadSize());
LargePage::Destroy(LargePage::From(base_page));
} else { // Regular object.
const size_t header_size = header.AllocatedSize();
object_size = header.ObjectSize();
auto* normal_page = NormalPage::From(base_page);
auto& normal_space = *static_cast<NormalPageSpace*>(&base_page->space());
auto& lab = normal_space.linear_allocation_buffer();
@ -62,7 +85,8 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
// list entry.
}
}
#endif // !defined(CPPGC_YOUNG_GENERATION)
InvalidateRememberedSlots(HeapBase::From(heap_handle), object,
reinterpret_cast<uint8_t*>(object) + object_size);
}
namespace {
@ -102,17 +126,17 @@ bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
lab.Set(free_start, lab.size() + size_delta);
SetMemoryInaccessible(lab.start(), size_delta);
header.SetAllocatedSize(new_size);
return true;
}
// Heuristic: Only return memory to the free list if the block is larger than
// the smallest size class.
if (size_delta >= ObjectAllocator::kSmallestSpaceSize) {
} else if (size_delta >= ObjectAllocator::kSmallestSpaceSize) {
// Heuristic: Only return memory to the free list if the block is larger
// than the smallest size class.
SetMemoryInaccessible(free_start, size_delta);
base_page.heap().stats_collector()->NotifyExplicitFree(size_delta);
normal_space.free_list().Add({free_start, size_delta});
NormalPage::From(&base_page)->object_start_bitmap().SetBit(free_start);
header.SetAllocatedSize(new_size);
}
InvalidateRememberedSlots(base_page.heap(), free_start,
free_start + size_delta);
// Return success in any case, as we want to avoid that embedders start
// copying memory because of small deltas.
return true;

View File

@ -55,6 +55,11 @@ class GarbageCollector {
MarkingType::kAtomic, SweepingType::kAtomic};
}
static constexpr Config MinorConservativeAtomicConfig() {
return {CollectionType::kMinor, StackState::kMayContainHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic};
}
CollectionType collection_type = CollectionType::kMajor;
StackState stack_state = StackState::kMayContainHeapPointers;
MarkingType marking_type = MarkingType::kAtomic;

View File

@ -125,18 +125,17 @@ class HeapObjectHeader {
using GCInfoIndexField = UnusedField1::Next<GCInfoIndex, 14>;
// Used in |encoded_low_|.
using MarkBitField = v8::base::BitField16<bool, 0, 1>;
using SizeField = void; // Use EncodeSize/DecodeSize instead.
using SizeField =
MarkBitField::Next<size_t, 15>; // Use EncodeSize/DecodeSize instead.
static constexpr size_t DecodeSize(uint16_t encoded) {
// Essentially, gets optimized to << 1.
using SizeFieldImpl = MarkBitField::Next<size_t, 15>;
return SizeFieldImpl::decode(encoded) * kAllocationGranularity;
return SizeField::decode(encoded) * kAllocationGranularity;
}
static constexpr uint16_t EncodeSize(size_t size) {
// Essentially, gets optimized to >> 1.
using SizeFieldImpl = MarkBitField::Next<size_t, 15>;
return SizeFieldImpl::encode(size / kAllocationGranularity);
return SizeField::encode(size / kAllocationGranularity);
}
V8_EXPORT_PRIVATE void CheckApiConstants();
@ -236,7 +235,10 @@ void HeapObjectHeader::SetAllocatedSize(size_t size) {
// resized.
DCHECK(!IsMarked());
#endif
encoded_low_ = EncodeSize(size);
// The object may be marked (i.e. old, in case young generation is enabled).
// Make sure to not overwrite the mark bit.
encoded_low_ &= ~SizeField::encode(SizeField::kMax);
encoded_low_ |= EncodeSize(size);
}
template <AccessMode mode>

View File

@ -67,6 +67,7 @@ void VisitRememberedSlots(HeapBase& heap,
StatsCollector::EnabledScope stats_scope(
heap.stats_collector(), StatsCollector::kMarkVisitRememberedSets);
for (void* slot : heap.remembered_slots()) {
// Slot must always point to a valid, not freed object.
auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
->ObjectHeaderFromInnerAddress(slot);
if (slot_header.IsYoung()) continue;
@ -80,6 +81,13 @@ void VisitRememberedSlots(HeapBase& heap,
// Slot could be updated to nullptr or kSentinelPointer by the mutator.
if (value == kSentinelPointer || value == nullptr) continue;
#if DEBUG
// Check that the slot can not point to a freed object.
HeapObjectHeader& header =
BasePage::FromPayload(value)->ObjectHeaderFromInnerAddress(value);
DCHECK(!header.IsFree());
#endif
mutator_marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
}
#endif

View File

@ -5,6 +5,7 @@
#if defined(CPPGC_YOUNG_GENERATION)
#include "include/cppgc/allocation.h"
#include "include/cppgc/explicit-management.h"
#include "include/cppgc/heap-consistency.h"
#include "include/cppgc/internal/caged-heap-local-data.h"
#include "include/cppgc/persistent.h"
@ -32,7 +33,7 @@ class SimpleGCedBase : public GarbageCollected<SimpleGCedBase> {
size_t SimpleGCedBase::destructed_objects;
template <size_t Size>
class SimpleGCed final : public SimpleGCedBase {
class SimpleGCed : public SimpleGCedBase {
char array[Size];
};
@ -65,6 +66,7 @@ class MinorGCTest : public testing::TestWithHeap {
Heap::From(GetHeap())->CollectGarbage(
Heap::Config::MinorPreciseAtomicConfig());
}
void CollectMajor() {
Heap::From(GetHeap())->CollectGarbage(Heap::Config::PreciseAtomicConfig());
}
@ -237,6 +239,88 @@ TYPED_TEST(MinorGCTestForType, OmitGenerationalBarrierForSentinels) {
old->next = static_cast<Type*>(kSentinelPointer);
EXPECT_EQ(set_size_before_barrier, set.size());
}
template <typename From, typename To>
void TestRememberedSetInvalidation(MinorGCTest& test) {
Persistent<From> old = MakeGarbageCollected<From>(test.GetAllocationHandle());
test.CollectMinor();
auto* young = MakeGarbageCollected<To>(test.GetAllocationHandle());
const auto& set = Heap::From(test.GetHeap())->remembered_slots();
const size_t set_size_before_barrier = set.size();
// Issue the generational barrier.
old->next = young;
EXPECT_EQ(set_size_before_barrier + 1, set.size());
// Release the persistent and free the old object.
auto* old_raw = old.Release();
subtle::FreeUnreferencedObject(test.GetHeapHandle(), *old_raw);
// Check that the reference was invalidated.
EXPECT_EQ(set_size_before_barrier, set.size());
// Visiting remembered slots must not fail.
test.CollectMinor();
}
TYPED_TEST(MinorGCTestForType, RememberedSetInvalidationOnPromptlyFree) {
using Type1 = typename TestFixture::Type;
using Type2 = typename OtherType<Type1>::Type;
TestRememberedSetInvalidation<Type1, Type1>(*this);
TestRememberedSetInvalidation<Type1, Type2>(*this);
}
TEST_F(MinorGCTest, RememberedSetInvalidationOnShrink) {
using Member = Member<Small>;
static constexpr size_t kTrailingMembers = 64;
static constexpr size_t kBytesToAllocate = kTrailingMembers * sizeof(Member);
static constexpr size_t kFirstMemberToInvalidate = 63;
static constexpr size_t kLastMemberToInvalidate = kTrailingMembers;
// Create an object with additional kBytesToAllocate bytes.
Persistent<Small> old = MakeGarbageCollected<Small>(
this->GetAllocationHandle(), AdditionalBytes(kBytesToAllocate));
auto get_member = [&old](size_t i) -> Member& {
return *reinterpret_cast<Member*>(reinterpret_cast<uint8_t*>(old.Get()) +
sizeof(Small) + i * sizeof(Member));
};
CollectMinor();
auto* young = MakeGarbageCollected<Small>(GetAllocationHandle());
const auto& set = Heap::From(GetHeap())->remembered_slots();
const size_t set_size_before_barrier = set.size();
// Issue the generational barriers.
for (size_t i = kFirstMemberToInvalidate; i < kLastMemberToInvalidate; ++i) {
// Construct the member.
new (&get_member(i)) Member;
// Issue the barrier.
get_member(i) = young;
}
// Check that barriers hit (kLastMemberToInvalidate -
// kFirstMemberToInvalidate) times.
EXPECT_EQ(set_size_before_barrier +
(kLastMemberToInvalidate - kFirstMemberToInvalidate),
set.size());
// Shrink the buffer for old object.
subtle::Resize(*old, AdditionalBytes(kBytesToAllocate / 2));
// Check that the reference was invalidated.
EXPECT_EQ(set_size_before_barrier, set.size());
// Visiting remembered slots must not fail.
CollectMinor();
}
} // namespace internal
} // namespace cppgc

View File

@ -48,7 +48,7 @@ class DelegatingTracingController : public TracingController {
};
class TestWithPlatform : public ::testing::Test {
protected:
public:
static void SetUpTestSuite();
static void TearDownTestSuite();
@ -67,7 +67,7 @@ class TestWithPlatform : public ::testing::Test {
};
class TestWithHeap : public TestWithPlatform {
protected:
public:
TestWithHeap();
void PreciseGC() {