diff --git a/src/heap/cppgc/explicit-management.cc b/src/heap/cppgc/explicit-management.cc index cb8781a36b..dd77fe7ea9 100644 --- a/src/heap/cppgc/explicit-management.cc +++ b/src/heap/cppgc/explicit-management.cc @@ -4,6 +4,7 @@ #include "include/cppgc/explicit-management.h" +#include #include #include "src/heap/cppgc/heap-base.h" @@ -24,12 +25,29 @@ bool InGC(HeapHandle& heap_handle) { heap.sweeper().IsSweepingInProgress(); } +void InvalidateRememberedSlots(HeapBase& heap, void* begin, void* end) { +#if defined(CPPGC_YOUNG_GENERATION) + // Invalidate slots that reside within |object|. + auto& remembered_slots = heap.remembered_slots(); + // TODO(bikineev): The 2 binary walks can be optimized with a custom + // algorithm. + auto from = remembered_slots.lower_bound(begin), + to = remembered_slots.lower_bound(end); + remembered_slots.erase(from, to); +#ifdef ENABLE_SLOW_DCHECKS + // Check that no remembered slots are referring to the freed area. + DCHECK(std::none_of(remembered_slots.begin(), remembered_slots.end(), + [begin, end](void* slot) { + void* value = *reinterpret_cast(slot); + return begin <= value && value < end; + })); +#endif // ENABLE_SLOW_DCHECKS +#endif // !defined(CPPGC_YOUNG_GENERATION) +} + } // namespace void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) { -// TODO(bikineev): Invalidate slots that reside within |object| and handle free -// values in VisitRememberedSlots. -#if !defined(CPPGC_YOUNG_GENERATION) if (InGC(heap_handle)) { return; } @@ -37,16 +55,21 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) { auto& header = HeapObjectHeader::FromObject(object); header.Finalize(); + size_t object_size = 0; + USE(object_size); + // `object` is guaranteed to be of type GarbageCollected, so getting the // BasePage is okay for regular and large objects. BasePage* base_page = BasePage::FromPayload(object); if (base_page->is_large()) { // Large object. + object_size = LargePage::From(base_page)->ObjectSize(); base_page->space().RemovePage(base_page); base_page->heap().stats_collector()->NotifyExplicitFree( LargePage::From(base_page)->PayloadSize()); LargePage::Destroy(LargePage::From(base_page)); } else { // Regular object. const size_t header_size = header.AllocatedSize(); + object_size = header.ObjectSize(); auto* normal_page = NormalPage::From(base_page); auto& normal_space = *static_cast(&base_page->space()); auto& lab = normal_space.linear_allocation_buffer(); @@ -62,7 +85,8 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) { // list entry. } } -#endif // !defined(CPPGC_YOUNG_GENERATION) + InvalidateRememberedSlots(HeapBase::From(heap_handle), object, + reinterpret_cast(object) + object_size); } namespace { @@ -102,17 +126,17 @@ bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size, lab.Set(free_start, lab.size() + size_delta); SetMemoryInaccessible(lab.start(), size_delta); header.SetAllocatedSize(new_size); - return true; - } - // Heuristic: Only return memory to the free list if the block is larger than - // the smallest size class. - if (size_delta >= ObjectAllocator::kSmallestSpaceSize) { + } else if (size_delta >= ObjectAllocator::kSmallestSpaceSize) { + // Heuristic: Only return memory to the free list if the block is larger + // than the smallest size class. SetMemoryInaccessible(free_start, size_delta); base_page.heap().stats_collector()->NotifyExplicitFree(size_delta); normal_space.free_list().Add({free_start, size_delta}); NormalPage::From(&base_page)->object_start_bitmap().SetBit(free_start); header.SetAllocatedSize(new_size); } + InvalidateRememberedSlots(base_page.heap(), free_start, + free_start + size_delta); // Return success in any case, as we want to avoid that embedders start // copying memory because of small deltas. return true; diff --git a/src/heap/cppgc/garbage-collector.h b/src/heap/cppgc/garbage-collector.h index b8e52452ee..91cfdc6ea7 100644 --- a/src/heap/cppgc/garbage-collector.h +++ b/src/heap/cppgc/garbage-collector.h @@ -55,6 +55,11 @@ class GarbageCollector { MarkingType::kAtomic, SweepingType::kAtomic}; } + static constexpr Config MinorConservativeAtomicConfig() { + return {CollectionType::kMinor, StackState::kMayContainHeapPointers, + MarkingType::kAtomic, SweepingType::kAtomic}; + } + CollectionType collection_type = CollectionType::kMajor; StackState stack_state = StackState::kMayContainHeapPointers; MarkingType marking_type = MarkingType::kAtomic; diff --git a/src/heap/cppgc/heap-object-header.h b/src/heap/cppgc/heap-object-header.h index 485000a60f..a6efb8defd 100644 --- a/src/heap/cppgc/heap-object-header.h +++ b/src/heap/cppgc/heap-object-header.h @@ -125,18 +125,17 @@ class HeapObjectHeader { using GCInfoIndexField = UnusedField1::Next; // Used in |encoded_low_|. using MarkBitField = v8::base::BitField16; - using SizeField = void; // Use EncodeSize/DecodeSize instead. + using SizeField = + MarkBitField::Next; // Use EncodeSize/DecodeSize instead. static constexpr size_t DecodeSize(uint16_t encoded) { // Essentially, gets optimized to << 1. - using SizeFieldImpl = MarkBitField::Next; - return SizeFieldImpl::decode(encoded) * kAllocationGranularity; + return SizeField::decode(encoded) * kAllocationGranularity; } static constexpr uint16_t EncodeSize(size_t size) { // Essentially, gets optimized to >> 1. - using SizeFieldImpl = MarkBitField::Next; - return SizeFieldImpl::encode(size / kAllocationGranularity); + return SizeField::encode(size / kAllocationGranularity); } V8_EXPORT_PRIVATE void CheckApiConstants(); @@ -236,7 +235,10 @@ void HeapObjectHeader::SetAllocatedSize(size_t size) { // resized. DCHECK(!IsMarked()); #endif - encoded_low_ = EncodeSize(size); + // The object may be marked (i.e. old, in case young generation is enabled). + // Make sure to not overwrite the mark bit. + encoded_low_ &= ~SizeField::encode(SizeField::kMax); + encoded_low_ |= EncodeSize(size); } template diff --git a/src/heap/cppgc/marker.cc b/src/heap/cppgc/marker.cc index 81a53302f8..fc8891a909 100644 --- a/src/heap/cppgc/marker.cc +++ b/src/heap/cppgc/marker.cc @@ -67,6 +67,7 @@ void VisitRememberedSlots(HeapBase& heap, StatsCollector::EnabledScope stats_scope( heap.stats_collector(), StatsCollector::kMarkVisitRememberedSets); for (void* slot : heap.remembered_slots()) { + // Slot must always point to a valid, not freed object. auto& slot_header = BasePage::FromInnerAddress(&heap, slot) ->ObjectHeaderFromInnerAddress(slot); if (slot_header.IsYoung()) continue; @@ -80,6 +81,13 @@ void VisitRememberedSlots(HeapBase& heap, // Slot could be updated to nullptr or kSentinelPointer by the mutator. if (value == kSentinelPointer || value == nullptr) continue; +#if DEBUG + // Check that the slot can not point to a freed object. + HeapObjectHeader& header = + BasePage::FromPayload(value)->ObjectHeaderFromInnerAddress(value); + DCHECK(!header.IsFree()); +#endif + mutator_marking_state.DynamicallyMarkAddress(static_cast
(value)); } #endif diff --git a/test/unittests/heap/cppgc/minor-gc-unittest.cc b/test/unittests/heap/cppgc/minor-gc-unittest.cc index 41af4774cf..785063e86d 100644 --- a/test/unittests/heap/cppgc/minor-gc-unittest.cc +++ b/test/unittests/heap/cppgc/minor-gc-unittest.cc @@ -5,6 +5,7 @@ #if defined(CPPGC_YOUNG_GENERATION) #include "include/cppgc/allocation.h" +#include "include/cppgc/explicit-management.h" #include "include/cppgc/heap-consistency.h" #include "include/cppgc/internal/caged-heap-local-data.h" #include "include/cppgc/persistent.h" @@ -32,7 +33,7 @@ class SimpleGCedBase : public GarbageCollected { size_t SimpleGCedBase::destructed_objects; template -class SimpleGCed final : public SimpleGCedBase { +class SimpleGCed : public SimpleGCedBase { char array[Size]; }; @@ -65,6 +66,7 @@ class MinorGCTest : public testing::TestWithHeap { Heap::From(GetHeap())->CollectGarbage( Heap::Config::MinorPreciseAtomicConfig()); } + void CollectMajor() { Heap::From(GetHeap())->CollectGarbage(Heap::Config::PreciseAtomicConfig()); } @@ -237,6 +239,88 @@ TYPED_TEST(MinorGCTestForType, OmitGenerationalBarrierForSentinels) { old->next = static_cast(kSentinelPointer); EXPECT_EQ(set_size_before_barrier, set.size()); } + +template +void TestRememberedSetInvalidation(MinorGCTest& test) { + Persistent old = MakeGarbageCollected(test.GetAllocationHandle()); + + test.CollectMinor(); + + auto* young = MakeGarbageCollected(test.GetAllocationHandle()); + + const auto& set = Heap::From(test.GetHeap())->remembered_slots(); + const size_t set_size_before_barrier = set.size(); + + // Issue the generational barrier. + old->next = young; + EXPECT_EQ(set_size_before_barrier + 1, set.size()); + + // Release the persistent and free the old object. + auto* old_raw = old.Release(); + subtle::FreeUnreferencedObject(test.GetHeapHandle(), *old_raw); + // Check that the reference was invalidated. + EXPECT_EQ(set_size_before_barrier, set.size()); + + // Visiting remembered slots must not fail. + test.CollectMinor(); +} + +TYPED_TEST(MinorGCTestForType, RememberedSetInvalidationOnPromptlyFree) { + using Type1 = typename TestFixture::Type; + using Type2 = typename OtherType::Type; + TestRememberedSetInvalidation(*this); + TestRememberedSetInvalidation(*this); +} + +TEST_F(MinorGCTest, RememberedSetInvalidationOnShrink) { + using Member = Member; + + static constexpr size_t kTrailingMembers = 64; + static constexpr size_t kBytesToAllocate = kTrailingMembers * sizeof(Member); + + static constexpr size_t kFirstMemberToInvalidate = 63; + static constexpr size_t kLastMemberToInvalidate = kTrailingMembers; + + // Create an object with additional kBytesToAllocate bytes. + Persistent old = MakeGarbageCollected( + this->GetAllocationHandle(), AdditionalBytes(kBytesToAllocate)); + + auto get_member = [&old](size_t i) -> Member& { + return *reinterpret_cast(reinterpret_cast(old.Get()) + + sizeof(Small) + i * sizeof(Member)); + }; + + CollectMinor(); + + auto* young = MakeGarbageCollected(GetAllocationHandle()); + + const auto& set = Heap::From(GetHeap())->remembered_slots(); + const size_t set_size_before_barrier = set.size(); + + // Issue the generational barriers. + for (size_t i = kFirstMemberToInvalidate; i < kLastMemberToInvalidate; ++i) { + // Construct the member. + new (&get_member(i)) Member; + // Issue the barrier. + get_member(i) = young; + } + + // Check that barriers hit (kLastMemberToInvalidate - + // kFirstMemberToInvalidate) times. + EXPECT_EQ(set_size_before_barrier + + (kLastMemberToInvalidate - kFirstMemberToInvalidate), + set.size()); + + // Shrink the buffer for old object. + subtle::Resize(*old, AdditionalBytes(kBytesToAllocate / 2)); + + // Check that the reference was invalidated. + EXPECT_EQ(set_size_before_barrier, set.size()); + + // Visiting remembered slots must not fail. + CollectMinor(); +} + } // namespace internal } // namespace cppgc diff --git a/test/unittests/heap/cppgc/tests.h b/test/unittests/heap/cppgc/tests.h index 39c92b95e9..7fe91397fc 100644 --- a/test/unittests/heap/cppgc/tests.h +++ b/test/unittests/heap/cppgc/tests.h @@ -48,7 +48,7 @@ class DelegatingTracingController : public TracingController { }; class TestWithPlatform : public ::testing::Test { - protected: + public: static void SetUpTestSuite(); static void TearDownTestSuite(); @@ -67,7 +67,7 @@ class TestWithPlatform : public ::testing::Test { }; class TestWithHeap : public TestWithPlatform { - protected: + public: TestWithHeap(); void PreciseGC() {