cppgc: young-gen: Switch from std::set to slot-set.

The CL makes Oilpan use the same data-structure (two-layer bitmap) as V8
uses for the remembered set.

Bug: chromium:1029379
Change-Id: I1213d3ae06da5e85466430875378e3b8dfdee16e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3701592
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83594}
This commit is contained in:
Anton Bikineev 2022-10-07 18:58:26 +02:00 committed by V8 LUCI CQ
parent 283791d250
commit cd552e12bb
8 changed files with 411 additions and 114 deletions

View File

@ -11,6 +11,7 @@
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/memory.h"
#include "src/heap/cppgc/object-view.h"
namespace cppgc {
namespace internal {
@ -36,21 +37,30 @@ void ExplicitManagementImpl::FreeUnreferencedObject(HeapHandle& heap_handle,
auto& header = HeapObjectHeader::FromObject(object);
header.Finalize();
size_t object_size = 0;
USE(object_size);
// `object` is guaranteed to be of type GarbageCollected, so getting the
// BasePage is okay for regular and large objects.
BasePage* base_page = BasePage::FromPayload(object);
#if defined(CPPGC_YOUNG_GENERATION)
const size_t object_size = ObjectView<>(header).Size();
if (auto& heap_base = HeapBase::From(heap_handle);
heap_base.generational_gc_supported()) {
heap_base.remembered_set().InvalidateRememberedSlotsInRange(
object, reinterpret_cast<uint8_t*>(object) + object_size);
// If this object was registered as remembered, remove it. Do that before
// the page gets destroyed.
heap_base.remembered_set().InvalidateRememberedSourceObject(header);
}
#endif // defined(CPPGC_YOUNG_GENERATION)
if (base_page->is_large()) { // Large object.
object_size = LargePage::From(base_page)->ObjectSize();
base_page->space().RemovePage(base_page);
base_page->heap().stats_collector()->NotifyExplicitFree(
LargePage::From(base_page)->PayloadSize());
LargePage::Destroy(LargePage::From(base_page));
} else { // Regular object.
const size_t header_size = header.AllocatedSize();
object_size = header.ObjectSize();
auto* normal_page = NormalPage::From(base_page);
auto& normal_space = *static_cast<NormalPageSpace*>(&base_page->space());
auto& lab = normal_space.linear_allocation_buffer();
@ -66,15 +76,6 @@ void ExplicitManagementImpl::FreeUnreferencedObject(HeapHandle& heap_handle,
// list entry.
}
}
#if defined(CPPGC_YOUNG_GENERATION)
auto& heap_base = HeapBase::From(heap_handle);
if (heap_base.generational_gc_supported()) {
heap_base.remembered_set().InvalidateRememberedSlotsInRange(
object, reinterpret_cast<uint8_t*>(object) + object_size);
// If this object was registered as remembered, remove it.
heap_base.remembered_set().InvalidateRememberedSourceObject(header);
}
#endif // defined(CPPGC_YOUNG_GENERATION)
}
namespace {

View File

@ -80,6 +80,12 @@ constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
#endif // !defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
#if defined(CPPGC_POINTER_COMPRESSION)
constexpr size_t kSlotSize = sizeof(uint32_t);
#else // !defined(CPPGC_POINTER_COMPRESSION)
constexpr size_t kSlotSize = sizeof(uintptr_t);
#endif // !defined(CPPGC_POINTER_COMPRESSION)
} // namespace internal
} // namespace cppgc

View File

@ -18,6 +18,7 @@
#include "src/heap/cppgc/object-start-bitmap.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/remembered-set.h"
#include "src/heap/cppgc/stats-collector.h"
namespace cppgc {
@ -85,6 +86,13 @@ ConstAddress BasePage::PayloadEnd() const {
return const_cast<BasePage*>(this)->PayloadEnd();
}
size_t BasePage::AllocatedSize() const {
return is_large() ? LargePage::PageHeaderSize() +
LargePage::From(this)->PayloadSize()
: NormalPage::From(this)->PayloadSize() +
RoundUp(sizeof(NormalPage), kAllocationGranularity);
}
size_t BasePage::AllocatedBytesAtLastGC() const {
return is_large() ? LargePage::From(this)->AllocatedBytesAtLastGC()
: NormalPage::From(this)->AllocatedBytesAtLastGC();
@ -120,8 +128,32 @@ const HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress(
return header;
}
#if defined(CPPGC_YOUNG_GENERATION)
void BasePage::AllocateSlotSet() {
DCHECK_NULL(slot_set_);
slot_set_ = decltype(slot_set_)(
static_cast<SlotSet*>(
SlotSet::Allocate(SlotSet::BucketsForSize(AllocatedSize()))),
SlotSetDeleter{AllocatedSize()});
}
void BasePage::SlotSetDeleter::operator()(SlotSet* slot_set) const {
DCHECK_NOT_NULL(slot_set);
SlotSet::Delete(slot_set, SlotSet::BucketsForSize(page_size_));
}
void BasePage::ResetSlotSet() { slot_set_.reset(); }
#endif // defined(CPPGC_YOUNG_GENERATION)
BasePage::BasePage(HeapBase& heap, BaseSpace& space, PageType type)
: BasePageHandle(heap), space_(space), type_(type) {
: BasePageHandle(heap),
space_(space),
type_(type)
#if defined(CPPGC_YOUNG_GENERATION)
,
slot_set_(nullptr, SlotSetDeleter{})
#endif // defined(CPPGC_YOUNG_GENERATION)
{
DCHECK_EQ(0u, (reinterpret_cast<uintptr_t>(this) - kGuardPageSize) &
kPageOffsetMask);
DCHECK_EQ(&heap.raw_heap(), space_.raw_heap());

View File

@ -8,6 +8,7 @@
#include "include/cppgc/internal/base-page-handle.h"
#include "src/base/iterator.h"
#include "src/base/macros.h"
#include "src/heap/base/basic-slot-set.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/object-start-bitmap.h"
@ -20,6 +21,7 @@ class NormalPageSpace;
class LargePageSpace;
class HeapBase;
class PageBackend;
class SlotSet;
class V8_EXPORT_PRIVATE BasePage : public BasePageHandle {
public:
@ -45,6 +47,9 @@ class V8_EXPORT_PRIVATE BasePage : public BasePageHandle {
Address PayloadEnd();
ConstAddress PayloadEnd() const;
// Size of the payload with the page header.
size_t AllocatedSize() const;
// Returns the size of live objects on the page at the last GC.
// The counter is update after sweeping.
size_t AllocatedBytesAtLastGC() const;
@ -92,14 +97,29 @@ class V8_EXPORT_PRIVATE BasePage : public BasePageHandle {
contains_young_objects_ = value;
}
#if defined(CPPGC_YOUNG_GENERATION)
V8_INLINE SlotSet* slot_set() const { return slot_set_.get(); }
V8_INLINE SlotSet& GetOrAllocateSlotSet();
void ResetSlotSet();
#endif // defined(CPPGC_YOUNG_GENERATION)
protected:
enum class PageType : uint8_t { kNormal, kLarge };
BasePage(HeapBase&, BaseSpace&, PageType);
private:
struct SlotSetDeleter {
void operator()(SlotSet*) const;
size_t page_size_ = 0;
};
void AllocateSlotSet();
BaseSpace& space_;
PageType type_;
bool contains_young_objects_ = false;
#if defined(CPPGC_YOUNG_GENERATION)
std::unique_ptr<SlotSet, SlotSetDeleter> slot_set_;
#endif // defined(CPPGC_YOUNG_GENERATION)
size_t discarded_memory_ = 0;
};
@ -311,6 +331,13 @@ const HeapObjectHeader& BasePage::ObjectHeaderFromInnerAddress(
return *header;
}
#if defined(CPPGC_YOUNG_GENERATION)
SlotSet& BasePage::GetOrAllocateSlotSet() {
if (!slot_set_) AllocateSlotSet();
return *slot_set_;
}
#endif // defined(CPPGC_YOUNG_GENERATION)
} // namespace internal
} // namespace cppgc

View File

@ -2,15 +2,19 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#if defined(CPPGC_YOUNG_GENERATION)
#include "src/heap/cppgc/remembered-set.h"
#include <algorithm>
#include "include/cppgc/member.h"
#include "include/cppgc/visitor.h"
#include "src/heap/base/basic-slot-set.h"
#include "src/heap/cppgc/heap-base.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/marking-state.h"
namespace cppgc {
@ -20,23 +24,54 @@ namespace {
enum class SlotType { kCompressed, kUncompressed };
template <SlotType slot_type>
void InvalidateRememberedSlots(std::set<void*>& slots, void* begin, void* end) {
void EraseFromSet(std::set<void*>& set, void* begin, void* end) {
// TODO(1029379): The 2 binary walks can be optimized with a custom algorithm.
auto from = slots.lower_bound(begin), to = slots.lower_bound(end);
slots.erase(from, to);
auto from = set.lower_bound(begin), to = set.lower_bound(end);
set.erase(from, to);
}
// TODO(1029379): Make the implementation functions private functions of
// OldToNewRememberedSet to avoid parameter passing.
void InvalidateCompressedRememberedSlots(
const HeapBase& heap, void* begin, void* end,
std::set<void*>& remembered_slots_for_verification) {
DCHECK_LT(begin, end);
BasePage* page = BasePage::FromInnerAddress(&heap, begin);
DCHECK_NOT_NULL(page);
// The input range must reside within the same page.
DCHECK_EQ(page, BasePage::FromInnerAddress(
&heap, reinterpret_cast<void*>(
reinterpret_cast<uintptr_t>(end) - 1)));
auto* slot_set = page->slot_set();
if (!slot_set) return;
const size_t buckets_size = SlotSet::BucketsForSize(page->AllocatedSize());
const uintptr_t page_start = reinterpret_cast<uintptr_t>(page);
const uintptr_t ubegin = reinterpret_cast<uintptr_t>(begin);
const uintptr_t uend = reinterpret_cast<uintptr_t>(end);
slot_set->RemoveRange(ubegin - page_start, uend - page_start, buckets_size,
SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
#if DEBUG
EraseFromSet(remembered_slots_for_verification, begin, end);
#endif // DEBUG
}
void InvalidateUncompressedRememberedSlots(
std::set<void*>& slots, void* begin, void* end,
std::set<void*>& remembered_slots_for_verification) {
EraseFromSet(slots, begin, end);
#if DEBUG
EraseFromSet(remembered_slots_for_verification, begin, end);
#endif // DEBUG
#if defined(ENABLE_SLOW_DCHECKS)
// Check that no remembered slots are referring to the freed area.
DCHECK(std::none_of(slots.begin(), slots.end(), [begin, end](void* slot) {
void* value = nullptr;
#if defined(CPPGC_POINTER_COMPRESSION)
if constexpr (slot_type == SlotType::kCompressed)
value = CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot));
else
value = *reinterpret_cast<void**>(slot);
#else // !defined(CPPGC_POINTER_COMPRESSION)
value = *reinterpret_cast<void**>(slot);
#endif // !defined(CPPGC_POINTER_COMPRESSION)
return begin <= value && value < end;
}));
#endif // defined(ENABLE_SLOW_DCHECKS)
@ -44,45 +79,155 @@ void InvalidateRememberedSlots(std::set<void*>& slots, void* begin, void* end) {
// Visit remembered set that was recorded in the generational barrier.
template <SlotType slot_type>
void VisitRememberedSlots(const std::set<void*>& slots, const HeapBase& heap,
MutatorMarkingState& mutator_marking_state) {
for (void* slot : slots) {
// Slot must always point to a valid, not freed object.
auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
->ObjectHeaderFromInnerAddress(slot);
// The age checking in the generational barrier is imprecise, since a card
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (slot_header.IsYoung()) continue;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>());
void VisitSlot(const HeapBase& heap, const BasePage& page, Address slot,
MutatorMarkingState& marking_state,
const std::set<void*>& slots_for_verification) {
#if defined(DEBUG)
DCHECK_EQ(BasePage::FromInnerAddress(&heap, slot), &page);
DCHECK_NE(slots_for_verification.end(), slots_for_verification.find(slot));
#endif // defined(DEBUG)
// Slot must always point to a valid, not freed object.
auto& slot_header = page.ObjectHeaderFromInnerAddress(slot);
// The age checking in the generational barrier is imprecise, since a card
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (slot_header.IsYoung()) return;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>());
#if defined(CPPGC_POINTER_COMPRESSION)
void* value = nullptr;
if constexpr (slot_type == SlotType::kCompressed) {
value = CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot));
} else {
value = *reinterpret_cast<void**>(slot);
}
void* value = nullptr;
if constexpr (slot_type == SlotType::kCompressed) {
value = CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot));
} else {
value = *reinterpret_cast<void**>(slot);
}
#else // !defined(CPPGC_POINTER_COMPRESSION)
void* value = *reinterpret_cast<void**>(slot);
void* value = *reinterpret_cast<void**>(slot);
#endif // !defined(CPPGC_POINTER_COMPRESSION)
// Slot could be updated to nullptr or kSentinelPointer by the mutator.
if (value == kSentinelPointer || value == nullptr) continue;
// Slot could be updated to nullptr or kSentinelPointer by the mutator.
if (value == kSentinelPointer || value == nullptr) return;
#if DEBUG
// Check that the slot can not point to a freed object.
HeapObjectHeader& header =
BasePage::FromPayload(value)->ObjectHeaderFromInnerAddress(value);
DCHECK(!header.IsFree());
#endif
#if defined(DEBUG)
// Check that the slot can not point to a freed object.
HeapObjectHeader& header =
BasePage::FromPayload(value)->ObjectHeaderFromInnerAddress(value);
DCHECK(!header.IsFree());
#endif // defined(DEBUG)
mutator_marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
}
class CompressedSlotVisitor : HeapVisitor<CompressedSlotVisitor> {
friend class HeapVisitor<CompressedSlotVisitor>;
public:
CompressedSlotVisitor(HeapBase& heap, MutatorMarkingState& marking_state,
const std::set<void*>& slots_for_verification)
: heap_(heap),
marking_state_(marking_state),
remembered_slots_for_verification_(slots_for_verification) {}
size_t Run() {
Traverse(heap_.raw_heap());
return objects_visited_;
}
private:
heap::base::SlotCallbackResult VisitCompressedSlot(Address slot) {
DCHECK(current_page_);
VisitSlot<SlotType::kCompressed>(heap_, *current_page_, slot,
marking_state_,
remembered_slots_for_verification_);
++objects_visited_;
return heap::base::KEEP_SLOT;
}
void VisitSlotSet(SlotSet* slot_set) {
DCHECK(current_page_);
if (!slot_set) return;
const uintptr_t page_start = reinterpret_cast<uintptr_t>(current_page_);
const size_t buckets_size =
SlotSet::BucketsForSize(current_page_->AllocatedSize());
slot_set->Iterate(
page_start, 0, buckets_size,
[this](SlotSet::Address slot) {
return VisitCompressedSlot(reinterpret_cast<Address>(slot));
},
SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
}
bool VisitNormalPage(NormalPage& page) {
current_page_ = &page;
VisitSlotSet(page.slot_set());
return true;
}
bool VisitLargePage(LargePage& page) {
current_page_ = &page;
VisitSlotSet(page.slot_set());
return true;
}
HeapBase& heap_;
MutatorMarkingState& marking_state_;
BasePage* current_page_ = nullptr;
const std::set<void*>& remembered_slots_for_verification_;
size_t objects_visited_ = 0u;
};
class SlotRemover : HeapVisitor<SlotRemover> {
friend class HeapVisitor<SlotRemover>;
public:
explicit SlotRemover(HeapBase& heap) : heap_(heap) {}
void Run() { Traverse(heap_.raw_heap()); }
private:
bool VisitNormalPage(NormalPage& page) {
page.ResetSlotSet();
return true;
}
bool VisitLargePage(LargePage& page) {
page.ResetSlotSet();
return true;
}
HeapBase& heap_;
};
// Visit remembered set that was recorded in the generational barrier.
void VisitRememberedSlots(
HeapBase& heap, MutatorMarkingState& mutator_marking_state,
const std::set<void*>& remembered_uncompressed_slots,
const std::set<void*>& remembered_slots_for_verification) {
size_t objects_visited = 0;
{
CompressedSlotVisitor slot_visitor(heap, mutator_marking_state,
remembered_slots_for_verification);
objects_visited += slot_visitor.Run();
}
for (void* uncompressed_slot : remembered_uncompressed_slots) {
auto* page = BasePage::FromInnerAddress(&heap, uncompressed_slot);
DCHECK(page);
VisitSlot<SlotType::kUncompressed>(
heap, *page, static_cast<Address>(uncompressed_slot),
mutator_marking_state, remembered_slots_for_verification);
++objects_visited;
}
DCHECK_EQ(remembered_slots_for_verification.size(), objects_visited);
USE(objects_visited);
}
// Visits source objects that were recorded in the generational barrier for
@ -114,12 +259,29 @@ void VisitRememberedSourceObjects(
void OldToNewRememberedSet::AddSlot(void* slot) {
DCHECK(heap_.generational_gc_supported());
remembered_slots_.insert(slot);
BasePage* source_page = BasePage::FromInnerAddress(&heap_, slot);
DCHECK(source_page);
auto& slot_set = source_page->GetOrAllocateSlotSet();
const uintptr_t slot_offset = reinterpret_cast<uintptr_t>(slot) -
reinterpret_cast<uintptr_t>(source_page);
slot_set.Insert<SlotSet::AccessMode::NON_ATOMIC>(
static_cast<size_t>(slot_offset));
#if defined(DEBUG)
remembered_slots_for_verification_.insert(slot);
#endif // defined(DEBUG)
}
void OldToNewRememberedSet::AddUncompressedSlot(void* uncompressed_slot) {
DCHECK(heap_.generational_gc_supported());
remembered_uncompressed_slots_.insert(uncompressed_slot);
#if defined(DEBUG)
remembered_slots_for_verification_.insert(uncompressed_slot);
#endif // defined(DEBUG)
}
void OldToNewRememberedSet::AddSourceObject(HeapObjectHeader& hoh) {
@ -138,10 +300,11 @@ void OldToNewRememberedSet::AddWeakCallback(WeakCallbackItem item) {
void OldToNewRememberedSet::InvalidateRememberedSlotsInRange(void* begin,
void* end) {
DCHECK(heap_.generational_gc_supported());
InvalidateRememberedSlots<SlotType::kCompressed>(remembered_slots_, begin,
end);
InvalidateRememberedSlots<SlotType::kUncompressed>(
remembered_uncompressed_slots_, begin, end);
InvalidateCompressedRememberedSlots(heap_, begin, end,
remembered_slots_for_verification_);
InvalidateUncompressedRememberedSlots(remembered_uncompressed_slots_, begin,
end,
remembered_slots_for_verification_);
}
void OldToNewRememberedSet::InvalidateRememberedSourceObject(
@ -153,10 +316,8 @@ void OldToNewRememberedSet::InvalidateRememberedSourceObject(
void OldToNewRememberedSet::Visit(Visitor& visitor,
MutatorMarkingState& marking_state) {
DCHECK(heap_.generational_gc_supported());
VisitRememberedSlots<SlotType::kCompressed>(remembered_slots_, heap_,
marking_state);
VisitRememberedSlots<SlotType::kUncompressed>(remembered_uncompressed_slots_,
heap_, marking_state);
VisitRememberedSlots(heap_, marking_state, remembered_uncompressed_slots_,
remembered_slots_for_verification_);
VisitRememberedSourceObjects(remembered_source_objects_, visitor);
}
@ -174,16 +335,23 @@ void OldToNewRememberedSet::ReleaseCustomCallbacks() {
void OldToNewRememberedSet::Reset() {
DCHECK(heap_.generational_gc_supported());
remembered_slots_.clear();
SlotRemover slot_remover(heap_);
slot_remover.Run();
remembered_uncompressed_slots_.clear();
remembered_source_objects_.clear();
#if DEBUG
remembered_slots_for_verification_.clear();
#endif // DEBUG
}
bool OldToNewRememberedSet::IsEmpty() const {
return remembered_slots_.empty() && remembered_uncompressed_slots_.empty() &&
// TODO(1029379): Add visitor to check if empty.
return remembered_uncompressed_slots_.empty() &&
remembered_source_objects_.empty() &&
remembered_weak_callbacks_.empty();
}
} // namespace internal
} // namespace cppgc
#endif // defined(CPPGC_YOUNG_GENERATION)

View File

@ -5,9 +5,12 @@
#ifndef V8_HEAP_CPPGC_REMEMBERED_SET_H_
#define V8_HEAP_CPPGC_REMEMBERED_SET_H_
#if defined(CPPGC_YOUNG_GENERATION)
#include <set>
#include "src/base/macros.h"
#include "src/heap/base/basic-slot-set.h"
#include "src/heap/cppgc/marking-worklists.h"
namespace cppgc {
@ -21,11 +24,14 @@ class HeapBase;
class HeapObjectHeader;
class MutatorMarkingState;
class SlotSet : public ::heap::base::BasicSlotSet<kSlotSize> {};
// OldToNewRememberedSet represents a per-heap set of old-to-new references.
class V8_EXPORT_PRIVATE OldToNewRememberedSet final {
public:
using WeakCallbackItem = MarkingWorklists::WeakCallbackItem;
explicit OldToNewRememberedSet(const HeapBase& heap)
explicit OldToNewRememberedSet(HeapBase& heap)
: heap_(heap), remembered_weak_callbacks_(compare_parameter) {}
OldToNewRememberedSet(const OldToNewRememberedSet&) = delete;
@ -58,15 +64,19 @@ class V8_EXPORT_PRIVATE OldToNewRememberedSet final {
}
} compare_parameter{};
const HeapBase& heap_;
std::set<void*> remembered_slots_;
std::set<void*> remembered_uncompressed_slots_;
HeapBase& heap_;
std::set<HeapObjectHeader*> remembered_source_objects_;
std::set<WeakCallbackItem, decltype(compare_parameter)>
remembered_weak_callbacks_;
// Compressed slots are stored in slot-sets (per-page two-level bitmaps),
// whereas uncompressed are stored in std::set.
std::set<void*> remembered_uncompressed_slots_;
std::set<void*> remembered_slots_for_verification_;
};
} // namespace internal
} // namespace cppgc
#endif // defined(CPPGC_YOUNG_GENERATION)
#endif // V8_HEAP_CPPGC_REMEMBERED_SET_H_

View File

@ -13,6 +13,7 @@
#include "include/cppgc/internal/caged-heap-local-data.h"
#include "include/cppgc/persistent.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/heap.h"
#include "test/unittests/heap/cppgc/tests.h"
#include "testing/gtest/include/gtest/gtest.h"
@ -87,6 +88,46 @@ void ExpectPageOld(BasePage& page) {
CagedHeap::OffsetFromAddress(page.PayloadEnd())));
}
class RememberedSetExtractor : HeapVisitor<RememberedSetExtractor> {
friend class HeapVisitor<RememberedSetExtractor>;
public:
static std::set<void*> Extract(cppgc::Heap* heap) {
RememberedSetExtractor extractor;
extractor.Traverse(Heap::From(heap)->raw_heap());
return std::move(extractor.slots_);
}
private:
void VisitPage(BasePage& page) {
auto* slot_set = page.slot_set();
if (!slot_set) return;
const uintptr_t page_start = reinterpret_cast<uintptr_t>(&page);
const size_t buckets_size = SlotSet::BucketsForSize(page.AllocatedSize());
slot_set->Iterate(
page_start, 0, buckets_size,
[this](SlotSet::Address slot) {
slots_.insert(reinterpret_cast<void*>(slot));
return heap::base::KEEP_SLOT;
},
SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
}
bool VisitNormalPage(NormalPage& page) {
VisitPage(page);
return true;
}
bool VisitLargePage(LargePage& page) {
VisitPage(page);
return true;
}
std::set<void*> slots_;
};
} // namespace
class MinorGCTest : public testing::TestWithHeap {
@ -114,10 +155,6 @@ class MinorGCTest : public testing::TestWithHeap {
Heap::From(GetHeap())->CollectGarbage(GCConfig::PreciseAtomicConfig());
}
const auto& RememberedSlots() const {
return Heap::From(GetHeap())->remembered_set().remembered_slots_;
}
const auto& RememberedSourceObjects() const {
return Heap::From(GetHeap())->remembered_set().remembered_source_objects_;
}
@ -144,75 +181,72 @@ struct ExpectRememberedSlotsAdded final {
ExpectRememberedSlotsAdded(
const MinorGCTest& test,
std::initializer_list<void*> slots_expected_to_be_remembered)
: remembered_slots_(test.RememberedSlots()),
: test_(test),
slots_expected_to_be_remembered_(slots_expected_to_be_remembered),
initial_number_of_slots_(remembered_slots_.size()) {
initial_slots_(RememberedSetExtractor::Extract(test.GetHeap())) {
// Check that the remembered set doesn't contain specified slots.
EXPECT_FALSE(std::includes(remembered_slots_.begin(),
remembered_slots_.end(),
EXPECT_FALSE(std::includes(initial_slots_.begin(), initial_slots_.end(),
slots_expected_to_be_remembered_.begin(),
slots_expected_to_be_remembered_.end()));
}
~ExpectRememberedSlotsAdded() {
const size_t current_number_of_slots = remembered_slots_.size();
EXPECT_EQ(
initial_number_of_slots_ + slots_expected_to_be_remembered_.size(),
current_number_of_slots);
EXPECT_TRUE(std::includes(remembered_slots_.begin(),
remembered_slots_.end(),
const auto current_slots = RememberedSetExtractor::Extract(test_.GetHeap());
EXPECT_EQ(initial_slots_.size() + slots_expected_to_be_remembered_.size(),
current_slots.size());
EXPECT_TRUE(std::includes(current_slots.begin(), current_slots.end(),
slots_expected_to_be_remembered_.begin(),
slots_expected_to_be_remembered_.end()));
}
private:
const std::set<void*>& remembered_slots_;
const MinorGCTest& test_;
std::set<void*> slots_expected_to_be_remembered_;
const size_t initial_number_of_slots_ = 0;
std::set<void*> initial_slots_;
};
struct ExpectRememberedSlotsRemoved final {
ExpectRememberedSlotsRemoved(
const MinorGCTest& test,
std::initializer_list<void*> slots_expected_to_be_removed)
: remembered_slots_(test.RememberedSlots()),
: test_(test),
slots_expected_to_be_removed_(slots_expected_to_be_removed),
initial_number_of_slots_(remembered_slots_.size()) {
DCHECK_GE(initial_number_of_slots_, slots_expected_to_be_removed_.size());
initial_slots_(RememberedSetExtractor::Extract(test.GetHeap())) {
DCHECK_GE(initial_slots_.size(), slots_expected_to_be_removed_.size());
// Check that the remembered set does contain specified slots to be removed.
EXPECT_TRUE(std::includes(remembered_slots_.begin(),
remembered_slots_.end(),
EXPECT_TRUE(std::includes(initial_slots_.begin(), initial_slots_.end(),
slots_expected_to_be_removed_.begin(),
slots_expected_to_be_removed_.end()));
}
~ExpectRememberedSlotsRemoved() {
const size_t current_number_of_slots = remembered_slots_.size();
EXPECT_EQ(initial_number_of_slots_ - slots_expected_to_be_removed_.size(),
current_number_of_slots);
EXPECT_FALSE(std::includes(remembered_slots_.begin(),
remembered_slots_.end(),
const auto current_slots = RememberedSetExtractor::Extract(test_.GetHeap());
EXPECT_EQ(initial_slots_.size() - slots_expected_to_be_removed_.size(),
current_slots.size());
EXPECT_FALSE(std::includes(current_slots.begin(), current_slots.end(),
slots_expected_to_be_removed_.begin(),
slots_expected_to_be_removed_.end()));
}
private:
const std::set<void*>& remembered_slots_;
const MinorGCTest& test_;
std::set<void*> slots_expected_to_be_removed_;
const size_t initial_number_of_slots_ = 0;
std::set<void*> initial_slots_;
};
struct ExpectNoRememberedSlotsAdded final {
explicit ExpectNoRememberedSlotsAdded(const MinorGCTest& test)
: remembered_slots_(test.RememberedSlots()),
initial_remembered_slots_(remembered_slots_) {}
: test_(test),
initial_remembered_slots_(
RememberedSetExtractor::Extract(test.GetHeap())) {}
~ExpectNoRememberedSlotsAdded() {
EXPECT_EQ(initial_remembered_slots_, remembered_slots_);
EXPECT_EQ(initial_remembered_slots_,
RememberedSetExtractor::Extract(test_.GetHeap()));
}
private:
const std::set<void*>& remembered_slots_;
const MinorGCTest& test_;
std::set<void*> initial_remembered_slots_;
};
@ -297,19 +331,23 @@ void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
}
}
const auto& set = test->RememberedSlots();
auto set_size_before = set.size();
auto remembered_set_size_before_barrier =
RememberedSetExtractor::Extract(test->GetHeap()).size();
// Issue generational barrier.
old->next = young;
EXPECT_EQ(set_size_before + 1u, set.size());
auto remembered_set_size_after_barrier =
RememberedSetExtractor::Extract(test->GetHeap()).size();
EXPECT_EQ(remembered_set_size_before_barrier + 1u,
remembered_set_size_after_barrier);
// Check that the remembered set is visited.
test->CollectMinor();
EXPECT_EQ(0u, MinorGCTest::DestructedObjects());
EXPECT_TRUE(set.empty());
EXPECT_TRUE(RememberedSetExtractor::Extract(test->GetHeap()).empty());
for (size_t i = 0; i < 64; ++i) {
EXPECT_FALSE(HeapObjectHeader::FromObject(young).IsFree());
@ -427,8 +465,8 @@ TEST_F(MinorGCTest, RememberedSetInvalidationOnShrink) {
auto* young = MakeGarbageCollected<Small>(GetAllocationHandle());
const auto& set = RememberedSlots();
const size_t set_size_before_barrier = set.size();
const size_t remembered_set_size_before_barrier =
RememberedSetExtractor::Extract(GetHeap()).size();
// Issue the generational barriers.
for (size_t i = kFirstMemberToInvalidate; i < kLastMemberToInvalidate; ++i) {
@ -438,17 +476,23 @@ TEST_F(MinorGCTest, RememberedSetInvalidationOnShrink) {
get_member(i) = young;
}
const auto remembered_set_size_after_barrier =
RememberedSetExtractor::Extract(GetHeap()).size();
// Check that barriers hit (kLastMemberToInvalidate -
// kFirstMemberToInvalidate) times.
EXPECT_EQ(set_size_before_barrier +
EXPECT_EQ(remembered_set_size_before_barrier +
(kLastMemberToInvalidate - kFirstMemberToInvalidate),
set.size());
remembered_set_size_after_barrier);
// Shrink the buffer for old object.
subtle::Resize(*old, AdditionalBytes(kBytesToAllocate / 2));
const auto remembered_set_after_shrink =
RememberedSetExtractor::Extract(GetHeap()).size();
// Check that the reference was invalidated.
EXPECT_EQ(set_size_before_barrier, set.size());
EXPECT_EQ(remembered_set_size_before_barrier, remembered_set_after_shrink);
// Visiting remembered slots must not fail.
CollectMinor();

View File

@ -271,7 +271,16 @@ TEST_F(SweeperTest, UnmarkObjects) {
}
TEST_F(SweeperTest, LazySweepingDuringAllocation) {
using GCedObject = GCed<256>;
// The test allocates objects in such a way that the object with its header is
// power of two. This is to make sure that if there is some padding at the end
// of the page, it will go to a different freelist bucket. To get that,
// subtract vptr and object-header-size from a power-of-two.
static constexpr size_t kGCObjectSize =
256 - sizeof(void*) - sizeof(HeapObjectHeader);
using GCedObject = GCed<kGCObjectSize>;
static_assert(v8::base::bits::IsPowerOfTwo(sizeof(GCedObject) +
sizeof(HeapObjectHeader)));
static const size_t kObjectsPerPage =
NormalPage::PayloadSize() /
(sizeof(GCedObject) + sizeof(HeapObjectHeader));