cppgc: Use reference instead of pointers in HeapVisitor
Bug: v8:11822 Change-Id: I35f3b5ce71ab5f86a5d9991bb9d729a2fe56f6dd Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2919955 Reviewed-by: Omer Katz <omerkatz@chromium.org> Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#74840}
This commit is contained in:
parent
52d65418e8
commit
f19e2e68c6
@ -477,9 +477,9 @@ class LiveObjectsForVisibilityIterator final
|
||||
: graph_builder_(graph_builder) {}
|
||||
|
||||
private:
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
|
||||
if (header->IsFree()) return true;
|
||||
graph_builder_.VisitForVisibility(nullptr, *header);
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader& header) {
|
||||
if (header.IsFree()) return true;
|
||||
graph_builder_.VisitForVisibility(nullptr, header);
|
||||
graph_builder_.ProcessPendingObjects();
|
||||
return true;
|
||||
}
|
||||
@ -686,7 +686,7 @@ void CppGraphBuilderImpl::Run() {
|
||||
// First pass: Figure out which objects should be included in the graph -- see
|
||||
// class-level comment on CppGraphBuilder.
|
||||
LiveObjectsForVisibilityIterator visitor(*this);
|
||||
visitor.Traverse(&cpp_heap_.raw_heap());
|
||||
visitor.Traverse(cpp_heap_.raw_heap());
|
||||
// Second pass: Add graph nodes for objects that must be shown.
|
||||
states_.ForAllVisibleStates([this](StateBase* state) {
|
||||
ParentScope parent_scope(*state);
|
||||
|
@ -374,7 +374,7 @@ void CompactSpace(NormalPageSpace* space,
|
||||
using Pages = NormalPageSpace::Pages;
|
||||
|
||||
#ifdef V8_USE_ADDRESS_SANITIZER
|
||||
UnmarkedObjectsPoisoner().Traverse(space);
|
||||
UnmarkedObjectsPoisoner().Traverse(*space);
|
||||
#endif // V8_USE_ADDRESS_SANITIZER
|
||||
|
||||
DCHECK(space->is_compactable());
|
||||
|
@ -29,18 +29,18 @@ class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> {
|
||||
friend class HeapVisitor<ObjectSizeCounter>;
|
||||
|
||||
public:
|
||||
size_t GetSize(RawHeap* heap) {
|
||||
size_t GetSize(RawHeap& heap) {
|
||||
Traverse(heap);
|
||||
return accumulated_size_;
|
||||
}
|
||||
|
||||
private:
|
||||
static size_t ObjectSize(const HeapObjectHeader* header) {
|
||||
return ObjectView(*header).Size();
|
||||
static size_t ObjectSize(const HeapObjectHeader& header) {
|
||||
return ObjectView(header).Size();
|
||||
}
|
||||
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
|
||||
if (header->IsFree()) return true;
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader& header) {
|
||||
if (header.IsFree()) return true;
|
||||
accumulated_size_ += ObjectSize(header);
|
||||
return true;
|
||||
}
|
||||
@ -90,7 +90,7 @@ PageAllocator* HeapBase::page_allocator() const {
|
||||
}
|
||||
|
||||
size_t HeapBase::ObjectPayloadSize() const {
|
||||
return ObjectSizeCounter().GetSize(const_cast<RawHeap*>(&raw_heap()));
|
||||
return ObjectSizeCounter().GetSize(const_cast<RawHeap&>(raw_heap()));
|
||||
}
|
||||
|
||||
void HeapBase::AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded() {
|
||||
|
@ -90,7 +90,7 @@ HeapStatistics HeapStatisticsCollector::CollectStatistics(HeapBase* heap) {
|
||||
stats.detail_level = HeapStatistics::DetailLevel::kDetailed;
|
||||
current_stats_ = &stats;
|
||||
|
||||
Traverse(&heap->raw_heap());
|
||||
Traverse(heap->raw_heap());
|
||||
FinalizeSpace(current_stats_, ¤t_space_stats_, ¤t_page_stats_);
|
||||
|
||||
DCHECK_EQ(heap->stats_collector()->allocated_memory_size(),
|
||||
@ -98,20 +98,20 @@ HeapStatistics HeapStatisticsCollector::CollectStatistics(HeapBase* heap) {
|
||||
return stats;
|
||||
}
|
||||
|
||||
bool HeapStatisticsCollector::VisitNormalPageSpace(NormalPageSpace* space) {
|
||||
DCHECK_EQ(0u, space->linear_allocation_buffer().size());
|
||||
bool HeapStatisticsCollector::VisitNormalPageSpace(NormalPageSpace& space) {
|
||||
DCHECK_EQ(0u, space.linear_allocation_buffer().size());
|
||||
|
||||
FinalizeSpace(current_stats_, ¤t_space_stats_, ¤t_page_stats_);
|
||||
|
||||
current_space_stats_ =
|
||||
InitializeSpace(current_stats_, GetNormalPageSpaceName(space->index()));
|
||||
InitializeSpace(current_stats_, GetNormalPageSpaceName(space.index()));
|
||||
|
||||
space->free_list().CollectStatistics(current_space_stats_->free_list_stats);
|
||||
space.free_list().CollectStatistics(current_space_stats_->free_list_stats);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool HeapStatisticsCollector::VisitLargePageSpace(LargePageSpace* space) {
|
||||
bool HeapStatisticsCollector::VisitLargePageSpace(LargePageSpace& space) {
|
||||
FinalizeSpace(current_stats_, ¤t_space_stats_, ¤t_page_stats_);
|
||||
|
||||
current_space_stats_ = InitializeSpace(current_stats_, "LargePageSpace");
|
||||
@ -119,7 +119,7 @@ bool HeapStatisticsCollector::VisitLargePageSpace(LargePageSpace* space) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool HeapStatisticsCollector::VisitNormalPage(NormalPage* page) {
|
||||
bool HeapStatisticsCollector::VisitNormalPage(NormalPage& page) {
|
||||
DCHECK_NOT_NULL(current_space_stats_);
|
||||
FinalizePage(current_space_stats_, ¤t_page_stats_);
|
||||
current_space_stats_->page_stats.emplace_back(
|
||||
@ -128,11 +128,11 @@ bool HeapStatisticsCollector::VisitNormalPage(NormalPage* page) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool HeapStatisticsCollector::VisitLargePage(LargePage* page) {
|
||||
bool HeapStatisticsCollector::VisitLargePage(LargePage& page) {
|
||||
DCHECK_NOT_NULL(current_space_stats_);
|
||||
FinalizePage(current_space_stats_, ¤t_page_stats_);
|
||||
HeapObjectHeader* object_header = page->ObjectHeader();
|
||||
size_t object_size = page->PayloadSize();
|
||||
HeapObjectHeader* object_header = page.ObjectHeader();
|
||||
size_t object_size = page.PayloadSize();
|
||||
RecordObjectType(current_space_stats_, object_header, object_size);
|
||||
size_t allocated_size = LargePage::AllocationSize(object_size);
|
||||
current_space_stats_->physical_size_bytes += allocated_size;
|
||||
@ -143,13 +143,13 @@ bool HeapStatisticsCollector::VisitLargePage(LargePage* page) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool HeapStatisticsCollector::VisitHeapObjectHeader(HeapObjectHeader* header) {
|
||||
DCHECK(!header->IsLargeObject());
|
||||
bool HeapStatisticsCollector::VisitHeapObjectHeader(HeapObjectHeader& header) {
|
||||
DCHECK(!header.IsLargeObject());
|
||||
DCHECK_NOT_NULL(current_space_stats_);
|
||||
DCHECK_NOT_NULL(current_page_stats_);
|
||||
if (header->IsFree()) return true;
|
||||
size_t object_size = header->AllocatedSize();
|
||||
RecordObjectType(current_space_stats_, header, object_size);
|
||||
if (header.IsFree()) return true;
|
||||
size_t object_size = header.AllocatedSize();
|
||||
RecordObjectType(current_space_stats_, &header, object_size);
|
||||
current_page_stats_->used_size_bytes += object_size;
|
||||
return true;
|
||||
}
|
||||
|
@ -18,11 +18,11 @@ class HeapStatisticsCollector : private HeapVisitor<HeapStatisticsCollector> {
|
||||
HeapStatistics CollectStatistics(HeapBase*);
|
||||
|
||||
private:
|
||||
bool VisitNormalPageSpace(NormalPageSpace*);
|
||||
bool VisitLargePageSpace(LargePageSpace*);
|
||||
bool VisitNormalPage(NormalPage*);
|
||||
bool VisitLargePage(LargePage*);
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader*);
|
||||
bool VisitNormalPageSpace(NormalPageSpace&);
|
||||
bool VisitLargePageSpace(LargePageSpace&);
|
||||
bool VisitNormalPage(NormalPage&);
|
||||
bool VisitLargePage(LargePage&);
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader&);
|
||||
|
||||
HeapStatistics* current_stats_;
|
||||
HeapStatistics::SpaceStatistics* current_space_stats_ = nullptr;
|
||||
|
@ -19,34 +19,34 @@ namespace internal {
|
||||
template <typename Derived>
|
||||
class HeapVisitor {
|
||||
public:
|
||||
void Traverse(RawHeap* heap) {
|
||||
void Traverse(RawHeap& heap) {
|
||||
if (VisitHeapImpl(heap)) return;
|
||||
for (auto& space : *heap) {
|
||||
Traverse(space.get());
|
||||
for (auto& space : heap) {
|
||||
Traverse(*space.get());
|
||||
}
|
||||
}
|
||||
|
||||
void Traverse(BaseSpace* space) {
|
||||
void Traverse(BaseSpace& space) {
|
||||
const bool is_stopped =
|
||||
space->is_large()
|
||||
? VisitLargePageSpaceImpl(&LargePageSpace::From(*space))
|
||||
: VisitNormalPageSpaceImpl(&NormalPageSpace::From(*space));
|
||||
space.is_large()
|
||||
? VisitLargePageSpaceImpl(LargePageSpace::From(space))
|
||||
: VisitNormalPageSpaceImpl(NormalPageSpace::From(space));
|
||||
if (is_stopped) return;
|
||||
for (auto* page : *space) {
|
||||
Traverse(page);
|
||||
for (auto* page : space) {
|
||||
Traverse(*page);
|
||||
}
|
||||
}
|
||||
|
||||
void Traverse(BasePage* page) {
|
||||
if (page->is_large()) {
|
||||
auto* large_page = LargePage::From(page);
|
||||
if (VisitLargePageImpl(large_page)) return;
|
||||
VisitHeapObjectHeaderImpl(large_page->ObjectHeader());
|
||||
void Traverse(BasePage& page) {
|
||||
if (page.is_large()) {
|
||||
auto* large_page = LargePage::From(&page);
|
||||
if (VisitLargePageImpl(*large_page)) return;
|
||||
VisitHeapObjectHeaderImpl(*large_page->ObjectHeader());
|
||||
} else {
|
||||
auto* normal_page = NormalPage::From(page);
|
||||
if (VisitNormalPageImpl(normal_page)) return;
|
||||
auto* normal_page = NormalPage::From(&page);
|
||||
if (VisitNormalPageImpl(*normal_page)) return;
|
||||
for (auto& header : *normal_page) {
|
||||
VisitHeapObjectHeaderImpl(&header);
|
||||
VisitHeapObjectHeaderImpl(header);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -54,31 +54,31 @@ class HeapVisitor {
|
||||
protected:
|
||||
// Visitor functions return true if no deeper processing is required.
|
||||
// Users are supposed to override functions that need special treatment.
|
||||
bool VisitHeap(RawHeap*) { return false; }
|
||||
bool VisitNormalPageSpace(NormalPageSpace*) { return false; }
|
||||
bool VisitLargePageSpace(LargePageSpace*) { return false; }
|
||||
bool VisitNormalPage(NormalPage*) { return false; }
|
||||
bool VisitLargePage(LargePage*) { return false; }
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader*) { return false; }
|
||||
bool VisitHeap(RawHeap&) { return false; }
|
||||
bool VisitNormalPageSpace(NormalPageSpace&) { return false; }
|
||||
bool VisitLargePageSpace(LargePageSpace&) { return false; }
|
||||
bool VisitNormalPage(NormalPage&) { return false; }
|
||||
bool VisitLargePage(LargePage&) { return false; }
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader&) { return false; }
|
||||
|
||||
private:
|
||||
Derived& ToDerived() { return static_cast<Derived&>(*this); }
|
||||
|
||||
bool VisitHeapImpl(RawHeap* heap) { return ToDerived().VisitHeap(heap); }
|
||||
bool VisitNormalPageSpaceImpl(NormalPageSpace* space) {
|
||||
bool VisitHeapImpl(RawHeap& heap) { return ToDerived().VisitHeap(heap); }
|
||||
bool VisitNormalPageSpaceImpl(NormalPageSpace& space) {
|
||||
return ToDerived().VisitNormalPageSpace(space);
|
||||
}
|
||||
bool VisitLargePageSpaceImpl(LargePageSpace* space) {
|
||||
bool VisitLargePageSpaceImpl(LargePageSpace& space) {
|
||||
return ToDerived().VisitLargePageSpace(space);
|
||||
}
|
||||
bool VisitNormalPageImpl(NormalPage* page) {
|
||||
bool VisitNormalPageImpl(NormalPage& page) {
|
||||
return ToDerived().VisitNormalPage(page);
|
||||
}
|
||||
bool VisitLargePageImpl(LargePage* page) {
|
||||
bool VisitLargePageImpl(LargePage& page) {
|
||||
return ToDerived().VisitLargePage(page);
|
||||
}
|
||||
bool VisitHeapObjectHeaderImpl(HeapObjectHeader* hoh) {
|
||||
return ToDerived().VisitHeapObjectHeader(hoh);
|
||||
bool VisitHeapObjectHeaderImpl(HeapObjectHeader& header) {
|
||||
return ToDerived().VisitHeapObjectHeader(header);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -62,11 +62,11 @@ class Unmarker final : private HeapVisitor<Unmarker> {
|
||||
friend class HeapVisitor<Unmarker>;
|
||||
|
||||
public:
|
||||
explicit Unmarker(RawHeap* heap) { Traverse(heap); }
|
||||
explicit Unmarker(RawHeap& heap) { Traverse(heap); }
|
||||
|
||||
private:
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
|
||||
if (header->IsMarked()) header->Unmark();
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader& header) {
|
||||
if (header.IsMarked()) header.Unmark();
|
||||
return true;
|
||||
}
|
||||
};
|
||||
@ -157,7 +157,7 @@ void Heap::StartGarbageCollection(Config config) {
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
if (config.collection_type == Config::CollectionType::kMajor)
|
||||
Unmarker unmarker(&raw_heap());
|
||||
Unmarker unmarker(raw_heap());
|
||||
#endif
|
||||
|
||||
const Marker::MarkingConfig marking_config{
|
||||
|
@ -24,7 +24,7 @@ MarkingVerifierBase::MarkingVerifierBase(
|
||||
void MarkingVerifierBase::Run(Heap::Config::StackState stack_state,
|
||||
uintptr_t stack_end,
|
||||
size_t expected_marked_bytes) {
|
||||
Traverse(&heap_.raw_heap());
|
||||
Traverse(heap_.raw_heap());
|
||||
if (stack_state == Heap::Config::StackState::kMayContainHeapPointers) {
|
||||
in_construction_objects_ = &in_construction_objects_stack_;
|
||||
heap_.stack()->IteratePointersUnsafe(this, stack_end);
|
||||
@ -87,22 +87,22 @@ void MarkingVerifierBase::VisitPointer(const void* address) {
|
||||
TraceConservativelyIfNeeded(address);
|
||||
}
|
||||
|
||||
bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader* header) {
|
||||
bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader& header) {
|
||||
// Verify only non-free marked objects.
|
||||
if (!header->IsMarked()) return true;
|
||||
if (!header.IsMarked()) return true;
|
||||
|
||||
DCHECK(!header->IsFree());
|
||||
DCHECK(!header.IsFree());
|
||||
|
||||
verification_state_.SetCurrentParent(header);
|
||||
verification_state_.SetCurrentParent(&header);
|
||||
|
||||
if (!header->IsInConstruction()) {
|
||||
header->Trace(visitor_.get());
|
||||
if (!header.IsInConstruction()) {
|
||||
header.Trace(visitor_.get());
|
||||
} else {
|
||||
// Dispatches to conservative tracing implementation.
|
||||
TraceConservativelyIfNeeded(*header);
|
||||
TraceConservativelyIfNeeded(header);
|
||||
}
|
||||
|
||||
found_marked_bytes_ += ObjectView(*header).Size() + sizeof(HeapObjectHeader);
|
||||
found_marked_bytes_ += ObjectView(header).Size() + sizeof(HeapObjectHeader);
|
||||
|
||||
verification_state_.SetCurrentParent(nullptr);
|
||||
|
||||
|
@ -51,7 +51,7 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase
|
||||
TraceConservativelyCallback) final;
|
||||
void VisitPointer(const void*) final;
|
||||
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader*);
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader&);
|
||||
|
||||
VerificationState& verification_state_;
|
||||
std::unique_ptr<cppgc::Visitor> visitor_;
|
||||
|
@ -188,10 +188,10 @@ void ObjectAllocator::ResetLinearAllocationBuffers() {
|
||||
public:
|
||||
explicit Resetter(StatsCollector* stats) : stats_collector_(stats) {}
|
||||
|
||||
bool VisitLargePageSpace(LargePageSpace*) { return true; }
|
||||
bool VisitLargePageSpace(LargePageSpace&) { return true; }
|
||||
|
||||
bool VisitNormalPageSpace(NormalPageSpace* space) {
|
||||
ReplaceLinearAllocationBuffer(*space, *stats_collector_, nullptr, 0);
|
||||
bool VisitNormalPageSpace(NormalPageSpace& space) {
|
||||
ReplaceLinearAllocationBuffer(space, *stats_collector_, nullptr, 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -199,7 +199,7 @@ void ObjectAllocator::ResetLinearAllocationBuffers() {
|
||||
StatsCollector* stats_collector_;
|
||||
} visitor(stats_collector_);
|
||||
|
||||
visitor.Traverse(raw_heap_);
|
||||
visitor.Traverse(*raw_heap_);
|
||||
}
|
||||
|
||||
void ObjectAllocator::Terminate() {
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "src/heap/cppgc/heap-object-header.h"
|
||||
#include "src/heap/cppgc/heap-page.h"
|
||||
#include "src/heap/cppgc/heap-visitor.h"
|
||||
#include "src/heap/cppgc/object-view.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
@ -20,14 +21,10 @@ class UnmarkedObjectsPoisoner : public HeapVisitor<UnmarkedObjectsPoisoner> {
|
||||
friend class HeapVisitor<UnmarkedObjectsPoisoner>;
|
||||
|
||||
private:
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
|
||||
if (header->IsFree() || header->IsMarked()) return true;
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader& header) {
|
||||
if (header.IsFree() || header.IsMarked()) return true;
|
||||
|
||||
const size_t size =
|
||||
header->IsLargeObject()
|
||||
? LargePage::From(BasePage::FromPayload(header))->ObjectSize()
|
||||
: header->ObjectSize();
|
||||
ASAN_POISON_MEMORY_REGION(header->ObjectStart(), size);
|
||||
ASAN_POISON_MEMORY_REGION(header.ObjectStart(), ObjectView(header).Size());
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
@ -37,25 +37,25 @@ class ObjectStartBitmapVerifier
|
||||
friend class HeapVisitor<ObjectStartBitmapVerifier>;
|
||||
|
||||
public:
|
||||
void Verify(RawHeap* heap) { Traverse(heap); }
|
||||
void Verify(RawHeap& heap) { Traverse(heap); }
|
||||
|
||||
private:
|
||||
bool VisitNormalPage(NormalPage* page) {
|
||||
bool VisitNormalPage(NormalPage& page) {
|
||||
// Remember bitmap and reset previous pointer.
|
||||
bitmap_ = &page->object_start_bitmap();
|
||||
bitmap_ = &page.object_start_bitmap();
|
||||
prev_ = nullptr;
|
||||
return false;
|
||||
}
|
||||
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
|
||||
if (header->IsLargeObject()) return true;
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader& header) {
|
||||
if (header.IsLargeObject()) return true;
|
||||
|
||||
auto* raw_header = reinterpret_cast<ConstAddress>(header);
|
||||
auto* raw_header = reinterpret_cast<ConstAddress>(&header);
|
||||
CHECK(bitmap_->CheckBit(raw_header));
|
||||
if (prev_) {
|
||||
CHECK_EQ(prev_, bitmap_->FindHeader(raw_header - 1));
|
||||
}
|
||||
prev_ = header;
|
||||
prev_ = &header;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -337,12 +337,12 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
|
||||
void Sweep() {
|
||||
for (SpaceState& state : *states_) {
|
||||
while (auto page = state.unswept_pages.Pop()) {
|
||||
SweepPage(*page);
|
||||
SweepPage(**page);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void SweepPage(BasePage* page) { Traverse(page); }
|
||||
void SweepPage(BasePage& page) { Traverse(page); }
|
||||
|
||||
bool SweepWithDeadline(double deadline_in_seconds) {
|
||||
DCHECK(platform_);
|
||||
@ -378,7 +378,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
|
||||
static constexpr size_t kDeadlineCheckInterval = 8;
|
||||
size_t page_count = 1;
|
||||
while (auto page = state->unswept_pages.Pop()) {
|
||||
Traverse(*page);
|
||||
Traverse(**page);
|
||||
if (page_count % kDeadlineCheckInterval == 0 &&
|
||||
deadline_in_seconds <= platform_->MonotonicallyIncreasingTime()) {
|
||||
return false;
|
||||
@ -389,27 +389,27 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VisitNormalPage(NormalPage* page) {
|
||||
bool VisitNormalPage(NormalPage& page) {
|
||||
const InlinedFinalizationBuilder::ResultType result =
|
||||
SweepNormalPage<InlinedFinalizationBuilder>(page);
|
||||
SweepNormalPage<InlinedFinalizationBuilder>(&page);
|
||||
if (result.is_empty) {
|
||||
NormalPage::Destroy(page);
|
||||
NormalPage::Destroy(&page);
|
||||
} else {
|
||||
page->space().AddPage(page);
|
||||
page.space().AddPage(&page);
|
||||
largest_new_free_list_entry_ = std::max(
|
||||
result.largest_new_free_list_entry, largest_new_free_list_entry_);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VisitLargePage(LargePage* page) {
|
||||
HeapObjectHeader* header = page->ObjectHeader();
|
||||
bool VisitLargePage(LargePage& page) {
|
||||
HeapObjectHeader* header = page.ObjectHeader();
|
||||
if (header->IsMarked()) {
|
||||
StickyUnmark(header);
|
||||
page->space().AddPage(page);
|
||||
page.space().AddPage(&page);
|
||||
} else {
|
||||
header->Finalize();
|
||||
LargePage::Destroy(page);
|
||||
LargePage::Destroy(&page);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -433,7 +433,7 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
|
||||
|
||||
for (SpaceState& state : *states_) {
|
||||
while (auto page = state.unswept_pages.Pop()) {
|
||||
Traverse(*page);
|
||||
Traverse(**page);
|
||||
if (delegate->ShouldYield()) return;
|
||||
}
|
||||
}
|
||||
@ -445,32 +445,32 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
|
||||
}
|
||||
|
||||
private:
|
||||
bool VisitNormalPage(NormalPage* page) {
|
||||
bool VisitNormalPage(NormalPage& page) {
|
||||
SpaceState::SweptPageState sweep_result =
|
||||
SweepNormalPage<DeferredFinalizationBuilder>(page);
|
||||
const size_t space_index = page->space().index();
|
||||
SweepNormalPage<DeferredFinalizationBuilder>(&page);
|
||||
const size_t space_index = page.space().index();
|
||||
DCHECK_GT(states_->size(), space_index);
|
||||
SpaceState& space_state = (*states_)[space_index];
|
||||
space_state.swept_unfinalized_pages.Push(std::move(sweep_result));
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VisitLargePage(LargePage* page) {
|
||||
HeapObjectHeader* header = page->ObjectHeader();
|
||||
bool VisitLargePage(LargePage& page) {
|
||||
HeapObjectHeader* header = page.ObjectHeader();
|
||||
if (header->IsMarked()) {
|
||||
StickyUnmark(header);
|
||||
page->space().AddPage(page);
|
||||
page.space().AddPage(&page);
|
||||
return true;
|
||||
}
|
||||
if (!header->IsFinalizable()) {
|
||||
LargePage::Destroy(page);
|
||||
LargePage::Destroy(&page);
|
||||
return true;
|
||||
}
|
||||
const size_t space_index = page->space().index();
|
||||
const size_t space_index = page.space().index();
|
||||
DCHECK_GT(states_->size(), space_index);
|
||||
SpaceState& state = (*states_)[space_index];
|
||||
state.swept_unfinalized_pages.Push(
|
||||
{page, {page->ObjectHeader()}, {}, {}, true});
|
||||
{&page, {page.ObjectHeader()}, {}, {}, true});
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -493,12 +493,12 @@ class PrepareForSweepVisitor final
|
||||
: states_(states),
|
||||
compactable_space_handling_(compactable_space_handling) {}
|
||||
|
||||
bool VisitNormalPageSpace(NormalPageSpace* space) {
|
||||
bool VisitNormalPageSpace(NormalPageSpace& space) {
|
||||
if ((compactable_space_handling_ == CompactableSpaceHandling::kIgnore) &&
|
||||
space->is_compactable())
|
||||
space.is_compactable())
|
||||
return true;
|
||||
DCHECK(!space->linear_allocation_buffer().size());
|
||||
space->free_list().Clear();
|
||||
DCHECK(!space.linear_allocation_buffer().size());
|
||||
space.free_list().Clear();
|
||||
#ifdef V8_USE_ADDRESS_SANITIZER
|
||||
UnmarkedObjectsPoisoner().Traverse(space);
|
||||
#endif // V8_USE_ADDRESS_SANITIZER
|
||||
@ -506,7 +506,7 @@ class PrepareForSweepVisitor final
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VisitLargePageSpace(LargePageSpace* space) {
|
||||
bool VisitLargePageSpace(LargePageSpace& space) {
|
||||
#ifdef V8_USE_ADDRESS_SANITIZER
|
||||
UnmarkedObjectsPoisoner().Traverse(space);
|
||||
#endif // V8_USE_ADDRESS_SANITIZER
|
||||
@ -515,10 +515,10 @@ class PrepareForSweepVisitor final
|
||||
}
|
||||
|
||||
private:
|
||||
void ExtractPages(BaseSpace* space) {
|
||||
BaseSpace::Pages space_pages = space->RemoveAllPages();
|
||||
(*states_)[space->index()].unswept_pages.Insert(space_pages.begin(),
|
||||
space_pages.end());
|
||||
void ExtractPages(BaseSpace& space) {
|
||||
BaseSpace::Pages space_pages = space.RemoveAllPages();
|
||||
(*states_)[space.index()].unswept_pages.Insert(space_pages.begin(),
|
||||
space_pages.end());
|
||||
}
|
||||
|
||||
SpaceStates* states_;
|
||||
@ -543,10 +543,10 @@ class Sweeper::SweeperImpl final {
|
||||
platform_ = platform;
|
||||
#if DEBUG
|
||||
// Verify bitmap for all spaces regardless of |compactable_space_handling|.
|
||||
ObjectStartBitmapVerifier().Verify(&heap_);
|
||||
ObjectStartBitmapVerifier().Verify(heap_);
|
||||
#endif
|
||||
PrepareForSweepVisitor(&space_states_, config.compactable_space_handling)
|
||||
.Traverse(&heap_);
|
||||
.Traverse(heap_);
|
||||
|
||||
if (config.sweeping_type == SweepingConfig::SweepingType::kAtomic) {
|
||||
Finish();
|
||||
@ -587,7 +587,7 @@ class Sweeper::SweeperImpl final {
|
||||
// unswept page. This also helps out the concurrent sweeper.
|
||||
MutatorThreadSweeper sweeper(&space_states_, platform_);
|
||||
while (auto page = space_state.unswept_pages.Pop()) {
|
||||
sweeper.SweepPage(*page);
|
||||
sweeper.SweepPage(**page);
|
||||
if (size <= sweeper.largest_new_free_list_entry()) return true;
|
||||
}
|
||||
}
|
||||
|
@ -200,18 +200,18 @@ class ObjectSizeCounter final : private HeapVisitor<ObjectSizeCounter> {
|
||||
friend class HeapVisitor<ObjectSizeCounter>;
|
||||
|
||||
public:
|
||||
size_t GetSize(RawHeap* heap) {
|
||||
size_t GetSize(RawHeap& heap) {
|
||||
Traverse(heap);
|
||||
return accumulated_size_;
|
||||
}
|
||||
|
||||
private:
|
||||
static size_t ObjectSize(const HeapObjectHeader* header) {
|
||||
return ObjectView(*header).Size();
|
||||
static size_t ObjectSize(const HeapObjectHeader& header) {
|
||||
return ObjectView(header).Size();
|
||||
}
|
||||
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
|
||||
if (header->IsFree()) return true;
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader& header) {
|
||||
if (header.IsFree()) return true;
|
||||
accumulated_size_ += ObjectSize(header);
|
||||
return true;
|
||||
}
|
||||
@ -226,7 +226,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
|
||||
"Allocation granularity is expected to be a multiple of 4");
|
||||
Heap* heap = internal::Heap::From(GetHeap());
|
||||
size_t initial_object_payload_size =
|
||||
ObjectSizeCounter().GetSize(&heap->raw_heap());
|
||||
ObjectSizeCounter().GetSize(heap->raw_heap());
|
||||
{
|
||||
// When the test starts there may already have been leaked some memory
|
||||
// on the heap, so we establish a base line.
|
||||
@ -248,7 +248,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
|
||||
size_t total = 96;
|
||||
|
||||
EXPECT_EQ(base_level + total,
|
||||
ObjectSizeCounter().GetSize(&heap->raw_heap()));
|
||||
ObjectSizeCounter().GetSize(heap->raw_heap()));
|
||||
if (test_pages_allocated) {
|
||||
EXPECT_EQ(kPageSize * 2,
|
||||
heap->stats_collector()->allocated_memory_size());
|
||||
@ -269,7 +269,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
|
||||
|
||||
PreciseGC();
|
||||
size_t total = 0;
|
||||
size_t base_level = ObjectSizeCounter().GetSize(&heap->raw_heap());
|
||||
size_t base_level = ObjectSizeCounter().GetSize(heap->raw_heap());
|
||||
bool test_pages_allocated = !base_level;
|
||||
if (test_pages_allocated) {
|
||||
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size());
|
||||
@ -292,7 +292,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
|
||||
// The allocations in the loop may trigger GC with lazy sweeping.
|
||||
heap->sweeper().FinishIfRunning();
|
||||
EXPECT_EQ(base_level + total,
|
||||
ObjectSizeCounter().GetSize(&heap->raw_heap()));
|
||||
ObjectSizeCounter().GetSize(heap->raw_heap()));
|
||||
if (test_pages_allocated) {
|
||||
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() &
|
||||
(kPageSize - 1));
|
||||
@ -310,7 +310,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
|
||||
|
||||
total += 96;
|
||||
EXPECT_EQ(base_level + total,
|
||||
ObjectSizeCounter().GetSize(&heap->raw_heap()));
|
||||
ObjectSizeCounter().GetSize(heap->raw_heap()));
|
||||
if (test_pages_allocated) {
|
||||
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() &
|
||||
(kPageSize - 1));
|
||||
@ -329,13 +329,13 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
|
||||
PreciseGC();
|
||||
|
||||
total -= big;
|
||||
EXPECT_EQ(base_level + total, ObjectSizeCounter().GetSize(&heap->raw_heap()));
|
||||
EXPECT_EQ(base_level + total, ObjectSizeCounter().GetSize(heap->raw_heap()));
|
||||
if (test_pages_allocated) {
|
||||
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() &
|
||||
(kPageSize - 1));
|
||||
}
|
||||
|
||||
EXPECT_EQ(base_level + total, ObjectSizeCounter().GetSize(&heap->raw_heap()));
|
||||
EXPECT_EQ(base_level + total, ObjectSizeCounter().GetSize(heap->raw_heap()));
|
||||
if (test_pages_allocated) {
|
||||
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() &
|
||||
(kPageSize - 1));
|
||||
|
Loading…
Reference in New Issue
Block a user