From b0c70710a4fc95838c80c3f7827b09ed58f9e899 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dominik=20Inf=C3=BChr?= Date: Tue, 1 Jun 2021 20:46:23 +0200 Subject: [PATCH] [heap] Remove unused LocalSpace class MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit LocalSpace was introduced for off-heap spaces with concurrent bytecode compilation finalization. However, finalization ended up using LocalHeap for concurrent allocations. LocalSpace is therefore unused and can be removed. This CL removes LocalSpace and renames all mentions of local space to compaction space. Compaction space was the only local space left. Change-Id: I12a8a2724f777a77ddb9957fe2d8e89febfebbaf Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2930169 Reviewed-by: Michael Lippautz Commit-Queue: Dominik Inführ Cr-Commit-Position: refs/heads/master@{#74914} --- src/common/globals.h | 5 +-- src/heap/local-allocator.h | 10 +++-- src/heap/mark-compact.cc | 7 +-- src/heap/paged-spaces.cc | 19 ++++---- src/heap/paged-spaces.h | 62 ++++++++++---------------- src/heap/scavenger.cc | 2 +- test/unittests/heap/spaces-unittest.cc | 4 +- 7 files changed, 45 insertions(+), 64 deletions(-) diff --git a/src/common/globals.h b/src/common/globals.h index 1d1f317be2..59e82add97 100644 --- a/src/common/globals.h +++ b/src/common/globals.h @@ -848,14 +848,11 @@ enum MinimumCapacity { enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR }; -enum class LocalSpaceKind { +enum class CompactionSpaceKind { kNone, kCompactionSpaceForScavenge, kCompactionSpaceForMarkCompact, kCompactionSpaceForMinorMarkCompact, - - kFirstCompactionSpace = kCompactionSpaceForScavenge, - kLastCompactionSpace = kCompactionSpaceForMinorMarkCompact, }; enum Executability { NOT_EXECUTABLE, EXECUTABLE }; diff --git a/src/heap/local-allocator.h b/src/heap/local-allocator.h index e64932b9fe..2b6841ecd6 100644 --- a/src/heap/local-allocator.h +++ b/src/heap/local-allocator.h @@ -21,18 +21,20 @@ class EvacuationAllocator { static const int kLabSize = 32 * KB; static const int kMaxLabObjectSize = 8 * KB; - explicit EvacuationAllocator(Heap* heap, LocalSpaceKind local_space_kind) + explicit EvacuationAllocator(Heap* heap, + CompactionSpaceKind compaction_space_kind) : heap_(heap), new_space_(heap->new_space()), - compaction_spaces_(heap, local_space_kind), + compaction_spaces_(heap, compaction_space_kind), new_space_lab_(LocalAllocationBuffer::InvalidBuffer()), lab_allocation_will_fail_(false) {} // Needs to be called from the main thread to finalize this // EvacuationAllocator. void Finalize() { - heap_->old_space()->MergeLocalSpace(compaction_spaces_.Get(OLD_SPACE)); - heap_->code_space()->MergeLocalSpace(compaction_spaces_.Get(CODE_SPACE)); + heap_->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); + heap_->code_space()->MergeCompactionSpace( + compaction_spaces_.Get(CODE_SPACE)); // Give back remaining LAB space if this EvacuationAllocator's new space LAB // sits right next to new space allocation top. const LinearAllocationArea info = new_space_lab_.CloseAndMakeIterable(); diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index a44b180850..9605c9b2c4 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -3091,7 +3091,8 @@ class FullEvacuator : public Evacuator { : Evacuator(collector->heap(), &record_visitor_, &local_allocator_, FLAG_always_promote_young_mc), record_visitor_(collector, &ephemeron_remembered_set_), - local_allocator_(heap_, LocalSpaceKind::kCompactionSpaceForMarkCompact), + local_allocator_(heap_, + CompactionSpaceKind::kCompactionSpaceForMarkCompact), collector_(collector) {} GCTracer::Scope::ScopeId GetBackgroundTracingScope() override { @@ -5163,8 +5164,8 @@ class YoungGenerationEvacuator : public Evacuator { : Evacuator(collector->heap(), &record_visitor_, &local_allocator_, false), record_visitor_(collector->heap()->mark_compact_collector()), - local_allocator_(heap_, - LocalSpaceKind::kCompactionSpaceForMinorMarkCompact), + local_allocator_( + heap_, CompactionSpaceKind::kCompactionSpaceForMinorMarkCompact), collector_(collector) {} GCTracer::Scope::ScopeId GetBackgroundTracingScope() override { diff --git a/src/heap/paged-spaces.cc b/src/heap/paged-spaces.cc index 1546d1f4c2..a869862b5c 100644 --- a/src/heap/paged-spaces.cc +++ b/src/heap/paged-spaces.cc @@ -81,10 +81,10 @@ Page* PagedSpace::InitializePage(MemoryChunk* chunk) { PagedSpace::PagedSpace(Heap* heap, AllocationSpace space, Executability executable, FreeList* free_list, - LocalSpaceKind local_space_kind) + CompactionSpaceKind compaction_space_kind) : SpaceWithLinearArea(heap, space, free_list), executable_(executable), - local_space_kind_(local_space_kind) { + compaction_space_kind_(compaction_space_kind) { area_size_ = MemoryChunkLayout::AllocatableMemoryInMemoryChunk(space); accounting_stats_.Clear(); } @@ -105,7 +105,6 @@ void PagedSpace::RefillFreeList() { identity() != MAP_SPACE) { return; } - DCHECK_IMPLIES(is_local_space(), is_compaction_space()); MarkCompactCollector* collector = heap()->mark_compact_collector(); size_t added = 0; @@ -123,7 +122,8 @@ void PagedSpace::RefillFreeList() { // Also merge old-to-new remembered sets if not scavenging because of // data races: One thread might iterate remembered set, while another // thread merges them. - if (local_space_kind() != LocalSpaceKind::kCompactionSpaceForScavenge) { + if (compaction_space_kind() != + CompactionSpaceKind::kCompactionSpaceForScavenge) { p->MergeOldToNewRememberedSets(); } @@ -150,7 +150,7 @@ void PagedSpace::RefillFreeList() { } } -void PagedSpace::MergeLocalSpace(LocalSpace* other) { +void PagedSpace::MergeCompactionSpace(CompactionSpace* other) { base::MutexGuard guard(mutex()); DCHECK(identity() == other->identity()); @@ -555,7 +555,7 @@ bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes, base::Optional> PagedSpace::RawRefillLabBackground( LocalHeap* local_heap, size_t min_size_in_bytes, size_t max_size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) { - DCHECK(!is_local_space() && identity() == OLD_SPACE); + DCHECK(!is_compaction_space() && identity() == OLD_SPACE); DCHECK_EQ(origin, AllocationOrigin::kRuntime); auto result = TryAllocationFromFreeListBackground( @@ -840,7 +840,7 @@ bool PagedSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) { return RawRefillLabMain(size_in_bytes, origin); } -Page* LocalSpace::Expand() { +Page* CompactionSpace::Expand() { Page* page = PagedSpace::Expand(); new_pages_.push_back(page); return page; @@ -864,9 +864,6 @@ bool PagedSpace::TryExpand(int size_in_bytes, AllocationOrigin origin) { } bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { - // Non-compaction local spaces are not supported. - DCHECK_IMPLIES(is_local_space(), is_compaction_space()); - // Allocation in this space has failed. DCHECK_GE(size_in_bytes, 0); const int kMaxPagesToSweep = 1; @@ -946,7 +943,7 @@ bool PagedSpace::ContributeToSweepingMain(int required_freed_bytes, AllocationResult PagedSpace::AllocateRawSlow(int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) { - if (!is_local_space()) { + if (!is_compaction_space()) { // Start incremental marking before the actual allocation, this allows the // allocation function to mark the object black when incremental marking is // running. diff --git a/src/heap/paged-spaces.h b/src/heap/paged-spaces.h index 329954f1c9..4e5d80454d 100644 --- a/src/heap/paged-spaces.h +++ b/src/heap/paged-spaces.h @@ -21,10 +21,10 @@ namespace v8 { namespace internal { +class CompactionSpace; class Heap; class HeapObject; class Isolate; -class LocalSpace; class ObjectVisitor; // ----------------------------------------------------------------------------- @@ -71,9 +71,10 @@ class V8_EXPORT_PRIVATE PagedSpace static const size_t kCompactionMemoryWanted = 500 * KB; // Creates a space with an id. - PagedSpace(Heap* heap, AllocationSpace id, Executability executable, - FreeList* free_list, - LocalSpaceKind local_space_kind = LocalSpaceKind::kNone); + PagedSpace( + Heap* heap, AllocationSpace id, Executability executable, + FreeList* free_list, + CompactionSpaceKind compaction_space_kind = CompactionSpaceKind::kNone); ~PagedSpace() override { TearDown(); } @@ -256,19 +257,15 @@ class V8_EXPORT_PRIVATE PagedSpace // Return size of allocatable area on a page in this space. inline int AreaSize() { return static_cast(area_size_); } - bool is_local_space() { return local_space_kind_ != LocalSpaceKind::kNone; } - bool is_compaction_space() { - return base::IsInRange(local_space_kind_, - LocalSpaceKind::kFirstCompactionSpace, - LocalSpaceKind::kLastCompactionSpace); + return compaction_space_kind_ != CompactionSpaceKind::kNone; } - LocalSpaceKind local_space_kind() { return local_space_kind_; } + CompactionSpaceKind compaction_space_kind() { return compaction_space_kind_; } // Merges {other} into the current space. Note that this modifies {other}, // e.g., removes its bump pointer area and resets statistics. - void MergeLocalSpace(LocalSpace* other); + void MergeCompactionSpace(CompactionSpace* other); // Refills the free list from the corresponding free list filled by the // sweeper. @@ -324,13 +321,13 @@ class V8_EXPORT_PRIVATE PagedSpace base::Optional guard_; }; - bool SupportsConcurrentAllocation() { return !is_local_space(); } + bool SupportsConcurrentAllocation() { return !is_compaction_space(); } // Set space linear allocation area. void SetTopAndLimit(Address top, Address limit); void DecreaseLimit(Address new_limit); void UpdateInlineAllocationLimit(size_t min_size) override; - bool SupportsAllocationObserver() override { return !is_local_space(); } + bool SupportsAllocationObserver() override { return !is_compaction_space(); } // Slow path of allocation function V8_WARN_UNUSED_RESULT AllocationResult @@ -407,7 +404,7 @@ class V8_EXPORT_PRIVATE PagedSpace Executability executable_; - LocalSpaceKind local_space_kind_; + CompactionSpaceKind compaction_space_kind_; size_t area_size_; @@ -430,20 +427,23 @@ class V8_EXPORT_PRIVATE PagedSpace }; // ----------------------------------------------------------------------------- -// Base class for compaction space and off-thread space. +// Compaction space that is used temporarily during compaction. -class V8_EXPORT_PRIVATE LocalSpace : public PagedSpace { +class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace { public: - LocalSpace(Heap* heap, AllocationSpace id, Executability executable, - LocalSpaceKind local_space_kind) + CompactionSpace(Heap* heap, AllocationSpace id, Executability executable, + CompactionSpaceKind compaction_space_kind) : PagedSpace(heap, id, executable, FreeList::CreateFreeList(), - local_space_kind) { - DCHECK_NE(local_space_kind, LocalSpaceKind::kNone); + compaction_space_kind) { + DCHECK(is_compaction_space()); } const std::vector& GetNewPages() { return new_pages_; } protected: + V8_WARN_UNUSED_RESULT bool RefillLabMain(int size_in_bytes, + AllocationOrigin origin) override; + Page* Expand() override; // The space is temporary and not included in any snapshots. bool snapshotable() override { return false; } @@ -452,31 +452,15 @@ class V8_EXPORT_PRIVATE LocalSpace : public PagedSpace { std::vector new_pages_; }; -// ----------------------------------------------------------------------------- -// Compaction space that is used temporarily during compaction. - -class V8_EXPORT_PRIVATE CompactionSpace : public LocalSpace { - public: - CompactionSpace(Heap* heap, AllocationSpace id, Executability executable, - LocalSpaceKind local_space_kind) - : LocalSpace(heap, id, executable, local_space_kind) { - DCHECK(is_compaction_space()); - } - - protected: - V8_WARN_UNUSED_RESULT bool RefillLabMain(int size_in_bytes, - AllocationOrigin origin) override; -}; - // A collection of |CompactionSpace|s used by a single compaction task. class CompactionSpaceCollection : public Malloced { public: explicit CompactionSpaceCollection(Heap* heap, - LocalSpaceKind local_space_kind) + CompactionSpaceKind compaction_space_kind) : old_space_(heap, OLD_SPACE, Executability::NOT_EXECUTABLE, - local_space_kind), + compaction_space_kind), code_space_(heap, CODE_SPACE, Executability::EXECUTABLE, - local_space_kind) {} + compaction_space_kind) {} CompactionSpace* Get(AllocationSpace space) { switch (space) { diff --git a/src/heap/scavenger.cc b/src/heap/scavenger.cc index efa3ed2f61..953a245603 100644 --- a/src/heap/scavenger.cc +++ b/src/heap/scavenger.cc @@ -529,7 +529,7 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging, local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity), copied_size_(0), promoted_size_(0), - allocator_(heap, LocalSpaceKind::kCompactionSpaceForScavenge), + allocator_(heap, CompactionSpaceKind::kCompactionSpaceForScavenge), is_logging_(is_logging), is_incremental_marking_(heap->incremental_marking()->IsMarking()), is_compacting_(heap->incremental_marking()->IsCompacting()) {} diff --git a/test/unittests/heap/spaces-unittest.cc b/test/unittests/heap/spaces-unittest.cc index ab300619c8..8b732f3ea2 100644 --- a/test/unittests/heap/spaces-unittest.cc +++ b/test/unittests/heap/spaces-unittest.cc @@ -28,7 +28,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) { CompactionSpace* compaction_space = new CompactionSpace(heap, OLD_SPACE, NOT_EXECUTABLE, - LocalSpaceKind::kCompactionSpaceForMarkCompact); + CompactionSpaceKind::kCompactionSpaceForMarkCompact); EXPECT_TRUE(compaction_space != nullptr); for (Page* p : *old_space) { @@ -54,7 +54,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) { int pages_in_old_space = old_space->CountTotalPages(); int pages_in_compaction_space = compaction_space->CountTotalPages(); EXPECT_EQ(kExpectedPages, pages_in_compaction_space); - old_space->MergeLocalSpace(compaction_space); + old_space->MergeCompactionSpace(compaction_space); EXPECT_EQ(pages_in_old_space + pages_in_compaction_space, old_space->CountTotalPages());