[heap] Remove unused LocalSpace class

LocalSpace was introduced for off-heap spaces with concurrent bytecode
compilation finalization. However, finalization ended up using
LocalHeap for concurrent allocations. LocalSpace is therefore unused
and can be removed.

This CL removes LocalSpace and renames all mentions of local space to
compaction space. Compaction space was the only local space left.

Change-Id: I12a8a2724f777a77ddb9957fe2d8e89febfebbaf
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2930169
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74914}
This commit is contained in:
Dominik Inführ 2021-06-01 20:46:23 +02:00 committed by V8 LUCI CQ
parent 6ff1129ca3
commit b0c70710a4
7 changed files with 45 additions and 64 deletions

View File

@ -848,14 +848,11 @@ enum MinimumCapacity {
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR };
enum class LocalSpaceKind {
enum class CompactionSpaceKind {
kNone,
kCompactionSpaceForScavenge,
kCompactionSpaceForMarkCompact,
kCompactionSpaceForMinorMarkCompact,
kFirstCompactionSpace = kCompactionSpaceForScavenge,
kLastCompactionSpace = kCompactionSpaceForMinorMarkCompact,
};
enum Executability { NOT_EXECUTABLE, EXECUTABLE };

View File

@ -21,18 +21,20 @@ class EvacuationAllocator {
static const int kLabSize = 32 * KB;
static const int kMaxLabObjectSize = 8 * KB;
explicit EvacuationAllocator(Heap* heap, LocalSpaceKind local_space_kind)
explicit EvacuationAllocator(Heap* heap,
CompactionSpaceKind compaction_space_kind)
: heap_(heap),
new_space_(heap->new_space()),
compaction_spaces_(heap, local_space_kind),
compaction_spaces_(heap, compaction_space_kind),
new_space_lab_(LocalAllocationBuffer::InvalidBuffer()),
lab_allocation_will_fail_(false) {}
// Needs to be called from the main thread to finalize this
// EvacuationAllocator.
void Finalize() {
heap_->old_space()->MergeLocalSpace(compaction_spaces_.Get(OLD_SPACE));
heap_->code_space()->MergeLocalSpace(compaction_spaces_.Get(CODE_SPACE));
heap_->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE));
heap_->code_space()->MergeCompactionSpace(
compaction_spaces_.Get(CODE_SPACE));
// Give back remaining LAB space if this EvacuationAllocator's new space LAB
// sits right next to new space allocation top.
const LinearAllocationArea info = new_space_lab_.CloseAndMakeIterable();

View File

@ -3091,7 +3091,8 @@ class FullEvacuator : public Evacuator {
: Evacuator(collector->heap(), &record_visitor_, &local_allocator_,
FLAG_always_promote_young_mc),
record_visitor_(collector, &ephemeron_remembered_set_),
local_allocator_(heap_, LocalSpaceKind::kCompactionSpaceForMarkCompact),
local_allocator_(heap_,
CompactionSpaceKind::kCompactionSpaceForMarkCompact),
collector_(collector) {}
GCTracer::Scope::ScopeId GetBackgroundTracingScope() override {
@ -5163,8 +5164,8 @@ class YoungGenerationEvacuator : public Evacuator {
: Evacuator(collector->heap(), &record_visitor_, &local_allocator_,
false),
record_visitor_(collector->heap()->mark_compact_collector()),
local_allocator_(heap_,
LocalSpaceKind::kCompactionSpaceForMinorMarkCompact),
local_allocator_(
heap_, CompactionSpaceKind::kCompactionSpaceForMinorMarkCompact),
collector_(collector) {}
GCTracer::Scope::ScopeId GetBackgroundTracingScope() override {

View File

@ -81,10 +81,10 @@ Page* PagedSpace::InitializePage(MemoryChunk* chunk) {
PagedSpace::PagedSpace(Heap* heap, AllocationSpace space,
Executability executable, FreeList* free_list,
LocalSpaceKind local_space_kind)
CompactionSpaceKind compaction_space_kind)
: SpaceWithLinearArea(heap, space, free_list),
executable_(executable),
local_space_kind_(local_space_kind) {
compaction_space_kind_(compaction_space_kind) {
area_size_ = MemoryChunkLayout::AllocatableMemoryInMemoryChunk(space);
accounting_stats_.Clear();
}
@ -105,7 +105,6 @@ void PagedSpace::RefillFreeList() {
identity() != MAP_SPACE) {
return;
}
DCHECK_IMPLIES(is_local_space(), is_compaction_space());
MarkCompactCollector* collector = heap()->mark_compact_collector();
size_t added = 0;
@ -123,7 +122,8 @@ void PagedSpace::RefillFreeList() {
// Also merge old-to-new remembered sets if not scavenging because of
// data races: One thread might iterate remembered set, while another
// thread merges them.
if (local_space_kind() != LocalSpaceKind::kCompactionSpaceForScavenge) {
if (compaction_space_kind() !=
CompactionSpaceKind::kCompactionSpaceForScavenge) {
p->MergeOldToNewRememberedSets();
}
@ -150,7 +150,7 @@ void PagedSpace::RefillFreeList() {
}
}
void PagedSpace::MergeLocalSpace(LocalSpace* other) {
void PagedSpace::MergeCompactionSpace(CompactionSpace* other) {
base::MutexGuard guard(mutex());
DCHECK(identity() == other->identity());
@ -555,7 +555,7 @@ bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes,
base::Optional<std::pair<Address, size_t>> PagedSpace::RawRefillLabBackground(
LocalHeap* local_heap, size_t min_size_in_bytes, size_t max_size_in_bytes,
AllocationAlignment alignment, AllocationOrigin origin) {
DCHECK(!is_local_space() && identity() == OLD_SPACE);
DCHECK(!is_compaction_space() && identity() == OLD_SPACE);
DCHECK_EQ(origin, AllocationOrigin::kRuntime);
auto result = TryAllocationFromFreeListBackground(
@ -840,7 +840,7 @@ bool PagedSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) {
return RawRefillLabMain(size_in_bytes, origin);
}
Page* LocalSpace::Expand() {
Page* CompactionSpace::Expand() {
Page* page = PagedSpace::Expand();
new_pages_.push_back(page);
return page;
@ -864,9 +864,6 @@ bool PagedSpace::TryExpand(int size_in_bytes, AllocationOrigin origin) {
}
bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) {
// Non-compaction local spaces are not supported.
DCHECK_IMPLIES(is_local_space(), is_compaction_space());
// Allocation in this space has failed.
DCHECK_GE(size_in_bytes, 0);
const int kMaxPagesToSweep = 1;
@ -946,7 +943,7 @@ bool PagedSpace::ContributeToSweepingMain(int required_freed_bytes,
AllocationResult PagedSpace::AllocateRawSlow(int size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin) {
if (!is_local_space()) {
if (!is_compaction_space()) {
// Start incremental marking before the actual allocation, this allows the
// allocation function to mark the object black when incremental marking is
// running.

View File

@ -21,10 +21,10 @@
namespace v8 {
namespace internal {
class CompactionSpace;
class Heap;
class HeapObject;
class Isolate;
class LocalSpace;
class ObjectVisitor;
// -----------------------------------------------------------------------------
@ -71,9 +71,10 @@ class V8_EXPORT_PRIVATE PagedSpace
static const size_t kCompactionMemoryWanted = 500 * KB;
// Creates a space with an id.
PagedSpace(Heap* heap, AllocationSpace id, Executability executable,
FreeList* free_list,
LocalSpaceKind local_space_kind = LocalSpaceKind::kNone);
PagedSpace(
Heap* heap, AllocationSpace id, Executability executable,
FreeList* free_list,
CompactionSpaceKind compaction_space_kind = CompactionSpaceKind::kNone);
~PagedSpace() override { TearDown(); }
@ -256,19 +257,15 @@ class V8_EXPORT_PRIVATE PagedSpace
// Return size of allocatable area on a page in this space.
inline int AreaSize() { return static_cast<int>(area_size_); }
bool is_local_space() { return local_space_kind_ != LocalSpaceKind::kNone; }
bool is_compaction_space() {
return base::IsInRange(local_space_kind_,
LocalSpaceKind::kFirstCompactionSpace,
LocalSpaceKind::kLastCompactionSpace);
return compaction_space_kind_ != CompactionSpaceKind::kNone;
}
LocalSpaceKind local_space_kind() { return local_space_kind_; }
CompactionSpaceKind compaction_space_kind() { return compaction_space_kind_; }
// Merges {other} into the current space. Note that this modifies {other},
// e.g., removes its bump pointer area and resets statistics.
void MergeLocalSpace(LocalSpace* other);
void MergeCompactionSpace(CompactionSpace* other);
// Refills the free list from the corresponding free list filled by the
// sweeper.
@ -324,13 +321,13 @@ class V8_EXPORT_PRIVATE PagedSpace
base::Optional<base::MutexGuard> guard_;
};
bool SupportsConcurrentAllocation() { return !is_local_space(); }
bool SupportsConcurrentAllocation() { return !is_compaction_space(); }
// Set space linear allocation area.
void SetTopAndLimit(Address top, Address limit);
void DecreaseLimit(Address new_limit);
void UpdateInlineAllocationLimit(size_t min_size) override;
bool SupportsAllocationObserver() override { return !is_local_space(); }
bool SupportsAllocationObserver() override { return !is_compaction_space(); }
// Slow path of allocation function
V8_WARN_UNUSED_RESULT AllocationResult
@ -407,7 +404,7 @@ class V8_EXPORT_PRIVATE PagedSpace
Executability executable_;
LocalSpaceKind local_space_kind_;
CompactionSpaceKind compaction_space_kind_;
size_t area_size_;
@ -430,20 +427,23 @@ class V8_EXPORT_PRIVATE PagedSpace
};
// -----------------------------------------------------------------------------
// Base class for compaction space and off-thread space.
// Compaction space that is used temporarily during compaction.
class V8_EXPORT_PRIVATE LocalSpace : public PagedSpace {
class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace {
public:
LocalSpace(Heap* heap, AllocationSpace id, Executability executable,
LocalSpaceKind local_space_kind)
CompactionSpace(Heap* heap, AllocationSpace id, Executability executable,
CompactionSpaceKind compaction_space_kind)
: PagedSpace(heap, id, executable, FreeList::CreateFreeList(),
local_space_kind) {
DCHECK_NE(local_space_kind, LocalSpaceKind::kNone);
compaction_space_kind) {
DCHECK(is_compaction_space());
}
const std::vector<Page*>& GetNewPages() { return new_pages_; }
protected:
V8_WARN_UNUSED_RESULT bool RefillLabMain(int size_in_bytes,
AllocationOrigin origin) override;
Page* Expand() override;
// The space is temporary and not included in any snapshots.
bool snapshotable() override { return false; }
@ -452,31 +452,15 @@ class V8_EXPORT_PRIVATE LocalSpace : public PagedSpace {
std::vector<Page*> new_pages_;
};
// -----------------------------------------------------------------------------
// Compaction space that is used temporarily during compaction.
class V8_EXPORT_PRIVATE CompactionSpace : public LocalSpace {
public:
CompactionSpace(Heap* heap, AllocationSpace id, Executability executable,
LocalSpaceKind local_space_kind)
: LocalSpace(heap, id, executable, local_space_kind) {
DCHECK(is_compaction_space());
}
protected:
V8_WARN_UNUSED_RESULT bool RefillLabMain(int size_in_bytes,
AllocationOrigin origin) override;
};
// A collection of |CompactionSpace|s used by a single compaction task.
class CompactionSpaceCollection : public Malloced {
public:
explicit CompactionSpaceCollection(Heap* heap,
LocalSpaceKind local_space_kind)
CompactionSpaceKind compaction_space_kind)
: old_space_(heap, OLD_SPACE, Executability::NOT_EXECUTABLE,
local_space_kind),
compaction_space_kind),
code_space_(heap, CODE_SPACE, Executability::EXECUTABLE,
local_space_kind) {}
compaction_space_kind) {}
CompactionSpace* Get(AllocationSpace space) {
switch (space) {

View File

@ -529,7 +529,7 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
copied_size_(0),
promoted_size_(0),
allocator_(heap, LocalSpaceKind::kCompactionSpaceForScavenge),
allocator_(heap, CompactionSpaceKind::kCompactionSpaceForScavenge),
is_logging_(is_logging),
is_incremental_marking_(heap->incremental_marking()->IsMarking()),
is_compacting_(heap->incremental_marking()->IsCompacting()) {}

View File

@ -28,7 +28,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) {
CompactionSpace* compaction_space =
new CompactionSpace(heap, OLD_SPACE, NOT_EXECUTABLE,
LocalSpaceKind::kCompactionSpaceForMarkCompact);
CompactionSpaceKind::kCompactionSpaceForMarkCompact);
EXPECT_TRUE(compaction_space != nullptr);
for (Page* p : *old_space) {
@ -54,7 +54,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) {
int pages_in_old_space = old_space->CountTotalPages();
int pages_in_compaction_space = compaction_space->CountTotalPages();
EXPECT_EQ(kExpectedPages, pages_in_compaction_space);
old_space->MergeLocalSpace(compaction_space);
old_space->MergeCompactionSpace(compaction_space);
EXPECT_EQ(pages_in_old_space + pages_in_compaction_space,
old_space->CountTotalPages());