[heap] Set up allocation in shared spaces for --shared-space

This CL unifies the fields for shared spaces for both the shared
isolate and the shared space isolate-approach. This allows to mostly
avoid separate code paths for both implementations.

While this CL already sets up everything needed for allocation with
--shared-space, allocation isn't fully working with this CL due to
other remaining issues.

Bug: v8:13267
Change-Id: Icdb40ed7045e33e6acbb97d3838fa374e6c24a2e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3892786
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83280}
This commit is contained in:
Dominik Inführ 2022-09-16 18:59:53 +02:00 committed by V8 LUCI CQ
parent defa678e8b
commit 89e19b8696
9 changed files with 60 additions and 34 deletions

View File

@ -27,11 +27,11 @@ void HeapAllocator::Setup() {
? static_cast<PagedSpace*>(spaces_[MAP_SPACE])
: static_cast<PagedSpace*>(spaces_[OLD_SPACE]);
shared_old_allocator_ = heap_->shared_old_allocator_.get();
shared_old_allocator_ = heap_->shared_space_allocator_.get();
shared_map_allocator_ = heap_->shared_map_allocator_
? heap_->shared_map_allocator_.get()
: shared_old_allocator_;
shared_lo_space_ = heap_->shared_isolate_lo_space_;
shared_lo_space_ = heap_->shared_lo_allocation_space();
}
void HeapAllocator::SetReadOnlySpace(ReadOnlySpace* read_only_space) {

View File

@ -123,7 +123,7 @@ PagedSpace* Heap::space_for_maps() {
ConcurrentAllocator* Heap::concurrent_allocator_for_maps() {
return V8_LIKELY(shared_map_allocator_) ? shared_map_allocator_.get()
: shared_old_allocator_.get();
: shared_space_allocator_.get();
}
RootsTable& Heap::roots_table() { return isolate()->roots_table(); }

View File

@ -3684,7 +3684,7 @@ void Heap::FreeSharedLinearAllocationAreas() {
void Heap::FreeMainThreadSharedLinearAllocationAreas() {
if (!isolate()->shared_isolate()) return;
shared_old_allocator_->FreeLinearAllocationArea();
shared_space_allocator_->FreeLinearAllocationArea();
if (shared_map_allocator_) shared_map_allocator_->FreeLinearAllocationArea();
main_thread_local_heap()->FreeSharedLinearAllocationArea();
}
@ -4353,10 +4353,14 @@ bool Heap::ContainsCode(HeapObject value) const {
}
bool Heap::SharedHeapContains(HeapObject value) const {
if (shared_isolate_old_space_)
return shared_isolate_old_space_->Contains(value) ||
(shared_isolate_map_space_ &&
shared_isolate_map_space_->Contains(value));
if (shared_allocation_space_) {
if (shared_allocation_space_->Contains(value)) return true;
if (shared_lo_allocation_space_->Contains(value)) return true;
if (shared_map_allocation_space_ &&
shared_map_allocation_space_->Contains(value))
return true;
}
return false;
}
@ -5492,7 +5496,7 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
map_space_ = static_cast<MapSpace*>(space_[MAP_SPACE].get());
}
if (v8_flags.shared_space && isolate()->is_shared_space_isolate()) {
if (isolate()->is_shared_space_isolate()) {
space_[SHARED_SPACE] = std::make_unique<SharedSpace>(this);
shared_space_ = static_cast<SharedSpace*>(space_[SHARED_SPACE].get());
}
@ -5504,7 +5508,7 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
code_lo_space_ =
static_cast<CodeLargeObjectSpace*>(space_[CODE_LO_SPACE].get());
if (v8_flags.shared_space && isolate()->is_shared_space_isolate()) {
if (isolate()->is_shared_space_isolate()) {
space_[SHARED_LO_SPACE] = std::make_unique<SharedLargeObjectSpace>(this);
shared_lo_space_ =
static_cast<SharedLargeObjectSpace*>(space_[SHARED_LO_SPACE].get());
@ -5583,19 +5587,33 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
}
#endif // V8_HEAP_USE_PKU_JIT_WRITE_PROTECT
if (isolate()->shared_isolate()) {
if (isolate()->shared_space_isolate()) {
Heap* heap = isolate()->shared_space_isolate()->heap();
shared_space_allocator_ = std::make_unique<ConcurrentAllocator>(
main_thread_local_heap(), heap->shared_space_);
DCHECK_NULL(shared_map_allocator_.get());
shared_allocation_space_ = heap->shared_space_;
shared_lo_allocation_space_ = heap->shared_lo_space_;
DCHECK(!v8_flags.use_map_space);
shared_map_allocation_space_ = heap->shared_space_;
} else if (isolate()->shared_isolate()) {
Heap* shared_heap = isolate()->shared_isolate()->heap();
shared_isolate_old_space_ = shared_heap->old_space();
shared_isolate_lo_space_ = shared_heap->lo_space();
shared_old_allocator_.reset(new ConcurrentAllocator(
main_thread_local_heap(), shared_isolate_old_space_));
shared_space_allocator_ = std::make_unique<ConcurrentAllocator>(
main_thread_local_heap(), shared_heap->old_space());
if (shared_heap->map_space()) {
shared_isolate_map_space_ = shared_heap->map_space();
shared_map_allocator_.reset(new ConcurrentAllocator(
main_thread_local_heap(), shared_isolate_map_space_));
shared_map_allocator_ = std::make_unique<ConcurrentAllocator>(
main_thread_local_heap(), shared_heap->map_space());
}
shared_allocation_space_ = shared_heap->old_space();
shared_lo_allocation_space_ = shared_heap->lo_space();
shared_map_allocation_space_ = shared_heap->map_space();
}
main_thread_local_heap()->SetUpMainThread();
@ -5903,10 +5921,7 @@ void Heap::TearDown() {
allocation_sites_to_pretenure_.reset();
shared_isolate_old_space_ = nullptr;
shared_old_allocator_.reset();
shared_isolate_map_space_ = nullptr;
shared_space_allocator_.reset();
shared_map_allocator_.reset();
{

View File

@ -878,7 +878,6 @@ class Heap {
NewSpace* new_space() const { return new_space_; }
inline PagedNewSpace* paged_new_space() const;
OldSpace* old_space() const { return old_space_; }
OldSpace* shared_old_space() const { return shared_isolate_old_space_; }
CodeSpace* code_space() const { return code_space_; }
MapSpace* map_space() const { return map_space_; }
inline PagedSpace* space_for_maps();
@ -887,6 +886,13 @@ class Heap {
NewLargeObjectSpace* new_lo_space() const { return new_lo_space_; }
ReadOnlySpace* read_only_space() const { return read_only_space_; }
PagedSpace* shared_allocation_space() const {
return shared_allocation_space_;
}
OldLargeObjectSpace* shared_lo_allocation_space() const {
return shared_lo_allocation_space_;
}
inline PagedSpace* paged_space(int idx);
inline Space* space(int idx);
@ -2191,6 +2197,7 @@ class Heap {
// For keeping track of context disposals.
int contexts_disposed_ = 0;
// Spaces owned by this heap through space_.
NewSpace* new_space_ = nullptr;
OldSpace* old_space_ = nullptr;
CodeSpace* code_space_ = nullptr;
@ -2202,11 +2209,14 @@ class Heap {
SharedLargeObjectSpace* shared_lo_space_ = nullptr;
ReadOnlySpace* read_only_space_ = nullptr;
OldSpace* shared_isolate_old_space_ = nullptr;
OldLargeObjectSpace* shared_isolate_lo_space_ = nullptr;
MapSpace* shared_isolate_map_space_ = nullptr;
// Either pointer to owned shared spaces or pointer to unowned shared spaces
// in another isolate.
PagedSpace* shared_allocation_space_ = nullptr;
OldLargeObjectSpace* shared_lo_allocation_space_ = nullptr;
PagedSpace* shared_map_allocation_space_ = nullptr;
std::unique_ptr<ConcurrentAllocator> shared_old_allocator_;
// Allocators for the shared spaces.
std::unique_ptr<ConcurrentAllocator> shared_space_allocator_;
std::unique_ptr<ConcurrentAllocator> shared_map_allocator_;
// Map from the space id to the space.

View File

@ -64,7 +64,8 @@ AllocationResult LocalHeap::AllocateRaw(int size_in_bytes, AllocationType type,
DCHECK_EQ(type, AllocationType::kSharedOld);
if (large_object) {
return heap()->code_lo_space()->AllocateRawBackground(this, size_in_bytes);
return heap()->shared_lo_allocation_space()->AllocateRawBackground(
this, size_in_bytes);
} else {
return shared_old_space_allocator()->AllocateRaw(size_in_bytes, alignment,
origin);

View File

@ -121,8 +121,8 @@ void LocalHeap::SetUp() {
DCHECK_NULL(shared_old_space_allocator_);
if (heap_->isolate()->has_shared_heap()) {
shared_old_space_allocator_ =
std::make_unique<ConcurrentAllocator>(this, heap_->shared_old_space());
shared_old_space_allocator_ = std::make_unique<ConcurrentAllocator>(
this, heap_->shared_allocation_space());
}
DCHECK_NULL(marking_barrier_);

View File

@ -4126,7 +4126,7 @@ void MarkCompactCollector::EvacuateEpilogue() {
namespace {
ConcurrentAllocator* CreateSharedOldAllocator(Heap* heap) {
if (v8_flags.shared_string_table && heap->isolate()->has_shared_heap()) {
return new ConcurrentAllocator(nullptr, heap->shared_old_space());
return new ConcurrentAllocator(nullptr, heap->shared_allocation_space());
}
return nullptr;

View File

@ -655,7 +655,7 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap,
AllocationOrigin origin) {
DCHECK(!is_compaction_space());
DCHECK(identity() == OLD_SPACE || identity() == CODE_SPACE ||
identity() == MAP_SPACE);
identity() == MAP_SPACE || identity() == SHARED_SPACE);
DCHECK(origin == AllocationOrigin::kRuntime ||
origin == AllocationOrigin::kGC);
DCHECK_IMPLIES(!local_heap, origin == AllocationOrigin::kGC);
@ -725,7 +725,7 @@ PagedSpaceBase::TryAllocationFromFreeListBackground(size_t min_size_in_bytes,
base::MutexGuard lock(&space_mutex_);
DCHECK_LE(min_size_in_bytes, max_size_in_bytes);
DCHECK(identity() == OLD_SPACE || identity() == CODE_SPACE ||
identity() == MAP_SPACE);
identity() == MAP_SPACE || identity() == SHARED_SPACE);
size_t new_node_size = 0;
FreeSpace new_node =

View File

@ -599,7 +599,7 @@ Scavenger::PromotionList::Local::Local(Scavenger::PromotionList* promotion_list)
namespace {
ConcurrentAllocator* CreateSharedOldAllocator(Heap* heap) {
if (v8_flags.shared_string_table && heap->isolate()->has_shared_heap()) {
return new ConcurrentAllocator(nullptr, heap->shared_old_space());
return new ConcurrentAllocator(nullptr, heap->shared_allocation_space());
}
return nullptr;
}