diff --git a/src/common/globals.h b/src/common/globals.h index 68634d7d1c..34a58f5201 100644 --- a/src/common/globals.h +++ b/src/common/globals.h @@ -969,20 +969,22 @@ using WeakSlotCallbackWithHeap = bool (*)(Heap* heap, FullObjectSlot pointer); // NOTE: SpaceIterator depends on AllocationSpace enumeration values being // consecutive. enum AllocationSpace { - RO_SPACE, // Immortal, immovable and immutable objects, - OLD_SPACE, // Old generation regular object space. - CODE_SPACE, // Old generation code object space, marked executable. - MAP_SPACE, // Old generation map object space, non-movable. - NEW_SPACE, // Young generation space for regular objects collected - // with Scavenger/MinorMC. - LO_SPACE, // Old generation large object space. - CODE_LO_SPACE, // Old generation large code object space. - NEW_LO_SPACE, // Young generation large object space. + RO_SPACE, // Immortal, immovable and immutable objects, + OLD_SPACE, // Old generation regular object space. + CODE_SPACE, // Old generation code object space, marked executable. + MAP_SPACE, // Old generation map object space, non-movable. + NEW_SPACE, // Young generation space for regular objects collected + // with Scavenger/MinorMC. + SHARED_SPACE, // Space shared between multiple isolates. Optional. + LO_SPACE, // Old generation large object space. + CODE_LO_SPACE, // Old generation large code object space. + NEW_LO_SPACE, // Young generation large object space. + SHARED_LO_SPACE, // Space shared between multiple isolates. Optional. FIRST_SPACE = RO_SPACE, - LAST_SPACE = NEW_LO_SPACE, + LAST_SPACE = SHARED_LO_SPACE, FIRST_MUTABLE_SPACE = OLD_SPACE, - LAST_MUTABLE_SPACE = NEW_LO_SPACE, + LAST_MUTABLE_SPACE = SHARED_LO_SPACE, FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE, LAST_GROWABLE_PAGED_SPACE = MAP_SPACE, FIRST_SWEEPABLE_SPACE = OLD_SPACE, diff --git a/src/flags/flag-definitions.h b/src/flags/flag-definitions.h index 87423fa8b9..fe0996b0ad 100644 --- a/src/flags/flag-definitions.h +++ b/src/flags/flag-definitions.h @@ -1220,6 +1220,8 @@ DEFINE_BOOL(global_gc_scheduling, true, DEFINE_BOOL(gc_global, false, "always perform global GCs") DEFINE_BOOL(shared_space, false, "Implement shared heap as shared space on a main isolate.") +// Don't use a map space with --shared-space in order to avoid shared map space. +DEFINE_NEG_IMPLICATION(shared_space, use_map_space) // TODO(12950): The next two flags only have an effect if // V8_ENABLE_ALLOCATION_TIMEOUT is set, so we should only define them in that diff --git a/src/heap/base-space.cc b/src/heap/base-space.cc index aabbeaebf5..5f28afc240 100644 --- a/src/heap/base-space.cc +++ b/src/heap/base-space.cc @@ -17,12 +17,16 @@ const char* BaseSpace::GetSpaceName(AllocationSpace space) { return "map_space"; case CODE_SPACE: return "code_space"; + case SHARED_SPACE: + return "shared_space"; case LO_SPACE: return "large_object_space"; case NEW_LO_SPACE: return "new_large_object_space"; case CODE_LO_SPACE: return "code_large_object_space"; + case SHARED_LO_SPACE: + return "shared_large_object_space"; case RO_SPACE: return "read_only_space"; } diff --git a/src/heap/heap-allocator.cc b/src/heap/heap-allocator.cc index c78098ef28..be23977973 100644 --- a/src/heap/heap-allocator.cc +++ b/src/heap/heap-allocator.cc @@ -31,7 +31,7 @@ void HeapAllocator::Setup() { shared_map_allocator_ = heap_->shared_map_allocator_ ? heap_->shared_map_allocator_.get() : shared_old_allocator_; - shared_lo_space_ = heap_->shared_lo_space(); + shared_lo_space_ = heap_->shared_isolate_lo_space_; } void HeapAllocator::SetReadOnlySpace(ReadOnlySpace* read_only_space) { diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h index 6991a6dca5..c58cc702ab 100644 --- a/src/heap/heap-inl.h +++ b/src/heap/heap-inl.h @@ -485,6 +485,8 @@ bool Heap::IsPendingAllocationInternal(HeapObject object) { return addr == large_space->pending_object(); } + case SHARED_SPACE: + case SHARED_LO_SPACE: case RO_SPACE: UNREACHABLE(); } diff --git a/src/heap/heap.cc b/src/heap/heap.cc index e8b5536acd..7117312f0b 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -4327,9 +4327,10 @@ bool Heap::ContainsCode(HeapObject value) const { } bool Heap::SharedHeapContains(HeapObject value) const { - if (shared_old_space_) - return shared_old_space_->Contains(value) || - (shared_map_space_ && shared_map_space_->Contains(value)); + if (shared_isolate_old_space_) + return shared_isolate_old_space_->Contains(value) || + (shared_isolate_map_space_ && + shared_isolate_map_space_->Contains(value)); return false; } @@ -4360,12 +4361,16 @@ bool Heap::InSpace(HeapObject value, AllocationSpace space) const { case MAP_SPACE: DCHECK(map_space_); return map_space_->Contains(value); + case SHARED_SPACE: + return shared_space_->Contains(value); case LO_SPACE: return lo_space_->Contains(value); case CODE_LO_SPACE: return code_lo_space_->Contains(value); case NEW_LO_SPACE: return new_lo_space_->Contains(value); + case SHARED_LO_SPACE: + return shared_lo_space_->Contains(value); case RO_SPACE: return ReadOnlyHeap::Contains(value); } @@ -4390,12 +4395,16 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const { case MAP_SPACE: DCHECK(map_space_); return map_space_->ContainsSlow(addr); + case SHARED_SPACE: + return shared_space_->ContainsSlow(addr); case LO_SPACE: return lo_space_->ContainsSlow(addr); case CODE_LO_SPACE: return code_lo_space_->ContainsSlow(addr); case NEW_LO_SPACE: return new_lo_space_->ContainsSlow(addr); + case SHARED_LO_SPACE: + return shared_lo_space_->ContainsSlow(addr); case RO_SPACE: return read_only_space_->ContainsSlow(addr); } @@ -4408,9 +4417,11 @@ bool Heap::IsValidAllocationSpace(AllocationSpace space) { case OLD_SPACE: case CODE_SPACE: case MAP_SPACE: + case SHARED_SPACE: case LO_SPACE: case NEW_LO_SPACE: case CODE_LO_SPACE: + case SHARED_LO_SPACE: case RO_SPACE: return true; default: @@ -5448,8 +5459,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info, if (v8_flags.use_map_space) { space_[MAP_SPACE] = map_space_ = new MapSpace(this); } + if (v8_flags.shared_space && isolate()->is_shared_space_isolate()) { + space_[SHARED_SPACE] = shared_space_ = new SharedSpace(this); + } space_[LO_SPACE] = lo_space_ = new OldLargeObjectSpace(this); space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this); + if (v8_flags.shared_space && isolate()->is_shared_space_isolate()) { + space_[SHARED_LO_SPACE] = shared_lo_space_ = + new SharedLargeObjectSpace(this); + } for (int i = 0; i < static_cast(v8::Isolate::kUseCounterFeatureCount); i++) { @@ -5527,15 +5545,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info, if (isolate()->shared_isolate()) { Heap* shared_heap = isolate()->shared_isolate()->heap(); - shared_old_space_ = shared_heap->old_space(); - shared_lo_space_ = shared_heap->lo_space(); - shared_old_allocator_.reset( - new ConcurrentAllocator(main_thread_local_heap(), shared_old_space_)); + shared_isolate_old_space_ = shared_heap->old_space(); + shared_isolate_lo_space_ = shared_heap->lo_space(); + shared_old_allocator_.reset(new ConcurrentAllocator( + main_thread_local_heap(), shared_isolate_old_space_)); if (shared_heap->map_space()) { - shared_map_space_ = shared_heap->map_space(); - shared_map_allocator_.reset( - new ConcurrentAllocator(main_thread_local_heap(), shared_map_space_)); + shared_isolate_map_space_ = shared_heap->map_space(); + shared_map_allocator_.reset(new ConcurrentAllocator( + main_thread_local_heap(), shared_isolate_map_space_)); } } @@ -5844,10 +5862,10 @@ void Heap::TearDown() { allocation_sites_to_pretenure_.reset(); - shared_old_space_ = nullptr; + shared_isolate_old_space_ = nullptr; shared_old_allocator_.reset(); - shared_map_space_ = nullptr; + shared_isolate_map_space_ = nullptr; shared_map_allocator_.reset(); { @@ -6781,9 +6799,12 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) { return dst == CODE_SPACE && type == CODE_TYPE; case MAP_SPACE: return dst == MAP_SPACE && type == MAP_TYPE; + case SHARED_SPACE: + return dst == SHARED_SPACE; case LO_SPACE: case CODE_LO_SPACE: case NEW_LO_SPACE: + case SHARED_LO_SPACE: case RO_SPACE: return false; } diff --git a/src/heap/heap.h b/src/heap/heap.h index 6e270f246d..b65424d99c 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -127,7 +127,9 @@ class SafepointScope; class ScavengeJob; class Scavenger; class ScavengerCollector; +class SharedLargeObjectSpace; class SharedReadOnlySpace; +class SharedSpace; class Space; class StressScavengeObserver; class TimedHistogram; @@ -876,12 +878,11 @@ class Heap { NewSpace* new_space() const { return new_space_; } inline PagedNewSpace* paged_new_space() const; OldSpace* old_space() const { return old_space_; } - OldSpace* shared_old_space() const { return shared_old_space_; } + OldSpace* shared_old_space() const { return shared_isolate_old_space_; } CodeSpace* code_space() const { return code_space_; } MapSpace* map_space() const { return map_space_; } inline PagedSpace* space_for_maps(); OldLargeObjectSpace* lo_space() const { return lo_space_; } - OldLargeObjectSpace* shared_lo_space() const { return shared_lo_space_; } CodeLargeObjectSpace* code_lo_space() const { return code_lo_space_; } NewLargeObjectSpace* new_lo_space() const { return new_lo_space_; } ReadOnlySpace* read_only_space() const { return read_only_space_; } @@ -2194,14 +2195,16 @@ class Heap { OldSpace* old_space_ = nullptr; CodeSpace* code_space_ = nullptr; MapSpace* map_space_ = nullptr; + SharedSpace* shared_space_ = nullptr; OldLargeObjectSpace* lo_space_ = nullptr; CodeLargeObjectSpace* code_lo_space_ = nullptr; NewLargeObjectSpace* new_lo_space_ = nullptr; + SharedLargeObjectSpace* shared_lo_space_ = nullptr; ReadOnlySpace* read_only_space_ = nullptr; - OldSpace* shared_old_space_ = nullptr; - OldLargeObjectSpace* shared_lo_space_ = nullptr; - MapSpace* shared_map_space_ = nullptr; + OldSpace* shared_isolate_old_space_ = nullptr; + OldLargeObjectSpace* shared_isolate_lo_space_ = nullptr; + MapSpace* shared_isolate_map_space_ = nullptr; std::unique_ptr shared_old_allocator_; std::unique_ptr shared_map_allocator_; diff --git a/src/heap/large-spaces.cc b/src/heap/large-spaces.cc index 74c621e81f..2baed404a2 100644 --- a/src/heap/large-spaces.cc +++ b/src/heap/large-spaces.cc @@ -582,5 +582,15 @@ void CodeLargeObjectSpace::RemovePage(LargePage* page) { OldLargeObjectSpace::RemovePage(page); } +SharedLargeObjectSpace::SharedLargeObjectSpace(Heap* heap) + : OldLargeObjectSpace(heap, SHARED_LO_SPACE) {} + +AllocationResult SharedLargeObjectSpace::AllocateRawBackground( + LocalHeap* local_heap, int object_size) { + DCHECK(!v8_flags.enable_third_party_heap); + return OldLargeObjectSpace::AllocateRawBackground(local_heap, object_size, + NOT_EXECUTABLE); +} + } // namespace internal } // namespace v8 diff --git a/src/heap/large-spaces.h b/src/heap/large-spaces.h index 70c55833e1..576c672fff 100644 --- a/src/heap/large-spaces.h +++ b/src/heap/large-spaces.h @@ -190,6 +190,14 @@ class OldLargeObjectSpace : public LargeObjectSpace { LocalHeap* local_heap, int object_size, Executability executable); }; +class SharedLargeObjectSpace : public OldLargeObjectSpace { + public: + explicit SharedLargeObjectSpace(Heap* heap); + + V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult + AllocateRawBackground(LocalHeap* local_heap, int object_size); +}; + class NewLargeObjectSpace : public LargeObjectSpace { public: NewLargeObjectSpace(Heap* heap, size_t capacity); diff --git a/src/heap/paged-spaces.h b/src/heap/paged-spaces.h index 7241a29b0e..986aed3a31 100644 --- a/src/heap/paged-spaces.h +++ b/src/heap/paged-spaces.h @@ -571,6 +571,32 @@ class MapSpace final : public PagedSpace { LinearAllocationArea paged_allocation_info_; }; +// ----------------------------------------------------------------------------- +// Shared space regular object space. + +class SharedSpace final : public PagedSpace { + public: + // Creates an old space object. The constructor does not allocate pages + // from OS. + explicit SharedSpace(Heap* heap) + : PagedSpace(heap, SHARED_SPACE, NOT_EXECUTABLE, + FreeList::CreateFreeList(), allocation_info) {} + + static bool IsAtPageStart(Address addr) { + return static_cast(addr & kPageAlignmentMask) == + MemoryChunkLayout::ObjectStartOffsetInDataPage(); + } + + size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final { + if (type == ExternalBackingStoreType::kArrayBuffer) return 0; + DCHECK_EQ(type, ExternalBackingStoreType::kExternalString); + return external_backing_store_bytes_[type]; + } + + private: + LinearAllocationArea allocation_info; +}; + // Iterates over the chunks (pages and large object pages) that can contain // pointers to new space or to evacuation candidates. class OldGenerationMemoryChunkIterator { diff --git a/src/snapshot/serializer.cc b/src/snapshot/serializer.cc index 4410790f19..e9971705ec 100644 --- a/src/snapshot/serializer.cc +++ b/src/snapshot/serializer.cc @@ -788,6 +788,8 @@ SnapshotSpace GetSnapshotSpace(HeapObject object) { return SnapshotSpace::kCode; case MAP_SPACE: return SnapshotSpace::kMap; + case SHARED_SPACE: + case SHARED_LO_SPACE: case CODE_LO_SPACE: case RO_SPACE: UNREACHABLE();