[heap] Collect shared spaces in full GC for shared heap isolate

This CL implements collection of garbage in the shared spaces in
the shared heap isolate. GC on such an isolate should now work
correctly without worker isolates. Support for worker isolates will
be implemented in a subsequent CL.

Bug: v8:13267
Change-Id: I30125ce3b791e2faa0504d065f23639d6106e6b6
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3904647
Reviewed-by: Jakob Linke <jgruber@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83371}
This commit is contained in:
Dominik Inführ 2022-09-21 16:42:29 +02:00 committed by V8 LUCI CQ
parent 85925fc1e0
commit 361e82457a
13 changed files with 112 additions and 40 deletions

View File

@ -978,9 +978,9 @@ enum AllocationSpace {
OLD_SPACE, // Old generation regular object space.
CODE_SPACE, // Old generation code object space, marked executable.
MAP_SPACE, // Old generation map object space, non-movable.
SHARED_SPACE, // Space shared between multiple isolates. Optional.
NEW_SPACE, // Young generation space for regular objects collected
// with Scavenger/MinorMC.
SHARED_SPACE, // Space shared between multiple isolates. Optional.
LO_SPACE, // Old generation large object space.
CODE_LO_SPACE, // Old generation large code object space.
NEW_LO_SPACE, // Young generation large object space.
@ -991,7 +991,7 @@ enum AllocationSpace {
FIRST_MUTABLE_SPACE = OLD_SPACE,
LAST_MUTABLE_SPACE = SHARED_LO_SPACE,
FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
LAST_GROWABLE_PAGED_SPACE = MAP_SPACE,
LAST_GROWABLE_PAGED_SPACE = SHARED_SPACE,
FIRST_SWEEPABLE_SPACE = OLD_SPACE,
LAST_SWEEPABLE_SPACE = NEW_SPACE
};

View File

@ -2009,7 +2009,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
}
// Returns the isolate that owns the shared spaces.
Isolate* shared_heap_isolate() {
Isolate* shared_heap_isolate() const {
DCHECK(has_shared_heap());
Isolate* isolate =
shared_isolate() ? shared_isolate() : shared_space_isolate();
@ -2017,6 +2017,10 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
return isolate;
}
bool is_shared_heap_isolate() const {
return is_shared() || is_shared_space_isolate();
}
GlobalSafepoint* global_safepoint() const { return global_safepoint_.get(); }
bool owns_shareable_data() { return owns_shareable_data_; }

View File

@ -171,7 +171,8 @@ void Heap::SetPendingOptimizeForTestBytecode(Object hash_table) {
}
PagedSpace* Heap::paged_space(int idx) {
DCHECK(idx == OLD_SPACE || idx == CODE_SPACE || idx == MAP_SPACE);
DCHECK(idx == OLD_SPACE || idx == CODE_SPACE || idx == MAP_SPACE ||
idx == SHARED_SPACE);
return static_cast<PagedSpace*>(space_[idx].get());
}

View File

@ -3659,6 +3659,9 @@ void Heap::MakeHeapIterable() {
space->MakeLinearAllocationAreaIterable();
}
if (v8_flags.shared_space && shared_space_allocator_) {
shared_space_allocator_->MakeLinearAllocationAreaIterable();
}
if (new_space()) new_space()->MakeLinearAllocationAreaIterable();
}
@ -3672,11 +3675,14 @@ void Heap::FreeLinearAllocationAreas() {
space->FreeLinearAllocationArea();
}
if (v8_flags.shared_space && shared_space_allocator_) {
shared_space_allocator_->FreeLinearAllocationArea();
}
if (new_space()) new_space()->FreeLinearAllocationArea();
}
void Heap::FreeSharedLinearAllocationAreas() {
if (!isolate()->shared_isolate()) return;
if (!isolate()->has_shared_heap()) return;
safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
local_heap->FreeSharedLinearAllocationArea();
});
@ -3684,7 +3690,7 @@ void Heap::FreeSharedLinearAllocationAreas() {
}
void Heap::FreeMainThreadSharedLinearAllocationAreas() {
if (!isolate()->shared_isolate()) return;
if (!isolate()->has_shared_heap()) return;
shared_space_allocator_->FreeLinearAllocationArea();
if (shared_map_allocator_) shared_map_allocator_->FreeLinearAllocationArea();
main_thread_local_heap()->FreeSharedLinearAllocationArea();
@ -4333,12 +4339,16 @@ bool Heap::Contains(HeapObject value) const {
if (memory_allocator()->IsOutsideAllocatedSpace(value.address())) {
return false;
}
return HasBeenSetUp() &&
((new_space_ && new_space_->Contains(value)) ||
old_space_->Contains(value) || code_space_->Contains(value) ||
(map_space_ && map_space_->Contains(value)) ||
lo_space_->Contains(value) || code_lo_space_->Contains(value) ||
(new_lo_space_ && new_lo_space_->Contains(value)));
if (!HasBeenSetUp()) return false;
return (new_space_ && new_space_->Contains(value)) ||
old_space_->Contains(value) || code_space_->Contains(value) ||
(map_space_ && map_space_->Contains(value)) ||
(shared_space_ && shared_space_->Contains(value)) ||
lo_space_->Contains(value) || code_lo_space_->Contains(value) ||
(new_lo_space_ && new_lo_space_->Contains(value)) ||
(shared_lo_space_ && shared_lo_space_->Contains(value));
}
bool Heap::ContainsCode(HeapObject value) const {
@ -4743,13 +4753,15 @@ void Heap::IterateRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) {
SerializerDeserializer::IterateStartupObjectCache(isolate_, v);
v->Synchronize(VisitorSynchronization::kStartupObjectCache);
// When shared_isolate() is null, isolate_ is either an unshared (instead of
// a client) Isolate or the shared Isolate. In both cases isolate_ owns its
// shared heap object cache and should iterate it.
// Iterate over shared heap object cache when the isolate owns this data
// structure. Isolates which own the shared heap object cache are:
// * Shared isolate
// * Shared space/main isolate
// * All isolates which do not use the shared heap feature.
//
// When shared_isolate() is not null, isolate_ is a client Isolate, does not
// own its shared heap object cache, and should not iterate it.
if (isolate_->shared_isolate() == nullptr) {
// However, worker/client isolates do not own the shared heap object cache
// and should not iterate it.
if (isolate_->is_shared_heap_isolate() || !isolate_->has_shared_heap()) {
SerializerDeserializer::IterateSharedHeapObjectCache(isolate_, v);
v->Synchronize(VisitorSynchronization::kSharedHeapObjectCache);
}

View File

@ -879,10 +879,12 @@ class Heap {
inline PagedNewSpace* paged_new_space() const;
OldSpace* old_space() const { return old_space_; }
CodeSpace* code_space() const { return code_space_; }
SharedSpace* shared_space() const { return shared_space_; }
MapSpace* map_space() const { return map_space_; }
inline PagedSpace* space_for_maps();
OldLargeObjectSpace* lo_space() const { return lo_space_; }
CodeLargeObjectSpace* code_lo_space() const { return code_lo_space_; }
SharedLargeObjectSpace* shared_lo_space() const { return shared_lo_space_; }
NewLargeObjectSpace* new_lo_space() const { return new_lo_space_; }
ReadOnlySpace* read_only_space() const { return read_only_space_; }

View File

@ -585,6 +585,10 @@ bool MarkCompactCollector::StartCompaction(StartCompactionMode mode) {
CollectEvacuationCandidates(heap()->map_space());
}
if (heap()->shared_space()) {
CollectEvacuationCandidates(heap()->shared_space());
}
if (v8_flags.compact_code_space &&
(!heap()->IsGCWithStack() || v8_flags.compact_code_space_with_stack)) {
CollectEvacuationCandidates(heap()->code_space());
@ -731,6 +735,9 @@ void MarkCompactCollector::EnsureSweepingCompleted(
"access to Code page headers");
heap()->code_space()->RefillFreeList(sweeper());
}
if (heap()->shared_space()) {
heap()->shared_space()->RefillFreeList(sweeper());
}
if (heap()->map_space()) {
heap()->map_space()->RefillFreeList(sweeper());
heap()->map_space()->SortFreeList();
@ -820,7 +827,7 @@ void MarkCompactCollector::ComputeEvacuationHeuristics(
void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE ||
space->identity() == MAP_SPACE);
space->identity() == MAP_SPACE || space->identity() == SHARED_SPACE);
int number_of_pages = space->CountTotalPages();
size_t area_size = space->AreaSize();
@ -1081,6 +1088,7 @@ void MarkCompactCollector::VerifyMarking() {
heap()->old_space()->VerifyLiveBytes();
if (heap()->map_space()) heap()->map_space()->VerifyLiveBytes();
heap()->code_space()->VerifyLiveBytes();
if (heap()->shared_space()) heap()->shared_space()->VerifyLiveBytes();
if (v8_flags.minor_mc && heap()->paged_new_space())
heap()->paged_new_space()->paged_space()->VerifyLiveBytes();
}
@ -1177,7 +1185,11 @@ void MarkCompactCollector::SweepArrayBufferExtensions() {
class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor {
public:
explicit RootMarkingVisitor(MarkCompactCollector* collector)
: collector_(collector), is_shared_heap_(collector->is_shared_heap()) {}
: collector_(collector),
uses_shared_heap_(collector->heap()->isolate()->has_shared_heap() ||
collector->heap()->isolate()->is_shared()),
is_shared_heap_isolate_(
collector->heap()->isolate()->is_shared_heap_isolate()) {}
void VisitRootPointer(Root root, const char* description,
FullObjectSlot p) final {
@ -1233,14 +1245,23 @@ class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor {
Object object = *p;
if (!object.IsHeapObject()) return;
HeapObject heap_object = HeapObject::cast(object);
BasicMemoryChunk* target_page =
BasicMemoryChunk::FromHeapObject(heap_object);
if (is_shared_heap_ != target_page->InSharedHeap()) return;
if (!ShouldMarkObject(heap_object)) return;
collector_->MarkRootObject(root, heap_object);
}
bool ShouldMarkObject(HeapObject object) const {
if (V8_LIKELY(!uses_shared_heap_)) return true;
if (v8_flags.shared_space) {
if (is_shared_heap_isolate_) return true;
return !object.InSharedHeap();
} else {
return is_shared_heap_isolate_ == object.InSharedHeap();
}
}
MarkCompactCollector* const collector_;
const bool is_shared_heap_;
const bool uses_shared_heap_;
const bool is_shared_heap_isolate_;
};
// This visitor is used to visit the body of special objects held alive by
@ -5317,11 +5338,21 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->code_space(),
RememberedSetUpdatingMode::ALL);
if (heap()->shared_space()) {
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->shared_space(),
RememberedSetUpdatingMode::ALL);
}
CollectRememberedSetUpdatingItems(this, &updating_items, heap()->lo_space(),
RememberedSetUpdatingMode::ALL);
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->code_lo_space(),
RememberedSetUpdatingMode::ALL);
if (heap()->shared_lo_space()) {
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->shared_lo_space(),
RememberedSetUpdatingMode::ALL);
}
if (heap()->map_space()) {
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->map_space(),
@ -5649,6 +5680,11 @@ void MarkCompactCollector::Sweep() {
heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP, ThreadKind::kMain);
StartSweepSpace(heap()->map_space());
}
if (heap()->shared_space()) {
GCTracer::Scope sweep_scope(
heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP, ThreadKind::kMain);
StartSweepSpace(heap()->shared_space());
}
if (v8_flags.minor_mc && heap()->new_space()) {
GCTracer::Scope sweep_scope(
heap()->tracer(), GCTracer::Scope::MC_SWEEP_NEW, ThreadKind::kMain);

View File

@ -143,7 +143,8 @@ void PagedSpaceBase::RefillFreeList(Sweeper* sweeper) {
// Any PagedSpace might invoke RefillFreeList. We filter all but our old
// generation spaces out.
DCHECK(identity() == OLD_SPACE || identity() == CODE_SPACE ||
identity() == MAP_SPACE || identity() == NEW_SPACE);
identity() == MAP_SPACE || identity() == NEW_SPACE ||
identity() == SHARED_SPACE);
size_t added = 0;

View File

@ -355,7 +355,7 @@ int Sweeper::RawSweep(
Space* space = p->owner();
DCHECK_NOT_NULL(space);
DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE ||
space->identity() == MAP_SPACE ||
space->identity() == MAP_SPACE || space->identity() == SHARED_SPACE ||
(space->identity() == NEW_SPACE && v8_flags.minor_mc));
DCHECK_IMPLIES(space->identity() == NEW_SPACE,
sweeping_mode == SweepingMode::kEagerDuringGC);

View File

@ -129,6 +129,7 @@ class Sweeper {
callback(OLD_SPACE);
callback(CODE_SPACE);
callback(MAP_SPACE);
callback(SHARED_SPACE);
}
// Helper function for RawSweep. Depending on the FreeListRebuildingMode and

View File

@ -604,12 +604,13 @@
F(MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR) \
F(MC_SWEEP_CODE) \
F(MC_SWEEP_CODE_LO) \
F(MC_SWEEP_FINISH_NEW_LO) \
F(MC_SWEEP_FINISH_NEW) \
F(MC_SWEEP_LO) \
F(MC_SWEEP_MAP) \
F(MC_SWEEP_NEW) \
F(MC_SWEEP_OLD) \
F(MC_SWEEP_FINISH_NEW_LO) \
F(MC_SWEEP_FINISH_NEW) \
F(MC_SWEEP_SHARED) \
F(MINOR_MARK_COMPACTOR) \
F(MINOR_MC) \
TOP_MINOR_MC_SCOPES(F) \

View File

@ -448,8 +448,7 @@ TEST(SizeOfInitialHeap) {
Heap* heap = isolate->heap();
for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
i++) {
// Map space might be disabled.
if (i == MAP_SPACE && !heap->paged_space(i)) continue;
if (!heap->paged_space(i)) continue;
// Debug code can be very large, so skip CODE_SPACE if we are generating it.
if (i == CODE_SPACE && i::v8_flags.debug_code) continue;

View File

@ -19,23 +19,33 @@ using SharedHeapTest = TestJSSharedMemoryWithIsolate;
class SharedHeapNoClientsTest : public TestJSSharedMemoryWithPlatform {
public:
SharedHeapNoClientsTest() {
bool created;
shared_isolate_ = Isolate::GetProcessWideSharedIsolate(&created);
CHECK(created);
if (v8_flags.shared_space) {
shared_space_isolate_wrapper.emplace(kNoCounters);
shared_isolate_ = shared_space_isolate_wrapper->i_isolate();
} else {
bool created;
shared_isolate_ = Isolate::GetProcessWideSharedIsolate(&created);
CHECK(created);
}
}
~SharedHeapNoClientsTest() override {
Isolate::DeleteProcessWideSharedIsolate();
if (!v8_flags.shared_space) {
Isolate::DeleteProcessWideSharedIsolate();
}
shared_isolate_ = nullptr;
}
v8::Isolate* shared_isolate() {
return reinterpret_cast<v8::Isolate*>(i_shared_isolate());
v8::Isolate* shared_heap_isolate() {
return reinterpret_cast<v8::Isolate*>(i_shared_heap_isolate());
}
Isolate* i_shared_isolate() { return shared_isolate_; }
Isolate* i_shared_heap_isolate() { return shared_isolate_; }
private:
Isolate* shared_isolate_;
base::Optional<IsolateWrapper> shared_space_isolate_wrapper;
};
namespace {
@ -182,10 +192,12 @@ TEST_F(SharedHeapTest, ConcurrentAllocationInSharedMapSpace) {
}
TEST_F(SharedHeapNoClientsTest, SharedCollectionWithoutClients) {
DCHECK_NULL(i_shared_isolate()->heap()->new_space());
DCHECK_NULL(i_shared_isolate()->heap()->new_lo_space());
if (!v8_flags.shared_space) {
DCHECK_NULL(i_shared_heap_isolate()->heap()->new_space());
DCHECK_NULL(i_shared_heap_isolate()->heap()->new_lo_space());
}
::v8::internal::CollectGarbage(OLD_SPACE, shared_isolate());
::v8::internal::CollectGarbage(OLD_SPACE, shared_heap_isolate());
}
void AllocateInSharedHeap(int iterations = 100) {

View File

@ -80,6 +80,9 @@ class IsolateWrapper final {
IsolateWrapper& operator=(const IsolateWrapper&) = delete;
v8::Isolate* isolate() const { return isolate_; }
i::Isolate* i_isolate() const {
return reinterpret_cast<i::Isolate*>(isolate_);
}
private:
std::unique_ptr<v8::ArrayBuffer::Allocator> array_buffer_allocator_;