[heap] Move Sweeper from collectors to heap
This is needed to simplify concurrent sweeping for MinorMC. Also: move evacuation verifiers to a separate file so that they can be used from heap.cc as well. Bug: v8:12612 Change-Id: I2a738a31e83a357f4fdded8a30ccb2ff6ba70553 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3932720 Commit-Queue: Omer Katz <omerkatz@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org> Reviewed-by: Toon Verwaest <verwaest@chromium.org> Cr-Commit-Position: refs/heads/main@{#83557}
This commit is contained in:
parent
ca3d846d40
commit
096fefc03a
@ -1437,6 +1437,9 @@ filegroup(
|
||||
"src/heap/embedder-tracing.cc",
|
||||
"src/heap/embedder-tracing.h",
|
||||
"src/heap/embedder-tracing-inl.h",
|
||||
"src/heap/evacuation-verifier.cc",
|
||||
"src/heap/evacuation-verifier.h",
|
||||
"src/heap/evacuation-verifier-inl.h",
|
||||
"src/heap/factory-base.cc",
|
||||
"src/heap/factory-base.h",
|
||||
"src/heap/factory-base-inl.h",
|
||||
|
3
BUILD.gn
3
BUILD.gn
@ -3042,6 +3042,8 @@ v8_header_set("v8_internal_headers") {
|
||||
"src/heap/embedder-tracing.h",
|
||||
"src/heap/evacuation-allocator-inl.h",
|
||||
"src/heap/evacuation-allocator.h",
|
||||
"src/heap/evacuation-verifier-inl.h",
|
||||
"src/heap/evacuation-verifier.h",
|
||||
"src/heap/factory-base-inl.h",
|
||||
"src/heap/factory-base.h",
|
||||
"src/heap/factory-inl.h",
|
||||
@ -4432,6 +4434,7 @@ v8_source_set("v8_base_without_compiler") {
|
||||
"src/heap/cppgc-js/unified-heap-marking-verifier.cc",
|
||||
"src/heap/cppgc-js/unified-heap-marking-visitor.cc",
|
||||
"src/heap/embedder-tracing.cc",
|
||||
"src/heap/evacuation-verifier.cc",
|
||||
"src/heap/factory-base.cc",
|
||||
"src/heap/factory.cc",
|
||||
"src/heap/finalization-registry-cleanup-task.cc",
|
||||
|
@ -566,8 +566,8 @@ Handle<Object> TranslatedValue::GetValue() {
|
||||
// headers.
|
||||
// TODO(hpayer): Find a cleaner way to support a group of
|
||||
// non-fully-initialized objects.
|
||||
isolate()->heap()->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
isolate()->heap()->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
|
||||
// 2. Initialize the objects. If we have allocated only byte arrays
|
||||
// for some objects, we now overwrite the byte arrays with the
|
||||
|
@ -109,7 +109,6 @@ ArrayBufferSweeper::~ArrayBufferSweeper() {
|
||||
void ArrayBufferSweeper::EnsureFinished() {
|
||||
if (!sweeping_in_progress()) return;
|
||||
|
||||
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS);
|
||||
TryAbortResult abort_result =
|
||||
heap_->isolate()->cancelable_task_manager()->TryAbort(job_->id_);
|
||||
|
||||
|
64
src/heap/evacuation-verifier-inl.h
Normal file
64
src/heap/evacuation-verifier-inl.h
Normal file
@ -0,0 +1,64 @@
|
||||
// Copyright 2022 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_HEAP_EVACUATION_VERIFIER_INL_H_
|
||||
#define V8_HEAP_EVACUATION_VERIFIER_INL_H_
|
||||
|
||||
#include "src/heap/evacuation-verifier.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/mark-compact.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
|
||||
void FullEvacuationVerifier::VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
if (!ShouldVerifyObject(heap_object)) return;
|
||||
CHECK_IMPLIES(Heap::InYoungGeneration(heap_object),
|
||||
Heap::InToPage(heap_object));
|
||||
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(heap_object));
|
||||
}
|
||||
|
||||
bool FullEvacuationVerifier::ShouldVerifyObject(HeapObject heap_object) {
|
||||
const bool in_shared_heap = heap_object.InSharedWritableHeap();
|
||||
return heap_->isolate()->is_shared_heap_isolate() ? in_shared_heap
|
||||
: !in_shared_heap;
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
void FullEvacuationVerifier::VerifyPointersImpl(TSlot start, TSlot end) {
|
||||
for (TSlot current = start; current < end; ++current) {
|
||||
typename TSlot::TObject object = current.load(cage_base());
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObjectIfStrong(&heap_object)) {
|
||||
VerifyHeapObjectImpl(heap_object);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void YoungGenerationEvacuationVerifier::VerifyHeapObjectImpl(
|
||||
HeapObject heap_object) {
|
||||
CHECK_IMPLIES(Heap::InYoungGeneration(heap_object),
|
||||
Heap::InToPage(heap_object));
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
void YoungGenerationEvacuationVerifier::VerifyPointersImpl(TSlot start,
|
||||
TSlot end) {
|
||||
for (TSlot current = start; current < end; ++current) {
|
||||
typename TSlot::TObject object = current.load(cage_base());
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObject(&heap_object)) {
|
||||
VerifyHeapObjectImpl(heap_object);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif // VERIFY_HEAP
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_HEAP_EVACUATION_VERIFIER_INL_H_
|
179
src/heap/evacuation-verifier.cc
Normal file
179
src/heap/evacuation-verifier.cc
Normal file
@ -0,0 +1,179 @@
|
||||
// Copyright 2022 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/codegen/assembler-inl.h"
|
||||
#include "src/codegen/reloc-info.h"
|
||||
#include "src/heap/evacuation-verifier-inl.h"
|
||||
#include "src/objects/map-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
|
||||
EvacuationVerifier::EvacuationVerifier(Heap* heap)
|
||||
: ObjectVisitorWithCageBases(heap), heap_(heap) {}
|
||||
|
||||
void EvacuationVerifier::VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VisitCodePointer(HeapObject host,
|
||||
CodeObjectSlot slot) {
|
||||
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||
VerifyCodePointer(slot);
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VisitRootPointers(Root root, const char* description,
|
||||
FullObjectSlot start,
|
||||
FullObjectSlot end) {
|
||||
VerifyRootPointers(start, end);
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VisitMapPointer(HeapObject object) {
|
||||
VerifyMap(object.map(cage_base()));
|
||||
}
|
||||
void EvacuationVerifier::VerifyRoots() {
|
||||
heap_->IterateRootsIncludingClients(this,
|
||||
base::EnumSet<SkipRoot>{SkipRoot::kWeak});
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) {
|
||||
Address current = start;
|
||||
while (current < end) {
|
||||
HeapObject object = HeapObject::FromAddress(current);
|
||||
if (!object.IsFreeSpaceOrFiller(cage_base())) {
|
||||
object.Iterate(cage_base(), this);
|
||||
}
|
||||
current += ALIGN_TO_ALLOCATION_ALIGNMENT(object.Size(cage_base()));
|
||||
}
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VerifyEvacuation(NewSpace* space) {
|
||||
if (!space) return;
|
||||
if (v8_flags.minor_mc) {
|
||||
VerifyEvacuation(PagedNewSpace::From(space)->paged_space());
|
||||
return;
|
||||
}
|
||||
PageRange range(space->first_allocatable_address(), space->top());
|
||||
for (auto it = range.begin(); it != range.end();) {
|
||||
Page* page = *(it++);
|
||||
Address current = page->area_start();
|
||||
Address limit = it != range.end() ? page->area_end() : space->top();
|
||||
CHECK(limit == space->top() || !page->Contains(space->top()));
|
||||
VerifyEvacuationOnPage(current, limit);
|
||||
}
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VerifyEvacuation(PagedSpaceBase* space) {
|
||||
for (Page* p : *space) {
|
||||
if (p->IsEvacuationCandidate()) continue;
|
||||
if (p->Contains(space->top())) {
|
||||
CodePageMemoryModificationScope memory_modification_scope(p);
|
||||
heap_->CreateFillerObjectAt(
|
||||
space->top(), static_cast<int>(space->limit() - space->top()));
|
||||
}
|
||||
VerifyEvacuationOnPage(p->area_start(), p->area_end());
|
||||
}
|
||||
}
|
||||
|
||||
FullEvacuationVerifier::FullEvacuationVerifier(Heap* heap)
|
||||
: EvacuationVerifier(heap) {}
|
||||
|
||||
void FullEvacuationVerifier::Run() {
|
||||
DCHECK(!heap_->sweeping_in_progress());
|
||||
VerifyRoots();
|
||||
VerifyEvacuation(heap_->new_space());
|
||||
VerifyEvacuation(heap_->old_space());
|
||||
VerifyEvacuation(heap_->code_space());
|
||||
if (heap_->shared_space()) VerifyEvacuation(heap_->shared_space());
|
||||
if (heap_->map_space()) VerifyEvacuation(heap_->map_space());
|
||||
}
|
||||
|
||||
void FullEvacuationVerifier::VerifyMap(Map map) { VerifyHeapObjectImpl(map); }
|
||||
void FullEvacuationVerifier::VerifyPointers(ObjectSlot start, ObjectSlot end) {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
void FullEvacuationVerifier::VerifyPointers(MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
void FullEvacuationVerifier::VerifyCodePointer(CodeObjectSlot slot) {
|
||||
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||
Object maybe_code = slot.load(code_cage_base());
|
||||
HeapObject code;
|
||||
// The slot might contain smi during CodeDataContainer creation, so skip it.
|
||||
if (maybe_code.GetHeapObject(&code)) {
|
||||
VerifyHeapObjectImpl(code);
|
||||
}
|
||||
}
|
||||
void FullEvacuationVerifier::VisitCodeTarget(Code host, RelocInfo* rinfo) {
|
||||
Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
||||
VerifyHeapObjectImpl(target);
|
||||
}
|
||||
void FullEvacuationVerifier::VisitEmbeddedPointer(Code host, RelocInfo* rinfo) {
|
||||
VerifyHeapObjectImpl(rinfo->target_object(cage_base()));
|
||||
}
|
||||
void FullEvacuationVerifier::VerifyRootPointers(FullObjectSlot start,
|
||||
FullObjectSlot end) {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
|
||||
YoungGenerationEvacuationVerifier::YoungGenerationEvacuationVerifier(Heap* heap)
|
||||
: EvacuationVerifier(heap) {}
|
||||
|
||||
void YoungGenerationEvacuationVerifier::YoungGenerationEvacuationVerifier::
|
||||
Run() {
|
||||
DCHECK(!heap_->sweeping_in_progress());
|
||||
VerifyRoots();
|
||||
VerifyEvacuation(heap_->new_space());
|
||||
VerifyEvacuation(heap_->old_space());
|
||||
VerifyEvacuation(heap_->code_space());
|
||||
if (heap_->map_space()) VerifyEvacuation(heap_->map_space());
|
||||
}
|
||||
|
||||
void YoungGenerationEvacuationVerifier::VerifyMap(Map map) {
|
||||
VerifyHeapObjectImpl(map);
|
||||
}
|
||||
void YoungGenerationEvacuationVerifier::VerifyPointers(ObjectSlot start,
|
||||
ObjectSlot end) {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
void YoungGenerationEvacuationVerifier::VerifyPointers(MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
void YoungGenerationEvacuationVerifier::VerifyCodePointer(CodeObjectSlot slot) {
|
||||
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||
Object maybe_code = slot.load(code_cage_base());
|
||||
HeapObject code;
|
||||
// The slot might contain smi during CodeDataContainer creation, so skip it.
|
||||
if (maybe_code.GetHeapObject(&code)) {
|
||||
VerifyHeapObjectImpl(code);
|
||||
}
|
||||
}
|
||||
void YoungGenerationEvacuationVerifier::VisitCodeTarget(Code host,
|
||||
RelocInfo* rinfo) {
|
||||
Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
||||
VerifyHeapObjectImpl(target);
|
||||
}
|
||||
void YoungGenerationEvacuationVerifier::VisitEmbeddedPointer(Code host,
|
||||
RelocInfo* rinfo) {
|
||||
VerifyHeapObjectImpl(rinfo->target_object(cage_base()));
|
||||
}
|
||||
void YoungGenerationEvacuationVerifier::VerifyRootPointers(FullObjectSlot start,
|
||||
FullObjectSlot end) {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
|
||||
#endif // VERIFY_HEAP
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
104
src/heap/evacuation-verifier.h
Normal file
104
src/heap/evacuation-verifier.h
Normal file
@ -0,0 +1,104 @@
|
||||
// Copyright 2022 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_HEAP_EVACUATION_VERIFIER_H_
|
||||
#define V8_HEAP_EVACUATION_VERIFIER_H_
|
||||
|
||||
#include "src/heap/new-spaces.h"
|
||||
#include "src/heap/paged-spaces.h"
|
||||
#include "src/objects/map.h"
|
||||
#include "src/objects/visitors.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
|
||||
class EvacuationVerifier : public ObjectVisitorWithCageBases,
|
||||
public RootVisitor {
|
||||
public:
|
||||
virtual void Run() = 0;
|
||||
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override;
|
||||
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override;
|
||||
|
||||
void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override;
|
||||
|
||||
void VisitRootPointers(Root root, const char* description,
|
||||
FullObjectSlot start, FullObjectSlot end) override;
|
||||
|
||||
void VisitMapPointer(HeapObject object) override;
|
||||
|
||||
protected:
|
||||
explicit EvacuationVerifier(Heap* heap);
|
||||
|
||||
inline Heap* heap() { return heap_; }
|
||||
|
||||
virtual void VerifyMap(Map map) = 0;
|
||||
virtual void VerifyPointers(ObjectSlot start, ObjectSlot end) = 0;
|
||||
virtual void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) = 0;
|
||||
virtual void VerifyCodePointer(CodeObjectSlot slot) = 0;
|
||||
virtual void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) = 0;
|
||||
|
||||
void VerifyRoots();
|
||||
void VerifyEvacuationOnPage(Address start, Address end);
|
||||
void VerifyEvacuation(NewSpace* new_space);
|
||||
void VerifyEvacuation(PagedSpaceBase* paged_space);
|
||||
|
||||
Heap* heap_;
|
||||
};
|
||||
|
||||
class FullEvacuationVerifier : public EvacuationVerifier {
|
||||
public:
|
||||
explicit FullEvacuationVerifier(Heap* heap);
|
||||
|
||||
void Run() override;
|
||||
|
||||
protected:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object);
|
||||
|
||||
V8_INLINE bool ShouldVerifyObject(HeapObject heap_object);
|
||||
|
||||
template <typename TSlot>
|
||||
void VerifyPointersImpl(TSlot start, TSlot end);
|
||||
|
||||
void VerifyMap(Map map) override;
|
||||
void VerifyPointers(ObjectSlot start, ObjectSlot end) override;
|
||||
void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override;
|
||||
void VerifyCodePointer(CodeObjectSlot slot) override;
|
||||
void VisitCodeTarget(Code host, RelocInfo* rinfo) override;
|
||||
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override;
|
||||
void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override;
|
||||
};
|
||||
|
||||
class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
|
||||
public:
|
||||
explicit YoungGenerationEvacuationVerifier(Heap* heap);
|
||||
|
||||
void Run() override;
|
||||
|
||||
protected:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object);
|
||||
|
||||
template <typename TSlot>
|
||||
void VerifyPointersImpl(TSlot start, TSlot end);
|
||||
|
||||
void VerifyMap(Map map) override;
|
||||
void VerifyPointers(ObjectSlot start, ObjectSlot end) override;
|
||||
void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override;
|
||||
void VerifyCodePointer(CodeObjectSlot slot) override;
|
||||
void VisitCodeTarget(Code host, RelocInfo* rinfo) override;
|
||||
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override;
|
||||
void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override;
|
||||
};
|
||||
|
||||
#endif // VERIFY_HEAP
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_HEAP_EVACUATION_VERIFIER_H_
|
105
src/heap/heap.cc
105
src/heap/heap.cc
@ -34,6 +34,7 @@
|
||||
#include "src/execution/microtask-queue.h"
|
||||
#include "src/execution/v8threads.h"
|
||||
#include "src/execution/vm-state-inl.h"
|
||||
#include "src/flags/flags.h"
|
||||
#include "src/handles/global-handles-inl.h"
|
||||
#include "src/heap/array-buffer-sweeper.h"
|
||||
#include "src/heap/base/stack.h"
|
||||
@ -47,6 +48,7 @@
|
||||
#include "src/heap/concurrent-marking.h"
|
||||
#include "src/heap/cppgc-js/cpp-heap.h"
|
||||
#include "src/heap/embedder-tracing.h"
|
||||
#include "src/heap/evacuation-verifier-inl.h"
|
||||
#include "src/heap/finalization-registry-cleanup-task.h"
|
||||
#include "src/heap/gc-idle-time-handler.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
@ -1861,11 +1863,13 @@ void Heap::StartIncrementalMarking(int gc_flags,
|
||||
}
|
||||
|
||||
void Heap::CompleteSweepingFull() {
|
||||
array_buffer_sweeper()->EnsureFinished();
|
||||
mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kUnifiedHeap);
|
||||
{
|
||||
TRACE_GC(tracer(), GCTracer::Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS);
|
||||
array_buffer_sweeper()->EnsureFinished();
|
||||
}
|
||||
EnsureSweepingCompleted(SweepingForcedFinalizationMode::kUnifiedHeap);
|
||||
|
||||
DCHECK(!mark_compact_collector()->sweeping_in_progress());
|
||||
DCHECK(!sweeping_in_progress());
|
||||
DCHECK_IMPLIES(cpp_heap(),
|
||||
!CppHeap::From(cpp_heap())->sweeper().IsSweepingInProgress());
|
||||
DCHECK(!tracer()->IsSweepingInProgress());
|
||||
@ -2355,10 +2359,16 @@ void Heap::CompleteSweepingYoung(GarbageCollector collector) {
|
||||
array_buffer_sweeper()->EnsureFinished();
|
||||
}
|
||||
|
||||
// If sweeping is in progress and there are no sweeper tasks running, finish
|
||||
// the sweeping here, to avoid having to pause and resume during the young
|
||||
// generation GC.
|
||||
mark_compact_collector()->FinishSweepingIfOutOfWork();
|
||||
if (v8_flags.minor_mc) {
|
||||
DCHECK(v8_flags.separate_gc_phases);
|
||||
// Do not interleave sweeping.
|
||||
EnsureSweepingCompleted(SweepingForcedFinalizationMode::kV8Only);
|
||||
} else {
|
||||
// If sweeping is in progress and there are no sweeper tasks running, finish
|
||||
// the sweeping here, to avoid having to pause and resume during the young
|
||||
// generation GC.
|
||||
FinishSweepingIfOutOfWork();
|
||||
}
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
// Always complete sweeping if young generation is enabled.
|
||||
@ -2370,8 +2380,8 @@ void Heap::CompleteSweepingYoung(GarbageCollector collector) {
|
||||
#endif // defined(CPPGC_YOUNG_GENERATION)
|
||||
}
|
||||
|
||||
void Heap::EnsureSweepingCompleted(HeapObject object) {
|
||||
if (!mark_compact_collector()->sweeping_in_progress()) return;
|
||||
void Heap::EnsureSweepingCompletedForObject(HeapObject object) {
|
||||
if (!sweeping_in_progress()) return;
|
||||
|
||||
BasicMemoryChunk* basic_chunk = BasicMemoryChunk::FromHeapObject(object);
|
||||
if (basic_chunk->InReadOnlySpace()) return;
|
||||
@ -2383,7 +2393,7 @@ void Heap::EnsureSweepingCompleted(HeapObject object) {
|
||||
DCHECK(!chunk->IsLargePage());
|
||||
|
||||
Page* page = Page::cast(chunk);
|
||||
mark_compact_collector()->EnsurePageIsSwept(page);
|
||||
sweeper()->EnsurePageIsSwept(page);
|
||||
}
|
||||
|
||||
void Heap::RecomputeLimits(GarbageCollector collector) {
|
||||
@ -3497,8 +3507,7 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim,
|
||||
}
|
||||
|
||||
void Heap::MakeHeapIterable() {
|
||||
mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
EnsureSweepingCompleted(SweepingForcedFinalizationMode::kV8Only);
|
||||
|
||||
safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
|
||||
local_heap->MakeLinearAllocationAreaIterable();
|
||||
@ -4349,7 +4358,7 @@ void Heap::VerifyCountersBeforeConcurrentSweeping() {
|
||||
// are just an over approximation.
|
||||
{
|
||||
TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_SWEEP, ThreadKind::kMain);
|
||||
space->RefillFreeList(mark_compact_collector()->sweeper());
|
||||
space->RefillFreeList();
|
||||
}
|
||||
|
||||
space->VerifyCountersBeforeConcurrentSweeping();
|
||||
@ -5271,6 +5280,8 @@ void Heap::SetUp(LocalHeap* main_thread_local_heap) {
|
||||
memory_allocator_.reset(
|
||||
new MemoryAllocator(isolate_, code_page_allocator, MaxReserved()));
|
||||
|
||||
sweeper_.reset(new Sweeper(this));
|
||||
|
||||
mark_compact_collector_.reset(new MarkCompactCollector(this));
|
||||
|
||||
scavenger_collector_.reset(new ScavengerCollector(this));
|
||||
@ -5780,6 +5791,9 @@ void Heap::TearDown() {
|
||||
minor_mark_compact_collector_.reset();
|
||||
}
|
||||
|
||||
sweeper_->TearDown();
|
||||
sweeper_.reset();
|
||||
|
||||
scavenger_collector_.reset();
|
||||
array_buffer_sweeper_.reset();
|
||||
incremental_marking_.reset();
|
||||
@ -7191,6 +7205,69 @@ void Heap::set_allocation_timeout(int allocation_timeout) {
|
||||
}
|
||||
#endif // V8_ENABLE_ALLOCATION_TIMEOUT
|
||||
|
||||
void Heap::FinishSweepingIfOutOfWork() {
|
||||
if (sweeper()->sweeping_in_progress() && v8_flags.concurrent_sweeping &&
|
||||
!sweeper()->AreSweeperTasksRunning()) {
|
||||
// At this point we know that all concurrent sweeping tasks have run
|
||||
// out of work and quit: all pages are swept. The main thread still needs
|
||||
// to complete sweeping though.
|
||||
EnsureSweepingCompleted(SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
if (cpp_heap()) {
|
||||
// Ensure that sweeping is also completed for the C++ managed heap, if one
|
||||
// exists and it's out of work.
|
||||
CppHeap::From(cpp_heap())->FinishSweepingIfOutOfWork();
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::EnsureSweepingCompleted(SweepingForcedFinalizationMode mode) {
|
||||
if (sweeper()->sweeping_in_progress()) {
|
||||
TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_COMPLETE_SWEEPING,
|
||||
ThreadKind::kMain);
|
||||
|
||||
sweeper()->EnsureCompleted();
|
||||
old_space()->RefillFreeList();
|
||||
{
|
||||
CodePageHeaderModificationScope rwx_write_scope(
|
||||
"Updating per-page stats stored in page headers requires write "
|
||||
"access to Code page headers");
|
||||
code_space()->RefillFreeList();
|
||||
}
|
||||
if (shared_space()) {
|
||||
shared_space()->RefillFreeList();
|
||||
}
|
||||
if (map_space()) {
|
||||
map_space()->RefillFreeList();
|
||||
map_space()->SortFreeList();
|
||||
}
|
||||
|
||||
tracer()->NotifySweepingCompleted();
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (v8_flags.verify_heap && !evacuation()) {
|
||||
FullEvacuationVerifier verifier(this);
|
||||
verifier.Run();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
if (mode == SweepingForcedFinalizationMode::kUnifiedHeap && cpp_heap()) {
|
||||
// Ensure that sweeping is also completed for the C++ managed heap, if one
|
||||
// exists.
|
||||
CppHeap::From(cpp_heap())->FinishSweepingIfRunning();
|
||||
DCHECK(!CppHeap::From(cpp_heap())->sweeper().IsSweepingInProgress());
|
||||
}
|
||||
|
||||
DCHECK_IMPLIES(
|
||||
mode == SweepingForcedFinalizationMode::kUnifiedHeap || !cpp_heap(),
|
||||
!tracer()->IsSweepingInProgress());
|
||||
}
|
||||
|
||||
void Heap::DrainSweepingWorklistForSpace(AllocationSpace space) {
|
||||
if (!sweeper()->sweeping_in_progress()) return;
|
||||
sweeper()->DrainSweepingWorklistForSpace(space);
|
||||
}
|
||||
|
||||
EmbedderStackStateScope::EmbedderStackStateScope(Heap* heap, Origin origin,
|
||||
StackState stack_state)
|
||||
: local_tracer_(heap->local_embedder_heap_tracer()),
|
||||
|
@ -33,6 +33,7 @@
|
||||
#include "src/heap/heap-allocator.h"
|
||||
#include "src/heap/marking-state.h"
|
||||
#include "src/heap/pretenuring-handler.h"
|
||||
#include "src/heap/sweeper.h"
|
||||
#include "src/init/heap-symbols.h"
|
||||
#include "src/objects/allocation-site.h"
|
||||
#include "src/objects/fixed-array.h"
|
||||
@ -921,6 +922,8 @@ class Heap {
|
||||
return minor_mark_compact_collector_.get();
|
||||
}
|
||||
|
||||
Sweeper* sweeper() { return sweeper_.get(); }
|
||||
|
||||
ArrayBufferSweeper* array_buffer_sweeper() {
|
||||
return array_buffer_sweeper_.get();
|
||||
}
|
||||
@ -1132,7 +1135,7 @@ class Heap {
|
||||
void CompleteSweepingYoung(GarbageCollector collector);
|
||||
|
||||
// Ensures that sweeping is finished for that object's page.
|
||||
void EnsureSweepingCompleted(HeapObject object);
|
||||
void EnsureSweepingCompletedForObject(HeapObject object);
|
||||
|
||||
IncrementalMarking* incremental_marking() const {
|
||||
return incremental_marking_.get();
|
||||
@ -1611,6 +1614,28 @@ class Heap {
|
||||
// it supports a forwarded map. Fails if the map is not the code map.
|
||||
Map GcSafeMapOfCodeSpaceObject(HeapObject object);
|
||||
|
||||
// ===========================================================================
|
||||
// Sweeping. =================================================================
|
||||
// ===========================================================================
|
||||
|
||||
bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); }
|
||||
|
||||
void FinishSweepingIfOutOfWork();
|
||||
|
||||
enum class SweepingForcedFinalizationMode { kUnifiedHeap, kV8Only };
|
||||
|
||||
// Ensures that sweeping is finished.
|
||||
//
|
||||
// Note: Can only be called safely from main thread.
|
||||
V8_EXPORT_PRIVATE void EnsureSweepingCompleted(
|
||||
SweepingForcedFinalizationMode mode);
|
||||
|
||||
void DrainSweepingWorklistForSpace(AllocationSpace space);
|
||||
|
||||
void set_evacuation(bool evacuation) { evacuation_ = evacuation; }
|
||||
|
||||
bool evacuation() const { return evacuation_; }
|
||||
|
||||
// =============================================================================
|
||||
|
||||
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
|
||||
@ -2287,6 +2312,7 @@ class Heap {
|
||||
double last_gc_time_ = 0.0;
|
||||
|
||||
std::unique_ptr<GCTracer> tracer_;
|
||||
std::unique_ptr<Sweeper> sweeper_;
|
||||
std::unique_ptr<MarkCompactCollector> mark_compact_collector_;
|
||||
std::unique_ptr<MinorMarkCompactCollector> minor_mark_compact_collector_;
|
||||
std::unique_ptr<ScavengerCollector> scavenger_collector_;
|
||||
@ -2406,6 +2432,8 @@ class Heap {
|
||||
|
||||
bool is_finalization_registry_cleanup_task_posted_ = false;
|
||||
|
||||
bool evacuation_ = false;
|
||||
|
||||
std::unique_ptr<third_party_heap::Heap> tp_heap_;
|
||||
|
||||
MarkingState marking_state_;
|
||||
@ -2867,6 +2895,18 @@ class V8_NODISCARD CppClassNamesAsHeapObjectNameScope final {
|
||||
std::unique_ptr<cppgc::internal::ClassNameAsHeapObjectNameScope> scope_;
|
||||
};
|
||||
|
||||
class V8_NODISCARD EvacuationScope {
|
||||
public:
|
||||
explicit EvacuationScope(Heap* heap) : heap_(heap) {
|
||||
heap_->set_evacuation(true);
|
||||
}
|
||||
|
||||
~EvacuationScope() { heap_->set_evacuation(false); }
|
||||
|
||||
private:
|
||||
Heap* const heap_;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -130,7 +130,7 @@ bool IncrementalMarking::IsBelowActivationThresholds() const {
|
||||
|
||||
void IncrementalMarking::Start(GarbageCollector garbage_collector,
|
||||
GarbageCollectionReason gc_reason) {
|
||||
DCHECK(!major_collector_->sweeping_in_progress());
|
||||
DCHECK(!heap_->sweeping_in_progress());
|
||||
DCHECK(!heap_->IsShared());
|
||||
|
||||
if (v8_flags.trace_incremental_marking) {
|
||||
|
@ -25,6 +25,7 @@
|
||||
#include "src/heap/code-object-registry.h"
|
||||
#include "src/heap/concurrent-allocator.h"
|
||||
#include "src/heap/evacuation-allocator-inl.h"
|
||||
#include "src/heap/evacuation-verifier-inl.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/global-handle-marking-visitor.h"
|
||||
@ -314,165 +315,6 @@ class FullMarkingVerifier : public MarkingVerifier {
|
||||
NonAtomicMarkingState* const marking_state_;
|
||||
};
|
||||
|
||||
class EvacuationVerifier : public ObjectVisitorWithCageBases,
|
||||
public RootVisitor {
|
||||
public:
|
||||
virtual void Run() = 0;
|
||||
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
|
||||
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||
VerifyCodePointer(slot);
|
||||
}
|
||||
|
||||
void VisitRootPointers(Root root, const char* description,
|
||||
FullObjectSlot start, FullObjectSlot end) override {
|
||||
VerifyRootPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitMapPointer(HeapObject object) override {
|
||||
VerifyMap(object.map(cage_base()));
|
||||
}
|
||||
|
||||
protected:
|
||||
explicit EvacuationVerifier(Heap* heap)
|
||||
: ObjectVisitorWithCageBases(heap), heap_(heap) {}
|
||||
|
||||
inline Heap* heap() { return heap_; }
|
||||
|
||||
virtual void VerifyMap(Map map) = 0;
|
||||
virtual void VerifyPointers(ObjectSlot start, ObjectSlot end) = 0;
|
||||
virtual void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) = 0;
|
||||
virtual void VerifyCodePointer(CodeObjectSlot slot) = 0;
|
||||
virtual void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) = 0;
|
||||
|
||||
void VerifyRoots();
|
||||
void VerifyEvacuationOnPage(Address start, Address end);
|
||||
void VerifyEvacuation(NewSpace* new_space);
|
||||
void VerifyEvacuation(PagedSpaceBase* paged_space);
|
||||
|
||||
Heap* heap_;
|
||||
};
|
||||
|
||||
void EvacuationVerifier::VerifyRoots() {
|
||||
heap_->IterateRootsIncludingClients(this,
|
||||
base::EnumSet<SkipRoot>{SkipRoot::kWeak});
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) {
|
||||
Address current = start;
|
||||
while (current < end) {
|
||||
HeapObject object = HeapObject::FromAddress(current);
|
||||
if (!object.IsFreeSpaceOrFiller(cage_base())) {
|
||||
object.Iterate(cage_base(), this);
|
||||
}
|
||||
current += ALIGN_TO_ALLOCATION_ALIGNMENT(object.Size(cage_base()));
|
||||
}
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VerifyEvacuation(NewSpace* space) {
|
||||
if (!space) return;
|
||||
if (v8_flags.minor_mc) {
|
||||
VerifyEvacuation(PagedNewSpace::From(space)->paged_space());
|
||||
return;
|
||||
}
|
||||
PageRange range(space->first_allocatable_address(), space->top());
|
||||
for (auto it = range.begin(); it != range.end();) {
|
||||
Page* page = *(it++);
|
||||
Address current = page->area_start();
|
||||
Address limit = it != range.end() ? page->area_end() : space->top();
|
||||
CHECK(limit == space->top() || !page->Contains(space->top()));
|
||||
VerifyEvacuationOnPage(current, limit);
|
||||
}
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VerifyEvacuation(PagedSpaceBase* space) {
|
||||
for (Page* p : *space) {
|
||||
if (p->IsEvacuationCandidate()) continue;
|
||||
if (p->Contains(space->top())) {
|
||||
CodePageMemoryModificationScope memory_modification_scope(p);
|
||||
heap_->CreateFillerObjectAt(
|
||||
space->top(), static_cast<int>(space->limit() - space->top()));
|
||||
}
|
||||
VerifyEvacuationOnPage(p->area_start(), p->area_end());
|
||||
}
|
||||
}
|
||||
|
||||
class FullEvacuationVerifier : public EvacuationVerifier {
|
||||
public:
|
||||
explicit FullEvacuationVerifier(Heap* heap) : EvacuationVerifier(heap) {}
|
||||
|
||||
void Run() override {
|
||||
DCHECK(!heap_->mark_compact_collector()->sweeping_in_progress());
|
||||
VerifyRoots();
|
||||
VerifyEvacuation(heap_->new_space());
|
||||
VerifyEvacuation(heap_->old_space());
|
||||
VerifyEvacuation(heap_->code_space());
|
||||
if (heap_->shared_space()) VerifyEvacuation(heap_->shared_space());
|
||||
if (heap_->map_space()) VerifyEvacuation(heap_->map_space());
|
||||
}
|
||||
|
||||
protected:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
if (!ShouldVerifyObject(heap_object)) return;
|
||||
CHECK_IMPLIES(Heap::InYoungGeneration(heap_object),
|
||||
Heap::InToPage(heap_object));
|
||||
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(heap_object));
|
||||
}
|
||||
|
||||
V8_INLINE bool ShouldVerifyObject(HeapObject heap_object) {
|
||||
const bool in_shared_heap = heap_object.InSharedWritableHeap();
|
||||
return heap_->isolate()->is_shared_heap_isolate() ? in_shared_heap
|
||||
: !in_shared_heap;
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
void VerifyPointersImpl(TSlot start, TSlot end) {
|
||||
for (TSlot current = start; current < end; ++current) {
|
||||
typename TSlot::TObject object = current.load(cage_base());
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObjectIfStrong(&heap_object)) {
|
||||
VerifyHeapObjectImpl(heap_object);
|
||||
}
|
||||
}
|
||||
}
|
||||
void VerifyMap(Map map) override { VerifyHeapObjectImpl(map); }
|
||||
void VerifyPointers(ObjectSlot start, ObjectSlot end) override {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
void VerifyCodePointer(CodeObjectSlot slot) override {
|
||||
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||
Object maybe_code = slot.load(code_cage_base());
|
||||
HeapObject code;
|
||||
// The slot might contain smi during CodeDataContainer creation, so skip it.
|
||||
if (maybe_code.GetHeapObject(&code)) {
|
||||
VerifyHeapObjectImpl(code);
|
||||
}
|
||||
}
|
||||
void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
|
||||
Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
||||
VerifyHeapObjectImpl(target);
|
||||
}
|
||||
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
|
||||
VerifyHeapObjectImpl(rinfo->target_object(cage_base()));
|
||||
}
|
||||
void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace
|
||||
#endif // VERIFY_HEAP
|
||||
|
||||
@ -513,12 +355,14 @@ bool CollectorBase::IsMajorMC() {
|
||||
return !heap_->IsYoungGenerationCollector(garbage_collector_);
|
||||
}
|
||||
|
||||
void CollectorBase::StartSweepSpace(Sweeper* sweeper, PagedSpaceBase* space) {
|
||||
void CollectorBase::StartSweepSpace(PagedSpaceBase* space) {
|
||||
space->ClearAllocatorState();
|
||||
|
||||
int will_be_swept = 0;
|
||||
bool unused_page_present = false;
|
||||
|
||||
Sweeper* sweeper = heap()->sweeper();
|
||||
|
||||
// Loop needs to support deletion if live bytes == 0 for a page.
|
||||
for (auto it = space->begin(); it != space->end();) {
|
||||
Page* p = *(it++);
|
||||
@ -567,10 +411,10 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap)
|
||||
#endif
|
||||
uses_shared_heap_(isolate()->has_shared_heap() || isolate()->is_shared()),
|
||||
is_shared_heap_isolate_(isolate()->is_shared_heap_isolate()),
|
||||
sweeper_(new Sweeper(heap)) {
|
||||
sweeper_(heap_->sweeper()) {
|
||||
}
|
||||
|
||||
MarkCompactCollector::~MarkCompactCollector() { delete sweeper_; }
|
||||
MarkCompactCollector::~MarkCompactCollector() = default;
|
||||
|
||||
void MarkCompactCollector::SetUp() {
|
||||
DCHECK_EQ(0, strcmp(Marking::kWhiteBitPattern, "00"));
|
||||
@ -589,7 +433,6 @@ void MarkCompactCollector::TearDown() {
|
||||
local_weak_objects()->Publish();
|
||||
weak_objects()->Clear();
|
||||
}
|
||||
sweeper()->TearDown();
|
||||
}
|
||||
|
||||
// static
|
||||
@ -766,77 +609,6 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
|
||||
|
||||
#endif // VERIFY_HEAP
|
||||
|
||||
void MarkCompactCollector::FinishSweepingIfOutOfWork() {
|
||||
if (sweeper()->sweeping_in_progress() && v8_flags.concurrent_sweeping &&
|
||||
!sweeper()->AreSweeperTasksRunning()) {
|
||||
// At this point we know that all concurrent sweeping tasks have run
|
||||
// out of work and quit: all pages are swept. The main thread still needs
|
||||
// to complete sweeping though.
|
||||
EnsureSweepingCompleted(SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
if (heap()->cpp_heap()) {
|
||||
// Ensure that sweeping is also completed for the C++ managed heap, if one
|
||||
// exists and it's out of work.
|
||||
CppHeap::From(heap()->cpp_heap())->FinishSweepingIfOutOfWork();
|
||||
}
|
||||
}
|
||||
|
||||
void MarkCompactCollector::EnsureSweepingCompleted(
|
||||
SweepingForcedFinalizationMode mode) {
|
||||
if (sweeper()->sweeping_in_progress()) {
|
||||
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_COMPLETE_SWEEPING,
|
||||
ThreadKind::kMain);
|
||||
|
||||
sweeper()->EnsureCompleted();
|
||||
heap()->old_space()->RefillFreeList(sweeper());
|
||||
{
|
||||
CodePageHeaderModificationScope rwx_write_scope(
|
||||
"Updating per-page stats stored in page headers requires write "
|
||||
"access to Code page headers");
|
||||
heap()->code_space()->RefillFreeList(sweeper());
|
||||
}
|
||||
if (heap()->shared_space()) {
|
||||
heap()->shared_space()->RefillFreeList(sweeper());
|
||||
}
|
||||
if (heap()->map_space()) {
|
||||
heap()->map_space()->RefillFreeList(sweeper());
|
||||
heap()->map_space()->SortFreeList();
|
||||
}
|
||||
|
||||
heap()->tracer()->NotifySweepingCompleted();
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (v8_flags.verify_heap && !evacuation()) {
|
||||
FullEvacuationVerifier verifier(heap());
|
||||
verifier.Run();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
if (mode == SweepingForcedFinalizationMode::kUnifiedHeap &&
|
||||
heap()->cpp_heap()) {
|
||||
// Ensure that sweeping is also completed for the C++ managed heap, if one
|
||||
// exists.
|
||||
CppHeap::From(heap()->cpp_heap())->FinishSweepingIfRunning();
|
||||
DCHECK(
|
||||
!CppHeap::From(heap()->cpp_heap())->sweeper().IsSweepingInProgress());
|
||||
}
|
||||
|
||||
DCHECK_IMPLIES(mode == SweepingForcedFinalizationMode::kUnifiedHeap ||
|
||||
!heap()->cpp_heap(),
|
||||
!heap()->tracer()->IsSweepingInProgress());
|
||||
}
|
||||
|
||||
void MarkCompactCollector::EnsurePageIsSwept(Page* page) {
|
||||
sweeper()->EnsurePageIsSwept(page);
|
||||
}
|
||||
|
||||
void MarkCompactCollector::DrainSweepingWorklistForSpace(
|
||||
AllocationSpace space) {
|
||||
if (!sweeper()->sweeping_in_progress()) return;
|
||||
sweeper()->DrainSweepingWorklistForSpace(space);
|
||||
}
|
||||
|
||||
void MarkCompactCollector::ComputeEvacuationHeuristics(
|
||||
size_t area_size, int* target_fragmentation_percent,
|
||||
size_t* max_evacuated_bytes) {
|
||||
@ -921,7 +693,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
|
||||
CodePageHeaderModificationScope rwx_write_scope(
|
||||
"Modification of Code page header flags requires write access");
|
||||
|
||||
DCHECK(!sweeping_in_progress());
|
||||
DCHECK(!sweeper()->sweeping_in_progress());
|
||||
Page* owner_of_linear_allocation_area =
|
||||
space->top() == space->limit()
|
||||
? nullptr
|
||||
@ -1069,7 +841,7 @@ void MarkCompactCollector::Prepare() {
|
||||
state_ = PREPARE_GC;
|
||||
#endif
|
||||
|
||||
DCHECK(!sweeping_in_progress());
|
||||
DCHECK(!sweeper()->sweeping_in_progress());
|
||||
|
||||
// Unmapper tasks needs to be stopped during the GC, otherwise pages queued
|
||||
// for freeing might get unmapped during the GC.
|
||||
@ -1189,7 +961,7 @@ void MarkCompactCollector::Finish() {
|
||||
ThreadKind::kMain);
|
||||
sweeper()->ParallelSweepSpace(NEW_SPACE,
|
||||
Sweeper::SweepingMode::kEagerDuringGC, 0);
|
||||
heap()->paged_new_space()->paged_space()->RefillFreeList(sweeper());
|
||||
heap()->paged_new_space()->paged_space()->RefillFreeList();
|
||||
}
|
||||
}
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH);
|
||||
@ -4730,7 +4502,7 @@ void MarkCompactCollector::Evacuate() {
|
||||
|
||||
{
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_COPY);
|
||||
EvacuationScope evacuation_scope(this);
|
||||
EvacuationScope evacuation_scope(heap());
|
||||
EvacuatePagesInParallel();
|
||||
}
|
||||
|
||||
@ -5591,27 +5363,27 @@ void MarkCompactCollector::Sweep() {
|
||||
{
|
||||
GCTracer::Scope sweep_scope(
|
||||
heap()->tracer(), GCTracer::Scope::MC_SWEEP_OLD, ThreadKind::kMain);
|
||||
StartSweepSpace(sweeper(), heap()->old_space());
|
||||
StartSweepSpace(heap()->old_space());
|
||||
}
|
||||
{
|
||||
GCTracer::Scope sweep_scope(
|
||||
heap()->tracer(), GCTracer::Scope::MC_SWEEP_CODE, ThreadKind::kMain);
|
||||
StartSweepSpace(sweeper(), heap()->code_space());
|
||||
StartSweepSpace(heap()->code_space());
|
||||
}
|
||||
if (heap()->map_space()) {
|
||||
GCTracer::Scope sweep_scope(
|
||||
heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP, ThreadKind::kMain);
|
||||
StartSweepSpace(sweeper(), heap()->map_space());
|
||||
StartSweepSpace(heap()->map_space());
|
||||
}
|
||||
if (heap()->shared_space()) {
|
||||
GCTracer::Scope sweep_scope(
|
||||
heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP, ThreadKind::kMain);
|
||||
StartSweepSpace(sweeper(), heap()->shared_space());
|
||||
StartSweepSpace(heap()->shared_space());
|
||||
}
|
||||
if (v8_flags.minor_mc && heap()->new_space()) {
|
||||
GCTracer::Scope sweep_scope(
|
||||
heap()->tracer(), GCTracer::Scope::MC_SWEEP_NEW, ThreadKind::kMain);
|
||||
StartSweepSpace(sweeper(), heap()->paged_new_space()->paged_space());
|
||||
StartSweepSpace(heap()->paged_new_space()->paged_space());
|
||||
}
|
||||
sweeper()->StartSweeping(garbage_collector_);
|
||||
}
|
||||
@ -5692,65 +5464,6 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
|
||||
NonAtomicMarkingState* const marking_state_;
|
||||
};
|
||||
|
||||
class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
|
||||
public:
|
||||
explicit YoungGenerationEvacuationVerifier(Heap* heap)
|
||||
: EvacuationVerifier(heap) {}
|
||||
|
||||
void Run() override {
|
||||
DCHECK(!heap_->mark_compact_collector()->sweeping_in_progress());
|
||||
DCHECK(!heap_->minor_mark_compact_collector()->sweeping_in_progress());
|
||||
VerifyRoots();
|
||||
VerifyEvacuation(heap_->new_space());
|
||||
VerifyEvacuation(heap_->old_space());
|
||||
VerifyEvacuation(heap_->code_space());
|
||||
if (heap_->map_space()) VerifyEvacuation(heap_->map_space());
|
||||
}
|
||||
|
||||
protected:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
CHECK_IMPLIES(Heap::InYoungGeneration(heap_object),
|
||||
Heap::InToPage(heap_object));
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
void VerifyPointersImpl(TSlot start, TSlot end) {
|
||||
for (TSlot current = start; current < end; ++current) {
|
||||
typename TSlot::TObject object = current.load(cage_base());
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObject(&heap_object)) {
|
||||
VerifyHeapObjectImpl(heap_object);
|
||||
}
|
||||
}
|
||||
}
|
||||
void VerifyMap(Map map) override { VerifyHeapObjectImpl(map); }
|
||||
void VerifyPointers(ObjectSlot start, ObjectSlot end) override {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
void VerifyCodePointer(CodeObjectSlot slot) override {
|
||||
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||
Object maybe_code = slot.load(code_cage_base());
|
||||
HeapObject code;
|
||||
// The slot might contain smi during CodeDataContainer creation, so skip it.
|
||||
if (maybe_code.GetHeapObject(&code)) {
|
||||
VerifyHeapObjectImpl(code);
|
||||
}
|
||||
}
|
||||
void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
|
||||
Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
||||
VerifyHeapObjectImpl(target);
|
||||
}
|
||||
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
|
||||
VerifyHeapObjectImpl(rinfo->target_object(cage_base()));
|
||||
}
|
||||
void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override {
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
};
|
||||
|
||||
#endif // VERIFY_HEAP
|
||||
|
||||
bool IsUnmarkedObjectForYoungGeneration(Heap* heap, FullObjectSlot p) {
|
||||
@ -5801,7 +5514,7 @@ constexpr size_t MinorMarkCompactCollector::kMaxParallelTasks;
|
||||
MinorMarkCompactCollector::MinorMarkCompactCollector(Heap* heap)
|
||||
: CollectorBase(heap, GarbageCollector::MINOR_MARK_COMPACTOR),
|
||||
page_parallel_job_semaphore_(0),
|
||||
sweeper_(std::make_unique<Sweeper>(heap_)) {}
|
||||
sweeper_(heap_->sweeper()) {}
|
||||
|
||||
std::pair<size_t, size_t> MinorMarkCompactCollector::ProcessMarkingWorklist(
|
||||
size_t bytes_to_process) {
|
||||
@ -6019,7 +5732,7 @@ void MinorMarkCompactCollector::Finish() {
|
||||
GCTracer::Scope::MINOR_MC_SWEEP_FINISH_NEW,
|
||||
ThreadKind::kMain);
|
||||
sweeper_->EnsureCompleted(Sweeper::SweepingMode::kEagerDuringGC);
|
||||
heap()->paged_new_space()->paged_space()->RefillFreeList(sweeper());
|
||||
heap()->paged_new_space()->paged_space()->RefillFreeList();
|
||||
}
|
||||
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_FINISH);
|
||||
@ -6544,6 +6257,7 @@ void MinorMarkCompactCollector::Evacuate() {
|
||||
|
||||
{
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_COPY);
|
||||
EvacuationScope evacuation_scope(heap());
|
||||
EvacuatePagesInParallel();
|
||||
}
|
||||
|
||||
@ -6687,7 +6401,7 @@ void MinorMarkCompactCollector::Sweep() {
|
||||
GCTracer::Scope sweep_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MINOR_MC_SWEEP_NEW,
|
||||
ThreadKind::kMain);
|
||||
StartSweepSpace(sweeper(), heap()->paged_new_space()->paged_space());
|
||||
StartSweepSpace(heap()->paged_new_space()->paged_space());
|
||||
}
|
||||
sweeper_->StartSweeping(garbage_collector_);
|
||||
}
|
||||
|
@ -276,8 +276,6 @@ class CollectorBase {
|
||||
// Used by incremental marking for object that change their layout.
|
||||
virtual void VisitObject(HeapObject obj) = 0;
|
||||
|
||||
virtual bool sweeping_in_progress() const = 0;
|
||||
|
||||
virtual void Finish() = 0;
|
||||
|
||||
bool IsMajorMC();
|
||||
@ -296,7 +294,7 @@ class CollectorBase {
|
||||
return non_atomic_marking_state_;
|
||||
}
|
||||
|
||||
void StartSweepSpace(Sweeper* sweeper, PagedSpaceBase* space);
|
||||
void StartSweepSpace(PagedSpaceBase* space);
|
||||
|
||||
Heap* heap_;
|
||||
GarbageCollector garbage_collector_;
|
||||
@ -395,33 +393,8 @@ class MarkCompactCollector final : public CollectorBase {
|
||||
|
||||
bool is_compacting() const { return compacting_; }
|
||||
|
||||
void FinishSweepingIfOutOfWork();
|
||||
|
||||
enum class SweepingForcedFinalizationMode { kUnifiedHeap, kV8Only };
|
||||
|
||||
// Ensures that sweeping is finished.
|
||||
//
|
||||
// Note: Can only be called safely from main thread.
|
||||
V8_EXPORT_PRIVATE void EnsureSweepingCompleted(
|
||||
SweepingForcedFinalizationMode mode);
|
||||
|
||||
void EnsurePageIsSwept(Page* page);
|
||||
|
||||
void DrainSweepingWorklistForSpace(AllocationSpace space);
|
||||
|
||||
// Checks if sweeping is in progress right now on any space.
|
||||
bool sweeping_in_progress() const final {
|
||||
return sweeper_->sweeping_in_progress();
|
||||
}
|
||||
|
||||
void set_evacuation(bool evacuation) { evacuation_ = evacuation; }
|
||||
|
||||
bool evacuation() const { return evacuation_; }
|
||||
|
||||
inline void AddTransitionArray(TransitionArray array);
|
||||
|
||||
Sweeper* sweeper() { return sweeper_; }
|
||||
|
||||
#ifdef DEBUG
|
||||
// Checks whether performing mark-compact collection.
|
||||
bool in_use() { return state_ > PREPARE_GC; }
|
||||
@ -482,6 +455,8 @@ class MarkCompactCollector final : public CollectorBase {
|
||||
#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB
|
||||
|
||||
private:
|
||||
Sweeper* sweeper() { return sweeper_; }
|
||||
|
||||
void ComputeEvacuationHeuristics(size_t area_size,
|
||||
int* target_fragmentation_percent,
|
||||
size_t* max_evacuated_bytes);
|
||||
@ -494,9 +469,6 @@ class MarkCompactCollector final : public CollectorBase {
|
||||
// Free unmarked ArrayBufferExtensions.
|
||||
void SweepArrayBufferExtensions();
|
||||
|
||||
// Free unmarked entries in the ExternalPointerTable.
|
||||
void SweepExternalPointerTable();
|
||||
|
||||
void MarkLiveObjects();
|
||||
|
||||
// Marks the object grey and adds it to the marking work list.
|
||||
@ -654,7 +626,6 @@ class MarkCompactCollector final : public CollectorBase {
|
||||
const bool uses_shared_heap_;
|
||||
const bool is_shared_heap_isolate_;
|
||||
|
||||
bool evacuation_ = false;
|
||||
// True if we are collecting slots to perform evacuation from evacuation
|
||||
// candidates.
|
||||
bool compacting_ = false;
|
||||
@ -681,7 +652,7 @@ class MarkCompactCollector final : public CollectorBase {
|
||||
aborted_evacuation_candidates_due_to_flags_;
|
||||
std::vector<LargePage*> promoted_large_pages_;
|
||||
|
||||
Sweeper* sweeper_;
|
||||
Sweeper* const sweeper_;
|
||||
|
||||
// Counts the number of major mark-compact collections. The counter is
|
||||
// incremented right after marking. This is used for:
|
||||
@ -700,19 +671,6 @@ class MarkCompactCollector final : public CollectorBase {
|
||||
friend class RecordMigratedSlotVisitor;
|
||||
};
|
||||
|
||||
class V8_NODISCARD EvacuationScope {
|
||||
public:
|
||||
explicit EvacuationScope(MarkCompactCollector* collector)
|
||||
: collector_(collector) {
|
||||
collector_->set_evacuation(true);
|
||||
}
|
||||
|
||||
~EvacuationScope() { collector_->set_evacuation(false); }
|
||||
|
||||
private:
|
||||
MarkCompactCollector* collector_;
|
||||
};
|
||||
|
||||
// Collector for young-generation only.
|
||||
class MinorMarkCompactCollector final : public CollectorBase {
|
||||
public:
|
||||
@ -742,9 +700,6 @@ class MinorMarkCompactCollector final : public CollectorBase {
|
||||
|
||||
void Finish() final;
|
||||
|
||||
Sweeper* sweeper() { return sweeper_.get(); }
|
||||
bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); }
|
||||
|
||||
void VisitObject(HeapObject obj) final;
|
||||
|
||||
private:
|
||||
@ -753,6 +708,8 @@ class MinorMarkCompactCollector final : public CollectorBase {
|
||||
static const int kNumMarkers = 8;
|
||||
static const int kMainMarker = 0;
|
||||
|
||||
Sweeper* sweeper() { return sweeper_; }
|
||||
|
||||
void MarkLiveObjects();
|
||||
void MarkRootSetInParallel(RootMarkingVisitor* root_visitor,
|
||||
bool was_marked_incrementally);
|
||||
@ -779,7 +736,7 @@ class MinorMarkCompactCollector final : public CollectorBase {
|
||||
std::vector<Page*> promoted_pages_;
|
||||
std::vector<LargePage*> promoted_large_pages_;
|
||||
|
||||
std::unique_ptr<Sweeper> sweeper_;
|
||||
Sweeper* const sweeper_;
|
||||
|
||||
friend class YoungGenerationMarkingTask;
|
||||
friend class YoungGenerationMarkingJob;
|
||||
|
@ -142,13 +142,14 @@ void PagedSpaceBase::TearDown() {
|
||||
accounting_stats_.Clear();
|
||||
}
|
||||
|
||||
void PagedSpaceBase::RefillFreeList(Sweeper* sweeper) {
|
||||
void PagedSpaceBase::RefillFreeList() {
|
||||
// Any PagedSpace might invoke RefillFreeList. We filter all but our old
|
||||
// generation spaces out.
|
||||
DCHECK(identity() == OLD_SPACE || identity() == CODE_SPACE ||
|
||||
identity() == MAP_SPACE || identity() == NEW_SPACE ||
|
||||
identity() == SHARED_SPACE);
|
||||
|
||||
Sweeper* sweeper = heap()->sweeper();
|
||||
size_t added = 0;
|
||||
|
||||
{
|
||||
@ -671,15 +672,14 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap,
|
||||
origin);
|
||||
if (result) return result;
|
||||
|
||||
MarkCompactCollector* collector = heap()->mark_compact_collector();
|
||||
// Sweeping is still in progress.
|
||||
if (collector->sweeping_in_progress()) {
|
||||
if (heap()->sweeping_in_progress()) {
|
||||
// First try to refill the free-list, concurrent sweeper threads
|
||||
// may have freed some objects in the meantime.
|
||||
{
|
||||
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_BACKGROUND_SWEEPING,
|
||||
ThreadKind::kBackground);
|
||||
RefillFreeList(collector->sweeper());
|
||||
RefillFreeList();
|
||||
}
|
||||
|
||||
// Retry the free list allocation.
|
||||
@ -694,10 +694,10 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap,
|
||||
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_BACKGROUND_SWEEPING,
|
||||
ThreadKind::kBackground);
|
||||
const int kMaxPagesToSweep = 1;
|
||||
max_freed = collector->sweeper()->ParallelSweepSpace(
|
||||
max_freed = heap()->sweeper()->ParallelSweepSpace(
|
||||
identity(), Sweeper::SweepingMode::kLazyOrConcurrent,
|
||||
static_cast<int>(min_size_in_bytes), kMaxPagesToSweep);
|
||||
RefillFreeList(collector->sweeper());
|
||||
RefillFreeList();
|
||||
}
|
||||
|
||||
if (static_cast<size_t>(max_freed) >= min_size_in_bytes) {
|
||||
@ -713,13 +713,13 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap,
|
||||
if (result) return result;
|
||||
}
|
||||
|
||||
if (collector->sweeping_in_progress()) {
|
||||
if (heap()->sweeping_in_progress()) {
|
||||
// Complete sweeping for this space.
|
||||
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_BACKGROUND_SWEEPING,
|
||||
ThreadKind::kBackground);
|
||||
collector->DrainSweepingWorklistForSpace(identity());
|
||||
heap()->DrainSweepingWorklistForSpace(identity());
|
||||
|
||||
RefillFreeList(collector->sweeper());
|
||||
RefillFreeList();
|
||||
|
||||
// Last try to acquire memory from free list.
|
||||
return TryAllocationFromFreeListBackground(min_size_in_bytes,
|
||||
@ -991,7 +991,6 @@ bool PagedSpaceBase::RawRefillLabMain(int size_in_bytes,
|
||||
return TryExpand(size_in_bytes, origin);
|
||||
}
|
||||
|
||||
MarkCompactCollector* collector = heap()->mark_compact_collector();
|
||||
const bool is_main_thread =
|
||||
heap()->IsMainThread() || heap()->IsSharedMainThread();
|
||||
const auto sweeping_scope_id = is_main_thread
|
||||
@ -1000,12 +999,12 @@ bool PagedSpaceBase::RawRefillLabMain(int size_in_bytes,
|
||||
const auto sweeping_scope_kind =
|
||||
is_main_thread ? ThreadKind::kMain : ThreadKind::kBackground;
|
||||
// Sweeping is still in progress.
|
||||
if (collector->sweeping_in_progress()) {
|
||||
if (heap()->sweeping_in_progress()) {
|
||||
// First try to refill the free-list, concurrent sweeper threads
|
||||
// may have freed some objects in the meantime.
|
||||
{
|
||||
TRACE_GC_EPOCH(heap()->tracer(), sweeping_scope_id, sweeping_scope_kind);
|
||||
RefillFreeList(collector->sweeper());
|
||||
RefillFreeList();
|
||||
}
|
||||
|
||||
// Retry the free list allocation.
|
||||
@ -1070,11 +1069,10 @@ bool PagedSpaceBase::ContributeToSweepingMain(int required_freed_bytes,
|
||||
is_compaction_space() ? Sweeper::SweepingMode::kEagerDuringGC
|
||||
: Sweeper::SweepingMode::kLazyOrConcurrent;
|
||||
|
||||
MarkCompactCollector* collector = heap()->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->sweeper()->ParallelSweepSpace(identity(), sweeping_mode,
|
||||
required_freed_bytes, max_pages);
|
||||
RefillFreeList(collector->sweeper());
|
||||
if (heap()->sweeping_in_progress()) {
|
||||
heap()->sweeper()->ParallelSweepSpace(identity(), sweeping_mode,
|
||||
required_freed_bytes, max_pages);
|
||||
RefillFreeList();
|
||||
return TryAllocationFromFreeListMain(size_in_bytes, origin);
|
||||
}
|
||||
return false;
|
||||
|
@ -280,7 +280,7 @@ class V8_EXPORT_PRIVATE PagedSpaceBase
|
||||
|
||||
// Refills the free list from the corresponding free list filled by the
|
||||
// sweeper.
|
||||
void RefillFreeList(Sweeper* sweeper);
|
||||
void RefillFreeList();
|
||||
|
||||
base::Mutex* mutex() { return &space_mutex_; }
|
||||
|
||||
|
@ -331,7 +331,7 @@ void ScavengerCollector::CollectGarbage() {
|
||||
EphemeronTableList ephemeron_table_list;
|
||||
|
||||
{
|
||||
Sweeper* sweeper = heap_->mark_compact_collector()->sweeper();
|
||||
Sweeper* sweeper = heap_->sweeper();
|
||||
|
||||
// Pause the concurrent sweeper.
|
||||
Sweeper::PauseScope pause_scope(sweeper);
|
||||
@ -665,9 +665,8 @@ void Scavenger::RememberPromotedEphemeron(EphemeronHashTable table, int entry) {
|
||||
void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
|
||||
AllocationSpace space = page->owner_identity();
|
||||
if ((space == OLD_SPACE) && !page->SweepingDone()) {
|
||||
heap()->mark_compact_collector()->sweeper()->AddPage(
|
||||
space, reinterpret_cast<Page*>(page),
|
||||
Sweeper::READD_TEMPORARY_REMOVED_PAGE);
|
||||
heap()->sweeper()->AddPage(space, reinterpret_cast<Page*>(page),
|
||||
Sweeper::READD_TEMPORARY_REMOVED_PAGE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -725,7 +725,8 @@ Handle<Object> JsonParser<Char>::BuildJsonObject(
|
||||
// must ensure that the sweeper is not running or has already swept the
|
||||
// object's page. Otherwise the GC can add the contents of
|
||||
// mutable_double_buffer to the free list.
|
||||
isolate()->heap()->EnsureSweepingCompleted(*mutable_double_buffer);
|
||||
isolate()->heap()->EnsureSweepingCompletedForObject(
|
||||
*mutable_double_buffer);
|
||||
mutable_double_buffer->set_length(0);
|
||||
}
|
||||
}
|
||||
|
@ -37,8 +37,7 @@ void SealCurrentObjects(Heap* heap) {
|
||||
CHECK(!v8_flags.stress_concurrent_allocation);
|
||||
CcTest::CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->old_space()->FreeLinearAllocationArea();
|
||||
for (Page* page : *heap->old_space()) {
|
||||
page->MarkNeverAllocateForTesting();
|
||||
@ -260,12 +259,11 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
|
||||
const double kStepSizeInMs = 100;
|
||||
CHECK(v8_flags.incremental_marking);
|
||||
i::IncrementalMarking* marking = heap->incremental_marking();
|
||||
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
|
||||
if (collector->sweeping_in_progress()) {
|
||||
if (heap->sweeping_in_progress()) {
|
||||
SafepointScope scope(heap);
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
|
||||
if (marking->IsMinorMarking()) {
|
||||
@ -297,10 +295,9 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
|
||||
// Background thread allocating concurrently interferes with this function.
|
||||
CHECK(!v8_flags.stress_concurrent_allocation);
|
||||
CodePageCollectionMemoryModificationScopeForTesting code_scope(space->heap());
|
||||
i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (space->heap()->sweeping_in_progress()) {
|
||||
space->heap()->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
space->FreeLinearAllocationArea();
|
||||
space->ResetFreeList();
|
||||
@ -315,10 +312,10 @@ void AbandonCurrentlyFreeMemory(PagedSpace* space) {
|
||||
|
||||
void GcAndSweep(Heap* heap, AllocationSpace space) {
|
||||
heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
|
||||
if (heap->mark_compact_collector()->sweeping_in_progress()) {
|
||||
if (heap->sweeping_in_progress()) {
|
||||
SafepointScope scope(heap);
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,8 +76,8 @@ HEAP_TEST(CompactionFullAbortedPage) {
|
||||
|
||||
heap->set_force_oom(true);
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
|
||||
// Check that all handles still point to the same page, i.e., compaction
|
||||
// has been aborted on the page.
|
||||
@ -158,8 +158,8 @@ HEAP_TEST(CompactionPartiallyAbortedPage) {
|
||||
|
||||
heap->set_force_oom(true);
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
|
||||
bool migration_aborted = false;
|
||||
for (Handle<FixedArray> object : compaction_page_handles) {
|
||||
@ -257,8 +257,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithInvalidatedSlots) {
|
||||
|
||||
heap->set_force_oom(true);
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
|
||||
CHECK_EQ(Page::FromHeapObject(*compaction_page_handles.front()),
|
||||
page_to_fill);
|
||||
@ -336,8 +336,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
|
||||
|
||||
heap->set_force_oom(true);
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
|
||||
// The following check makes sure that we compacted "some" objects, while
|
||||
// leaving others in place.
|
||||
@ -438,8 +438,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithRememberedSetEntries) {
|
||||
|
||||
heap->set_force_oom(true);
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
|
||||
// The following check makes sure that we compacted "some" objects, while
|
||||
// leaving others in place.
|
||||
|
@ -32,15 +32,15 @@ TEST(ConcurrentMarking) {
|
||||
Heap* heap = CcTest::heap();
|
||||
CcTest::CollectAllGarbage();
|
||||
if (!heap->incremental_marking()->IsStopped()) return;
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
|
||||
WeakObjects weak_objects;
|
||||
ConcurrentMarking* concurrent_marking =
|
||||
new ConcurrentMarking(heap, &weak_objects);
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
PublishSegment(*collector->marking_worklists()->shared(),
|
||||
ReadOnlyRoots(heap).undefined_value());
|
||||
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
||||
@ -54,15 +54,15 @@ TEST(ConcurrentMarkingReschedule) {
|
||||
Heap* heap = CcTest::heap();
|
||||
CcTest::CollectAllGarbage();
|
||||
if (!heap->incremental_marking()->IsStopped()) return;
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
|
||||
WeakObjects weak_objects;
|
||||
ConcurrentMarking* concurrent_marking =
|
||||
new ConcurrentMarking(heap, &weak_objects);
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
PublishSegment(*collector->marking_worklists()->shared(),
|
||||
ReadOnlyRoots(heap).undefined_value());
|
||||
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
||||
@ -80,15 +80,15 @@ TEST(ConcurrentMarkingPreemptAndReschedule) {
|
||||
Heap* heap = CcTest::heap();
|
||||
CcTest::CollectAllGarbage();
|
||||
if (!heap->incremental_marking()->IsStopped()) return;
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
|
||||
WeakObjects weak_objects;
|
||||
ConcurrentMarking* concurrent_marking =
|
||||
new ConcurrentMarking(heap, &weak_objects);
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
for (int i = 0; i < 5000; i++)
|
||||
PublishSegment(*collector->marking_worklists()->shared(),
|
||||
ReadOnlyRoots(heap).undefined_value());
|
||||
|
@ -1875,6 +1875,7 @@ TEST(TestSizeOfRegExpCode) {
|
||||
v8_flags.stress_concurrent_allocation = false;
|
||||
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = CcTest::heap();
|
||||
HandleScope scope(isolate);
|
||||
|
||||
LocalContext context;
|
||||
@ -1897,21 +1898,19 @@ TEST(TestSizeOfRegExpCode) {
|
||||
// Get initial heap size after several full GCs, which will stabilize
|
||||
// the heap size and return with sweeping finished completely.
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
|
||||
int initial_size = static_cast<int>(heap->SizeOfObjects());
|
||||
|
||||
CompileRun("'foo'.match(reg_exp_source);");
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
|
||||
int size_with_regexp = static_cast<int>(heap->SizeOfObjects());
|
||||
|
||||
CompileRun("'foo'.match(half_size_reg_exp);");
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
int size_with_optimized_regexp =
|
||||
static_cast<int>(CcTest::heap()->SizeOfObjects());
|
||||
int size_with_optimized_regexp = static_cast<int>(heap->SizeOfObjects());
|
||||
|
||||
int size_of_regexp_code = size_with_regexp - initial_size;
|
||||
|
||||
@ -1935,14 +1934,13 @@ HEAP_TEST(TestSizeOfObjects) {
|
||||
// Disable LAB, such that calculations with SizeOfObjects() and object size
|
||||
// are correct.
|
||||
heap->DisableInlineAllocation();
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
|
||||
// Get initial heap size after several full GCs, which will stabilize
|
||||
// the heap size and return with sweeping finished completely.
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
int initial_size = static_cast<int>(heap->SizeOfObjects());
|
||||
|
||||
@ -1966,9 +1964,9 @@ HEAP_TEST(TestSizeOfObjects) {
|
||||
// Normally sweeping would not be complete here, but no guarantees.
|
||||
CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
|
||||
// Waiting for sweeper threads should not change heap size.
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
|
||||
}
|
||||
@ -2518,10 +2516,9 @@ HEAP_TEST(GCFlags) {
|
||||
GarbageCollectionReason::kTesting);
|
||||
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
|
||||
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
|
||||
IncrementalMarking* marking = heap->incremental_marking();
|
||||
@ -5577,8 +5574,7 @@ HEAP_TEST(Regress587004) {
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
heap->RightTrimFixedArray(*array, N - 1);
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
ByteArray byte_array;
|
||||
const int M = 256;
|
||||
// Don't allow old space expansion. The test works without this flag too,
|
||||
@ -5749,10 +5745,9 @@ TEST(Regress598319) {
|
||||
|
||||
// GC to cleanup state
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
|
||||
CHECK(heap->lo_space()->Contains(arr.get()));
|
||||
@ -5821,8 +5816,7 @@ Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
|
||||
for (int i = 0; i < 5; i++) {
|
||||
CcTest::CollectAllGarbage();
|
||||
}
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
// Disable LAB, such that calculations with SizeOfObjects() and object size
|
||||
// are correct.
|
||||
heap->DisableInlineAllocation();
|
||||
@ -5837,8 +5831,7 @@ Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
|
||||
CHECK_EQ(size_after_allocation, size_after_shrinking);
|
||||
// GC and sweeping updates the size to acccount for shrinking.
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
intptr_t size_after_gc = heap->SizeOfObjects();
|
||||
CHECK_EQ(size_after_gc, size_before_allocation + array->Size());
|
||||
return array;
|
||||
@ -5872,11 +5865,10 @@ TEST(Regress615489) {
|
||||
Isolate* isolate = heap->isolate();
|
||||
CcTest::CollectAllGarbage();
|
||||
|
||||
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
i::IncrementalMarking* marking = heap->incremental_marking();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
CHECK(marking->IsMarking() || marking->IsStopped());
|
||||
if (marking->IsStopped()) {
|
||||
@ -5972,11 +5964,10 @@ TEST(LeftTrimFixedArrayInBlackArea) {
|
||||
Isolate* isolate = heap->isolate();
|
||||
CcTest::CollectAllGarbage();
|
||||
|
||||
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
i::IncrementalMarking* marking = heap->incremental_marking();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
CHECK(marking->IsMarking() || marking->IsStopped());
|
||||
if (marking->IsStopped()) {
|
||||
@ -6014,11 +6005,10 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
|
||||
Isolate* isolate = heap->isolate();
|
||||
CcTest::CollectAllGarbage();
|
||||
|
||||
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
i::IncrementalMarking* marking = heap->incremental_marking();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
CHECK(marking->IsMarking() || marking->IsStopped());
|
||||
if (marking->IsStopped()) {
|
||||
@ -6083,11 +6073,10 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
CcTest::CollectAllGarbage();
|
||||
|
||||
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
i::IncrementalMarking* marking = heap->incremental_marking();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
CHECK(marking->IsMarking() || marking->IsStopped());
|
||||
if (marking->IsStopped()) {
|
||||
@ -6501,12 +6490,11 @@ HEAP_TEST(Regress670675) {
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
Heap* heap = CcTest::heap();
|
||||
Isolate* isolate = heap->isolate();
|
||||
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
CcTest::CollectAllGarbage();
|
||||
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
heap->tracer()->StopFullCycleIfNeeded();
|
||||
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
|
||||
@ -6558,10 +6546,9 @@ HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
|
||||
// then the map is white and will be freed prematurely.
|
||||
heap::SimulateIncrementalMarking(heap, true);
|
||||
CcTest::CollectAllGarbage();
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
CHECK(object->map().IsMap());
|
||||
}
|
||||
|
@ -205,8 +205,7 @@ HEAP_TEST(DoNotEvacuatePinnedPages) {
|
||||
page->SetFlag(MemoryChunk::PINNED);
|
||||
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
|
||||
// The pinned flag should prevent the page from moving.
|
||||
for (Handle<FixedArray> object : handles) {
|
||||
@ -216,8 +215,7 @@ HEAP_TEST(DoNotEvacuatePinnedPages) {
|
||||
page->ClearFlag(MemoryChunk::PINNED);
|
||||
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
|
||||
// `compact_on_every_full_gc` ensures that this page is an evacuation
|
||||
// candidate, so with the pin flag cleared compaction should now move it.
|
||||
@ -450,11 +448,10 @@ TEST(Regress5829) {
|
||||
v8::HandleScope sc(CcTest::isolate());
|
||||
Heap* heap = isolate->heap();
|
||||
heap::SealCurrentObjects(heap);
|
||||
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
i::IncrementalMarking* marking = heap->incremental_marking();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
CHECK(marking->IsMarking() || marking->IsStopped());
|
||||
if (marking->IsStopped()) {
|
||||
|
@ -16511,9 +16511,9 @@ TEST(TestIdleNotification) {
|
||||
(v8::base::TimeTicks::Now().ToInternalValue() /
|
||||
static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
|
||||
IdlePauseInSeconds);
|
||||
if (CcTest::heap()->mark_compact_collector()->sweeping_in_progress()) {
|
||||
CcTest::heap()->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
i::MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (CcTest::heap()->sweeping_in_progress()) {
|
||||
CcTest::heap()->EnsureSweepingCompleted(
|
||||
i::Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
}
|
||||
intptr_t final_size = CcTest::heap()->SizeOfObjects();
|
||||
|
@ -457,9 +457,9 @@ TEST_F(EmbedderTracingTest, FinalizeTracingWhenMarking) {
|
||||
|
||||
// Finalize a potentially running garbage collection.
|
||||
CollectGarbage(OLD_SPACE);
|
||||
if (heap->mark_compact_collector()->sweeping_in_progress()) {
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
heap->tracer()->StopFullCycleIfNeeded();
|
||||
EXPECT_TRUE(heap->incremental_marking()->IsStopped());
|
||||
|
@ -22,12 +22,11 @@ void HeapInternalsBase::SimulateIncrementalMarking(Heap* heap,
|
||||
constexpr double kStepSizeInMs = 100;
|
||||
CHECK(v8_flags.incremental_marking);
|
||||
i::IncrementalMarking* marking = heap->incremental_marking();
|
||||
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
|
||||
if (collector->sweeping_in_progress()) {
|
||||
if (heap->sweeping_in_progress()) {
|
||||
SafepointScope scope(heap);
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
|
||||
if (marking->IsStopped()) {
|
||||
@ -155,11 +154,11 @@ void HeapInternalsBase::SimulateFullSpace(v8::internal::PagedSpace* space) {
|
||||
// v8_flags.stress_concurrent_allocation = false;
|
||||
// Background thread allocating concurrently interferes with this function.
|
||||
CHECK(!v8_flags.stress_concurrent_allocation);
|
||||
CodePageCollectionMemoryModificationScopeForTesting code_scope(space->heap());
|
||||
i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
collector->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
Heap* heap = space->heap();
|
||||
CodePageCollectionMemoryModificationScopeForTesting code_scope(heap);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
heap->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
space->FreeLinearAllocationArea();
|
||||
space->ResetFreeList();
|
||||
|
@ -78,8 +78,8 @@ class WithHeapInternals : public TMixin, HeapInternalsBase {
|
||||
CHECK(!v8_flags.stress_concurrent_allocation);
|
||||
FullGC();
|
||||
FullGC();
|
||||
heap()->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap()->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap()->old_space()->FreeLinearAllocationArea();
|
||||
for (Page* page : *heap()->old_space()) {
|
||||
page->MarkNeverAllocateForTesting();
|
||||
@ -88,10 +88,10 @@ class WithHeapInternals : public TMixin, HeapInternalsBase {
|
||||
|
||||
void GcAndSweep(i::AllocationSpace space) {
|
||||
heap()->CollectGarbage(space, GarbageCollectionReason::kTesting);
|
||||
if (heap()->mark_compact_collector()->sweeping_in_progress()) {
|
||||
if (heap()->sweeping_in_progress()) {
|
||||
SafepointScope scope(heap());
|
||||
heap()->mark_compact_collector()->EnsureSweepingCompleted(
|
||||
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
|
||||
heap()->EnsureSweepingCompleted(
|
||||
Heap::SweepingForcedFinalizationMode::kV8Only);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user