[heap] YoungGenerationConcurrentMarkingVisitor
Similar to the FullGC marking hierarchy (MarkingVisitorBase, with derived classes MainMarkingVisitor and ConcurrentMarkingVisitor), this CL introduces YoungGenerationMarkingVisitorBase + YoungGenerationConcurrentMarkingVisitor, and refactors YoungGenerationMarkingVisitor to inherit from YoungGenerationMarkingVisitorBase. YoungGenerationConcurrentMarkingVisitor dispatches to functions refactored to ConcurrentMarkingVisitorUtility by the previous CL. Bug: v8:13012 Change-Id: I0e827eb95732ed9ddf027fe68e25a0839cdda773 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3780524 Reviewed-by: Omer Katz <omerkatz@chromium.org> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org> Commit-Queue: Leon Bettscheider <bettscheider@google.com> Cr-Commit-Position: refs/heads/main@{#81976}
This commit is contained in:
parent
eb89d2c92f
commit
55a497d933
@ -204,6 +204,151 @@ class ConcurrentMarkingVisitorUtility {
|
||||
};
|
||||
};
|
||||
|
||||
class YoungGenerationConcurrentMarkingVisitor final
|
||||
: public YoungGenerationMarkingVisitorBase<
|
||||
YoungGenerationConcurrentMarkingVisitor, ConcurrentMarkingState> {
|
||||
public:
|
||||
YoungGenerationConcurrentMarkingVisitor(
|
||||
Heap* heap, MarkingWorklists::Local* worklists_local,
|
||||
MemoryChunkDataMap* memory_chunk_data)
|
||||
: YoungGenerationMarkingVisitorBase<
|
||||
YoungGenerationConcurrentMarkingVisitor, ConcurrentMarkingState>(
|
||||
heap->isolate(), worklists_local),
|
||||
marking_state_(heap->isolate(), memory_chunk_data) {}
|
||||
|
||||
bool is_shared_heap() { return false; }
|
||||
|
||||
void SynchronizePageAccess(HeapObject heap_object) {
|
||||
#ifdef THREAD_SANITIZER
|
||||
// This is needed because TSAN does not process the memory fence
|
||||
// emitted after page initialization.
|
||||
BasicMemoryChunk::FromHeapObject(heap_object)->SynchronizedHeapLoad();
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static V8_INLINE T Cast(HeapObject object) {
|
||||
return T::cast(object);
|
||||
}
|
||||
|
||||
// Used by utility functions
|
||||
void MarkObject(HeapObject host, HeapObject object) {
|
||||
SynchronizePageAccess(object);
|
||||
MarkObjectViaMarkingWorklist(object);
|
||||
}
|
||||
|
||||
// HeapVisitor overrides to implement the snapshotting protocol.
|
||||
|
||||
bool AllowDefaultJSObjectVisit() { return false; }
|
||||
|
||||
int VisitJSObject(Map map, JSObject object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
|
||||
object);
|
||||
}
|
||||
|
||||
int VisitJSObjectFast(Map map, JSObject object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclassFast(this, map,
|
||||
object);
|
||||
}
|
||||
|
||||
int VisitJSExternalObject(Map map, JSExternalObject object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
|
||||
object);
|
||||
}
|
||||
|
||||
#if V8_ENABLE_WEBASSEMBLY
|
||||
int VisitWasmInstanceObject(Map map, WasmInstanceObject object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
|
||||
object);
|
||||
}
|
||||
int VisitWasmSuspenderObject(Map map, WasmSuspenderObject object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
|
||||
object);
|
||||
}
|
||||
#endif // V8_ENABLE_WEBASSEMBLY
|
||||
|
||||
int VisitJSWeakCollection(Map map, JSWeakCollection object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
|
||||
object);
|
||||
}
|
||||
|
||||
int VisitJSFinalizationRegistry(Map map, JSFinalizationRegistry object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
|
||||
object);
|
||||
}
|
||||
|
||||
int VisitConsString(Map map, ConsString object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitFullyWithSnapshot(this, map,
|
||||
object);
|
||||
}
|
||||
|
||||
int VisitSlicedString(Map map, SlicedString object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitFullyWithSnapshot(this, map,
|
||||
object);
|
||||
}
|
||||
|
||||
int VisitThinString(Map map, ThinString object) {
|
||||
return ConcurrentMarkingVisitorUtility::VisitFullyWithSnapshot(this, map,
|
||||
object);
|
||||
}
|
||||
|
||||
int VisitSeqOneByteString(Map map, SeqOneByteString object) {
|
||||
if (!ShouldVisit(object)) return 0;
|
||||
return SeqOneByteString::SizeFor(object.length(kAcquireLoad));
|
||||
}
|
||||
|
||||
int VisitSeqTwoByteString(Map map, SeqTwoByteString object) {
|
||||
if (!ShouldVisit(object)) return 0;
|
||||
return SeqTwoByteString::SizeFor(object.length(kAcquireLoad));
|
||||
}
|
||||
|
||||
void VisitMapPointer(HeapObject host) {
|
||||
// ShouldVisitMapPointer(): Implemented by NewSpaceVisitor (return false).
|
||||
// VisitMapPointer(): Should never be called, because HeapVisitor bails out
|
||||
// if !ShouldVisitMapPointer().
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
// HeapVisitor override.
|
||||
|
||||
bool ShouldVisit(HeapObject object) {
|
||||
return marking_state_.GreyToBlack(object);
|
||||
}
|
||||
|
||||
bool ShouldVisitUnaccounted(HeapObject object) {
|
||||
return marking_state_.GreyToBlackUnaccounted(object);
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
void RecordSlot(HeapObject object, TSlot slot, HeapObject target) {}
|
||||
|
||||
SlotSnapshot* slot_snapshot() { return &slot_snapshot_; }
|
||||
|
||||
ConcurrentMarkingState* marking_state() { return &marking_state_; }
|
||||
|
||||
private:
|
||||
template <typename T>
|
||||
int VisitLeftTrimmableArray(Map map, T object) {
|
||||
// The length() function checks that the length is a Smi.
|
||||
// This is not necessarily the case if the array is being left-trimmed.
|
||||
Object length = object.unchecked_length(kAcquireLoad);
|
||||
// No accounting here to avoid re-reading the length which could already
|
||||
// contain a non-SMI value when left-trimming happens concurrently.
|
||||
if (!ShouldVisitUnaccounted(object)) return 0;
|
||||
// The cached length must be the actual length as the array is not black.
|
||||
// Left trimming marks the array black before over-writing the length.
|
||||
DCHECK(length.IsSmi());
|
||||
int size = T::SizeFor(Smi::ToInt(length));
|
||||
marking_state_.IncrementLiveBytes(MemoryChunk::FromHeapObject(object),
|
||||
size);
|
||||
T::BodyDescriptor::IterateBody(map, object, size, this);
|
||||
return size;
|
||||
}
|
||||
|
||||
ConcurrentMarkingState marking_state_;
|
||||
SlotSnapshot slot_snapshot_;
|
||||
};
|
||||
|
||||
class ConcurrentMarkingVisitor final
|
||||
: public MarkingVisitorBase<ConcurrentMarkingVisitor,
|
||||
ConcurrentMarkingState> {
|
||||
|
@ -5383,87 +5383,12 @@ bool IsUnmarkedObjectForYoungGeneration(Heap* heap, FullObjectSlot p) {
|
||||
|
||||
} // namespace
|
||||
|
||||
class YoungGenerationMarkingVisitor final
|
||||
: public NewSpaceVisitor<YoungGenerationMarkingVisitor> {
|
||||
public:
|
||||
YoungGenerationMarkingVisitor(Isolate* isolate, MarkingState* marking_state,
|
||||
MarkingWorklists::Local* worklists_local)
|
||||
: NewSpaceVisitor(isolate),
|
||||
worklists_local_(worklists_local),
|
||||
marking_state_(marking_state) {}
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitCodePointer(HeapObject host,
|
||||
CodeObjectSlot slot) override {
|
||||
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||
// Code slots never appear in new space because CodeDataContainers, the
|
||||
// only object that can contain code pointers, are always allocated in
|
||||
// the old space.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointer(HeapObject host, ObjectSlot slot) final {
|
||||
VisitPointerImpl(host, slot);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointer(HeapObject host, MaybeObjectSlot slot) final {
|
||||
VisitPointerImpl(host, slot);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
|
||||
// Code objects are not expected in new space.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
|
||||
// Code objects are not expected in new space.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object) {
|
||||
object.YoungMarkExtension();
|
||||
int size = JSArrayBuffer::BodyDescriptor::SizeOf(map, object);
|
||||
JSArrayBuffer::BodyDescriptor::IterateBody(map, object, size, this);
|
||||
return size;
|
||||
}
|
||||
|
||||
private:
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
|
||||
for (TSlot slot = start; slot < end; ++slot) {
|
||||
VisitPointer(host, slot);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointerImpl(HeapObject host, TSlot slot) {
|
||||
typename TSlot::TObject target = *slot;
|
||||
if (Heap::InYoungGeneration(target)) {
|
||||
// Treat weak references as strong.
|
||||
// TODO(marja): Proper weakness handling for minor-mcs.
|
||||
HeapObject target_object = target.GetHeapObject();
|
||||
MarkObjectViaMarkingWorklist(target_object);
|
||||
}
|
||||
}
|
||||
|
||||
inline void MarkObjectViaMarkingWorklist(HeapObject object) {
|
||||
if (marking_state_->WhiteToBlack(object)) {
|
||||
worklists_local_->Push(object);
|
||||
}
|
||||
}
|
||||
|
||||
MarkingWorklists::Local* worklists_local_;
|
||||
MarkingState* marking_state_;
|
||||
};
|
||||
YoungGenerationMarkingVisitor::YoungGenerationMarkingVisitor(
|
||||
Isolate* isolate, MarkingState* marking_state,
|
||||
MarkingWorklists::Local* worklists_local)
|
||||
: YoungGenerationMarkingVisitorBase<YoungGenerationMarkingVisitor,
|
||||
MarkingState>(isolate, worklists_local),
|
||||
marking_state_(marking_state) {}
|
||||
|
||||
MinorMarkCompactCollector::~MinorMarkCompactCollector() = default;
|
||||
|
||||
@ -5690,8 +5615,7 @@ void MinorMarkCompactCollector::StartMarking() {
|
||||
}
|
||||
|
||||
void MinorMarkCompactCollector::Finish() {
|
||||
TRACE_GC(heap()->tracer(),
|
||||
GCTracer::Scope::MINOR_MC_FINISH);
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_FINISH);
|
||||
local_marking_worklists_.reset();
|
||||
main_marking_visitor_.reset();
|
||||
}
|
||||
|
@ -32,7 +32,6 @@ class MigrationObserver;
|
||||
class ReadOnlySpace;
|
||||
class RecordMigratedSlotVisitor;
|
||||
class UpdatingItem;
|
||||
class YoungGenerationMarkingVisitor;
|
||||
|
||||
class MarkBitCellIterator {
|
||||
public:
|
||||
@ -325,6 +324,21 @@ class MainMarkingVisitor final
|
||||
bool revisiting_object_;
|
||||
};
|
||||
|
||||
class YoungGenerationMarkingVisitor final
|
||||
: public YoungGenerationMarkingVisitorBase<YoungGenerationMarkingVisitor,
|
||||
MarkingState> {
|
||||
public:
|
||||
YoungGenerationMarkingVisitor(Isolate* isolate, MarkingState* marking_state,
|
||||
MarkingWorklists::Local* worklists_local);
|
||||
|
||||
private:
|
||||
MarkingState* marking_state() { return marking_state_; }
|
||||
MarkingState* const marking_state_;
|
||||
|
||||
friend class YoungGenerationMarkingVisitorBase<YoungGenerationMarkingVisitor,
|
||||
MarkingState>;
|
||||
};
|
||||
|
||||
class CollectorBase {
|
||||
public:
|
||||
GarbageCollector garbage_collector() { return garbage_collector_; }
|
||||
|
@ -572,6 +572,44 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray(
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor, typename MarkingState>
|
||||
YoungGenerationMarkingVisitorBase<ConcreteVisitor, MarkingState>::
|
||||
YoungGenerationMarkingVisitorBase(Isolate* isolate,
|
||||
MarkingWorklists::Local* worklists_local)
|
||||
: NewSpaceVisitor<ConcreteVisitor>(isolate),
|
||||
worklists_local_(worklists_local) {}
|
||||
|
||||
template <typename ConcreteVisitor, typename MarkingState>
|
||||
int YoungGenerationMarkingVisitorBase<
|
||||
ConcreteVisitor, MarkingState>::VisitJSArrayBuffer(Map map,
|
||||
JSArrayBuffer object) {
|
||||
object.YoungMarkExtension();
|
||||
int size = JSArrayBuffer::BodyDescriptor::SizeOf(map, object);
|
||||
JSArrayBuffer::BodyDescriptor::IterateBody(map, object, size, this);
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor, typename MarkingState>
|
||||
void YoungGenerationMarkingVisitorBase<ConcreteVisitor, MarkingState>::
|
||||
MarkObjectViaMarkingWorklist(HeapObject object) {
|
||||
if (concrete_visitor()->marking_state()->WhiteToBlack(object)) {
|
||||
worklists_local_->Push(object);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor, typename MarkingState>
|
||||
template <typename TSlot>
|
||||
void YoungGenerationMarkingVisitorBase<
|
||||
ConcreteVisitor, MarkingState>::VisitPointerImpl(HeapObject host,
|
||||
TSlot slot) {
|
||||
typename TSlot::TObject target = *slot;
|
||||
if (Heap::InYoungGeneration(target)) {
|
||||
// Treat weak references as strong.
|
||||
HeapObject target_object = target.GetHeapObject();
|
||||
MarkObjectViaMarkingWorklist(target_object);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -296,6 +296,73 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
|
||||
#endif // V8_ENABLE_SANDBOX
|
||||
};
|
||||
|
||||
template <typename ConcreteVisitor, typename MarkingState>
|
||||
class YoungGenerationMarkingVisitorBase
|
||||
: public NewSpaceVisitor<ConcreteVisitor> {
|
||||
public:
|
||||
YoungGenerationMarkingVisitorBase(Isolate* isolate,
|
||||
MarkingWorklists::Local* worklists_local);
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitCodePointer(HeapObject host,
|
||||
CodeObjectSlot slot) override {
|
||||
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
|
||||
// Code slots never appear in new space because CodeDataContainers, the
|
||||
// only object that can contain code pointers, are always allocated in
|
||||
// the old space.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointer(HeapObject host, ObjectSlot slot) final {
|
||||
VisitPointerImpl(host, slot);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointer(HeapObject host, MaybeObjectSlot slot) final {
|
||||
VisitPointerImpl(host, slot);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
|
||||
// Code objects are not expected in new space.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
|
||||
// Code objects are not expected in new space.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object);
|
||||
|
||||
protected:
|
||||
ConcreteVisitor* concrete_visitor() {
|
||||
return static_cast<ConcreteVisitor*>(this);
|
||||
}
|
||||
|
||||
inline void MarkObjectViaMarkingWorklist(HeapObject object);
|
||||
|
||||
private:
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
|
||||
for (TSlot slot = start; slot < end; ++slot) {
|
||||
VisitPointer(host, slot);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointerImpl(HeapObject host, TSlot slot);
|
||||
|
||||
MarkingWorklists::Local* worklists_local_;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user