[heap] Switch main thread marking visitors to MarkingVisitorBase

Now incremental marker and stop-the-world marker use the same visitor,
which is derived from MarkingVisitorBase. This removes code duplication
and also should reduce binary size.

The marking worklist processing code also changes to not color the
object black before visiting it. Instead the visitor colors the
object black in ShouldVisit method.

Bug: chromium:1019218
Change-Id: I57971122f3c77ad2770b6754d696b79d802ef1a4
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1901271
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64848}
This commit is contained in:
Ulan Degenbaev 2019-11-07 17:37:00 +01:00 committed by Commit Bot
parent f3b9d9e55c
commit fb7676769b
12 changed files with 251 additions and 650 deletions

View File

@ -1615,6 +1615,8 @@ V8_INLINE bool operator!(ExceptionStatus status) {
return !static_cast<bool>(status);
}
enum class TraceRetainingPathMode { kEnabled, kDisabled };
} // namespace internal
} // namespace v8

View File

@ -81,13 +81,13 @@ class ConcurrentMarkingVisitor final
public:
ConcurrentMarkingVisitor(int task_id, MarkingWorklist* marking_worklist,
EmbedderTracingWorklist* embedder_worklist,
WeakObjects* weak_objects,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc,
MemoryChunkDataMap* memory_chunk_data)
: MarkingVisitorBase(task_id, marking_worklist, embedder_worklist,
weak_objects, mark_compact_epoch,
weak_objects, heap, mark_compact_epoch,
bytecode_flush_mode, embedder_tracing_enabled,
is_forced_gc),
marking_state_(memory_chunk_data),
@ -157,6 +157,11 @@ class ConcurrentMarkingVisitor final
return false;
}
// HeapVisitor override.
bool ShouldVisit(HeapObject object) {
return marking_state_.GreyToBlack(object);
}
private:
// Helper class for collecting in-object slot addresses and values.
class SlotSnapshottingVisitor final : public ObjectVisitor {
@ -298,6 +303,10 @@ class ConcurrentMarkingVisitor final
ConcurrentMarkingState* marking_state() { return &marking_state_; }
TraceRetainingPathMode retaining_path_mode() {
return TraceRetainingPathMode::kDisabled;
}
ConcurrentMarkingState marking_state_;
MemoryChunkDataMap* memory_chunk_data_;
SlotSnapshot slot_snapshot_;
@ -384,7 +393,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000;
ConcurrentMarkingVisitor visitor(
task_id, marking_worklist_, embedder_worklist_, weak_objects_,
task_id, marking_worklist_, embedder_worklist_, weak_objects_, heap_,
task_state->mark_compact_epoch, Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(), task_state->is_forced_gc,
&task_state->memory_chunk_data);

View File

@ -6351,8 +6351,8 @@ void Heap::MarkingBarrierForDescriptorArraySlow(Heap* heap, HeapObject host,
if (NumberOfMarkedDescriptors::decode(heap->mark_compact_collector()->epoch(),
raw_marked) <
number_of_own_descriptors) {
heap->incremental_marking()->VisitDescriptors(host, descriptor_array,
number_of_own_descriptors);
heap->incremental_marking()->MarkDescriptorArrayFromWriteBarrier(
host, descriptor_array, number_of_own_descriptors);
}
}

View File

@ -97,10 +97,6 @@ enum class ClearFreedMemoryMode { kClearFreedMemory, kDontClearFreedMemory };
enum ExternalBackingStoreType { kArrayBuffer, kExternalString, kNumTypes };
enum class FixedArrayVisitationMode { kRegular, kIncremental };
enum class TraceRetainingPathMode { kEnabled, kDisabled };
enum class RetainingPathOption { kDefault, kTrackEphemeronPath };
enum class AllocationOrigin {
@ -2099,9 +2095,8 @@ class Heap {
friend class IncrementalMarking;
friend class IncrementalMarkingJob;
friend class OldLargeObjectSpace;
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
friend class MarkingVisitor;
template <typename ConcreteVisitor, typename MarkingState>
friend class MarkingVisitorBase;
friend class MarkCompactCollector;
friend class MarkCompactCollectorBase;
friend class MinorMarkCompactCollector;

View File

@ -13,6 +13,8 @@
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/marking-visitor-inl.h"
#include "src/heap/marking-visitor.h"
#include "src/heap/object-stats.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/objects-visiting.h"
@ -30,11 +32,6 @@
namespace v8 {
namespace internal {
using IncrementalMarkingMarkingVisitor =
MarkingVisitor<FixedArrayVisitationMode::kIncremental,
TraceRetainingPathMode::kDisabled,
IncrementalMarking::MarkingState>;
void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
size_t size) {
Heap* heap = incremental_marking_->heap();
@ -70,6 +67,11 @@ IncrementalMarking::IncrementalMarking(
SetState(STOPPED);
}
IncrementalMarking::~IncrementalMarking() {
// Avoid default destructor, which would be inlined in the header file
// and cause compile errors due marking_visitor_ not fully defined.
}
void IncrementalMarking::RecordWriteSlow(HeapObject obj, HeapObjectSlot slot,
HeapObject value) {
if (BaseRecordWrite(obj, value) && slot.address() != kNullAddress) {
@ -102,9 +104,7 @@ void IncrementalMarking::MarkBlackAndVisitObjectDueToLayoutChange(
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingLayoutChange");
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_LAYOUT_CHANGE);
marking_state()->WhiteToGrey(obj);
if (marking_state()->GreyToBlack(obj)) {
RevisitObject(obj);
}
marking_visitor_->Visit(obj.map(), obj);
}
void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
@ -339,6 +339,15 @@ void IncrementalMarking::StartMarking() {
ActivateIncrementalWriteBarrier();
MarkCompactCollector* collector = heap_->mark_compact_collector();
marking_visitor_ = std::make_unique<MarkCompactCollector::MarkingVisitor>(
collector->marking_state(), collector->marking_worklist()->shared(),
collector->marking_worklist()->embedder(), collector->weak_objects(),
heap_, collector->epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
// Marking bits are cleared by the sweeper.
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
@ -680,29 +689,6 @@ void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
bytes_marked_ -= Min(bytes_marked_, dead_bytes_in_new_space);
}
int IncrementalMarking::VisitObject(Map map, HeapObject obj) {
DCHECK(marking_state()->IsGrey(obj) || marking_state()->IsBlack(obj));
if (!marking_state()->GreyToBlack(obj)) {
// The object can already be black in these cases:
// 1. The object is a fixed array with the progress bar.
// 2. The object is a JSObject that was colored black before
// unsafe layout change.
// 3. The object is a string that was colored black before
// unsafe layout change.
// 4. The object is materizalized by the deoptimizer.
// 5. The object is a descriptor array marked black by
// the descriptor array marking barrier.
DCHECK(obj.IsHashTable() || obj.IsPropertyArray() || obj.IsFixedArray() ||
obj.IsContext() || obj.IsJSObject() || obj.IsString() ||
obj.IsDescriptorArray());
}
DCHECK(marking_state()->IsBlack(obj));
WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
marking_state());
return visitor.Visit(map, obj);
}
void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject obj) {
if (IsMarking() && marking_state()->IsBlack(obj)) {
RevisitObject(obj);
@ -715,23 +701,16 @@ void IncrementalMarking::RevisitObject(HeapObject obj) {
DCHECK_IMPLIES(MemoryChunk::FromHeapObject(obj)->IsFlagSet(
MemoryChunk::HAS_PROGRESS_BAR),
0u == MemoryChunk::FromHeapObject(obj)->ProgressBar());
Map map = obj.map();
WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
marking_state());
visitor.Visit(map, obj);
MarkCompactCollector::MarkingVisitor::RevisitScope revisit(
marking_visitor_.get());
marking_visitor_->Visit(obj.map(), obj);
}
void IncrementalMarking::VisitDescriptors(HeapObject host,
DescriptorArray descriptors,
int number_of_own_descriptors) {
IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
marking_state());
// This is necessary because the Scavenger records slots only for the
// promoted black objects and the marking visitor of DescriptorArray skips
// the descriptors marked by the visitor.VisitDescriptors() below.
visitor.MarkDescriptorArrayBlack(host, descriptors);
visitor.VisitDescriptors(descriptors, number_of_own_descriptors);
void IncrementalMarking::MarkDescriptorArrayFromWriteBarrier(
HeapObject host, DescriptorArray descriptors,
int number_of_own_descriptors) {
marking_visitor_->MarkDescriptorArrayFromWriteBarrier(
host, descriptors, number_of_own_descriptors);
}
intptr_t IncrementalMarking::ProcessMarkingWorklist(
@ -755,7 +734,7 @@ intptr_t IncrementalMarking::ProcessMarkingWorklist(
marking_state()->IsBlackOrGrey(obj));
continue;
}
bytes_processed += VisitObject(obj.map(), obj);
bytes_processed += marking_visitor_->Visit(obj.map(), obj);
}
return bytes_processed;
}

View File

@ -36,13 +36,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
#ifdef V8_CONCURRENT_MARKING
using MarkingState = IncrementalMarkingState;
#else
using MarkingState = MajorNonAtomicMarkingState;
#endif // V8_CONCURRENT_MARKING
using AtomicMarkingState = MajorAtomicMarkingState;
using NonAtomicMarkingState = MajorNonAtomicMarkingState;
using MarkingState = MarkCompactCollector::MarkingState;
using AtomicMarkingState = MarkCompactCollector::AtomicMarkingState;
using NonAtomicMarkingState = MarkCompactCollector::NonAtomicMarkingState;
class PauseBlackAllocationScope {
public:
@ -95,6 +91,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
IncrementalMarking(Heap* heap,
MarkCompactCollector::MarkingWorklist* marking_worklist,
WeakObjects* weak_objects);
~IncrementalMarking();
MarkingState* marking_state() { return &marking_state_; }
@ -207,8 +204,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void RevisitObject(HeapObject obj);
// Ensures that all descriptors int range [0, number_of_own_descripts)
// are visited.
void VisitDescriptors(HeapObject host, DescriptorArray array,
int number_of_own_descriptors);
void MarkDescriptorArrayFromWriteBarrier(HeapObject host,
DescriptorArray array,
int number_of_own_descriptors);
void RecordWriteSlow(HeapObject obj, HeapObjectSlot slot, HeapObject value);
void RecordWriteIntoCode(Code host, RelocInfo* rinfo, HeapObject value);
@ -289,9 +287,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
intptr_t bytes_to_process,
ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION);
// Visits the object and returns its size.
V8_INLINE int VisitObject(Map map, HeapObject obj);
// Updates scheduled_bytes_to_mark_ to ensure marking progress based on
// time.
void ScheduleBytesToMarkBasedOnTime(double time_ms);
@ -326,6 +321,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
MarkCompactCollector::MarkingWorklist* const marking_worklist_;
WeakObjects* weak_objects_;
std::unique_ptr<MarkCompactCollector::MarkingVisitor> marking_visitor_;
double start_time_ms_;
size_t initial_old_generation_size_;
size_t old_generation_allocation_counter_;

View File

@ -21,397 +21,6 @@
namespace v8 {
namespace internal {
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::MarkingVisitor(MarkCompactCollector* collector,
MarkingState* marking_state)
: heap_(collector->heap()),
collector_(collector),
marking_state_(marking_state),
mark_compact_epoch_(collector->epoch()) {}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitBytecodeArray(Map map,
BytecodeArray array) {
int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
if (!heap_->is_current_gc_forced()) {
array.MakeOlder();
}
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitDescriptorArray(Map map,
DescriptorArray array) {
int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
VisitPointers(array, array.GetFirstPointerSlot(), array.GetDescriptorSlot(0));
VisitDescriptors(array, array.number_of_descriptors());
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitSharedFunctionInfo(Map map, SharedFunctionInfo shared_info) {
int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
SharedFunctionInfo::BodyDescriptor::IterateBody(map, shared_info, size, this);
// If the SharedFunctionInfo has old bytecode, mark it as flushable,
// otherwise visit the function data field strongly.
if (shared_info.ShouldFlushBytecode(Heap::GetBytecodeFlushMode())) {
collector_->AddBytecodeFlushingCandidate(shared_info);
} else {
VisitPointer(shared_info,
shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
}
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitJSFunction(Map map, JSFunction object) {
int size = Parent::VisitJSFunction(map, object);
// Check if the JSFunction needs reset due to bytecode being flushed.
if (FLAG_flush_bytecode && object.NeedsResetDueToFlushedBytecode()) {
collector_->AddFlushedJSFunction(object);
}
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitFixedArray(Map map, FixedArray object) {
return VisitFixedArrayIncremental(map, object);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
template <typename T>
V8_INLINE int
MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitEmbedderTracingSubclass(Map map, T object) {
if (heap_->local_embedder_heap_tracer()->InUse()) {
marking_worklist()->embedder()->Push(MarkCompactCollectorBase::kMainThread,
object);
}
int size = T::BodyDescriptor::SizeOf(map, object);
T::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitJSApiObject(Map map, JSObject object) {
return VisitEmbedderTracingSubclass(map, object);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitJSArrayBuffer(Map map,
JSArrayBuffer object) {
return VisitEmbedderTracingSubclass(map, object);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitJSDataView(Map map, JSDataView object) {
return VisitEmbedderTracingSubclass(map, object);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitJSTypedArray(Map map,
JSTypedArray object) {
return VisitEmbedderTracingSubclass(map, object);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitEphemeronHashTable(Map map, EphemeronHashTable table) {
collector_->AddEphemeronHashTable(table);
for (InternalIndex i : table.IterateEntries()) {
ObjectSlot key_slot =
table.RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
HeapObject key = HeapObject::cast(table.KeyAt(i));
collector_->RecordSlot(table, key_slot, key);
ObjectSlot value_slot =
table.RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
if (marking_state()->IsBlackOrGrey(key)) {
VisitPointer(table, value_slot);
} else {
Object value_obj = *value_slot;
if (value_obj.IsHeapObject()) {
HeapObject value = HeapObject::cast(value_obj);
collector_->RecordSlot(table, value_slot, value);
// Revisit ephemerons with both key and value unreachable at end
// of concurrent marking cycle.
if (marking_state()->IsWhite(value)) {
collector_->AddEphemeron(key, value);
}
}
}
}
return table.SizeFromMap(map);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitMap(Map meta_map, Map map) {
int size = Map::BodyDescriptor::SizeOf(meta_map, map);
if (map.CanTransition()) {
// Maps that can transition share their descriptor arrays and require
// special visiting logic to avoid memory leaks.
// Since descriptor arrays are potentially shared, ensure that only the
// descriptors that belong to this map are marked. The first time a
// non-empty descriptor array is marked, its header is also visited. The
// slot holding the descriptor array will be implicitly recorded when the
// pointer fields of this map are visited.
DescriptorArray descriptors = map.instance_descriptors();
MarkDescriptorArrayBlack(map, descriptors);
int number_of_own_descriptors = map.NumberOfOwnDescriptors();
if (number_of_own_descriptors) {
DCHECK_LE(number_of_own_descriptors, descriptors.number_of_descriptors());
VisitDescriptors(descriptors, number_of_own_descriptors);
}
// Mark the pointer fields of the Map. Since the transitions array has
// been marked already, it is fine that one of these fields contains a
// pointer to it.
}
Map::BodyDescriptor::IterateBody(meta_map, map, size, this);
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitTransitionArray(Map map,
TransitionArray array) {
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
collector_->AddTransitionArray(array);
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitJSWeakRef(Map map, JSWeakRef weak_ref) {
if (weak_ref.target().IsHeapObject()) {
HeapObject target = HeapObject::cast(weak_ref.target());
if (marking_state()->IsBlackOrGrey(target)) {
// Record the slot inside the JSWeakRef, since the IterateBody below
// won't visit it.
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
collector_->RecordSlot(weak_ref, slot, target);
} else {
// JSWeakRef points to a potentially dead object. We have to process
// them when we know the liveness of the whole transitive closure.
collector_->AddWeakRef(weak_ref);
}
}
int size = JSWeakRef::BodyDescriptor::SizeOf(map, weak_ref);
JSWeakRef::BodyDescriptor::IterateBody(map, weak_ref, size, this);
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitWeakCell(Map map, WeakCell weak_cell) {
if (weak_cell.target().IsHeapObject()) {
HeapObject target = HeapObject::cast(weak_cell.target());
if (marking_state()->IsBlackOrGrey(target)) {
// Record the slot inside the WeakCell, since the IterateBody below
// won't visit it.
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
collector_->RecordSlot(weak_cell, slot, target);
} else {
// WeakCell points to a potentially dead object. We have to process
// them when we know the liveness of the whole transitive closure.
collector_->AddWeakCell(weak_cell);
}
}
int size = WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
WeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
return size;
}
// class template arguments
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
// method template arguments
template <typename TSlot>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitPointerImpl(HeapObject host,
TSlot slot) {
static_assert(std::is_same<TSlot, ObjectSlot>::value ||
std::is_same<TSlot, MaybeObjectSlot>::value,
"Only ObjectSlot and MaybeObjectSlot are expected here");
typename TSlot::TObject object = *slot;
HeapObject target_object;
if (object.GetHeapObjectIfStrong(&target_object)) {
collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
MarkObject(host, target_object);
} else if (TSlot::kCanBeWeak && object.GetHeapObjectIfWeak(&target_object)) {
if (marking_state()->IsBlackOrGrey(target_object)) {
// Weak references with live values are directly processed here to reduce
// the processing time of weak cells during the main GC pause.
collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
} else {
// If we do not know about liveness of values of weak cells, we have to
// process them when we know the liveness of the whole transitive
// closure.
collector_->AddWeakReference(host, HeapObjectSlot(slot));
}
}
}
// class template arguments
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
// method template arguments
template <typename TSlot>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitPointersImpl(HeapObject host,
TSlot start, TSlot end) {
for (TSlot p = start; p < end; ++p) {
VisitPointer(host, p);
}
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitEmbeddedPointer(Code host,
RelocInfo* rinfo) {
DCHECK(RelocInfo::IsEmbeddedObjectMode(rinfo->rmode()));
HeapObject object = HeapObject::cast(rinfo->target_object());
collector_->RecordRelocSlot(host, rinfo, object);
if (!marking_state()->IsBlackOrGrey(object)) {
if (host.IsWeakObject(object)) {
collector_->AddWeakObjectInCode(object, host);
} else {
MarkObject(host, object);
}
}
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitCodeTarget(Code host,
RelocInfo* rinfo) {
DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
collector_->RecordRelocSlot(host, rinfo, target);
MarkObject(host, target);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
MarkDescriptorArrayBlack(HeapObject host, DescriptorArray descriptors) {
// Note that WhiteToBlack is not sufficient here because it fails if the
// descriptor array is grey. So we need to do two steps: WhiteToGrey and
// GreyToBlack. Alternatively, we could check WhiteToGrey || WhiteToBlack.
if (marking_state()->WhiteToGrey(descriptors)) {
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainer(host, descriptors);
}
}
if (marking_state()->GreyToBlack(descriptors)) {
VisitPointers(descriptors, descriptors.GetFirstPointerSlot(),
descriptors.GetDescriptorSlot(0));
}
DCHECK(marking_state()->IsBlack(descriptors));
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::MarkObject(HeapObject host,
HeapObject object) {
if (marking_state()->WhiteToGrey(object)) {
marking_worklist()->Push(object);
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainer(host, object);
}
}
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitFixedArrayIncremental(Map map, FixedArray object) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
int size = FixedArray::BodyDescriptor::SizeOf(map, object);
if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
DCHECK(FLAG_use_marking_progress_bar);
DCHECK(heap_->IsLargeObject(object));
size_t current_progress_bar = chunk->ProgressBar();
int start = static_cast<int>(current_progress_bar);
if (start == 0) start = FixedArray::BodyDescriptor::kStartOffset;
int end = Min(size, start + kProgressBarScanningChunk);
if (start < end) {
VisitPointers(object, object.RawField(start), object.RawField(end));
bool success = chunk->TrySetProgressBar(current_progress_bar, end);
CHECK(success);
if (end < size) {
DCHECK(marking_state()->IsBlack(object));
// The object can be pushed back onto the marking worklist only after
// progress bar was updated.
marking_worklist()->Push(object);
}
}
return end - start;
}
// Non-batched processing.
FixedArray::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitDescriptors(DescriptorArray descriptors,
int number_of_own_descriptors) {
// Updating the number of marked descriptor is supported only for black
// descriptor arrays.
DCHECK(marking_state()->IsBlack(descriptors));
int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
int16_t old_marked = descriptors.UpdateNumberOfMarkedDescriptors(
mark_compact_epoch_, new_marked);
if (old_marked < new_marked) {
VisitPointers(descriptors,
MaybeObjectSlot(descriptors.GetDescriptorSlot(old_marked)),
MaybeObjectSlot(descriptors.GetDescriptorSlot(new_marked)));
}
}
void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj);
@ -435,7 +44,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
if (Heap::InYoungGeneration(obj) &&
non_atomic_marking_state_.WhiteToGrey(obj)) {
worklist_->Push(kMainThread, obj);
worklist_->Push(kMainThreadTask, obj);
}
}
@ -476,16 +85,53 @@ void MarkCompactCollector::RecordSlot(MemoryChunk* source_page,
}
void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
weak_objects_.transition_arrays.Push(kMainThread, array);
weak_objects_.transition_arrays.Push(kMainThreadTask, array);
}
void MarkCompactCollector::AddBytecodeFlushingCandidate(
SharedFunctionInfo flush_candidate) {
weak_objects_.bytecode_flushing_candidates.Push(kMainThread, flush_candidate);
template <typename MarkingState>
template <typename T, typename TBodyDescriptor>
int MainMarkingVisitor<MarkingState>::VisitJSObjectSubclass(Map map, T object) {
if (!this->ShouldVisit(object)) return 0;
this->VisitMapPointer(object);
int size = TBodyDescriptor::SizeOf(map, object);
TBodyDescriptor::IterateBody(map, object, size, this);
return size;
}
void MarkCompactCollector::AddFlushedJSFunction(JSFunction flushed_function) {
weak_objects_.flushed_js_functions.Push(kMainThread, flushed_function);
template <typename MarkingState>
template <typename T>
int MainMarkingVisitor<MarkingState>::VisitLeftTrimmableArray(Map map,
T object) {
if (!this->ShouldVisit(object)) return 0;
int size = T::SizeFor(object.length());
this->VisitMapPointer(object);
T::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}
template <typename MarkingState>
template <typename TSlot>
void MainMarkingVisitor<MarkingState>::RecordSlot(HeapObject object, TSlot slot,
HeapObject target) {
MarkCompactCollector::RecordSlot(object, slot, target);
}
template <typename MarkingState>
void MainMarkingVisitor<MarkingState>::RecordRelocSlot(Code host,
RelocInfo* rinfo,
HeapObject target) {
MarkCompactCollector::RecordRelocSlot(host, rinfo, target);
}
template <typename MarkingState>
void MainMarkingVisitor<MarkingState>::MarkDescriptorArrayFromWriteBarrier(
HeapObject host, DescriptorArray descriptors,
int number_of_own_descriptors) {
// This is necessary because the Scavenger records slots only for the
// promoted black objects and the marking visitor of DescriptorArray skips
// the descriptors marked by the visitor.VisitDescriptors() below.
this->MarkDescriptorArrayBlack(host, descriptors);
this->VisitDescriptors(descriptors, number_of_own_descriptors);
}
template <LiveObjectIterationMode mode>

View File

@ -21,6 +21,8 @@
#include "src/heap/item-parallel-job.h"
#include "src/heap/local-allocator-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/marking-visitor-inl.h"
#include "src/heap/marking-visitor.h"
#include "src/heap/object-stats.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/read-only-heap.h"
@ -368,11 +370,6 @@ class FullEvacuationVerifier : public EvacuationVerifier {
// MarkCompactCollectorBase, MinorMarkCompactCollector, MarkCompactCollector
// =============================================================================
using MarkCompactMarkingVisitor =
MarkingVisitor<FixedArrayVisitationMode::kRegular,
TraceRetainingPathMode::kEnabled,
MarkCompactCollector::MarkingState>;
namespace {
int NumberOfAvailableCores() {
@ -1606,7 +1603,7 @@ bool MarkCompactCollector::ProcessEphemerons() {
// Drain current_ephemerons and push ephemerons where key and value are still
// unreachable into next_ephemerons.
while (weak_objects_.current_ephemerons.Pop(kMainThread, &ephemeron)) {
while (weak_objects_.current_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
@ -1619,15 +1616,15 @@ bool MarkCompactCollector::ProcessEphemerons() {
// Drain discovered_ephemerons (filled in the drain MarkingWorklist-phase
// before) and push ephemerons where key and value are still unreachable into
// next_ephemerons.
while (weak_objects_.discovered_ephemerons.Pop(kMainThread, &ephemeron)) {
while (weak_objects_.discovered_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
}
// Flush local ephemerons for main task to global pool.
weak_objects_.ephemeron_hash_tables.FlushToGlobal(kMainThread);
weak_objects_.next_ephemerons.FlushToGlobal(kMainThread);
weak_objects_.ephemeron_hash_tables.FlushToGlobal(kMainThreadTask);
weak_objects_.next_ephemerons.FlushToGlobal(kMainThreadTask);
return ephemeron_marked;
}
@ -1642,7 +1639,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
DCHECK(weak_objects_.current_ephemerons.IsEmpty());
weak_objects_.current_ephemerons.Swap(weak_objects_.next_ephemerons);
while (weak_objects_.current_ephemerons.Pop(kMainThread, &ephemeron)) {
while (weak_objects_.current_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
ProcessEphemeron(ephemeron.key, ephemeron.value);
if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
@ -1669,7 +1666,8 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
kTrackNewlyDiscoveredObjects>();
}
while (weak_objects_.discovered_ephemerons.Pop(kMainThread, &ephemeron)) {
while (
weak_objects_.discovered_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
ProcessEphemeron(ephemeron.key, ephemeron.value);
if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
@ -1723,7 +1721,7 @@ void MarkCompactCollector::PerformWrapperTracing() {
LocalEmbedderHeapTracer::ProcessingScope scope(
heap_->local_embedder_heap_tracer());
HeapObject object;
while (marking_worklist()->embedder()->Pop(kMainThread, &object)) {
while (marking_worklist()->embedder()->Pop(kMainThreadTask, &object)) {
scope.TracePossibleWrapper(JSObject::cast(object));
}
}
@ -1740,7 +1738,11 @@ void MarkCompactCollector::ProcessMarkingWorklist() {
template <MarkCompactCollector::MarkingWorklistProcessingMode mode>
void MarkCompactCollector::ProcessMarkingWorklistInternal() {
HeapObject object;
MarkCompactMarkingVisitor visitor(this, marking_state());
MarkingVisitor visitor(marking_state(), marking_worklist()->shared(),
marking_worklist()->embedder(), weak_objects(), heap_,
epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
while (!(object = marking_worklist()->Pop()).is_null()) {
// Left trimming may result in grey or black filler objects on the marking
// worklist. Ignore these objects.
@ -1760,14 +1762,11 @@ void MarkCompactCollector::ProcessMarkingWorklistInternal() {
DCHECK(object.IsHeapObject());
DCHECK(heap()->Contains(object));
DCHECK(!(marking_state()->IsWhite(object)));
marking_state()->GreyToBlack(object);
if (mode == MarkCompactCollector::MarkingWorklistProcessingMode::
kTrackNewlyDiscoveredObjects) {
AddNewlyDiscovered(object);
}
Map map = object.map();
MarkObject(object, map);
visitor.Visit(map, object);
visitor.Visit(object.map(), object);
}
}
@ -1779,7 +1778,7 @@ bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) {
}
} else if (marking_state()->IsWhite(value)) {
weak_objects_.next_ephemerons.Push(kMainThread, Ephemeron{key, value});
weak_objects_.next_ephemerons.Push(kMainThreadTask, Ephemeron{key, value});
}
return false;
@ -1790,7 +1789,7 @@ void MarkCompactCollector::ProcessEphemeronMarking() {
// Incremental marking might leave ephemerons in main task's local
// buffer, flush it into global pool.
weak_objects_.next_ephemerons.FlushToGlobal(kMainThread);
weak_objects_.next_ephemerons.FlushToGlobal(kMainThreadTask);
ProcessEphemeronsUntilFixpoint();
@ -2025,7 +2024,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
void MarkCompactCollector::MarkDependentCodeForDeoptimization() {
std::pair<HeapObject, Code> weak_object_in_code;
while (weak_objects_.weak_objects_in_code.Pop(kMainThread,
while (weak_objects_.weak_objects_in_code.Pop(kMainThreadTask,
&weak_object_in_code)) {
HeapObject object = weak_object_in_code.first;
Code code = weak_object_in_code.second;
@ -2141,7 +2140,7 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() {
DCHECK(FLAG_flush_bytecode ||
weak_objects_.bytecode_flushing_candidates.IsEmpty());
SharedFunctionInfo flushing_candidate;
while (weak_objects_.bytecode_flushing_candidates.Pop(kMainThread,
while (weak_objects_.bytecode_flushing_candidates.Pop(kMainThreadTask,
&flushing_candidate)) {
// If the BytecodeArray is dead, flush it, which will replace the field with
// an uncompiled data object.
@ -2161,7 +2160,7 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() {
void MarkCompactCollector::ClearFlushedJsFunctions() {
DCHECK(FLAG_flush_bytecode || weak_objects_.flushed_js_functions.IsEmpty());
JSFunction flushed_js_function;
while (weak_objects_.flushed_js_functions.Pop(kMainThread,
while (weak_objects_.flushed_js_functions.Pop(kMainThreadTask,
&flushed_js_function)) {
flushed_js_function.ResetIfBytecodeFlushed();
}
@ -2169,7 +2168,7 @@ void MarkCompactCollector::ClearFlushedJsFunctions() {
void MarkCompactCollector::ClearFullMapTransitions() {
TransitionArray array;
while (weak_objects_.transition_arrays.Pop(kMainThread, &array)) {
while (weak_objects_.transition_arrays.Pop(kMainThreadTask, &array)) {
int num_transitions = array.number_of_entries();
if (num_transitions > 0) {
Map map;
@ -2312,7 +2311,7 @@ void MarkCompactCollector::ClearWeakCollections() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
EphemeronHashTable table;
while (weak_objects_.ephemeron_hash_tables.Pop(kMainThread, &table)) {
while (weak_objects_.ephemeron_hash_tables.Pop(kMainThreadTask, &table)) {
for (InternalIndex i : table.IterateEntries()) {
HeapObject key = HeapObject::cast(table.KeyAt(i));
#ifdef VERIFY_HEAP
@ -2344,7 +2343,7 @@ void MarkCompactCollector::ClearWeakReferences() {
std::pair<HeapObject, HeapObjectSlot> slot;
HeapObjectReference cleared_weak_ref =
HeapObjectReference::ClearedValue(isolate());
while (weak_objects_.weak_references.Pop(kMainThread, &slot)) {
while (weak_objects_.weak_references.Pop(kMainThreadTask, &slot)) {
HeapObject value;
// The slot could have been overwritten, so we have to treat it
// as MaybeObjectSlot.
@ -2370,7 +2369,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
return;
}
JSWeakRef weak_ref;
while (weak_objects_.js_weak_refs.Pop(kMainThread, &weak_ref)) {
while (weak_objects_.js_weak_refs.Pop(kMainThreadTask, &weak_ref)) {
HeapObject target = HeapObject::cast(weak_ref.target());
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value());
@ -2381,7 +2380,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
}
}
WeakCell weak_cell;
while (weak_objects_.weak_cells.Pop(kMainThread, &weak_cell)) {
while (weak_objects_.weak_cells.Pop(kMainThreadTask, &weak_cell)) {
HeapObject target = HeapObject::cast(weak_cell.target());
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
DCHECK(!target.IsUndefined());

View File

@ -185,8 +185,6 @@ enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
// Base class for minor and full MC collectors.
class MarkCompactCollectorBase {
public:
static const int kMainThread = 0;
virtual ~MarkCompactCollectorBase() = default;
virtual void SetUp() = 0;
@ -288,9 +286,9 @@ class MinorNonAtomicMarkingState final
}
};
// This marking state is used when concurrent marking is running.
class IncrementalMarkingState final
: public MarkingStateBase<IncrementalMarkingState, AccessMode::ATOMIC> {
// This is used by marking visitors.
class MajorMarkingState final
: public MarkingStateBase<MajorMarkingState, AccessMode::ATOMIC> {
public:
ConcurrentBitmap<AccessMode::ATOMIC>* bitmap(const MemoryChunk* chunk) const {
DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
@ -314,6 +312,8 @@ class IncrementalMarkingState final
}
};
// This is used by Scavenger and Evacuator in TransferColor.
// Live byte increments have to be atomic.
class MajorAtomicMarkingState final
: public MarkingStateBase<MajorAtomicMarkingState, AccessMode::ATOMIC> {
public:
@ -355,17 +355,102 @@ class MajorNonAtomicMarkingState final
}
};
// The index of the main thread task used by concurrent/parallel GC.
const int kMainThreadTask = 0;
// This visitor is used for marking on the main thread. It is cheaper than
// the concurrent marking visitor because it does not snapshot JSObjects.
template <typename MarkingState>
class MainMarkingVisitor final
: public MarkingVisitorBase<MainMarkingVisitor<MarkingState>,
MarkingState> {
public:
// This is used for revisiting objects that were black allocated.
class RevisitScope {
public:
explicit RevisitScope(MainMarkingVisitor* visitor) : visitor_(visitor) {
DCHECK(!visitor->revisiting_object_);
visitor->revisiting_object_ = true;
}
~RevisitScope() {
DCHECK(visitor_->revisiting_object_);
visitor_->revisiting_object_ = false;
}
private:
MainMarkingVisitor<MarkingState>* visitor_;
};
MainMarkingVisitor(MarkingState* marking_state,
MarkingWorklist* marking_worklist,
EmbedderTracingWorklist* embedder_worklist,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc)
: MarkingVisitorBase<MainMarkingVisitor<MarkingState>, MarkingState>(
kMainThreadTask, marking_worklist, embedder_worklist, weak_objects,
heap, mark_compact_epoch, bytecode_flush_mode,
embedder_tracing_enabled, is_forced_gc),
marking_state_(marking_state),
revisiting_object_(false) {}
// HeapVisitor override to allow revisiting of black objects.
bool ShouldVisit(HeapObject object) {
return marking_state_->GreyToBlack(object) ||
V8_UNLIKELY(revisiting_object_);
}
void MarkDescriptorArrayFromWriteBarrier(HeapObject host,
DescriptorArray descriptors,
int number_of_own_descriptors);
private:
// Functions required by MarkingVisitorBase.
template <typename T, typename TBodyDescriptor = typename T::BodyDescriptor>
int VisitJSObjectSubclass(Map map, T object);
template <typename T>
int VisitLeftTrimmableArray(Map map, T object);
template <typename TSlot>
void RecordSlot(HeapObject object, TSlot slot, HeapObject target);
void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target);
void SynchronizePageAccess(HeapObject heap_object) {
// Nothing to do on the main thread.
}
MarkingState* marking_state() { return marking_state_; }
TraceRetainingPathMode retaining_path_mode() {
return (V8_UNLIKELY(FLAG_track_retaining_path))
? TraceRetainingPathMode::kEnabled
: TraceRetainingPathMode::kDisabled;
}
MarkingState* const marking_state_;
friend class MarkingVisitorBase<MainMarkingVisitor<MarkingState>,
MarkingState>;
bool revisiting_object_;
};
// Collector for young and old generation.
class MarkCompactCollector final : public MarkCompactCollectorBase {
public:
#ifdef V8_CONCURRENT_MARKING
using MarkingState = IncrementalMarkingState;
using MarkingState = MajorMarkingState;
#else
using MarkingState = MajorNonAtomicMarkingState;
#endif // V8_CONCURRENT_MARKING
using AtomicMarkingState = MajorAtomicMarkingState;
using NonAtomicMarkingState = MajorNonAtomicMarkingState;
using MarkingVisitor = MainMarkingVisitor<MarkingState>;
// Wrapper for the shared worklist.
class MarkingWorklist {
public:
@ -376,18 +461,18 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
explicit MarkingWorklist(Heap* heap) {}
void Push(HeapObject object) {
bool success = shared_.Push(kMainThread, object);
bool success = shared_.Push(kMainThreadTask, object);
USE(success);
DCHECK(success);
}
HeapObject Pop() {
HeapObject result;
if (shared_.Pop(kMainThread, &result)) return result;
if (shared_.Pop(kMainThreadTask, &result)) return result;
#ifdef V8_CONCURRENT_MARKING
// The expectation is that this work list is empty almost all the time
// and we can thus avoid the emptiness checks by putting it last.
if (on_hold_.Pop(kMainThread, &result)) return result;
if (on_hold_.Pop(kMainThreadTask, &result)) return result;
#endif
return HeapObject();
}
@ -399,19 +484,19 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
}
bool IsEmpty() {
return shared_.IsLocalEmpty(kMainThread) &&
on_hold_.IsLocalEmpty(kMainThread) &&
return shared_.IsLocalEmpty(kMainThreadTask) &&
on_hold_.IsLocalEmpty(kMainThreadTask) &&
shared_.IsGlobalPoolEmpty() && on_hold_.IsGlobalPoolEmpty();
}
bool IsEmbedderEmpty() {
return embedder_.IsLocalEmpty(kMainThread) &&
return embedder_.IsLocalEmpty(kMainThreadTask) &&
embedder_.IsGlobalPoolEmpty();
}
int Size() {
return static_cast<int>(shared_.LocalSize(kMainThread) +
on_hold_.LocalSize(kMainThread));
return static_cast<int>(shared_.LocalSize(kMainThreadTask) +
on_hold_.LocalSize(kMainThreadTask));
}
// Calls the specified callback on each element of the deques and replaces
@ -426,8 +511,9 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
}
void ShareWorkIfGlobalPoolIsEmpty() {
if (!shared_.IsLocalEmpty(kMainThread) && shared_.IsGlobalPoolEmpty()) {
shared_.FlushToGlobal(kMainThread);
if (!shared_.IsLocalEmpty(kMainThreadTask) &&
shared_.IsGlobalPoolEmpty()) {
shared_.FlushToGlobal(kMainThreadTask);
}
}
@ -541,35 +627,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
inline void AddTransitionArray(TransitionArray array);
void AddEphemeronHashTable(EphemeronHashTable table) {
weak_objects_.ephemeron_hash_tables.Push(kMainThread, table);
}
void AddEphemeron(HeapObject key, HeapObject value) {
weak_objects_.discovered_ephemerons.Push(kMainThread,
Ephemeron{key, value});
}
void AddWeakReference(HeapObject host, HeapObjectSlot slot) {
weak_objects_.weak_references.Push(kMainThread, std::make_pair(host, slot));
}
void AddWeakObjectInCode(HeapObject object, Code code) {
weak_objects_.weak_objects_in_code.Push(kMainThread,
std::make_pair(object, code));
}
void AddWeakRef(JSWeakRef weak_ref) {
weak_objects_.js_weak_refs.Push(kMainThread, weak_ref);
}
void AddWeakCell(WeakCell weak_cell) {
weak_objects_.weak_cells.Push(kMainThread, weak_cell);
}
inline void AddBytecodeFlushingCandidate(SharedFunctionInfo flush_candidate);
inline void AddFlushedJSFunction(JSFunction flushed_function);
void AddNewlyDiscovered(HeapObject object) {
if (ephemeron_marking_.newly_discovered_overflowed) return;
@ -813,97 +870,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
friend class RecordMigratedSlotVisitor;
};
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
class MarkingVisitor final
: public HeapVisitor<
int,
MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>> {
public:
using Parent = HeapVisitor<
int, MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>>;
V8_INLINE MarkingVisitor(MarkCompactCollector* collector,
MarkingState* marking_state);
V8_INLINE bool ShouldVisitMapPointer() { return false; }
V8_INLINE int VisitBytecodeArray(Map map, BytecodeArray object);
V8_INLINE int VisitDescriptorArray(Map map, DescriptorArray object);
V8_INLINE int VisitEphemeronHashTable(Map map, EphemeronHashTable object);
V8_INLINE int VisitFixedArray(Map map, FixedArray object);
V8_INLINE int VisitJSApiObject(Map map, JSObject object);
V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object);
V8_INLINE int VisitJSFunction(Map map, JSFunction object);
V8_INLINE int VisitJSDataView(Map map, JSDataView object);
V8_INLINE int VisitJSTypedArray(Map map, JSTypedArray object);
V8_INLINE int VisitMap(Map map, Map object);
V8_INLINE int VisitSharedFunctionInfo(Map map, SharedFunctionInfo object);
V8_INLINE int VisitTransitionArray(Map map, TransitionArray object);
V8_INLINE int VisitWeakCell(Map map, WeakCell object);
V8_INLINE int VisitJSWeakRef(Map map, JSWeakRef object);
// ObjectVisitor implementation.
V8_INLINE void VisitPointer(HeapObject host, ObjectSlot p) final {
VisitPointerImpl(host, p);
}
V8_INLINE void VisitPointer(HeapObject host, MaybeObjectSlot p) final {
VisitPointerImpl(host, p);
}
V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) final {
VisitPointersImpl(host, start, end);
}
V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
MaybeObjectSlot end) final {
VisitPointersImpl(host, start, end);
}
V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final;
V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final;
// Weak list pointers should be ignored during marking. The lists are
// reconstructed after GC.
void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) final {}
V8_INLINE void VisitDescriptors(DescriptorArray descriptors,
int number_of_own_descriptors);
// Marks the descriptor array black without pushing it on the marking work
// list and visits its header.
V8_INLINE void MarkDescriptorArrayBlack(HeapObject host,
DescriptorArray descriptors);
private:
// Granularity in which FixedArrays are scanned if |fixed_array_mode|
// is true.
static const int kProgressBarScanningChunk = kMaxRegularHeapObjectSize;
template <typename TSlot>
V8_INLINE void VisitPointerImpl(HeapObject host, TSlot p);
template <typename TSlot>
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end);
V8_INLINE int VisitFixedArrayIncremental(Map map, FixedArray object);
template <typename T>
V8_INLINE int VisitEmbedderTracingSubclass(Map map, T object);
// Marks the object grey and pushes it on the marking work list.
V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
MarkingState* marking_state() { return marking_state_; }
MarkCompactCollector::MarkingWorklist* marking_worklist() const {
return collector_->marking_worklist();
}
Heap* const heap_;
MarkCompactCollector* const collector_;
MarkingState* const marking_state_;
const unsigned mark_compact_epoch_;
};
class EvacuationScope {
public:
explicit EvacuationScope(MarkCompactCollector* collector)

View File

@ -23,6 +23,10 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkObject(
concrete_visitor()->SynchronizePageAccess(object);
if (concrete_visitor()->marking_state()->WhiteToGrey(object)) {
marking_worklist_->Push(task_id_, object);
if (V8_UNLIKELY(concrete_visitor()->retaining_path_mode() ==
TraceRetainingPathMode::kEnabled)) {
heap_->AddRetainer(host, object);
}
}
}
@ -110,7 +114,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitCodeTarget(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitBytecodeArray(
Map map, BytecodeArray object) {
if (!ShouldVisit(object)) return 0;
if (!concrete_visitor()->ShouldVisit(object)) return 0;
int size = BytecodeArray::BodyDescriptor::SizeOf(map, object);
this->VisitMapPointer(object);
BytecodeArray::BodyDescriptor::IterateBody(map, object, size, this);
@ -135,7 +139,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo(
Map map, SharedFunctionInfo shared_info) {
if (!ShouldVisit(shared_info)) return 0;
if (!concrete_visitor()->ShouldVisit(shared_info)) return 0;
int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
this->VisitMapPointer(shared_info);
@ -252,7 +256,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSTypedArray(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
Map map, EphemeronHashTable table) {
if (!ShouldVisit(table)) return 0;
if (!concrete_visitor()->ShouldVisit(table)) return 0;
weak_objects_->ephemeron_hash_tables.Push(task_id_, table);
for (InternalIndex i : table.IterateEntries()) {
@ -313,7 +317,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
Map map, WeakCell weak_cell) {
if (!ShouldVisit(weak_cell)) return 0;
if (!concrete_visitor()->ShouldVisit(weak_cell)) return 0;
int size = WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
this->VisitMapPointer(weak_cell);
@ -367,7 +371,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptors(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptorArray(
Map map, DescriptorArray array) {
if (!ShouldVisit(array)) return 0;
if (!concrete_visitor()->ShouldVisit(array)) return 0;
this->VisitMapPointer(array);
int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
VisitPointers(array, array.GetFirstPointerSlot(), array.GetDescriptorSlot(0));
@ -378,7 +382,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptorArray(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitMap(Map meta_map,
Map map) {
if (!ShouldVisit(map)) return 0;
if (!concrete_visitor()->ShouldVisit(map)) return 0;
int size = Map::BodyDescriptor::SizeOf(meta_map, map);
if (map.CanTransition()) {
// Maps that can transition share their descriptor arrays and require
@ -413,7 +417,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitMap(Map meta_map,
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray(
Map map, TransitionArray array) {
if (!ShouldVisit(array)) return 0;
if (!concrete_visitor()->ShouldVisit(array)) return 0;
this->VisitMapPointer(array);
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);

View File

@ -5,6 +5,7 @@
#ifndef V8_HEAP_MARKING_VISITOR_H_
#define V8_HEAP_MARKING_VISITOR_H_
#include "src/common/globals.h"
#include "src/heap/marking.h"
#include "src/heap/objects-visiting.h"
#include "src/heap/spaces.h"
@ -136,33 +137,35 @@ class MarkingStateBase {
//
// Derived classes are expected to provide the following:
// - ConcreteVisitor::marking_state method,
// - ConcreteVisitor::VisitJSObjectSubclass method,
// - ConcreteVisitor::VisitLeftTrimmableArray method,
// - ConcreteVisitor::retaining_path_mode method,
// - ConcreteVisitor::RecordSlot method,
// - ConcreteVisitor::RecordRelocSlot method,
// - ConcreteVisitor::SynchronizePageAccess method.
// - ConcreteVisitor::SynchronizePageAccess method,
// - ConcreteVisitor::VisitJSObjectSubclass method,
// - ConcreteVisitor::VisitLeftTrimmableArray method.
// These methods capture the difference between the concurrent and main thread
// marking visitors. For example, the concurrent visitor has to use the
// snapshotting protocol to visit JSObject and left-trimmable FixedArrays.
template <typename ConcreteVisitor, typename MarkingState>
class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
public:
MarkingVisitorBase(int task_id, MarkingWorklist* marking_worklist,
EmbedderTracingWorklist* embedder_worklist,
WeakObjects* weak_objects, unsigned mark_compact_epoch,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool is_embedder_tracing_enabled, bool is_forced_gc)
: marking_worklist_(marking_worklist),
embedder_worklist_(embedder_worklist),
weak_objects_(weak_objects),
heap_(heap),
task_id_(task_id),
mark_compact_epoch_(mark_compact_epoch),
bytecode_flush_mode_(bytecode_flush_mode),
is_embedder_tracing_enabled_(is_embedder_tracing_enabled),
is_forced_gc_(is_forced_gc) {}
// HeapVisitor overrides for objects that require custom visitation.
V8_INLINE bool ShouldVisit(HeapObject object) {
return concrete_visitor()->marking_state()->GreyToBlack(object);
}
V8_INLINE int VisitBytecodeArray(Map map, BytecodeArray object);
V8_INLINE int VisitDescriptorArray(Map map, DescriptorArray object);
V8_INLINE int VisitEphemeronHashTable(Map map, EphemeronHashTable object);
@ -235,6 +238,7 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
MarkingWorklist* const marking_worklist_;
EmbedderTracingWorklist* const embedder_worklist_;
WeakObjects* const weak_objects_;
Heap* const heap_;
const int task_id_;
const unsigned mark_compact_epoch_;
const BytecodeFlushMode bytecode_flush_mode_;

View File

@ -968,7 +968,7 @@ class MemoryChunk : public BasicMemoryChunk {
void InitializeReservedMemory() { reservation_.Reset(); }
friend class ConcurrentMarkingState;
friend class IncrementalMarkingState;
friend class MajorMarkingState;
friend class MajorAtomicMarkingState;
friend class MajorNonAtomicMarkingState;
friend class MemoryAllocator;