diff --git a/src/heap/concurrent-marking.cc b/src/heap/concurrent-marking.cc index 5a2cbff86f..03aefd67b9 100644 --- a/src/heap/concurrent-marking.cc +++ b/src/heap/concurrent-marking.cc @@ -400,8 +400,8 @@ ConcurrentMarking::ConcurrentMarking(Heap* heap, void ConcurrentMarking::Run(JobDelegate* delegate, unsigned mark_compact_epoch, bool is_forced_gc) { - TRACE_GC1(heap_->tracer(), GCTracer::Scope::MC_BACKGROUND_MARKING, - ThreadKind::kBackground); + TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_BACKGROUND_MARKING, + ThreadKind::kBackground); size_t kBytesUntilInterruptCheck = 64 * KB; int kObjectsUntilInterrupCheck = 1000; uint8_t task_id = delegate->GetTaskId() + 1; diff --git a/src/heap/gc-tracer.cc b/src/heap/gc-tracer.cc index de1375f109..170645cbf8 100644 --- a/src/heap/gc-tracer.cc +++ b/src/heap/gc-tracer.cc @@ -48,6 +48,14 @@ double GCTracer::MonotonicallyIncreasingTimeInMs() { } } +CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId scope_id) { + if (Scope::NeedsYoungEpoch(scope_id)) { + return heap_->epoch_young(); + } else { + return heap_->epoch_full(); + } +} + GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind) : tracer_(tracer), scope_(scope), thread_kind_(thread_kind) { start_time_ = tracer_->MonotonicallyIncreasingTimeInMs(); @@ -95,6 +103,19 @@ const char* GCTracer::Scope::Name(ScopeId id) { return nullptr; } +bool GCTracer::Scope::NeedsYoungEpoch(ScopeId id) { +#define CASE(scope) \ + case Scope::scope: \ + return true; + switch (id) { + TRACER_YOUNG_EPOCH_SCOPES(CASE) + default: + return false; + } +#undef CASE + UNREACHABLE(); +} + GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason, const char* collector_reason) : type(type), diff --git a/src/heap/gc-tracer.h b/src/heap/gc-tracer.h index 5ee77c5363..5cad6ef50f 100644 --- a/src/heap/gc-tracer.h +++ b/src/heap/gc-tracer.h @@ -41,6 +41,13 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects }; GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id, thread_kind); \ TRACE_EVENT0(TRACE_GC_CATEGORIES, GCTracer::Scope::Name(gc_tracer_scope_id)) +#define TRACE_GC_EPOCH(tracer, scope_id, thread_kind) \ + GCTracer::Scope::ScopeId gc_tracer_scope_id(scope_id); \ + GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id, thread_kind); \ + CollectionEpoch gc_tracer_epoch = tracer->CurrentEpoch(scope_id); \ + TRACE_EVENT1(TRACE_GC_CATEGORIES, GCTracer::Scope::Name(gc_tracer_scope_id), \ + "epoch", gc_tracer_epoch) + // GCTracer collects and prints ONE line after each garbage collector // invocation IFF --trace_gc is used. class V8_EXPORT_PRIVATE GCTracer { @@ -99,6 +106,7 @@ class V8_EXPORT_PRIVATE GCTracer { Scope(const Scope&) = delete; Scope& operator=(const Scope&) = delete; static const char* Name(ScopeId id); + static bool NeedsYoungEpoch(ScopeId id); private: GCTracer* tracer_; @@ -337,6 +345,8 @@ class V8_EXPORT_PRIVATE GCTracer { WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats(); + CollectionEpoch CurrentEpoch(Scope::ScopeId id); + private: FRIEND_TEST(GCTracer, AverageSpeed); FRIEND_TEST(GCTracerTest, AllocationThroughput); diff --git a/src/heap/heap.cc b/src/heap/heap.cc index 8c574998ee..9e019c9a6d 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -104,6 +104,14 @@ namespace v8 { namespace internal { +namespace { +std::atomic global_epoch{0}; + +CollectionEpoch next_epoch() { + return global_epoch.fetch_add(1, std::memory_order_relaxed) + 1; +} +} // namespace + #ifdef V8_ENABLE_THIRD_PARTY_HEAP Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) { return reinterpret_cast( @@ -1733,6 +1741,10 @@ void Heap::StartIncrementalMarking(int gc_flags, GarbageCollectionReason gc_reason, GCCallbackFlags gc_callback_flags) { DCHECK(incremental_marking()->IsStopped()); + + // The next GC cycle begins here. + UpdateEpochFull(); + SafepointScope safepoint(this); set_current_gc_flags(gc_flags); current_gc_callback_flags_ = gc_callback_flags; @@ -1946,23 +1958,43 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) { tracer()->AddSurvivalRatio(survival_rate); } +namespace { +GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) { + switch (collector) { + case MARK_COMPACTOR: + return GCTracer::Scope::ScopeId::MARK_COMPACTOR; + case MINOR_MARK_COMPACTOR: + return GCTracer::Scope::ScopeId::MINOR_MARK_COMPACTOR; + case SCAVENGER: + return GCTracer::Scope::ScopeId::SCAVENGER; + } + UNREACHABLE(); +} +} // namespace + size_t Heap::PerformGarbageCollection( GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) { DisallowJavascriptExecution no_js(isolate()); base::Optional optional_safepoint_scope; + UpdateCurrentEpoch(collector); + // Stop time-to-collection timer before safepoint - we do not want to measure // time for safepointing. collection_barrier_->StopTimeToCollectionTimer(); + TRACE_GC_EPOCH(tracer(), CollectorScopeId(collector), ThreadKind::kMain); + if (FLAG_local_heaps) { optional_safepoint_scope.emplace(this); } + #ifdef VERIFY_HEAP if (FLAG_verify_heap) { Verify(); } #endif + tracer()->StartInSafepoint(); GarbageCollectionPrologueInSafepoint(); @@ -2042,6 +2074,16 @@ size_t Heap::PerformGarbageCollection( return freed_global_handles; } +void Heap::UpdateCurrentEpoch(GarbageCollector collector) { + if (IsYoungGenerationCollector(collector)) { + epoch_young_ = next_epoch(); + } else if (incremental_marking()->IsStopped()) { + epoch_full_ = next_epoch(); + } +} + +void Heap::UpdateEpochFull() { epoch_full_ = next_epoch(); } + void Heap::RecomputeLimits(GarbageCollector collector) { if (!((collector == MARK_COMPACTOR) || (HasLowYoungGenerationAllocationRate() && @@ -3419,8 +3461,9 @@ void Heap::FinalizeIncrementalMarkingIncrementally( HistogramTimerScope incremental_marking_scope( isolate()->counters()->gc_incremental_marking_finalize()); - TRACE_EVENT0("v8", "V8.GCIncrementalMarkingFinalize"); - TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE); + TRACE_EVENT1("v8", "V8.GCIncrementalMarkingFinalize", "epoch", epoch_full()); + TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE, + ThreadKind::kMain); SafepointScope safepoint(this); InvokeIncrementalMarkingPrologueCallbacks(); diff --git a/src/heap/heap.h b/src/heap/heap.h index 5f02dccd1d..67e7fe7349 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -249,6 +249,8 @@ using EphemeronRememberedSet = std::unordered_map, Object::Hasher>; +using CollectionEpoch = uint32_t; + class Heap { public: // Stores ephemeron entries where the EphemeronHashTable is in old-space, @@ -511,6 +513,9 @@ class Heap { void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk); + void UpdateCurrentEpoch(GarbageCollector collector); + void UpdateEpochFull(); + inline Address* NewSpaceAllocationTopAddress(); inline Address* NewSpaceAllocationLimitAddress(); inline Address* OldSpaceAllocationTopAddress(); @@ -1558,6 +1563,9 @@ class Heap { static Isolate* GetIsolateFromWritableObject(HeapObject object); + CollectionEpoch epoch_young() { return epoch_young_; } + CollectionEpoch epoch_full() { return epoch_full_; } + private: using ExternalStringTableUpdaterCallback = String (*)(Heap* heap, FullObjectSlot pointer); @@ -2334,6 +2342,11 @@ class Heap { std::unique_ptr tp_heap_; + // We need two epochs, since there can be scavenges during incremental + // marking. + CollectionEpoch epoch_young_ = 0; + CollectionEpoch epoch_full_ = 0; + // Classes in "heap" can be friends. friend class AlwaysAllocateScope; friend class ArrayBufferCollector; diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc index 2904d7cb8b..4dfaeb2b08 100644 --- a/src/heap/incremental-marking.cc +++ b/src/heap/incremental-marking.cc @@ -178,8 +178,10 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) { static_cast(gc_reason)); HistogramTimerScope incremental_marking_scope( counters->gc_incremental_marking_start()); - TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart"); - TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START); + TRACE_EVENT1("v8", "V8.GCIncrementalMarkingStart", "epoch", + heap_->epoch_full()); + TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START, + ThreadKind::kMain); heap_->tracer()->NotifyIncrementalMarkingStart(); start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs(); @@ -779,8 +781,9 @@ StepResult IncrementalMarking::AdvanceWithDeadline( StepOrigin step_origin) { HistogramTimerScope incremental_marking_scope( heap_->isolate()->counters()->gc_incremental_marking()); - TRACE_EVENT0("v8", "V8.GCIncrementalMarking"); - TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL); + TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch", heap_->epoch_full()); + TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL, + ThreadKind::kMain); DCHECK(!IsStopped()); ScheduleBytesToMarkBasedOnTime(heap()->MonotonicallyIncreasingTimeInMs()); diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index d193f4b14e..4f039d769d 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -3114,8 +3114,8 @@ class PageEvacuationJob : public v8::JobTask { TRACE_GC(tracer_, evacuator->GetTracingScope()); ProcessItems(delegate, evacuator); } else { - TRACE_GC1(tracer_, evacuator->GetBackgroundTracingScope(), - ThreadKind::kBackground); + TRACE_GC_EPOCH(tracer_, evacuator->GetBackgroundTracingScope(), + ThreadKind::kBackground); ProcessItems(delegate, evacuator); } } @@ -3489,7 +3489,7 @@ class PointersUpdatingJob : public v8::JobTask { TRACE_GC(tracer_, scope_); UpdatePointers(delegate); } else { - TRACE_GC1(tracer_, background_scope_, ThreadKind::kBackground); + TRACE_GC_EPOCH(tracer_, background_scope_, ThreadKind::kBackground); UpdatePointers(delegate); } } @@ -4865,9 +4865,9 @@ class YoungGenerationMarkingJob : public v8::JobTask { GCTracer::Scope::MINOR_MC_MARK_PARALLEL); ProcessItems(delegate); } else { - TRACE_GC1(collector_->heap()->tracer(), - GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING, - ThreadKind::kBackground); + TRACE_GC_EPOCH(collector_->heap()->tracer(), + GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING, + ThreadKind::kBackground); ProcessItems(delegate); } } diff --git a/src/heap/scavenger.cc b/src/heap/scavenger.cc index e35e6cc40c..bf0d284d9f 100644 --- a/src/heap/scavenger.cc +++ b/src/heap/scavenger.cc @@ -182,9 +182,9 @@ void ScavengerCollector::JobTask::Run(JobDelegate* delegate) { GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL); ProcessItems(delegate, scavenger); } else { - TRACE_GC1(outer_->heap_->tracer(), - GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, - ThreadKind::kBackground); + TRACE_GC_EPOCH(outer_->heap_->tracer(), + GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, + ThreadKind::kBackground); ProcessItems(delegate, scavenger); } } diff --git a/src/init/heap-symbols.h b/src/init/heap-symbols.h index 7384c7f696..28cfbf119f 100644 --- a/src/init/heap-symbols.h +++ b/src/init/heap-symbols.h @@ -426,6 +426,7 @@ F(HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES) \ F(HEAP_PROLOGUE) \ F(HEAP_PROLOGUE_SAFEPOINT) \ + F(MARK_COMPACTOR) \ TOP_MC_SCOPES(F) \ F(MC_CLEAR_DEPENDENT_CODE) \ F(MC_CLEAR_FLUSHABLE_BYTECODE) \ @@ -467,6 +468,7 @@ F(MC_SWEEP_CODE) \ F(MC_SWEEP_MAP) \ F(MC_SWEEP_OLD) \ + F(MINOR_MARK_COMPACTOR) \ F(MINOR_MC) \ F(MINOR_MC_CLEAR) \ F(MINOR_MC_CLEAR_STRING_TABLE) \ @@ -492,6 +494,7 @@ F(MINOR_MC_MARKING_DEQUE) \ F(MINOR_MC_RESET_LIVENESS) \ F(MINOR_MC_SWEEPING) \ + F(SCAVENGER) \ F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \ F(SCAVENGER_FAST_PROMOTE) \ F(SCAVENGER_FREE_REMEMBERED_SET) \ @@ -521,4 +524,10 @@ F(MINOR_MC_BACKGROUND_MARKING) \ F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL) +#define TRACER_YOUNG_EPOCH_SCOPES(F) \ + F(BACKGROUND_YOUNG_ARRAY_BUFFER_SWEEP) \ + F(MINOR_MARK_COMPACTOR) \ + F(SCAVENGER) \ + F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL) + #endif // V8_INIT_HEAP_SYMBOLS_H_