Reland "[heap] Add epoch to GC tracing events"
This is a reland of be52501d52
Fix data race by not emitting the epoch for sweeper background jobs
at them moment.
Original change's description:
> [heap] Add epoch to GC tracing events
>
> This CL adds the TRACE_GC_EPOCH macro, which adds the epoch as attribute
> to the trace event. Use TRACE_GC_EPOCH for top-level events, nested
> events can get the information from its parent.
>
> V8's GC needs an epoch for young and full collections, since scavenges
> also occur during incremental marking. The epoch is also process-wide,
> so different isolates do not reuse the same id.
>
> Change-Id: I8889bccce51e008374b4796445a50062bd87a45d
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2565247
> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#71521}
Change-Id: Ib8f4bfdc01c459955eb6db63bb6e24a8aa068f09
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2567702
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71567}
This commit is contained in:
parent
56362efc7f
commit
3238162da7
@ -400,8 +400,8 @@ ConcurrentMarking::ConcurrentMarking(Heap* heap,
|
||||
|
||||
void ConcurrentMarking::Run(JobDelegate* delegate, unsigned mark_compact_epoch,
|
||||
bool is_forced_gc) {
|
||||
TRACE_GC1(heap_->tracer(), GCTracer::Scope::MC_BACKGROUND_MARKING,
|
||||
ThreadKind::kBackground);
|
||||
TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_BACKGROUND_MARKING,
|
||||
ThreadKind::kBackground);
|
||||
size_t kBytesUntilInterruptCheck = 64 * KB;
|
||||
int kObjectsUntilInterrupCheck = 1000;
|
||||
uint8_t task_id = delegate->GetTaskId() + 1;
|
||||
|
@ -48,6 +48,14 @@ double GCTracer::MonotonicallyIncreasingTimeInMs() {
|
||||
}
|
||||
}
|
||||
|
||||
CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId scope_id) {
|
||||
if (Scope::NeedsYoungEpoch(scope_id)) {
|
||||
return heap_->epoch_young();
|
||||
} else {
|
||||
return heap_->epoch_full();
|
||||
}
|
||||
}
|
||||
|
||||
GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
|
||||
: tracer_(tracer), scope_(scope), thread_kind_(thread_kind) {
|
||||
start_time_ = tracer_->MonotonicallyIncreasingTimeInMs();
|
||||
@ -95,6 +103,19 @@ const char* GCTracer::Scope::Name(ScopeId id) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
bool GCTracer::Scope::NeedsYoungEpoch(ScopeId id) {
|
||||
#define CASE(scope) \
|
||||
case Scope::scope: \
|
||||
return true;
|
||||
switch (id) {
|
||||
TRACER_YOUNG_EPOCH_SCOPES(CASE)
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
#undef CASE
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason)
|
||||
: type(type),
|
||||
|
@ -41,6 +41,13 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
|
||||
GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id, thread_kind); \
|
||||
TRACE_EVENT0(TRACE_GC_CATEGORIES, GCTracer::Scope::Name(gc_tracer_scope_id))
|
||||
|
||||
#define TRACE_GC_EPOCH(tracer, scope_id, thread_kind) \
|
||||
GCTracer::Scope::ScopeId gc_tracer_scope_id(scope_id); \
|
||||
GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id, thread_kind); \
|
||||
CollectionEpoch epoch = tracer->CurrentEpoch(scope_id); \
|
||||
TRACE_EVENT1(TRACE_GC_CATEGORIES, GCTracer::Scope::Name(gc_tracer_scope_id), \
|
||||
"epoch", epoch)
|
||||
|
||||
// GCTracer collects and prints ONE line after each garbage collector
|
||||
// invocation IFF --trace_gc is used.
|
||||
class V8_EXPORT_PRIVATE GCTracer {
|
||||
@ -99,6 +106,7 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
Scope(const Scope&) = delete;
|
||||
Scope& operator=(const Scope&) = delete;
|
||||
static const char* Name(ScopeId id);
|
||||
static bool NeedsYoungEpoch(ScopeId id);
|
||||
|
||||
private:
|
||||
GCTracer* tracer_;
|
||||
@ -337,6 +345,8 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
|
||||
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats();
|
||||
|
||||
CollectionEpoch CurrentEpoch(Scope::ScopeId id);
|
||||
|
||||
private:
|
||||
FRIEND_TEST(GCTracer, AverageSpeed);
|
||||
FRIEND_TEST(GCTracerTest, AllocationThroughput);
|
||||
|
@ -104,6 +104,14 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
namespace {
|
||||
std::atomic<CollectionEpoch> global_epoch{0};
|
||||
|
||||
CollectionEpoch next_epoch() {
|
||||
return global_epoch.fetch_add(1, std::memory_order_relaxed) + 1;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
|
||||
Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) {
|
||||
return reinterpret_cast<Isolate*>(
|
||||
@ -1733,6 +1741,10 @@ void Heap::StartIncrementalMarking(int gc_flags,
|
||||
GarbageCollectionReason gc_reason,
|
||||
GCCallbackFlags gc_callback_flags) {
|
||||
DCHECK(incremental_marking()->IsStopped());
|
||||
|
||||
// The next GC cycle begins here.
|
||||
UpdateEpochFull();
|
||||
|
||||
SafepointScope safepoint(this);
|
||||
set_current_gc_flags(gc_flags);
|
||||
current_gc_callback_flags_ = gc_callback_flags;
|
||||
@ -1946,23 +1958,43 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
|
||||
tracer()->AddSurvivalRatio(survival_rate);
|
||||
}
|
||||
|
||||
namespace {
|
||||
GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) {
|
||||
switch (collector) {
|
||||
case MARK_COMPACTOR:
|
||||
return GCTracer::Scope::ScopeId::MARK_COMPACTOR;
|
||||
case MINOR_MARK_COMPACTOR:
|
||||
return GCTracer::Scope::ScopeId::MINOR_MARK_COMPACTOR;
|
||||
case SCAVENGER:
|
||||
return GCTracer::Scope::ScopeId::SCAVENGER;
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
} // namespace
|
||||
|
||||
size_t Heap::PerformGarbageCollection(
|
||||
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
|
||||
DisallowJavascriptExecution no_js(isolate());
|
||||
base::Optional<SafepointScope> optional_safepoint_scope;
|
||||
|
||||
UpdateCurrentEpoch(collector);
|
||||
|
||||
// Stop time-to-collection timer before safepoint - we do not want to measure
|
||||
// time for safepointing.
|
||||
collection_barrier_->StopTimeToCollectionTimer();
|
||||
|
||||
TRACE_GC_EPOCH(tracer(), CollectorScopeId(collector), ThreadKind::kMain);
|
||||
|
||||
if (FLAG_local_heaps) {
|
||||
optional_safepoint_scope.emplace(this);
|
||||
}
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap) {
|
||||
Verify();
|
||||
}
|
||||
#endif
|
||||
|
||||
tracer()->StartInSafepoint();
|
||||
|
||||
GarbageCollectionPrologueInSafepoint();
|
||||
@ -2042,6 +2074,16 @@ size_t Heap::PerformGarbageCollection(
|
||||
return freed_global_handles;
|
||||
}
|
||||
|
||||
void Heap::UpdateCurrentEpoch(GarbageCollector collector) {
|
||||
if (IsYoungGenerationCollector(collector)) {
|
||||
epoch_young_ = next_epoch();
|
||||
} else if (incremental_marking()->IsStopped()) {
|
||||
epoch_full_ = next_epoch();
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::UpdateEpochFull() { epoch_full_ = next_epoch(); }
|
||||
|
||||
void Heap::RecomputeLimits(GarbageCollector collector) {
|
||||
if (!((collector == MARK_COMPACTOR) ||
|
||||
(HasLowYoungGenerationAllocationRate() &&
|
||||
@ -3419,8 +3461,9 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
|
||||
|
||||
HistogramTimerScope incremental_marking_scope(
|
||||
isolate()->counters()->gc_incremental_marking_finalize());
|
||||
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingFinalize");
|
||||
TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE);
|
||||
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingFinalize", "epoch", epoch_full());
|
||||
TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE,
|
||||
ThreadKind::kMain);
|
||||
|
||||
SafepointScope safepoint(this);
|
||||
InvokeIncrementalMarkingPrologueCallbacks();
|
||||
|
@ -249,6 +249,8 @@ using EphemeronRememberedSet =
|
||||
std::unordered_map<EphemeronHashTable, std::unordered_set<int>,
|
||||
Object::Hasher>;
|
||||
|
||||
using CollectionEpoch = uint32_t;
|
||||
|
||||
class Heap {
|
||||
public:
|
||||
// Stores ephemeron entries where the EphemeronHashTable is in old-space,
|
||||
@ -511,6 +513,9 @@ class Heap {
|
||||
|
||||
void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk);
|
||||
|
||||
void UpdateCurrentEpoch(GarbageCollector collector);
|
||||
void UpdateEpochFull();
|
||||
|
||||
inline Address* NewSpaceAllocationTopAddress();
|
||||
inline Address* NewSpaceAllocationLimitAddress();
|
||||
inline Address* OldSpaceAllocationTopAddress();
|
||||
@ -1558,6 +1563,9 @@ class Heap {
|
||||
|
||||
static Isolate* GetIsolateFromWritableObject(HeapObject object);
|
||||
|
||||
CollectionEpoch epoch_young() { return epoch_young_; }
|
||||
CollectionEpoch epoch_full() { return epoch_full_; }
|
||||
|
||||
private:
|
||||
using ExternalStringTableUpdaterCallback = String (*)(Heap* heap,
|
||||
FullObjectSlot pointer);
|
||||
@ -2334,6 +2342,11 @@ class Heap {
|
||||
|
||||
std::unique_ptr<third_party_heap::Heap> tp_heap_;
|
||||
|
||||
// We need two epochs, since there can be scavenges during incremental
|
||||
// marking.
|
||||
CollectionEpoch epoch_young_ = 0;
|
||||
CollectionEpoch epoch_full_ = 0;
|
||||
|
||||
// Classes in "heap" can be friends.
|
||||
friend class AlwaysAllocateScope;
|
||||
friend class ArrayBufferCollector;
|
||||
|
@ -178,8 +178,10 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
|
||||
static_cast<int>(gc_reason));
|
||||
HistogramTimerScope incremental_marking_scope(
|
||||
counters->gc_incremental_marking_start());
|
||||
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart");
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START);
|
||||
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingStart", "epoch",
|
||||
heap_->epoch_full());
|
||||
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START,
|
||||
ThreadKind::kMain);
|
||||
heap_->tracer()->NotifyIncrementalMarkingStart();
|
||||
|
||||
start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
|
||||
@ -779,8 +781,9 @@ StepResult IncrementalMarking::AdvanceWithDeadline(
|
||||
StepOrigin step_origin) {
|
||||
HistogramTimerScope incremental_marking_scope(
|
||||
heap_->isolate()->counters()->gc_incremental_marking());
|
||||
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
|
||||
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
|
||||
TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch", heap_->epoch_full());
|
||||
TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL,
|
||||
ThreadKind::kMain);
|
||||
DCHECK(!IsStopped());
|
||||
|
||||
ScheduleBytesToMarkBasedOnTime(heap()->MonotonicallyIncreasingTimeInMs());
|
||||
|
@ -3115,8 +3115,8 @@ class PageEvacuationJob : public v8::JobTask {
|
||||
TRACE_GC(tracer_, evacuator->GetTracingScope());
|
||||
ProcessItems(delegate, evacuator);
|
||||
} else {
|
||||
TRACE_GC1(tracer_, evacuator->GetBackgroundTracingScope(),
|
||||
ThreadKind::kBackground);
|
||||
TRACE_GC_EPOCH(tracer_, evacuator->GetBackgroundTracingScope(),
|
||||
ThreadKind::kBackground);
|
||||
ProcessItems(delegate, evacuator);
|
||||
}
|
||||
}
|
||||
@ -3489,7 +3489,7 @@ class PointersUpdatingJob : public v8::JobTask {
|
||||
TRACE_GC(tracer_, scope_);
|
||||
UpdatePointers(delegate);
|
||||
} else {
|
||||
TRACE_GC1(tracer_, background_scope_, ThreadKind::kBackground);
|
||||
TRACE_GC_EPOCH(tracer_, background_scope_, ThreadKind::kBackground);
|
||||
UpdatePointers(delegate);
|
||||
}
|
||||
}
|
||||
@ -4862,9 +4862,9 @@ class YoungGenerationMarkingJob : public v8::JobTask {
|
||||
GCTracer::Scope::MINOR_MC_MARK_PARALLEL);
|
||||
ProcessItems(delegate);
|
||||
} else {
|
||||
TRACE_GC1(collector_->heap()->tracer(),
|
||||
GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
|
||||
ThreadKind::kBackground);
|
||||
TRACE_GC_EPOCH(collector_->heap()->tracer(),
|
||||
GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
|
||||
ThreadKind::kBackground);
|
||||
ProcessItems(delegate);
|
||||
}
|
||||
}
|
||||
|
@ -182,9 +182,9 @@ void ScavengerCollector::JobTask::Run(JobDelegate* delegate) {
|
||||
GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
|
||||
ProcessItems(delegate, scavenger);
|
||||
} else {
|
||||
TRACE_GC1(outer_->heap_->tracer(),
|
||||
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL,
|
||||
ThreadKind::kBackground);
|
||||
TRACE_GC_EPOCH(outer_->heap_->tracer(),
|
||||
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL,
|
||||
ThreadKind::kBackground);
|
||||
ProcessItems(delegate, scavenger);
|
||||
}
|
||||
}
|
||||
|
@ -425,6 +425,7 @@
|
||||
F(HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES) \
|
||||
F(HEAP_PROLOGUE) \
|
||||
F(HEAP_PROLOGUE_SAFEPOINT) \
|
||||
F(MARK_COMPACTOR) \
|
||||
TOP_MC_SCOPES(F) \
|
||||
F(MC_CLEAR_DEPENDENT_CODE) \
|
||||
F(MC_CLEAR_FLUSHABLE_BYTECODE) \
|
||||
@ -466,6 +467,7 @@
|
||||
F(MC_SWEEP_CODE) \
|
||||
F(MC_SWEEP_MAP) \
|
||||
F(MC_SWEEP_OLD) \
|
||||
F(MINOR_MARK_COMPACTOR) \
|
||||
F(MINOR_MC) \
|
||||
F(MINOR_MC_CLEAR) \
|
||||
F(MINOR_MC_CLEAR_STRING_TABLE) \
|
||||
@ -491,6 +493,7 @@
|
||||
F(MINOR_MC_MARKING_DEQUE) \
|
||||
F(MINOR_MC_RESET_LIVENESS) \
|
||||
F(MINOR_MC_SWEEPING) \
|
||||
F(SCAVENGER) \
|
||||
F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \
|
||||
F(SCAVENGER_FAST_PROMOTE) \
|
||||
F(SCAVENGER_FREE_REMEMBERED_SET) \
|
||||
@ -520,4 +523,10 @@
|
||||
F(MINOR_MC_BACKGROUND_MARKING) \
|
||||
F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL)
|
||||
|
||||
#define TRACER_YOUNG_EPOCH_SCOPES(F) \
|
||||
F(BACKGROUND_YOUNG_ARRAY_BUFFER_SWEEP) \
|
||||
F(MINOR_MARK_COMPACTOR) \
|
||||
F(SCAVENGER) \
|
||||
F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL)
|
||||
|
||||
#endif // V8_INIT_HEAP_SYMBOLS_H_
|
||||
|
Loading…
Reference in New Issue
Block a user