heap: Inline GCTracer::Scope constructor and destructor
Tracer scopes are used in numerous places in src/heap to track time spent during various phases of the garbage collection. Usually, they are introduced with the TRACE_GC* family of macros, taking the scope identifier as a parameter. At most call sites, the scope identifier is known at compile time. This CL inlines the constructor and destructor of GCTracer::Scope, in order to enable the C++ compiler to properly optimize the introduction of such scopes when the scope identifier is known at compile time, using constant propagation. This is expected to have a performance impact for short-lived and frequently used scopes, e.g., in incremental marking and sweeping steps. Change-Id: I6f1a2954a437de1fa6dab5e464c20c952d84ffd4 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3581774 Reviewed-by: Omer Katz <omerkatz@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org> Cr-Commit-Position: refs/heads/main@{#79957}
This commit is contained in:
parent
f473f10ef3
commit
ae0480a01b
@ -1413,6 +1413,7 @@ filegroup(
|
||||
"src/heap/gc-idle-time-handler.cc",
|
||||
"src/heap/gc-idle-time-handler.h",
|
||||
"src/heap/gc-tracer.cc",
|
||||
"src/heap/gc-tracer-inl.h",
|
||||
"src/heap/gc-tracer.h",
|
||||
"src/heap/heap-allocator-inl.h",
|
||||
"src/heap/heap-allocator.cc",
|
||||
|
1
BUILD.gn
1
BUILD.gn
@ -2997,6 +2997,7 @@ v8_header_set("v8_internal_headers") {
|
||||
"src/heap/free-list-inl.h",
|
||||
"src/heap/free-list.h",
|
||||
"src/heap/gc-idle-time-handler.h",
|
||||
"src/heap/gc-tracer-inl.h",
|
||||
"src/heap/gc-tracer.h",
|
||||
"src/heap/heap-allocator-inl.h",
|
||||
"src/heap/heap-allocator.h",
|
||||
|
@ -7,8 +7,10 @@
|
||||
#include <atomic>
|
||||
#include <memory>
|
||||
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/objects/js-array-buffer.h"
|
||||
#include "src/tasks/cancelable-task.h"
|
||||
#include "src/tasks/task-utils.h"
|
||||
|
@ -9,7 +9,6 @@
|
||||
#include "src/common/globals.h"
|
||||
#include "src/execution/isolate.h"
|
||||
#include "src/handles/handles.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/local-heap.h"
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include "include/v8config.h"
|
||||
#include "src/common/globals.h"
|
||||
#include "src/execution/isolate.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap.h"
|
||||
|
@ -5,7 +5,6 @@
|
||||
#include "src/heap/gc-idle-time-handler.h"
|
||||
|
||||
#include "src/flags/flags.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/utils/utils.h"
|
||||
|
||||
namespace v8 {
|
||||
|
174
src/heap/gc-tracer-inl.h
Normal file
174
src/heap/gc-tracer-inl.h
Normal file
@ -0,0 +1,174 @@
|
||||
// Copyright 2022 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_HEAP_GC_TRACER_INL_H_
|
||||
#define V8_HEAP_GC_TRACER_INL_H_
|
||||
|
||||
#include "src/base/platform/platform.h"
|
||||
#include "src/execution/isolate.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
GCTracer::IncrementalMarkingInfos::IncrementalMarkingInfos()
|
||||
: duration(0), longest_step(0), steps(0) {}
|
||||
|
||||
void GCTracer::IncrementalMarkingInfos::Update(double delta) {
|
||||
steps++;
|
||||
duration += delta;
|
||||
if (delta > longest_step) {
|
||||
longest_step = delta;
|
||||
}
|
||||
}
|
||||
|
||||
void GCTracer::IncrementalMarkingInfos::ResetCurrentCycle() {
|
||||
duration = 0;
|
||||
longest_step = 0;
|
||||
steps = 0;
|
||||
}
|
||||
|
||||
GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
|
||||
: tracer_(tracer),
|
||||
scope_(scope),
|
||||
thread_kind_(thread_kind),
|
||||
start_time_(tracer_->MonotonicallyIncreasingTimeInMs()) {
|
||||
#ifdef V8_RUNTIME_CALL_STATS
|
||||
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
|
||||
if (thread_kind_ == ThreadKind::kMain) {
|
||||
#if DEBUG
|
||||
AssertMainThread();
|
||||
#endif // DEBUG
|
||||
runtime_stats_ = tracer_->heap_->isolate_->counters()->runtime_call_stats();
|
||||
runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
|
||||
} else {
|
||||
runtime_call_stats_scope_.emplace(
|
||||
tracer->worker_thread_runtime_call_stats());
|
||||
runtime_stats_ = runtime_call_stats_scope_->Get();
|
||||
runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
|
||||
}
|
||||
#endif // defined(V8_RUNTIME_CALL_STATS)
|
||||
}
|
||||
|
||||
GCTracer::Scope::~Scope() {
|
||||
double duration_ms = tracer_->MonotonicallyIncreasingTimeInMs() - start_time_;
|
||||
tracer_->AddScopeSample(scope_, duration_ms);
|
||||
|
||||
if (thread_kind_ == ThreadKind::kMain) {
|
||||
#if DEBUG
|
||||
AssertMainThread();
|
||||
#endif // DEBUG
|
||||
|
||||
if (scope_ == ScopeId::MC_INCREMENTAL ||
|
||||
scope_ == ScopeId::MC_INCREMENTAL_START ||
|
||||
scope_ == ScopeId::MC_INCREMENTAL_FINALIZE) {
|
||||
auto* long_task_stats =
|
||||
tracer_->heap_->isolate_->GetCurrentLongTaskStats();
|
||||
long_task_stats->gc_full_incremental_wall_clock_duration_us +=
|
||||
static_cast<int64_t>(duration_ms *
|
||||
base::Time::kMicrosecondsPerMillisecond);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef V8_RUNTIME_CALL_STATS
|
||||
if (V8_LIKELY(runtime_stats_ == nullptr)) return;
|
||||
runtime_stats_->Leave(&timer_);
|
||||
#endif // defined(V8_RUNTIME_CALL_STATS)
|
||||
}
|
||||
|
||||
constexpr int GCTracer::Scope::IncrementalOffset(ScopeId id) {
|
||||
DCHECK_LE(FIRST_INCREMENTAL_SCOPE, id);
|
||||
DCHECK_GE(LAST_INCREMENTAL_SCOPE, id);
|
||||
return id - FIRST_INCREMENTAL_SCOPE;
|
||||
}
|
||||
|
||||
constexpr bool GCTracer::Event::IsYoungGenerationEvent(Type type) {
|
||||
DCHECK_NE(START, type);
|
||||
return type == SCAVENGER || type == MINOR_MARK_COMPACTOR;
|
||||
}
|
||||
|
||||
CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId id) const {
|
||||
return Scope::NeedsYoungEpoch(id) ? epoch_young_ : epoch_full_;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
bool GCTracer::IsInObservablePause() const {
|
||||
return 0.0 < start_of_observable_pause_;
|
||||
}
|
||||
|
||||
bool GCTracer::IsConsistentWithCollector(GarbageCollector collector) const {
|
||||
return (collector == GarbageCollector::SCAVENGER &&
|
||||
current_.type == Event::SCAVENGER) ||
|
||||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
|
||||
current_.type == Event::MINOR_MARK_COMPACTOR) ||
|
||||
(collector == GarbageCollector::MARK_COMPACTOR &&
|
||||
(current_.type == Event::MARK_COMPACTOR ||
|
||||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR));
|
||||
}
|
||||
|
||||
bool GCTracer::IsSweepingInProgress() const {
|
||||
return (current_.type == Event::MARK_COMPACTOR ||
|
||||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR) &&
|
||||
current_.state == Event::State::SWEEPING;
|
||||
}
|
||||
#endif
|
||||
|
||||
constexpr double GCTracer::current_scope(Scope::ScopeId id) const {
|
||||
if (Scope::FIRST_INCREMENTAL_SCOPE <= id &&
|
||||
id <= Scope::LAST_INCREMENTAL_SCOPE) {
|
||||
return incremental_scope(id).duration;
|
||||
} else if (Scope::FIRST_BACKGROUND_SCOPE <= id &&
|
||||
id <= Scope::LAST_BACKGROUND_SCOPE) {
|
||||
return background_counter_[id].total_duration_ms;
|
||||
} else {
|
||||
DCHECK_GT(Scope::NUMBER_OF_SCOPES, id);
|
||||
return current_.scopes[id];
|
||||
}
|
||||
}
|
||||
|
||||
constexpr const GCTracer::IncrementalMarkingInfos& GCTracer::incremental_scope(
|
||||
Scope::ScopeId id) const {
|
||||
return incremental_scopes_[Scope::IncrementalOffset(id)];
|
||||
}
|
||||
|
||||
void GCTracer::AddScopeSample(Scope::ScopeId id, double duration) {
|
||||
if (Scope::FIRST_INCREMENTAL_SCOPE <= id &&
|
||||
id <= Scope::LAST_INCREMENTAL_SCOPE) {
|
||||
incremental_scopes_[Scope::IncrementalOffset(id)].Update(duration);
|
||||
} else if (Scope::FIRST_BACKGROUND_SCOPE <= id &&
|
||||
id <= Scope::LAST_BACKGROUND_SCOPE) {
|
||||
base::MutexGuard guard(&background_counter_mutex_);
|
||||
background_counter_[id].total_duration_ms += duration;
|
||||
} else {
|
||||
DCHECK_GT(Scope::NUMBER_OF_SCOPES, id);
|
||||
current_.scopes[id] += duration;
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef V8_RUNTIME_CALL_STATS
|
||||
WorkerThreadRuntimeCallStats* GCTracer::worker_thread_runtime_call_stats() {
|
||||
return heap_->isolate_->counters()->worker_thread_runtime_call_stats();
|
||||
}
|
||||
|
||||
RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
|
||||
STATIC_ASSERT(Scope::FIRST_SCOPE == Scope::MC_INCREMENTAL);
|
||||
return static_cast<RuntimeCallCounterId>(
|
||||
static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
|
||||
static_cast<int>(id));
|
||||
}
|
||||
#endif // defined(V8_RUNTIME_CALL_STATS)
|
||||
|
||||
double GCTracer::MonotonicallyIncreasingTimeInMs() {
|
||||
if (V8_UNLIKELY(FLAG_predictable)) {
|
||||
return heap_->MonotonicallyIncreasingTimeInMs();
|
||||
} else {
|
||||
return base::TimeTicks::Now().ToInternalValue() /
|
||||
static_cast<double>(base::Time::kMicrosecondsPerMillisecond);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_HEAP_GC_TRACER_INL_H_
|
@ -10,11 +10,12 @@
|
||||
#include "src/base/atomic-utils.h"
|
||||
#include "src/base/strings.h"
|
||||
#include "src/common/globals.h"
|
||||
#include "src/execution/isolate.h"
|
||||
#include "src/execution/thread-id.h"
|
||||
#include "src/heap/cppgc-js/cpp-heap.h"
|
||||
#include "src/heap/cppgc/metric-recorder.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/incremental-marking.h"
|
||||
#include "src/heap/spaces.h"
|
||||
#include "src/logging/counters.h"
|
||||
@ -36,28 +37,6 @@ static size_t CountTotalHolesSize(Heap* heap) {
|
||||
return holes_size;
|
||||
}
|
||||
|
||||
#ifdef V8_RUNTIME_CALL_STATS
|
||||
WorkerThreadRuntimeCallStats* GCTracer::worker_thread_runtime_call_stats() {
|
||||
return heap_->isolate()->counters()->worker_thread_runtime_call_stats();
|
||||
}
|
||||
|
||||
RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
|
||||
STATIC_ASSERT(Scope::FIRST_SCOPE == Scope::MC_INCREMENTAL);
|
||||
return static_cast<RuntimeCallCounterId>(
|
||||
static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
|
||||
static_cast<int>(id));
|
||||
}
|
||||
#endif // defined(V8_RUNTIME_CALL_STATS)
|
||||
|
||||
double GCTracer::MonotonicallyIncreasingTimeInMs() {
|
||||
if (V8_UNLIKELY(FLAG_predictable)) {
|
||||
return heap_->MonotonicallyIncreasingTimeInMs();
|
||||
} else {
|
||||
return base::TimeTicks::Now().ToInternalValue() /
|
||||
static_cast<double>(base::Time::kMicrosecondsPerMillisecond);
|
||||
}
|
||||
}
|
||||
|
||||
namespace {
|
||||
std::atomic<CollectionEpoch> global_epoch{0};
|
||||
|
||||
@ -66,53 +45,6 @@ CollectionEpoch next_epoch() {
|
||||
}
|
||||
} // namespace
|
||||
|
||||
GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
|
||||
: tracer_(tracer), scope_(scope), thread_kind_(thread_kind) {
|
||||
start_time_ = tracer_->MonotonicallyIncreasingTimeInMs();
|
||||
#ifdef V8_RUNTIME_CALL_STATS
|
||||
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
|
||||
if (thread_kind_ == ThreadKind::kMain) {
|
||||
#if DEBUG
|
||||
AssertMainThread();
|
||||
#endif // DEBUG
|
||||
runtime_stats_ =
|
||||
tracer_->heap_->isolate()->counters()->runtime_call_stats();
|
||||
runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
|
||||
} else {
|
||||
runtime_call_stats_scope_.emplace(
|
||||
tracer->worker_thread_runtime_call_stats());
|
||||
runtime_stats_ = runtime_call_stats_scope_->Get();
|
||||
runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
|
||||
}
|
||||
#endif // defined(V8_RUNTIME_CALL_STATS)
|
||||
}
|
||||
|
||||
GCTracer::Scope::~Scope() {
|
||||
double duration_ms = tracer_->MonotonicallyIncreasingTimeInMs() - start_time_;
|
||||
tracer_->AddScopeSample(scope_, duration_ms);
|
||||
|
||||
if (thread_kind_ == ThreadKind::kMain) {
|
||||
#if DEBUG
|
||||
AssertMainThread();
|
||||
#endif // DEBUG
|
||||
|
||||
if (scope_ == ScopeId::MC_INCREMENTAL ||
|
||||
scope_ == ScopeId::MC_INCREMENTAL_START ||
|
||||
scope_ == ScopeId::MC_INCREMENTAL_FINALIZE) {
|
||||
auto* long_task_stats =
|
||||
tracer_->heap_->isolate()->GetCurrentLongTaskStats();
|
||||
long_task_stats->gc_full_incremental_wall_clock_duration_us +=
|
||||
static_cast<int64_t>(duration_ms *
|
||||
base::Time::kMicrosecondsPerMillisecond);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef V8_RUNTIME_CALL_STATS
|
||||
if (V8_LIKELY(runtime_stats_ == nullptr)) return;
|
||||
runtime_stats_->Leave(&timer_);
|
||||
#endif // defined(V8_RUNTIME_CALL_STATS)
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
void GCTracer::Scope::AssertMainThread() {
|
||||
Isolate* isolate = tracer_->heap_->isolate();
|
||||
|
@ -9,8 +9,6 @@
|
||||
#include "src/base/compiler-specific.h"
|
||||
#include "src/base/macros.h"
|
||||
#include "src/base/optional.h"
|
||||
#include "src/base/platform/platform.h"
|
||||
#include "src/base/platform/time.h"
|
||||
#include "src/base/ring-buffer.h"
|
||||
#include "src/common/globals.h"
|
||||
#include "src/heap/heap.h"
|
||||
@ -61,21 +59,9 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
GCTracer& operator=(const GCTracer&) = delete;
|
||||
|
||||
struct IncrementalMarkingInfos {
|
||||
IncrementalMarkingInfos() : duration(0), longest_step(0), steps(0) {}
|
||||
|
||||
void Update(double delta) {
|
||||
steps++;
|
||||
duration += delta;
|
||||
if (delta > longest_step) {
|
||||
longest_step = delta;
|
||||
}
|
||||
}
|
||||
|
||||
void ResetCurrentCycle() {
|
||||
duration = 0;
|
||||
longest_step = 0;
|
||||
steps = 0;
|
||||
}
|
||||
V8_INLINE IncrementalMarkingInfos();
|
||||
V8_INLINE void Update(double delta);
|
||||
V8_INLINE void ResetCurrentCycle();
|
||||
|
||||
double duration; // in ms
|
||||
double longest_step; // in ms
|
||||
@ -107,28 +93,23 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
LAST_BACKGROUND_SCOPE = LAST_MINOR_GC_BACKGROUND_SCOPE
|
||||
};
|
||||
|
||||
Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind);
|
||||
~Scope();
|
||||
V8_INLINE Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind);
|
||||
V8_INLINE ~Scope();
|
||||
Scope(const Scope&) = delete;
|
||||
Scope& operator=(const Scope&) = delete;
|
||||
static const char* Name(ScopeId id);
|
||||
static bool NeedsYoungEpoch(ScopeId id);
|
||||
|
||||
static constexpr int IncrementalOffset(ScopeId id) {
|
||||
DCHECK_LE(FIRST_INCREMENTAL_SCOPE, id);
|
||||
DCHECK_GE(LAST_INCREMENTAL_SCOPE, id);
|
||||
return id - FIRST_INCREMENTAL_SCOPE;
|
||||
}
|
||||
V8_INLINE static constexpr int IncrementalOffset(ScopeId id);
|
||||
|
||||
private:
|
||||
#if DEBUG
|
||||
void AssertMainThread();
|
||||
#endif // DEBUG
|
||||
|
||||
GCTracer* tracer_;
|
||||
ScopeId scope_;
|
||||
ThreadKind thread_kind_;
|
||||
double start_time_;
|
||||
GCTracer* const tracer_;
|
||||
const ScopeId scope_;
|
||||
const ThreadKind thread_kind_;
|
||||
const double start_time_;
|
||||
#ifdef V8_RUNTIME_CALL_STATS
|
||||
RuntimeCallTimer timer_;
|
||||
RuntimeCallStats* runtime_stats_ = nullptr;
|
||||
@ -147,10 +128,7 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
};
|
||||
|
||||
// Returns true if the event corresponds to a young generation GC.
|
||||
static constexpr bool IsYoungGenerationEvent(Type type) {
|
||||
DCHECK_NE(START, type);
|
||||
return type == SCAVENGER || type == MINOR_MARK_COMPACTOR;
|
||||
}
|
||||
V8_INLINE static constexpr bool IsYoungGenerationEvent(Type type);
|
||||
|
||||
// The state diagram for a GC cycle:
|
||||
// (NOT_RUNNING) -----(StartCycle)----->
|
||||
@ -249,14 +227,12 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
double optional_speed);
|
||||
|
||||
#ifdef V8_RUNTIME_CALL_STATS
|
||||
static RuntimeCallCounterId RCSCounterFromScope(Scope::ScopeId id);
|
||||
V8_INLINE static RuntimeCallCounterId RCSCounterFromScope(Scope::ScopeId id);
|
||||
#endif // defined(V8_RUNTIME_CALL_STATS)
|
||||
|
||||
explicit GCTracer(Heap* heap);
|
||||
|
||||
CollectionEpoch CurrentEpoch(Scope::ScopeId id) const {
|
||||
return Scope::NeedsYoungEpoch(id) ? epoch_young_ : epoch_full_;
|
||||
}
|
||||
V8_INLINE CollectionEpoch CurrentEpoch(Scope::ScopeId id) const;
|
||||
|
||||
// Start and stop an observable pause.
|
||||
void StartObservablePause();
|
||||
@ -294,26 +270,14 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
YoungGenerationHandling young_generation_handling);
|
||||
|
||||
#ifdef DEBUG
|
||||
bool IsInObservablePause() const { return 0.0 < start_of_observable_pause_; }
|
||||
V8_INLINE bool IsInObservablePause() const;
|
||||
|
||||
// Checks if the current event is consistent with a collector.
|
||||
bool IsConsistentWithCollector(GarbageCollector collector) const {
|
||||
return (collector == GarbageCollector::SCAVENGER &&
|
||||
current_.type == Event::SCAVENGER) ||
|
||||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
|
||||
current_.type == Event::MINOR_MARK_COMPACTOR) ||
|
||||
(collector == GarbageCollector::MARK_COMPACTOR &&
|
||||
(current_.type == Event::MARK_COMPACTOR ||
|
||||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR));
|
||||
}
|
||||
V8_INLINE bool IsConsistentWithCollector(GarbageCollector collector) const;
|
||||
|
||||
// Checks if the current event corresponds to a full GC cycle whose sweeping
|
||||
// has not finalized yet.
|
||||
bool IsSweepingInProgress() const {
|
||||
return (current_.type == Event::MARK_COMPACTOR ||
|
||||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR) &&
|
||||
current_.state == Event::State::SWEEPING;
|
||||
}
|
||||
V8_INLINE bool IsSweepingInProgress() const;
|
||||
#endif
|
||||
|
||||
// Sample and accumulate bytes allocated since the last GC.
|
||||
@ -419,19 +383,7 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
double AverageMarkCompactMutatorUtilization() const;
|
||||
double CurrentMarkCompactMutatorUtilization() const;
|
||||
|
||||
V8_INLINE void AddScopeSample(Scope::ScopeId id, double duration) {
|
||||
if (Scope::FIRST_INCREMENTAL_SCOPE <= id &&
|
||||
id <= Scope::LAST_INCREMENTAL_SCOPE) {
|
||||
incremental_scopes_[Scope::IncrementalOffset(id)].Update(duration);
|
||||
} else if (Scope::FIRST_BACKGROUND_SCOPE <= id &&
|
||||
id <= Scope::LAST_BACKGROUND_SCOPE) {
|
||||
base::MutexGuard guard(&background_counter_mutex_);
|
||||
background_counter_[id].total_duration_ms += duration;
|
||||
} else {
|
||||
DCHECK_GT(Scope::NUMBER_OF_SCOPES, id);
|
||||
current_.scopes[id] += duration;
|
||||
}
|
||||
}
|
||||
V8_INLINE void AddScopeSample(Scope::ScopeId id, double duration);
|
||||
|
||||
void RecordGCPhasesHistograms(RecordGCPhasesInfo::Mode mode);
|
||||
|
||||
@ -443,7 +395,7 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
void RecordTimeToIncrementalMarkingTask(double time_to_task);
|
||||
|
||||
#ifdef V8_RUNTIME_CALL_STATS
|
||||
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats();
|
||||
V8_INLINE WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats();
|
||||
#endif // defined(V8_RUNTIME_CALL_STATS)
|
||||
|
||||
private:
|
||||
@ -477,23 +429,10 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
// Note: when accessing a background scope via this method, the caller is
|
||||
// responsible for avoiding data races, e.g., by acquiring
|
||||
// background_counter_mutex_.
|
||||
constexpr double current_scope(Scope::ScopeId id) const {
|
||||
if (Scope::FIRST_INCREMENTAL_SCOPE <= id &&
|
||||
id <= Scope::LAST_INCREMENTAL_SCOPE) {
|
||||
return incremental_scope(id).duration;
|
||||
} else if (Scope::FIRST_BACKGROUND_SCOPE <= id &&
|
||||
id <= Scope::LAST_BACKGROUND_SCOPE) {
|
||||
return background_counter_[id].total_duration_ms;
|
||||
} else {
|
||||
DCHECK_GT(Scope::NUMBER_OF_SCOPES, id);
|
||||
return current_.scopes[id];
|
||||
}
|
||||
}
|
||||
V8_INLINE constexpr double current_scope(Scope::ScopeId id) const;
|
||||
|
||||
constexpr const IncrementalMarkingInfos& incremental_scope(
|
||||
Scope::ScopeId id) const {
|
||||
return incremental_scopes_[Scope::IncrementalOffset(id)];
|
||||
}
|
||||
V8_INLINE constexpr const IncrementalMarkingInfos& incremental_scope(
|
||||
Scope::ScopeId id) const;
|
||||
|
||||
// Returns the average speed of the events in the buffer.
|
||||
// If the buffer is empty, the result is 0.
|
||||
@ -513,7 +452,7 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
// end of the atomic pause.
|
||||
void RecordGCSumCounters();
|
||||
|
||||
double MonotonicallyIncreasingTimeInMs();
|
||||
V8_INLINE double MonotonicallyIncreasingTimeInMs();
|
||||
|
||||
// Print one detailed trace line in name=value format.
|
||||
// TODO(ernstm): Move to Heap.
|
||||
|
@ -47,6 +47,7 @@
|
||||
#include "src/heap/embedder-tracing.h"
|
||||
#include "src/heap/finalization-registry-cleanup-task.h"
|
||||
#include "src/heap/gc-idle-time-handler.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-controller.h"
|
||||
#include "src/heap/heap-layout-tracer.h"
|
||||
|
@ -10,10 +10,13 @@
|
||||
#include "src/heap/concurrent-marking.h"
|
||||
#include "src/heap/embedder-tracing.h"
|
||||
#include "src/heap/gc-idle-time-handler.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/incremental-marking-inl.h"
|
||||
#include "src/heap/mark-compact-inl.h"
|
||||
#include "src/heap/mark-compact.h"
|
||||
#include "src/heap/marking-barrier.h"
|
||||
#include "src/heap/marking-visitor-inl.h"
|
||||
#include "src/heap/marking-visitor.h"
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include "src/handles/local-handles.h"
|
||||
#include "src/heap/collection-barrier.h"
|
||||
#include "src/heap/concurrent-allocator.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap-write-barrier.h"
|
||||
|
@ -24,6 +24,7 @@
|
||||
#include "src/heap/code-object-registry.h"
|
||||
#include "src/heap/concurrent-allocator.h"
|
||||
#include "src/heap/evacuation-allocator-inl.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/incremental-marking-inl.h"
|
||||
|
@ -11,8 +11,10 @@
|
||||
#include "src/execution/isolate.h"
|
||||
#include "src/flags/flags.h"
|
||||
#include "src/heap/basic-memory-chunk.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/memory-chunk.h"
|
||||
#include "src/heap/read-only-spaces.h"
|
||||
#include "src/logging/log.h"
|
||||
|
@ -14,6 +14,7 @@
|
||||
#include "src/handles/handles.h"
|
||||
#include "src/handles/local-handles.h"
|
||||
#include "src/handles/persistent-handles.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap.h"
|
||||
|
@ -8,8 +8,10 @@
|
||||
#include "src/handles/global-handles.h"
|
||||
#include "src/heap/array-buffer-sweeper.h"
|
||||
#include "src/heap/concurrent-allocator.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/invalidated-slots-inl.h"
|
||||
#include "src/heap/mark-compact-inl.h"
|
||||
#include "src/heap/mark-compact.h"
|
||||
|
@ -16,7 +16,6 @@
|
||||
#include "src/heap/base/active-system-pages.h"
|
||||
#include "src/heap/combined-heap.h"
|
||||
#include "src/heap/concurrent-marking.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-controller.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/incremental-marking-inl.h"
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "src/heap/base/active-system-pages.h"
|
||||
#include "src/heap/code-object-registry.h"
|
||||
#include "src/heap/free-list-inl.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/invalidated-slots-inl.h"
|
||||
#include "src/heap/mark-compact-inl.h"
|
||||
|
@ -687,7 +687,7 @@ class WorkerThreadRuntimeCallStats final {
|
||||
// Creating a WorkerThreadRuntimeCallStatsScope will provide a thread-local
|
||||
// runtime call stats table, and will dump the table to an immediate trace event
|
||||
// when it is destroyed.
|
||||
class V8_NODISCARD WorkerThreadRuntimeCallStatsScope final {
|
||||
class V8_EXPORT_PRIVATE V8_NODISCARD WorkerThreadRuntimeCallStatsScope final {
|
||||
public:
|
||||
WorkerThreadRuntimeCallStatsScope() = default;
|
||||
explicit WorkerThreadRuntimeCallStatsScope(
|
||||
|
@ -2,13 +2,15 @@
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/heap/gc-tracer.h"
|
||||
|
||||
#include <cmath>
|
||||
#include <limits>
|
||||
|
||||
#include "src/base/platform/platform.h"
|
||||
#include "src/common/globals.h"
|
||||
#include "src/execution/isolate.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/gc-tracer-inl.h"
|
||||
#include "test/unittests/test-utils.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user