cppgc: Add UMA support

This CL introduces cppgc::HistogramRecorder api which is similar to the
v8::metrics::Recorder api and is used by cppgc to report histogram
samples to embedders. Embedders should implement the api if they want to
collect histograms and provide an instance of it on heap creation.

CppHeap uses an adaptor class that implements the HistogramRecorder api
and is used to forward the relevant info to the relevant
v8::metrics::Recorder.

The api used 3 data structures: 2 for incremental steps that need to be
reported as they come (marking and sweeping) and 1 for the end of a GC
cycle that aggregates statistics over the entire cycle.
The data structure only provide the "raw" samples (e.g. atomic mark
time, incremental mark time, etc...). The embedder is expected to
compute aggregate histogram on its own (e.g. overall marking time).

Bug: chromium:1056170
Change-Id: If63ef50a29a21594f654edb83084598980d221ce
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2642258
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#72256}
This commit is contained in:
Omer Katz 2021-01-22 15:16:15 +01:00 committed by Commit Bot
parent 987f0b75bb
commit adda4c5f98
17 changed files with 551 additions and 50 deletions

View File

@ -4646,6 +4646,7 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/marking-visitor.h",
"src/heap/cppgc/marking-worklists.cc",
"src/heap/cppgc/marking-worklists.h",
"src/heap/cppgc/metric-recorder.h",
"src/heap/cppgc/name-trait.cc",
"src/heap/cppgc/object-allocator.cc",
"src/heap/cppgc/object-allocator.h",

View File

@ -169,11 +169,13 @@ void UnifiedHeapMarker::AddObject(void* object) {
CppHeap::CppHeap(
v8::Isolate* isolate,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces)
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
std::unique_ptr<cppgc::internal::MetricRecorder> metric_recorder)
: cppgc::internal::HeapBase(std::make_shared<CppgcPlatformAdapter>(isolate),
custom_spaces,
cppgc::internal::HeapBase::StackSupport::
kSupportsConservativeStackScan),
kSupportsConservativeStackScan,
std::move(metric_recorder)),
isolate_(*reinterpret_cast<Isolate*>(isolate)) {
if (isolate_.heap_profiler()) {
isolate_.heap_profiler()->AddBuildEmbedderGraphCallback(

View File

@ -28,9 +28,11 @@ class V8_EXPORT_PRIVATE CppHeap final : public cppgc::internal::HeapBase,
return static_cast<const CppHeap*>(heap);
}
CppHeap(v8::Isolate* isolate,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>&
custom_spaces);
CppHeap(
v8::Isolate* isolate,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
std::unique_ptr<cppgc::internal::MetricRecorder> metric_recorder =
nullptr);
~CppHeap() final;
CppHeap(const CppHeap&) = delete;
@ -56,8 +58,6 @@ class V8_EXPORT_PRIVATE CppHeap final : public cppgc::internal::HeapBase,
// finalization is not needed) thus this method is left empty.
}
void PostGarbageCollection() final {}
Isolate& isolate_;
bool marking_done_ = false;
bool is_in_final_pause_ = false;

View File

@ -56,7 +56,8 @@ class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> {
HeapBase::HeapBase(
std::shared_ptr<cppgc::Platform> platform,
const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces,
StackSupport stack_support)
StackSupport stack_support,
std::unique_ptr<MetricRecorder> histogram_recorder)
: raw_heap_(this, custom_spaces),
platform_(std::move(platform)),
#if defined(CPPGC_CAGED_HEAP)
@ -66,7 +67,8 @@ HeapBase::HeapBase(
page_backend_(
std::make_unique<PageBackend>(platform_->GetPageAllocator())),
#endif
stats_collector_(std::make_unique<StatsCollector>()),
stats_collector_(
std::make_unique<StatsCollector>(std::move(histogram_recorder))),
stack_(std::make_unique<heap::base::Stack>(
v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)),

View File

@ -14,6 +14,7 @@
#include "src/base/macros.h"
#include "src/heap/cppgc/compactor.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/metric-recorder.h"
#include "src/heap/cppgc/object-allocator.h"
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/sweeper.h"
@ -79,7 +80,8 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
HeapBase(std::shared_ptr<cppgc::Platform> platform,
const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces,
StackSupport stack_support);
StackSupport stack_support,
std::unique_ptr<MetricRecorder> histogram_recorder);
virtual ~HeapBase();
HeapBase(const HeapBase&) = delete;
@ -153,9 +155,6 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
void AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded();
// Notifies the heap that a GC is done.
virtual void PostGarbageCollection() = 0;
// Termination drops all roots (clears them out) and runs garbage collections
// in a bounded fixed point loop until no new objects are created in
// destructors. Exceeding the loop bound results in a crash.

View File

@ -86,7 +86,8 @@ void CheckConfig(Heap::Config config, Heap::MarkingType marking_support,
Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
cppgc::Heap::HeapOptions options)
: HeapBase(platform, options.custom_spaces, options.stack_support),
: HeapBase(platform, options.custom_spaces, options.stack_support,
nullptr /* metric_recorder */),
gc_invoker_(this, platform_.get(), options.stack_support),
growing_(&gc_invoker_, stats_collector_.get(),
options.resource_constraints, options.marking_support,
@ -196,8 +197,6 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
sweeper_.NotifyDoneIfNeeded();
}
void Heap::PostGarbageCollection() {}
void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); }
void Heap::FinalizeIncrementalGarbageCollectionIfNeeded(

View File

@ -46,8 +46,6 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
void FinalizeIncrementalGarbageCollectionIfNeeded(Config::StackState) final;
void PostGarbageCollection() final;
Config config_;
GCInvoker gc_invoker_;
HeapGrowing growing_;

View File

@ -287,8 +287,8 @@ void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
void MarkerBase::ProcessWeakness() {
DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type);
StatsCollector::DisabledScope stats_scope(
heap(), StatsCollector::kWeakInvokeCallbacks);
StatsCollector::DisabledScope stats_scope(heap(),
StatsCollector::kAtomicWeak);
heap().GetWeakPersistentRegion().Trace(&visitor());
// Processing cross-thread handles requires taking the process lock.

View File

@ -0,0 +1,54 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_METRIC_RECORDER_H_
#define V8_HEAP_CPPGC_METRIC_RECORDER_H_
#include <cstdint>
namespace cppgc {
namespace internal {
class StatsCollector;
/**
* Base class used for reporting GC statistics histograms. Embedders interested
* in collecting histgorams should implement the virtual AddMainThreadEvent
* methods below and pass an instance of the implementation during Heap
* creation.
*/
class MetricRecorder {
public:
struct CppGCCycleEndMetricSamples {
int64_t atomic_mark_ms;
int64_t atomic_weak_ms;
int64_t atomic_compact_ms;
int64_t atomic_sweep_ms;
int64_t incremental_mark_ms;
int64_t incremental_sweep_ms;
int64_t concurrent_mark_ms;
int64_t concurrent_sweep_ms;
};
struct CppGCIncrementalMarkMetricSample {
int64_t duration_ms;
};
struct CppGCIncrementalSweepMetricSample {
int64_t duration_ms;
};
virtual ~MetricRecorder() = default;
virtual void AddMainThreadEvent(const CppGCCycleEndMetricSamples& event) {}
virtual void AddMainThreadEvent(
const CppGCIncrementalMarkMetricSample& event) {}
virtual void AddMainThreadEvent(
const CppGCIncrementalSweepMetricSample& event) {}
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_METRIC_RECORDER_H_

View File

@ -8,6 +8,7 @@
#include <cmath>
#include "src/base/logging.h"
#include "src/heap/cppgc/metric-recorder.h"
namespace cppgc {
namespace internal {
@ -15,6 +16,10 @@ namespace internal {
// static
constexpr size_t StatsCollector::kAllocationThresholdBytes;
StatsCollector::StatsCollector(
std::unique_ptr<MetricRecorder> histogram_recorder)
: metric_recorder_(std::move(histogram_recorder)) {}
void StatsCollector::RegisterObserver(AllocationObserver* observer) {
DCHECK_EQ(allocation_observers_.end(),
std::find(allocation_observers_.begin(),
@ -114,6 +119,18 @@ void StatsCollector::NotifySweepingCompleted() {
gc_state_ = GarbageCollectionState::kNotRunning;
previous_ = std::move(current_);
current_ = Event();
if (metric_recorder_) {
MetricRecorder::CppGCCycleEndMetricSamples event{
previous_.scope_data[kAtomicMark].InMilliseconds(),
previous_.scope_data[kAtomicWeak].InMilliseconds(),
previous_.scope_data[kAtomicCompact].InMilliseconds(),
previous_.scope_data[kAtomicSweep].InMilliseconds(),
previous_.scope_data[kIncrementalMark].InMilliseconds(),
previous_.scope_data[kIncrementalSweep].InMilliseconds(),
previous_.concurrent_scope_data[kConcurrentMark],
previous_.concurrent_scope_data[kConcurrentSweep]};
metric_recorder_->AddMainThreadEvent(event);
}
}
size_t StatsCollector::allocated_object_size() const {
@ -129,5 +146,25 @@ size_t StatsCollector::allocated_object_size() const {
allocated_bytes_since_end_of_marking_);
}
void StatsCollector::RecordHistogramSample(ScopeId scope_id_,
v8::base::TimeDelta time) {
switch (scope_id_) {
case kIncrementalMark: {
MetricRecorder::CppGCIncrementalMarkMetricSample event{
time.InMilliseconds()};
metric_recorder_->AddMainThreadEvent(event);
break;
}
case kIncrementalSweep: {
MetricRecorder::CppGCIncrementalSweepMetricSample event{
time.InMilliseconds()};
metric_recorder_->AddMainThreadEvent(event);
break;
}
default:
break;
}
}
} // namespace internal
} // namespace cppgc

View File

@ -14,17 +14,23 @@
#include "src/base/platform/time.h"
#include "src/heap/cppgc/garbage-collector.h"
#include "src/heap/cppgc/heap-base.h"
#include "src/heap/cppgc/metric-recorder.h"
#include "src/heap/cppgc/trace-event.h"
namespace cppgc {
namespace internal {
// Histogram scopes contribute to histogram as well as to traces and metrics.
// Other scopes contribute only to traces and metrics.
#define CPPGC_FOR_ALL_HISTOGRAM_SCOPES(V) \
V(AtomicMark) \
V(AtomicWeak) \
V(AtomicCompact) \
V(AtomicSweep) \
V(IncrementalMark) \
V(IncrementalSweep)
#define CPPGC_FOR_ALL_SCOPES(V) \
V(AtomicMark) \
V(AtomicSweep) \
V(AtomicCompact) \
V(IncrementalMark) \
V(IncrementalSweep) \
V(MarkIncrementalStart) \
V(MarkIncrementalFinalize) \
V(MarkAtomicPrologue) \
@ -43,17 +49,17 @@ namespace internal {
V(MarkVisitCrossThreadPersistents) \
V(MarkVisitStack) \
V(MarkVisitRememberedSets) \
V(WeakInvokeCallbacks) \
V(SweepInvokePreFinalizers) \
V(SweepIdleStep) \
V(SweepOnAllocation) \
V(SweepFinalize)
#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) \
V(ConcurrentMarkProcessEphemerons) \
V(ConcurrentMark) \
#define CPPGC_FOR_ALL_HISTOGRAM_CONCURRENT_SCOPES(V) \
V(ConcurrentMark) \
V(ConcurrentSweep)
#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) V(ConcurrentMarkProcessEphemerons)
// Sink for various time and memory statistics.
class V8_EXPORT_PRIVATE StatsCollector final {
using CollectionType = GarbageCollector::Config::CollectionType;
@ -66,6 +72,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
enum ScopeId {
#define CPPGC_DECLARE_ENUM(name) k##name,
CPPGC_FOR_ALL_HISTOGRAM_SCOPES(CPPGC_DECLARE_ENUM)
kNumHistogramScopeIds,
CPPGC_FOR_ALL_SCOPES(CPPGC_DECLARE_ENUM)
#undef CPPGC_DECLARE_ENUM
kNumScopeIds,
@ -73,6 +81,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
enum ConcurrentScopeId {
#define CPPGC_DECLARE_ENUM(name) k##name,
CPPGC_FOR_ALL_HISTOGRAM_CONCURRENT_SCOPES(CPPGC_DECLARE_ENUM)
kNumHistogramConcurrentScopeIds,
CPPGC_FOR_ALL_CONCURRENT_SCOPES(CPPGC_DECLARE_ENUM)
#undef CPPGC_DECLARE_ENUM
kNumConcurrentScopeIds
@ -85,8 +95,9 @@ class V8_EXPORT_PRIVATE StatsCollector final {
struct Event final {
V8_EXPORT_PRIVATE explicit Event();
v8::base::TimeDelta scope_data[kNumScopeIds];
v8::base::Atomic32 concurrent_scope_data[kNumConcurrentScopeIds]{0};
v8::base::TimeDelta scope_data[kNumHistogramScopeIds];
v8::base::Atomic32 concurrent_scope_data[kNumHistogramConcurrentScopeIds]{
0};
size_t epoch = -1;
CollectionType collection_type = CollectionType::kMajor;
@ -106,6 +117,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
case k##name: \
return type == CollectionType::kMajor ? "CppGC." #name \
: "CppGC." #name ".Minor";
CPPGC_FOR_ALL_HISTOGRAM_SCOPES(CPPGC_CASE)
CPPGC_FOR_ALL_SCOPES(CPPGC_CASE)
#undef CPPGC_CASE
default:
@ -120,6 +132,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
case k##name: \
return type == CollectionType::kMajor ? "CppGC." #name \
: "CppGC." #name ".Minor";
CPPGC_FOR_ALL_HISTOGRAM_CONCURRENT_SCOPES(CPPGC_CASE)
CPPGC_FOR_ALL_CONCURRENT_SCOPES(CPPGC_CASE)
#undef CPPGC_CASE
default:
@ -149,6 +162,10 @@ class V8_EXPORT_PRIVATE StatsCollector final {
scope_category == kMutatorThread
? static_cast<int>(kNumScopeIds)
: static_cast<int>(kNumConcurrentScopeIds));
DCHECK_NE(static_cast<int>(scope_id_),
scope_category == kMutatorThread
? static_cast<int>(kNumHistogramScopeIds)
: static_cast<int>(kNumHistogramConcurrentScopeIds));
StartTrace(args...);
}
@ -160,6 +177,10 @@ class V8_EXPORT_PRIVATE StatsCollector final {
InternalScope(const InternalScope&) = delete;
InternalScope& operator=(const InternalScope&) = delete;
void DecreaseStartTimeForTesting(v8::base::TimeDelta delta) {
start_time_ -= delta;
}
private:
void* operator new(size_t, void*) = delete;
void* operator new(size_t) = delete;
@ -182,7 +203,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
HeapBase& heap_;
StatsCollector* const stats_collector_;
const v8::base::TimeTicks start_time_;
v8::base::TimeTicks start_time_;
const ScopeIdType scope_id_;
};
@ -217,7 +238,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
// reasonably interesting sizes.
static constexpr size_t kAllocationThresholdBytes = 1024;
StatsCollector() = default;
explicit StatsCollector(std::unique_ptr<MetricRecorder>);
StatsCollector(const StatsCollector&) = delete;
StatsCollector& operator=(const StatsCollector&) = delete;
@ -248,6 +269,11 @@ class V8_EXPORT_PRIVATE StatsCollector final {
const Event& GetPreviousEventForTesting() const { return previous_; }
void SetMetricRecorderForTesting(
std::unique_ptr<MetricRecorder> histogram_recorder) {
metric_recorder_ = std::move(histogram_recorder);
}
private:
enum class GarbageCollectionState : uint8_t {
kNotRunning,
@ -255,6 +281,9 @@ class V8_EXPORT_PRIVATE StatsCollector final {
kSweeping
};
void RecordHistogramSample(ScopeId, v8::base::TimeDelta);
void RecordHistogramSample(ConcurrentScopeId, v8::base::TimeDelta) {}
// Invokes |callback| for all registered observers.
template <typename Callback>
void ForAllAllocationObservers(Callback callback);
@ -285,6 +314,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
Event current_;
// The previous GC event which is populated at NotifySweepingFinished.
Event previous_;
std::unique_ptr<MetricRecorder> metric_recorder_;
};
template <typename Callback>
@ -371,9 +402,17 @@ template <StatsCollector::TraceCategory trace_category,
void StatsCollector::InternalScope<trace_category,
scope_category>::IncreaseScopeTime() {
DCHECK_NE(GarbageCollectionState::kNotRunning, stats_collector_->gc_state_);
// Only record top level scopes.
if (static_cast<int>(scope_id_) >=
(scope_category == kMutatorThread
? static_cast<int>(kNumHistogramScopeIds)
: static_cast<int>(kNumHistogramConcurrentScopeIds)))
return;
v8::base::TimeDelta time = v8::base::TimeTicks::Now() - start_time_;
if (scope_category == StatsCollector::ScopeContext::kMutatorThread) {
stats_collector_->current_.scope_data[scope_id_] += time;
if (stats_collector_->metric_recorder_)
stats_collector_->RecordHistogramSample(scope_id_, time);
return;
}
// scope_category == StatsCollector::ScopeContext::kConcurrentThread

View File

@ -572,8 +572,6 @@ class Sweeper::SweeperImpl final {
DCHECK(notify_done_pending_);
notify_done_pending_ = false;
stats_collector_->NotifySweepingCompleted();
// Notify the heap that GC is finished.
heap_->heap()->PostGarbageCollection();
}
void NotifyDoneIfNeeded() {

View File

@ -103,6 +103,7 @@ v8_source_set("cppgc_unittests_sources") {
"heap/cppgc/marking-verifier-unittest.cc",
"heap/cppgc/marking-visitor-unittest.cc",
"heap/cppgc/member-unittest.cc",
"heap/cppgc/metric-recorder-unittest.cc",
"heap/cppgc/minor-gc-unittest.cc",
"heap/cppgc/name-trait-unittest.cc",
"heap/cppgc/object-start-bitmap-unittest.cc",

View File

@ -60,7 +60,7 @@ void FakeAllocate(StatsCollector* stats_collector, size_t bytes) {
} // namespace
TEST(HeapGrowingTest, ConservativeGCInvoked) {
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
@ -73,7 +73,7 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) {
}
TEST(HeapGrowingTest, InitialHeapSize) {
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
// Use larger size to avoid running into small heap optimizations.
@ -90,7 +90,7 @@ TEST(HeapGrowingTest, InitialHeapSize) {
TEST(HeapGrowingTest, ConstantGrowingFactor) {
// Use larger size to avoid running into small heap optimizations.
constexpr size_t kObjectSize = 10 * HeapGrowing::kMinLimitIncrease;
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
FakeGarbageCollector gc(&stats_collector);
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
@ -108,7 +108,7 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) {
TEST(HeapGrowingTest, SmallHeapGrowing) {
// Larger constant to avoid running into special handling for smaller heaps.
constexpr size_t kLargeAllocation = 100 * kMB;
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
FakeGarbageCollector gc(&stats_collector);
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
@ -124,7 +124,7 @@ TEST(HeapGrowingTest, SmallHeapGrowing) {
}
TEST(HeapGrowingTest, IncrementalGCStarted) {
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints,
@ -137,7 +137,7 @@ TEST(HeapGrowingTest, IncrementalGCStarted) {
}
TEST(HeapGrowingTest, IncrementalGCFinalized) {
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints,

View File

@ -0,0 +1,368 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/metric-recorder.h"
#include "src/heap/cppgc/stats-collector.h"
#include "test/unittests/heap/cppgc/tests.h"
namespace cppgc {
namespace internal {
namespace {
class MetricRecorderImpl final : public MetricRecorder {
public:
void AddMainThreadEvent(const CppGCCycleEndMetricSamples& event) final {
CppGCCycleEndMetricSamples_event = event;
CppGCCycleEndMetricSamples_callcount++;
}
void AddMainThreadEvent(const CppGCIncrementalMarkMetricSample& event) final {
CppGCIncrementalMarkMetricSample_event = event;
CppGCIncrementalMarkMetricSample_callcount++;
}
void AddMainThreadEvent(
const CppGCIncrementalSweepMetricSample& event) final {
CppGCIncrementalSweepMetricSample_event = event;
CppGCIncrementalSweepMetricSample_callcount++;
}
static size_t CppGCCycleEndMetricSamples_callcount;
static CppGCCycleEndMetricSamples CppGCCycleEndMetricSamples_event;
static size_t CppGCIncrementalMarkMetricSample_callcount;
static CppGCIncrementalMarkMetricSample
CppGCIncrementalMarkMetricSample_event;
static size_t CppGCIncrementalSweepMetricSample_callcount;
static CppGCIncrementalSweepMetricSample
CppGCIncrementalSweepMetricSample_event;
};
// static
size_t MetricRecorderImpl::CppGCCycleEndMetricSamples_callcount = 0u;
MetricRecorderImpl::CppGCCycleEndMetricSamples
MetricRecorderImpl::CppGCCycleEndMetricSamples_event;
size_t MetricRecorderImpl::CppGCIncrementalMarkMetricSample_callcount = 0u;
MetricRecorderImpl::CppGCIncrementalMarkMetricSample
MetricRecorderImpl::CppGCIncrementalMarkMetricSample_event;
size_t MetricRecorderImpl::CppGCIncrementalSweepMetricSample_callcount = 0u;
MetricRecorderImpl::CppGCIncrementalSweepMetricSample
MetricRecorderImpl::CppGCIncrementalSweepMetricSample_event;
class MetricRecorderTest : public testing::TestWithHeap {
public:
MetricRecorderTest() : stats(Heap::From(GetHeap())->stats_collector()) {
stats->SetMetricRecorderForTesting(std::make_unique<MetricRecorderImpl>());
}
void StartGC() {
stats->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor,
GarbageCollector::Config::IsForcedGC::kNotForced);
}
void EndGC(size_t marked_bytes) {
stats->NotifyMarkingCompleted(marked_bytes);
stats->NotifySweepingCompleted();
}
StatsCollector* stats;
};
} // namespace
TEST_F(MetricRecorderTest, IncrementalScopesReportedImmediately) {
MetricRecorderImpl::CppGCCycleEndMetricSamples_callcount = 0u;
MetricRecorderImpl::CppGCIncrementalMarkMetricSample_callcount = 0u;
MetricRecorderImpl::CppGCIncrementalSweepMetricSample_callcount = 0u;
StartGC();
{
EXPECT_EQ(0u,
MetricRecorderImpl::CppGCIncrementalMarkMetricSample_callcount);
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kIncrementalMark);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EXPECT_EQ(1u,
MetricRecorderImpl::CppGCIncrementalMarkMetricSample_callcount);
EXPECT_LT(
0u,
MetricRecorderImpl::CppGCIncrementalMarkMetricSample_event.duration_ms);
}
{
EXPECT_EQ(0u,
MetricRecorderImpl::CppGCIncrementalSweepMetricSample_callcount);
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kIncrementalSweep);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EXPECT_EQ(1u,
MetricRecorderImpl::CppGCIncrementalSweepMetricSample_callcount);
EXPECT_LT(0u, MetricRecorderImpl::CppGCIncrementalSweepMetricSample_event
.duration_ms);
}
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_callcount);
EndGC(0);
}
TEST_F(MetricRecorderTest, AtomicScopesNotReportedImmediately) {
MetricRecorderImpl::CppGCCycleEndMetricSamples_callcount = 0u;
MetricRecorderImpl::CppGCIncrementalMarkMetricSample_callcount = 0u;
MetricRecorderImpl::CppGCIncrementalSweepMetricSample_callcount = 0u;
StartGC();
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kAtomicMark);
}
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kAtomicWeak);
}
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kAtomicCompact);
}
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kAtomicSweep);
}
EXPECT_EQ(0u, MetricRecorderImpl::CppGCIncrementalMarkMetricSample_callcount);
EXPECT_EQ(0u,
MetricRecorderImpl::CppGCIncrementalSweepMetricSample_callcount);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_callcount);
EndGC(0);
}
TEST_F(MetricRecorderTest, CycleEndHistogramReportedOnGcEnd) {
MetricRecorderImpl::CppGCCycleEndMetricSamples_callcount = 0u;
MetricRecorderImpl::CppGCIncrementalMarkMetricSample_callcount = 0u;
MetricRecorderImpl::CppGCIncrementalSweepMetricSample_callcount = 0u;
StartGC();
EndGC(0);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCIncrementalMarkMetricSample_callcount);
EXPECT_EQ(0u,
MetricRecorderImpl::CppGCIncrementalSweepMetricSample_callcount);
EXPECT_EQ(1u, MetricRecorderImpl::CppGCCycleEndMetricSamples_callcount);
}
TEST_F(MetricRecorderTest, CycleEndHistogramReportsValuesForAtomicScopes) {
{
StartGC();
EndGC(0);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_mark_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_weak_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_compact_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_sweep_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_mark_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_sweep_ms);
}
{
StartGC();
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kAtomicMark);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EndGC(0);
EXPECT_LT(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_mark_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_weak_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_compact_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_sweep_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_mark_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_sweep_ms);
}
{
StartGC();
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kAtomicWeak);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EndGC(0);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_mark_ms);
EXPECT_LT(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_weak_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_compact_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_sweep_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_mark_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_sweep_ms);
}
{
StartGC();
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kAtomicCompact);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EndGC(0);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_mark_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_weak_ms);
EXPECT_LT(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_compact_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_sweep_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_mark_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_sweep_ms);
}
{
StartGC();
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kAtomicSweep);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EndGC(0);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_mark_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_weak_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_compact_ms);
EXPECT_LT(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_sweep_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_mark_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_sweep_ms);
}
{
StartGC();
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kIncrementalMark);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EndGC(0);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_mark_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_weak_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_compact_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_sweep_ms);
EXPECT_LT(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_mark_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_sweep_ms);
}
{
StartGC();
{
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()),
StatsCollector::kIncrementalSweep);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EndGC(0);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_mark_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_weak_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_compact_ms);
EXPECT_EQ(
0u,
MetricRecorderImpl::CppGCCycleEndMetricSamples_event.atomic_sweep_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_mark_ms);
EXPECT_LT(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.incremental_sweep_ms);
}
}
TEST_F(MetricRecorderTest, ConcurrentSamplesAreReported) {
{
StartGC();
EndGC(0);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.concurrent_mark_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.concurrent_sweep_ms);
}
{
StartGC();
{
StatsCollector::EnabledConcurrentScope scope(
*Heap::From(GetHeap()), StatsCollector::kConcurrentMark);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EndGC(0);
EXPECT_LT(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.concurrent_mark_ms);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.concurrent_sweep_ms);
}
{
StartGC();
{
StatsCollector::EnabledConcurrentScope scope(
*Heap::From(GetHeap()), StatsCollector::kConcurrentSweep);
scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1));
}
EndGC(0);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.concurrent_mark_ms);
EXPECT_LT(0u, MetricRecorderImpl::CppGCCycleEndMetricSamples_event
.concurrent_sweep_ms);
}
}
} // namespace internal
} // namespace cppgc

View File

@ -227,16 +227,17 @@ TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) {
stats_collector->NotifySweepingCompleted();
const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
EXPECT_TRUE(event.scope_data[i].IsZero());
}
for (int i = 0; i < StatsCollector::kNumConcurrentScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramConcurrentScopeIds; ++i) {
EXPECT_EQ(0, event.concurrent_scope_data[i]);
}
}
TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
for (int scope_id = 0; scope_id < StatsCollector::kNumScopeIds; ++scope_id) {
for (int scope_id = 0; scope_id < StatsCollector::kNumHistogramScopeIds;
++scope_id) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
stats_collector->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor,
@ -255,21 +256,21 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
stats_collector->NotifySweepingCompleted();
const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
if (i == scope_id)
EXPECT_LT(v8::base::TimeDelta(), event.scope_data[i]);
else
EXPECT_TRUE(event.scope_data[i].IsZero());
}
for (int i = 0; i < StatsCollector::kNumConcurrentScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramConcurrentScopeIds; ++i) {
EXPECT_EQ(0, event.concurrent_scope_data[i]);
}
}
}
TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
for (int scope_id = 0; scope_id < StatsCollector::kNumConcurrentScopeIds;
++scope_id) {
for (int scope_id = 0;
scope_id < StatsCollector::kNumHistogramConcurrentScopeIds; ++scope_id) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
stats_collector->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor,
@ -288,10 +289,10 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
stats_collector->NotifySweepingCompleted();
const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
EXPECT_TRUE(event.scope_data[i].IsZero());
}
for (int i = 0; i < StatsCollector::kNumConcurrentScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramConcurrentScopeIds; ++i) {
if (i == scope_id)
EXPECT_LT(0, event.concurrent_scope_data[i]);
else

View File

@ -18,6 +18,8 @@ constexpr size_t kMinReportedSize = StatsCollector::kAllocationThresholdBytes;
class StatsCollectorTest : public ::testing::Test {
public:
StatsCollectorTest() : stats(nullptr /* metric_recorder */) {}
void FakeAllocate(size_t bytes) {
stats.NotifyAllocation(bytes);
stats.NotifySafePointForConservativeCollection();