cppgc: Add tracing scopes
This CL adds tracing scopes for the various cppgc classes. Scopes use TRACE_EVENT_BEGIN and TRACE_EVENT_END macros to report trace events. To do so they need to include trace-event.h. For unified heap builds, trace-event.h forwards to v8's src/tracing/trace-event.h. For other builds, trace-event.h provides a subset of src/tracing/trace-event.h that covers just the parts used by cppgc. This CL covers what we need for traces and blink gc metrics (up to renaming events from BlinkGC.* to CppGC.*). UMA and UKM are not yet handled. Bug: chromium:1056170 Change-Id: Id92e84b27259ff0aadae7692f3d79d30896fb8e7 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2540548 Commit-Queue: Omer Katz <omerkatz@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#71284}
This commit is contained in:
parent
ba5fa195ed
commit
6a1a3a101e
5
BUILD.gn
5
BUILD.gn
@ -3825,7 +3825,8 @@ v8_source_set("v8_base_without_compiler") {
|
||||
":cppgc_base_config",
|
||||
]
|
||||
|
||||
defines = []
|
||||
defines = [ "CPPGC_BUILD_IN_V8" ]
|
||||
|
||||
deps = [
|
||||
":cppgc_base",
|
||||
":torque_generated_definitions",
|
||||
@ -4426,6 +4427,7 @@ v8_source_set("cppgc_base") {
|
||||
visibility = [ ":*" ]
|
||||
|
||||
sources = [
|
||||
"//base/trace_event/common/trace_event_common.h",
|
||||
"include/cppgc/allocation.h",
|
||||
"include/cppgc/common.h",
|
||||
"include/cppgc/custom-space.h",
|
||||
@ -4521,6 +4523,7 @@ v8_source_set("cppgc_base") {
|
||||
"src/heap/cppgc/sweeper.cc",
|
||||
"src/heap/cppgc/sweeper.h",
|
||||
"src/heap/cppgc/task-handle.h",
|
||||
"src/heap/cppgc/trace-event.h",
|
||||
"src/heap/cppgc/trace-trait.cc",
|
||||
"src/heap/cppgc/virtual-memory.cc",
|
||||
"src/heap/cppgc/virtual-memory.h",
|
||||
|
@ -23,9 +23,12 @@ class V8_EXPORT DefaultPlatform : public Platform {
|
||||
using IdleTaskSupport = v8::platform::IdleTaskSupport;
|
||||
explicit DefaultPlatform(
|
||||
int thread_pool_size = 0,
|
||||
IdleTaskSupport idle_task_support = IdleTaskSupport::kDisabled)
|
||||
: v8_platform_(v8::platform::NewDefaultPlatform(thread_pool_size,
|
||||
idle_task_support)) {}
|
||||
IdleTaskSupport idle_task_support = IdleTaskSupport::kDisabled,
|
||||
std::unique_ptr<TracingController> tracing_controller = {})
|
||||
: v8_platform_(v8::platform::NewDefaultPlatform(
|
||||
thread_pool_size, idle_task_support,
|
||||
v8::platform::InProcessStackDumping::kDisabled,
|
||||
std::move(tracing_controller))) {}
|
||||
|
||||
cppgc::PageAllocator* GetPageAllocator() override {
|
||||
return v8_platform_->GetPageAllocator();
|
||||
@ -48,6 +51,10 @@ class V8_EXPORT DefaultPlatform : public Platform {
|
||||
return v8_platform_->PostJob(priority, std::move(job_task));
|
||||
}
|
||||
|
||||
TracingController* GetTracingController() override {
|
||||
return v8_platform_->GetTracingController();
|
||||
}
|
||||
|
||||
protected:
|
||||
static constexpr v8::Isolate* kNoIsolate = nullptr;
|
||||
|
||||
|
@ -20,6 +20,7 @@ using PageAllocator = v8::PageAllocator;
|
||||
using Task = v8::Task;
|
||||
using TaskPriority = v8::TaskPriority;
|
||||
using TaskRunner = v8::TaskRunner;
|
||||
using TracingController = v8::TracingController;
|
||||
|
||||
/**
|
||||
* Platform interface used by Heap. Contains allocators and executors.
|
||||
@ -113,6 +114,11 @@ class V8_EXPORT Platform {
|
||||
TaskPriority priority, std::unique_ptr<JobTask> job_task) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an instance of a TracingController. This must be non-nullptr.
|
||||
*/
|
||||
virtual TracingController* GetTracingController() = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -67,6 +67,10 @@ class CppgcPlatformAdapter final : public cppgc::Platform {
|
||||
return platform_->PostJob(priority, std::move(job_task));
|
||||
}
|
||||
|
||||
TracingController* GetTracingController() override {
|
||||
return platform_->GetTracingController();
|
||||
}
|
||||
|
||||
private:
|
||||
v8::Platform* platform_;
|
||||
v8::Isolate* isolate_;
|
||||
@ -185,7 +189,10 @@ void CppHeap::TracePrologue(TraceFlags flags) {
|
||||
const UnifiedHeapMarker::MarkingConfig marking_config{
|
||||
UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
|
||||
cppgc::Heap::StackState::kNoHeapPointers,
|
||||
UnifiedHeapMarker::MarkingConfig::MarkingType::kIncrementalAndConcurrent};
|
||||
UnifiedHeapMarker::MarkingConfig::MarkingType::kIncrementalAndConcurrent,
|
||||
flags == TraceFlags::kForced
|
||||
? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced
|
||||
: UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced};
|
||||
if ((flags == TraceFlags::kReduceMemory) || (flags == TraceFlags::kForced)) {
|
||||
// Only enable compaction when in a memory reduction garbage collection as
|
||||
// it may significantly increase the final garbage collection pause.
|
||||
|
@ -20,6 +20,7 @@ class GarbageCollector {
|
||||
using StackState = cppgc::Heap::StackState;
|
||||
using MarkingType = Marker::MarkingConfig::MarkingType;
|
||||
using SweepingType = Sweeper::SweepingConfig::SweepingType;
|
||||
using IsForcedGC = Marker::MarkingConfig::IsForcedGC;
|
||||
|
||||
static constexpr Config ConservativeAtomicConfig() {
|
||||
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
|
||||
@ -50,6 +51,7 @@ class GarbageCollector {
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
MarkingType marking_type = MarkingType::kAtomic;
|
||||
SweepingType sweeping_type = SweepingType::kAtomic;
|
||||
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
|
||||
};
|
||||
|
||||
// Executes a garbage collection specified in config.
|
||||
|
@ -43,7 +43,8 @@ void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason,
|
||||
internal::Heap::From(this)->CollectGarbage(
|
||||
{internal::GarbageCollector::Config::CollectionType::kMajor, stack_state,
|
||||
internal::GarbageCollector::Config::MarkingType::kAtomic,
|
||||
internal::GarbageCollector::Config::SweepingType::kAtomic});
|
||||
internal::GarbageCollector::Config::SweepingType::kAtomic,
|
||||
internal::GarbageCollector::Config::IsForcedGC::kForced});
|
||||
}
|
||||
|
||||
AllocationHandle& Heap::GetAllocationHandle() {
|
||||
@ -142,7 +143,8 @@ void Heap::StartGarbageCollection(Config config) {
|
||||
#endif
|
||||
|
||||
const Marker::MarkingConfig marking_config{
|
||||
config.collection_type, config.stack_state, config.marking_type};
|
||||
config.collection_type, config.stack_state, config.marking_type,
|
||||
config.is_forced_gc};
|
||||
marker_ = MarkerFactory::CreateAndStartMarking<Marker>(
|
||||
AsBase(), platform_.get(), marking_config);
|
||||
}
|
||||
|
@ -195,7 +195,8 @@ MarkerBase::~MarkerBase() {
|
||||
|
||||
void MarkerBase::StartMarking() {
|
||||
DCHECK(!is_marking_started_);
|
||||
heap().stats_collector()->NotifyMarkingStarted();
|
||||
heap().stats_collector()->NotifyMarkingStarted(config_.collection_type,
|
||||
config_.is_forced_gc);
|
||||
|
||||
is_marking_started_ = true;
|
||||
if (EnterIncrementalMarkingIfNeeded(config_, heap())) {
|
||||
|
@ -44,17 +44,22 @@ class V8_EXPORT_PRIVATE MarkerBase {
|
||||
kMajor,
|
||||
};
|
||||
using StackState = cppgc::Heap::StackState;
|
||||
enum MarkingType : uint8_t {
|
||||
enum class MarkingType : uint8_t {
|
||||
kAtomic,
|
||||
kIncremental,
|
||||
kIncrementalAndConcurrent
|
||||
};
|
||||
enum class IsForcedGC : uint8_t {
|
||||
kNotForced,
|
||||
kForced,
|
||||
};
|
||||
|
||||
static constexpr MarkingConfig Default() { return {}; }
|
||||
|
||||
const CollectionType collection_type = CollectionType::kMajor;
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
MarkingType marking_type = MarkingType::kIncremental;
|
||||
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
|
||||
};
|
||||
|
||||
virtual ~MarkerBase();
|
||||
|
@ -71,8 +71,16 @@ void StatsCollector::AllocatedObjectSizeSafepointImpl() {
|
||||
explicitly_freed_bytes_since_safepoint_ = 0;
|
||||
}
|
||||
|
||||
void StatsCollector::NotifyMarkingStarted() {
|
||||
StatsCollector::Event::Event() {
|
||||
static std::atomic<size_t> epoch_counter{0};
|
||||
epoch = epoch_counter.fetch_add(1);
|
||||
}
|
||||
|
||||
void StatsCollector::NotifyMarkingStarted(CollectionType collection_type,
|
||||
IsForcedGC is_forced_gc) {
|
||||
DCHECK_EQ(GarbageCollectionState::kNotRunning, gc_state_);
|
||||
current_.collection_type = collection_type;
|
||||
current_.is_forced_gc = is_forced_gc;
|
||||
gc_state_ = GarbageCollectionState::kMarking;
|
||||
}
|
||||
|
||||
@ -101,12 +109,11 @@ double StatsCollector::GetRecentAllocationSpeedInBytesPerMs() const {
|
||||
(current_time - time_of_last_end_of_marking_).InMillisecondsF();
|
||||
}
|
||||
|
||||
const StatsCollector::Event& StatsCollector::NotifySweepingCompleted() {
|
||||
void StatsCollector::NotifySweepingCompleted() {
|
||||
DCHECK_EQ(GarbageCollectionState::kSweeping, gc_state_);
|
||||
gc_state_ = GarbageCollectionState::kNotRunning;
|
||||
previous_ = std::move(current_);
|
||||
current_ = Event();
|
||||
return previous_;
|
||||
}
|
||||
|
||||
size_t StatsCollector::allocated_object_size() const {
|
||||
|
@ -12,22 +12,155 @@
|
||||
|
||||
#include "src/base/macros.h"
|
||||
#include "src/base/platform/time.h"
|
||||
#include "src/heap/cppgc/garbage-collector.h"
|
||||
#include "src/heap/cppgc/heap-base.h"
|
||||
#include "src/heap/cppgc/trace-event.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
#define CPPGC_FOR_ALL_SCOPES(V) \
|
||||
V(MainThreadScopeForTests1) \
|
||||
V(MainThreadScopeForTests2)
|
||||
|
||||
#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) V(ConcurrentThreadScopeForTests)
|
||||
|
||||
// Sink for various time and memory statistics.
|
||||
class V8_EXPORT_PRIVATE StatsCollector final {
|
||||
using CollectionType = GarbageCollector::Config::CollectionType;
|
||||
using IsForcedGC = GarbageCollector::Config::IsForcedGC;
|
||||
|
||||
public:
|
||||
#if defined(CPPGC_DECLARE_ENUM)
|
||||
static_assert(false, "CPPGC_DECLARE_ENUM macro is already defined");
|
||||
#endif
|
||||
|
||||
enum ScopeId {
|
||||
#define CPPGC_DECLARE_ENUM(name) k##name,
|
||||
CPPGC_FOR_ALL_SCOPES(CPPGC_DECLARE_ENUM)
|
||||
#undef CPPGC_DECLARE_ENUM
|
||||
kNumScopeIds,
|
||||
};
|
||||
|
||||
enum ConcurrentScopeId {
|
||||
#define CPPGC_DECLARE_ENUM(name) k##name,
|
||||
CPPGC_FOR_ALL_CONCURRENT_SCOPES(CPPGC_DECLARE_ENUM)
|
||||
#undef CPPGC_DECLARE_ENUM
|
||||
kNumConcurrentScopeIds
|
||||
};
|
||||
|
||||
// POD to hold interesting data accumulated during a garbage collection cycle.
|
||||
//
|
||||
// The event is always fully populated when looking at previous events but
|
||||
// may only be partially populated when looking at the current event.
|
||||
struct Event final {
|
||||
V8_EXPORT_PRIVATE explicit Event();
|
||||
|
||||
v8::base::TimeDelta scope_data[kNumScopeIds];
|
||||
v8::base::Atomic32 concurrent_scope_data[kNumConcurrentScopeIds]{0};
|
||||
|
||||
size_t epoch = -1;
|
||||
CollectionType collection_type = CollectionType::kMajor;
|
||||
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
|
||||
// Marked bytes collected during marking.
|
||||
size_t marked_bytes = 0;
|
||||
};
|
||||
|
||||
private:
|
||||
#if defined(CPPGC_CASE)
|
||||
static_assert(false, "CPPGC_CASE macro is already defined");
|
||||
#endif
|
||||
|
||||
constexpr static const char* GetScopeName(ScopeId id, CollectionType type) {
|
||||
switch (id) {
|
||||
#define CPPGC_CASE(name) \
|
||||
case k##name: \
|
||||
return type == CollectionType::kMajor ? "CppGC." #name \
|
||||
: "CppGC." #name ".Minor";
|
||||
CPPGC_FOR_ALL_SCOPES(CPPGC_CASE)
|
||||
#undef CPPGC_CASE
|
||||
default:
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
constexpr static const char* GetScopeName(ConcurrentScopeId id,
|
||||
CollectionType type) {
|
||||
switch (id) {
|
||||
#define CPPGC_CASE(name) \
|
||||
case k##name: \
|
||||
return type == CollectionType::kMajor ? "CppGC." #name \
|
||||
: "CppGC." #name ".Minor";
|
||||
CPPGC_FOR_ALL_CONCURRENT_SCOPES(CPPGC_CASE)
|
||||
#undef CPPGC_CASE
|
||||
default:
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
enum TraceCategory { kEnabled, kDisabled };
|
||||
enum ScopeContext { kMutatorThread, kConcurrentThread };
|
||||
|
||||
// Trace a particular scope. Will emit a trace event and record the time in
|
||||
// the corresponding StatsCollector.
|
||||
template <TraceCategory trace_category, ScopeContext scope_category>
|
||||
class InternalScope {
|
||||
using ScopeIdType = std::conditional_t<scope_category == kMutatorThread,
|
||||
ScopeId, ConcurrentScopeId>;
|
||||
|
||||
public:
|
||||
template <typename... Args>
|
||||
InternalScope(HeapBase& heap, ScopeIdType scope_id, Args... args)
|
||||
: heap_(heap),
|
||||
stats_collector_(heap_.stats_collector()),
|
||||
start_time_(v8::base::TimeTicks::Now()),
|
||||
scope_id_(scope_id) {
|
||||
DCHECK_LE(0, scope_id_);
|
||||
DCHECK_LT(scope_id_, scope_category == kMutatorThread
|
||||
? kNumScopeIds
|
||||
: kNumConcurrentScopeIds);
|
||||
StartTrace(args...);
|
||||
}
|
||||
|
||||
~InternalScope() {
|
||||
StopTrace();
|
||||
IncreaseScopeTime();
|
||||
}
|
||||
|
||||
private:
|
||||
void* operator new(size_t, void*) = delete;
|
||||
void* operator new(size_t) = delete;
|
||||
|
||||
inline constexpr static const char* TraceCategory();
|
||||
|
||||
template <typename... Args>
|
||||
inline void StartTrace(Args... args);
|
||||
inline void StopTrace();
|
||||
|
||||
inline void StartTraceImpl();
|
||||
template <typename Value1>
|
||||
inline void StartTraceImpl(const char* k1, Value1 v1);
|
||||
template <typename Value1, typename Value2>
|
||||
inline void StartTraceImpl(const char* k1, Value1 v1, const char* k2,
|
||||
Value2 v2);
|
||||
inline void StopTraceImpl();
|
||||
|
||||
inline void IncreaseScopeTime();
|
||||
|
||||
HeapBase& heap_;
|
||||
StatsCollector* const stats_collector_;
|
||||
const v8::base::TimeTicks start_time_;
|
||||
const ScopeIdType scope_id_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(InternalScope);
|
||||
};
|
||||
|
||||
public:
|
||||
using DisabledScope = InternalScope<kDisabled, kMutatorThread>;
|
||||
using EnabledScope = InternalScope<kEnabled, kMutatorThread>;
|
||||
using DisabledConcurrentScope = InternalScope<kDisabled, kConcurrentThread>;
|
||||
using EnabledConcurrentScope = InternalScope<kEnabled, kConcurrentThread>;
|
||||
|
||||
// Observer for allocated object size. May be used to implement heap growing
|
||||
// heuristics.
|
||||
class AllocationObserver {
|
||||
@ -68,13 +201,13 @@ class V8_EXPORT_PRIVATE StatsCollector final {
|
||||
void NotifySafePointForConservativeCollection();
|
||||
|
||||
// Indicates a new garbage collection cycle.
|
||||
void NotifyMarkingStarted();
|
||||
void NotifyMarkingStarted(CollectionType, IsForcedGC);
|
||||
// Indicates that marking of the current garbage collection cycle is
|
||||
// completed.
|
||||
void NotifyMarkingCompleted(size_t marked_bytes);
|
||||
// Indicates the end of a garbage collection cycle. This means that sweeping
|
||||
// is finished at this point.
|
||||
const Event& NotifySweepingCompleted();
|
||||
void NotifySweepingCompleted();
|
||||
|
||||
// Size of live objects in bytes on the heap. Based on the most recent marked
|
||||
// bytes and the bytes allocated since last marking.
|
||||
@ -82,6 +215,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
|
||||
|
||||
double GetRecentAllocationSpeedInBytesPerMs() const;
|
||||
|
||||
const Event& GetPreviousEventForTesting() const { return previous_; }
|
||||
|
||||
private:
|
||||
enum class GarbageCollectionState : uint8_t {
|
||||
kNotRunning,
|
||||
@ -128,6 +263,97 @@ void StatsCollector::ForAllAllocationObservers(Callback callback) {
|
||||
}
|
||||
}
|
||||
|
||||
template <StatsCollector::TraceCategory trace_category,
|
||||
StatsCollector::ScopeContext scope_category>
|
||||
constexpr const char*
|
||||
StatsCollector::InternalScope<trace_category, scope_category>::TraceCategory() {
|
||||
switch (trace_category) {
|
||||
case kEnabled:
|
||||
return "cppgc";
|
||||
case kDisabled:
|
||||
return TRACE_DISABLED_BY_DEFAULT("cppgc");
|
||||
}
|
||||
}
|
||||
|
||||
template <StatsCollector::TraceCategory trace_category,
|
||||
StatsCollector::ScopeContext scope_category>
|
||||
template <typename... Args>
|
||||
void StatsCollector::InternalScope<trace_category, scope_category>::StartTrace(
|
||||
Args... args) {
|
||||
if (trace_category == StatsCollector::TraceCategory::kEnabled)
|
||||
StartTraceImpl(args...);
|
||||
}
|
||||
|
||||
template <StatsCollector::TraceCategory trace_category,
|
||||
StatsCollector::ScopeContext scope_category>
|
||||
void StatsCollector::InternalScope<trace_category,
|
||||
scope_category>::StopTrace() {
|
||||
if (trace_category == StatsCollector::TraceCategory::kEnabled)
|
||||
StopTraceImpl();
|
||||
}
|
||||
|
||||
template <StatsCollector::TraceCategory trace_category,
|
||||
StatsCollector::ScopeContext scope_category>
|
||||
void StatsCollector::InternalScope<trace_category,
|
||||
scope_category>::StartTraceImpl() {
|
||||
TRACE_EVENT_BEGIN0(
|
||||
TraceCategory(),
|
||||
GetScopeName(scope_id_, stats_collector_->current_.collection_type));
|
||||
}
|
||||
|
||||
template <StatsCollector::TraceCategory trace_category,
|
||||
StatsCollector::ScopeContext scope_category>
|
||||
template <typename Value1>
|
||||
void StatsCollector::InternalScope<
|
||||
trace_category, scope_category>::StartTraceImpl(const char* k1, Value1 v1) {
|
||||
TRACE_EVENT_BEGIN1(
|
||||
TraceCategory(),
|
||||
GetScopeName(scope_id_, stats_collector_->current_.collection_type), k1,
|
||||
v1);
|
||||
}
|
||||
|
||||
template <StatsCollector::TraceCategory trace_category,
|
||||
StatsCollector::ScopeContext scope_category>
|
||||
template <typename Value1, typename Value2>
|
||||
void StatsCollector::InternalScope<
|
||||
trace_category, scope_category>::StartTraceImpl(const char* k1, Value1 v1,
|
||||
const char* k2, Value2 v2) {
|
||||
TRACE_EVENT_BEGIN2(
|
||||
TraceCategory(),
|
||||
GetScopeName(scope_id_, stats_collector_->current_.collection_type), k1,
|
||||
v1, k2, v2);
|
||||
}
|
||||
|
||||
template <StatsCollector::TraceCategory trace_category,
|
||||
StatsCollector::ScopeContext scope_category>
|
||||
void StatsCollector::InternalScope<trace_category,
|
||||
scope_category>::StopTraceImpl() {
|
||||
TRACE_EVENT_END2(
|
||||
TraceCategory(),
|
||||
GetScopeName(scope_id_, stats_collector_->current_.collection_type),
|
||||
"epoch", stats_collector_->current_.epoch, "forced",
|
||||
stats_collector_->current_.is_forced_gc == IsForcedGC::kForced);
|
||||
}
|
||||
|
||||
template <StatsCollector::TraceCategory trace_category,
|
||||
StatsCollector::ScopeContext scope_category>
|
||||
void StatsCollector::InternalScope<trace_category,
|
||||
scope_category>::IncreaseScopeTime() {
|
||||
DCHECK_NE(GarbageCollectionState::kNotRunning, stats_collector_->gc_state_);
|
||||
v8::base::TimeDelta time = v8::base::TimeTicks::Now() - start_time_;
|
||||
if (scope_category == StatsCollector::ScopeContext::kMutatorThread) {
|
||||
stats_collector_->current_.scope_data[scope_id_] += time;
|
||||
return;
|
||||
}
|
||||
// scope_category == StatsCollector::ScopeContext::kConcurrentThread
|
||||
using Atomic32 = v8::base::Atomic32;
|
||||
const int64_t ms = time.InMicroseconds();
|
||||
DCHECK(ms <= std::numeric_limits<Atomic32>::max());
|
||||
v8::base::Relaxed_AtomicIncrement(
|
||||
&stats_collector_->current_.concurrent_scope_data[scope_id_],
|
||||
static_cast<Atomic32>(ms));
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
||||
|
241
src/heap/cppgc/trace-event.h
Normal file
241
src/heap/cppgc/trace-event.h
Normal file
@ -0,0 +1,241 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_HEAP_CPPGC_TRACE_EVENT_H_
|
||||
#define V8_HEAP_CPPGC_TRACE_EVENT_H_
|
||||
|
||||
#if CPPGC_BUILD_IN_V8
|
||||
#include "src/tracing/trace-event.h"
|
||||
#else
|
||||
// This is a subset of stc/tracing/trace-event.h required to support
|
||||
// tracing in the cppgc standalone library using TracingController.
|
||||
|
||||
#include "base/trace_event/common/trace_event_common.h"
|
||||
#include "include/cppgc/platform.h"
|
||||
#include "src/base/atomicops.h"
|
||||
#include "src/base/macros.h"
|
||||
|
||||
// This header file defines implementation details of how the trace macros in
|
||||
// trace_event_common.h collect and store trace events. Anything not
|
||||
// implementation-specific should go in trace_macros_common.h instead of here.
|
||||
|
||||
// The pointer returned from GetCategoryGroupEnabled() points to a
|
||||
// value with zero or more of the following bits. Used in this class only.
|
||||
// The TRACE_EVENT macros should only use the value as a bool.
|
||||
// These values must be in sync with macro values in trace_log.h in
|
||||
// chromium.
|
||||
enum CategoryGroupEnabledFlags {
|
||||
// Category group enabled for the recording mode.
|
||||
kEnabledForRecording_CategoryGroupEnabledFlags = 1 << 0,
|
||||
// Category group enabled by SetEventCallbackEnabled().
|
||||
kEnabledForEventCallback_CategoryGroupEnabledFlags = 1 << 2,
|
||||
};
|
||||
|
||||
#define INTERNAL_TRACE_EVENT_CATEGORY_GROUP_ENABLED_FOR_RECORDING_MODE() \
|
||||
TRACE_EVENT_API_LOAD_CATEGORY_GROUP_ENABLED() & \
|
||||
(kEnabledForRecording_CategoryGroupEnabledFlags | \
|
||||
kEnabledForEventCallback_CategoryGroupEnabledFlags)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Implementation specific tracing API definitions.
|
||||
|
||||
// Get a pointer to the enabled state of the given trace category. Only
|
||||
// long-lived literal strings should be given as the category group. The
|
||||
// returned pointer can be held permanently in a local static for example. If
|
||||
// the unsigned char is non-zero, tracing is enabled. If tracing is enabled,
|
||||
// TRACE_EVENT_API_ADD_TRACE_EVENT can be called. It's OK if tracing is disabled
|
||||
// between the load of the tracing state and the call to
|
||||
// TRACE_EVENT_API_ADD_TRACE_EVENT, because this flag only provides an early out
|
||||
// for best performance when tracing is disabled.
|
||||
// const uint8_t*
|
||||
// TRACE_EVENT_API_GET_CATEGORY_GROUP_ENABLED(const char* category_group)
|
||||
#define TRACE_EVENT_API_GET_CATEGORY_GROUP_ENABLED \
|
||||
platform->GetTracingController()->GetCategoryGroupEnabled
|
||||
|
||||
// Add a trace event to the platform tracing system.
|
||||
// uint64_t TRACE_EVENT_API_ADD_TRACE_EVENT(
|
||||
// char phase,
|
||||
// const uint8_t* category_group_enabled,
|
||||
// const char* name,
|
||||
// const char* scope,
|
||||
// uint64_t id,
|
||||
// uint64_t bind_id,
|
||||
// int num_args,
|
||||
// const char** arg_names,
|
||||
// const uint8_t* arg_types,
|
||||
// const uint64_t* arg_values,
|
||||
// unsigned int flags)
|
||||
#define TRACE_EVENT_API_ADD_TRACE_EVENT cppgc::internal::AddTraceEventImpl
|
||||
|
||||
// Defines atomic operations used internally by the tracing system.
|
||||
#define TRACE_EVENT_API_ATOMIC_WORD v8::base::AtomicWord
|
||||
#define TRACE_EVENT_API_ATOMIC_LOAD(var) v8::base::Relaxed_Load(&(var))
|
||||
#define TRACE_EVENT_API_ATOMIC_STORE(var, value) \
|
||||
v8::base::Relaxed_Store(&(var), (value))
|
||||
#define TRACE_EVENT_API_LOAD_CATEGORY_GROUP_ENABLED() \
|
||||
v8::base::Relaxed_Load(reinterpret_cast<const v8::base::Atomic8*>( \
|
||||
INTERNAL_TRACE_EVENT_UID(category_group_enabled)))
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Implementation detail: trace event macros create temporary variables
|
||||
// to keep instrumentation overhead low. These macros give each temporary
|
||||
// variable a unique name based on the line number to prevent name collisions.
|
||||
#define INTERNAL_TRACE_EVENT_UID3(a, b) trace_event_unique_##a##b
|
||||
#define INTERNAL_TRACE_EVENT_UID2(a, b) INTERNAL_TRACE_EVENT_UID3(a, b)
|
||||
#define INTERNAL_TRACE_EVENT_UID(name_prefix) \
|
||||
INTERNAL_TRACE_EVENT_UID2(name_prefix, __LINE__)
|
||||
|
||||
// Implementation detail: internal macro to create static category.
|
||||
// No barriers are needed, because this code is designed to operate safely
|
||||
// even when the unsigned char* points to garbage data (which may be the case
|
||||
// on processors without cache coherency).
|
||||
#define INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO_CUSTOM_VARIABLES( \
|
||||
category_group, atomic, category_group_enabled) \
|
||||
category_group_enabled = \
|
||||
reinterpret_cast<const uint8_t*>(TRACE_EVENT_API_ATOMIC_LOAD(atomic)); \
|
||||
if (!category_group_enabled) { \
|
||||
category_group_enabled = \
|
||||
TRACE_EVENT_API_GET_CATEGORY_GROUP_ENABLED(category_group); \
|
||||
TRACE_EVENT_API_ATOMIC_STORE( \
|
||||
atomic, reinterpret_cast<TRACE_EVENT_API_ATOMIC_WORD>( \
|
||||
category_group_enabled)); \
|
||||
}
|
||||
|
||||
#define INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category_group) \
|
||||
static TRACE_EVENT_API_ATOMIC_WORD INTERNAL_TRACE_EVENT_UID(atomic) = 0; \
|
||||
const uint8_t* INTERNAL_TRACE_EVENT_UID(category_group_enabled); \
|
||||
INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO_CUSTOM_VARIABLES( \
|
||||
category_group, INTERNAL_TRACE_EVENT_UID(atomic), \
|
||||
INTERNAL_TRACE_EVENT_UID(category_group_enabled));
|
||||
|
||||
// Implementation detail: internal macro to create static category and add
|
||||
// event if the category is enabled.
|
||||
#define INTERNAL_TRACE_EVENT_ADD(phase, category_group, name, flags, ...) \
|
||||
DCHECK_NOT_NULL(name); \
|
||||
do { \
|
||||
cppgc::Platform* platform = heap_.platform(); \
|
||||
INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category_group); \
|
||||
if (INTERNAL_TRACE_EVENT_CATEGORY_GROUP_ENABLED_FOR_RECORDING_MODE()) { \
|
||||
cppgc::internal::AddTraceEvent( \
|
||||
phase, INTERNAL_TRACE_EVENT_UID(category_group_enabled), name, \
|
||||
nullptr /* scope */, 0 /* id */, 0 /* bind_id */, flags, platform, \
|
||||
##__VA_ARGS__); \
|
||||
} \
|
||||
} while (false)
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
using ConvertableToTraceFormat = v8::ConvertableToTraceFormat;
|
||||
|
||||
class TraceEventHelper {
|
||||
public:
|
||||
V8_EXPORT_PRIVATE static TracingController* GetTracingController();
|
||||
};
|
||||
|
||||
static V8_INLINE uint64_t AddTraceEventImpl(
|
||||
char phase, const uint8_t* category_group_enabled, const char* name,
|
||||
const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args,
|
||||
const char** arg_names, const uint8_t* arg_types,
|
||||
const uint64_t* arg_values, unsigned int flags, Platform* platform) {
|
||||
std::unique_ptr<ConvertableToTraceFormat> arg_convertables[2];
|
||||
if (num_args > 0 && arg_types[0] == TRACE_VALUE_TYPE_CONVERTABLE) {
|
||||
arg_convertables[0].reset(reinterpret_cast<ConvertableToTraceFormat*>(
|
||||
static_cast<intptr_t>(arg_values[0])));
|
||||
}
|
||||
if (num_args > 1 && arg_types[1] == TRACE_VALUE_TYPE_CONVERTABLE) {
|
||||
arg_convertables[1].reset(reinterpret_cast<ConvertableToTraceFormat*>(
|
||||
static_cast<intptr_t>(arg_values[1])));
|
||||
}
|
||||
DCHECK_LE(num_args, 2);
|
||||
TracingController* controller = platform->GetTracingController();
|
||||
return controller->AddTraceEvent(phase, category_group_enabled, name, scope,
|
||||
id, bind_id, num_args, arg_names, arg_types,
|
||||
arg_values, arg_convertables, flags);
|
||||
}
|
||||
|
||||
// Define SetTraceValue for each allowed type. It stores the type and value
|
||||
// in the return arguments. This allows this API to avoid declaring any
|
||||
// structures so that it is portable to third_party libraries.
|
||||
// This is the base implementation for integer types (including bool) and enums.
|
||||
template <typename T>
|
||||
static V8_INLINE typename std::enable_if<
|
||||
std::is_integral<T>::value || std::is_enum<T>::value, void>::type
|
||||
SetTraceValue(T arg, unsigned char* type, uint64_t* value) {
|
||||
*type = std::is_same<T, bool>::value
|
||||
? TRACE_VALUE_TYPE_BOOL
|
||||
: std::is_signed<T>::value ? TRACE_VALUE_TYPE_INT
|
||||
: TRACE_VALUE_TYPE_UINT;
|
||||
*value = static_cast<uint64_t>(arg);
|
||||
}
|
||||
|
||||
#define INTERNAL_DECLARE_SET_TRACE_VALUE(actual_type, value_type_id) \
|
||||
static V8_INLINE void SetTraceValue(actual_type arg, unsigned char* type, \
|
||||
uint64_t* value) { \
|
||||
*type = value_type_id; \
|
||||
*value = 0; \
|
||||
STATIC_ASSERT(sizeof(arg) <= sizeof(*value)); \
|
||||
memcpy(value, &arg, sizeof(arg)); \
|
||||
}
|
||||
INTERNAL_DECLARE_SET_TRACE_VALUE(double, TRACE_VALUE_TYPE_DOUBLE)
|
||||
INTERNAL_DECLARE_SET_TRACE_VALUE(const char*, TRACE_VALUE_TYPE_STRING)
|
||||
#undef INTERNAL_DECLARE_SET_TRACE_VALUE
|
||||
|
||||
// These AddTraceEvent template functions are defined here instead of in
|
||||
// the macro, because the arg_values could be temporary objects, such as
|
||||
// std::string. In order to store pointers to the internal c_str and pass
|
||||
// through to the tracing API, the arg_values must live throughout these
|
||||
// procedures.
|
||||
|
||||
static V8_INLINE uint64_t AddTraceEvent(char phase,
|
||||
const uint8_t* category_group_enabled,
|
||||
const char* name, const char* scope,
|
||||
uint64_t id, uint64_t bind_id,
|
||||
unsigned int flags,
|
||||
Platform* platform) {
|
||||
return TRACE_EVENT_API_ADD_TRACE_EVENT(
|
||||
phase, category_group_enabled, name, scope, id, bind_id, 0 /* num_args */,
|
||||
nullptr, nullptr, nullptr, flags, platform);
|
||||
}
|
||||
|
||||
template <class ARG1_TYPE>
|
||||
static V8_INLINE uint64_t AddTraceEvent(
|
||||
char phase, const uint8_t* category_group_enabled, const char* name,
|
||||
const char* scope, uint64_t id, uint64_t bind_id, unsigned int flags,
|
||||
Platform* platform, const char* arg1_name, ARG1_TYPE&& arg1_val) {
|
||||
const int num_args = 1;
|
||||
uint8_t arg_type;
|
||||
uint64_t arg_value;
|
||||
SetTraceValue(std::forward<ARG1_TYPE>(arg1_val), &arg_type, &arg_value);
|
||||
return TRACE_EVENT_API_ADD_TRACE_EVENT(
|
||||
phase, category_group_enabled, name, scope, id, bind_id, num_args,
|
||||
&arg1_name, &arg_type, &arg_value, flags, platform);
|
||||
}
|
||||
|
||||
template <class ARG1_TYPE, class ARG2_TYPE>
|
||||
static V8_INLINE uint64_t AddTraceEvent(
|
||||
char phase, const uint8_t* category_group_enabled, const char* name,
|
||||
const char* scope, uint64_t id, uint64_t bind_id, unsigned int flags,
|
||||
Platform* platform, const char* arg1_name, ARG1_TYPE&& arg1_val,
|
||||
const char* arg2_name, ARG2_TYPE&& arg2_val) {
|
||||
const int num_args = 2;
|
||||
const char* arg_names[2] = {arg1_name, arg2_name};
|
||||
unsigned char arg_types[2];
|
||||
uint64_t arg_values[2];
|
||||
SetTraceValue(std::forward<ARG1_TYPE>(arg1_val), &arg_types[0],
|
||||
&arg_values[0]);
|
||||
SetTraceValue(std::forward<ARG2_TYPE>(arg2_val), &arg_types[1],
|
||||
&arg_values[1]);
|
||||
return TRACE_EVENT_API_ADD_TRACE_EVENT(
|
||||
phase, category_group_enabled, name, scope, id, bind_id, num_args,
|
||||
arg_names, arg_types, arg_values, flags, platform);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
||||
#endif // CPPGC_BUILD_IN_V8
|
||||
|
||||
#endif // V8_HEAP_CPPGC_TRACE_EVENT_H_
|
@ -109,6 +109,7 @@ v8_source_set("cppgc_unittests_sources") {
|
||||
"heap/cppgc/prefinalizer-unittest.cc",
|
||||
"heap/cppgc/source-location-unittest.cc",
|
||||
"heap/cppgc/stack-unittest.cc",
|
||||
"heap/cppgc/stats-collector-scopes-unittest.cc",
|
||||
"heap/cppgc/stats-collector-unittest.cc",
|
||||
"heap/cppgc/sweeper-unittest.cc",
|
||||
"heap/cppgc/test-platform.cc",
|
||||
|
@ -72,7 +72,9 @@ class ConcurrentSweeperTest : public testing::TestWithHeap {
|
||||
ResetLinearAllocationBuffers();
|
||||
// Pretend do finish marking as StatsCollector verifies that Notify*
|
||||
// methods are called in the right order.
|
||||
heap->stats_collector()->NotifyMarkingStarted();
|
||||
heap->stats_collector()->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
heap->stats_collector()->NotifyMarkingCompleted(0);
|
||||
Sweeper& sweeper = heap->sweeper();
|
||||
const Sweeper::SweepingConfig sweeping_config{
|
||||
|
@ -46,7 +46,8 @@ class MockTaskRunner : public cppgc::TaskRunner {
|
||||
class MockPlatform : public cppgc::Platform {
|
||||
public:
|
||||
explicit MockPlatform(std::shared_ptr<TaskRunner> runner)
|
||||
: runner_(std::move(runner)) {}
|
||||
: runner_(std::move(runner)),
|
||||
tracing_controller_(std::make_unique<TracingController>()) {}
|
||||
|
||||
PageAllocator* GetPageAllocator() override { return nullptr; }
|
||||
double MonotonicallyIncreasingTime() override { return 0.0; }
|
||||
@ -55,8 +56,13 @@ class MockPlatform : public cppgc::Platform {
|
||||
return runner_;
|
||||
}
|
||||
|
||||
TracingController* GetTracingController() override {
|
||||
return tracing_controller_.get();
|
||||
}
|
||||
|
||||
private:
|
||||
std::shared_ptr<TaskRunner> runner_;
|
||||
std::unique_ptr<TracingController> tracing_controller_;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
@ -23,7 +23,9 @@ class FakeGarbageCollector : public GarbageCollector {
|
||||
void SetLiveBytes(size_t live_bytes) { live_bytes_ = live_bytes; }
|
||||
|
||||
void CollectGarbage(GarbageCollector::Config config) override {
|
||||
stats_collector_->NotifyMarkingStarted();
|
||||
stats_collector_->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats_collector_->NotifyMarkingCompleted(live_bytes_);
|
||||
stats_collector_->NotifySweepingCompleted();
|
||||
callcount_++;
|
||||
|
@ -14,7 +14,7 @@ namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
class IncrementalMarkingScheduleTest : public testing::Test {
|
||||
class IncrementalMarkingScheduleTest : public ::testing::Test {
|
||||
public:
|
||||
static const size_t kObjectSize;
|
||||
};
|
||||
|
287
test/unittests/heap/cppgc/stats-collector-scopes-unittest.cc
Normal file
287
test/unittests/heap/cppgc/stats-collector-scopes-unittest.cc
Normal file
@ -0,0 +1,287 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/heap/cppgc/stats-collector.h"
|
||||
#include "test/unittests/heap/cppgc/tests.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
class DelegatingTracingControllerImpl : public TracingController {
|
||||
public:
|
||||
virtual uint64_t AddTraceEvent(
|
||||
char phase, const uint8_t* category_enabled_flag, const char* name,
|
||||
const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args,
|
||||
const char** arg_names, const uint8_t* arg_types,
|
||||
const uint64_t* arg_values,
|
||||
std::unique_ptr<ConvertableToTraceFormat>* arg_convertables,
|
||||
unsigned int flags) {
|
||||
if (!check_expectations) return 0;
|
||||
static char phases[2] = {'B', 'E'};
|
||||
EXPECT_EQ(phases[AddTraceEvent_callcount], phase);
|
||||
EXPECT_TRUE(*category_enabled_flag);
|
||||
if (expected_name) EXPECT_EQ(0, strcmp(expected_name, name));
|
||||
stored_num_args += num_args;
|
||||
for (int i = 0; i < num_args; ++i) {
|
||||
stored_arg_names.push_back(arg_names[i]);
|
||||
stored_arg_types.push_back(arg_types[i]);
|
||||
stored_arg_values.push_back(arg_values[i]);
|
||||
}
|
||||
AddTraceEvent_callcount++;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool check_expectations;
|
||||
static size_t AddTraceEvent_callcount;
|
||||
static const char* expected_name;
|
||||
static int32_t stored_num_args;
|
||||
static std::vector<std::string> stored_arg_names;
|
||||
static std::vector<uint8_t> stored_arg_types;
|
||||
static std::vector<uint64_t> stored_arg_values;
|
||||
};
|
||||
|
||||
bool DelegatingTracingControllerImpl::check_expectations = false;
|
||||
size_t DelegatingTracingControllerImpl::AddTraceEvent_callcount = 0u;
|
||||
const char* DelegatingTracingControllerImpl::expected_name = nullptr;
|
||||
int32_t DelegatingTracingControllerImpl::stored_num_args = 0;
|
||||
std::vector<std::string> DelegatingTracingControllerImpl::stored_arg_names;
|
||||
std::vector<uint8_t> DelegatingTracingControllerImpl::stored_arg_types;
|
||||
std::vector<uint64_t> DelegatingTracingControllerImpl::stored_arg_values;
|
||||
|
||||
class CppgcTracingScopesTest : public testing::TestWithHeap {
|
||||
using Config = Marker::MarkingConfig;
|
||||
|
||||
public:
|
||||
CppgcTracingScopesTest() {
|
||||
SetTracingController(std::make_unique<DelegatingTracingControllerImpl>());
|
||||
}
|
||||
|
||||
void StartGC() {
|
||||
Config config = {Config::CollectionType::kMajor,
|
||||
Config::StackState::kNoHeapPointers,
|
||||
Config::MarkingType::kIncremental};
|
||||
GetMarkerRef() = MarkerFactory::CreateAndStartMarking<Marker>(
|
||||
Heap::From(GetHeap())->AsBase(), GetPlatformHandle().get(), config);
|
||||
DelegatingTracingControllerImpl::check_expectations = true;
|
||||
}
|
||||
|
||||
void EndGC() {
|
||||
DelegatingTracingControllerImpl::check_expectations = false;
|
||||
GetMarkerRef()->FinishMarking(Config::StackState::kNoHeapPointers);
|
||||
GetMarkerRef().reset();
|
||||
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted();
|
||||
}
|
||||
|
||||
void ResetTestTracingController(const char* expected_name = nullptr) {
|
||||
DelegatingTracingControllerImpl::AddTraceEvent_callcount = 0u;
|
||||
DelegatingTracingControllerImpl::stored_num_args = 0;
|
||||
DelegatingTracingControllerImpl::stored_arg_names.clear();
|
||||
DelegatingTracingControllerImpl::stored_arg_types.clear();
|
||||
DelegatingTracingControllerImpl::stored_arg_values.clear();
|
||||
DelegatingTracingControllerImpl::expected_name = expected_name;
|
||||
}
|
||||
|
||||
void FindArgument(std::string name, uint8_t type, uint64_t value) {
|
||||
int i = 0;
|
||||
for (; i < DelegatingTracingControllerImpl::stored_num_args; ++i) {
|
||||
if (name.compare(DelegatingTracingControllerImpl::stored_arg_names[i]) ==
|
||||
0)
|
||||
break;
|
||||
}
|
||||
EXPECT_LT(i, DelegatingTracingControllerImpl::stored_num_args);
|
||||
EXPECT_EQ(type, DelegatingTracingControllerImpl::stored_arg_types[i]);
|
||||
EXPECT_EQ(value, DelegatingTracingControllerImpl::stored_arg_values[i]);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, DisabledScope) {
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
{
|
||||
StatsCollector::DisabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1);
|
||||
}
|
||||
EXPECT_EQ(0u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
|
||||
EndGC();
|
||||
}
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, EnabledScope) {
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController("CppGC.MainThreadScopeForTests1");
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1);
|
||||
}
|
||||
EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
|
||||
EndGC();
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController("CppGC.MainThreadScopeForTests2");
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests2);
|
||||
}
|
||||
EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
|
||||
EndGC();
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
|
||||
// Scopes always add 2 arguments: epoch and is_forced_gc.
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1);
|
||||
}
|
||||
EXPECT_EQ(2, DelegatingTracingControllerImpl::stored_num_args);
|
||||
EndGC();
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
"arg1", 1);
|
||||
}
|
||||
EXPECT_EQ(3, DelegatingTracingControllerImpl::stored_num_args);
|
||||
EndGC();
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
"arg1", 1, "arg2", 2);
|
||||
}
|
||||
EXPECT_EQ(4, DelegatingTracingControllerImpl::stored_num_args);
|
||||
EndGC();
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
"uint_arg", 13u, "bool_arg", false);
|
||||
}
|
||||
FindArgument("uint_arg", TRACE_VALUE_TYPE_UINT, 13);
|
||||
FindArgument("bool_arg", TRACE_VALUE_TYPE_BOOL, false);
|
||||
EndGC();
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
"neg_int_arg", -5, "pos_int_arg", 7);
|
||||
}
|
||||
FindArgument("neg_int_arg", TRACE_VALUE_TYPE_INT, -5);
|
||||
FindArgument("pos_int_arg", TRACE_VALUE_TYPE_INT, 7);
|
||||
EndGC();
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
double double_value = 1.2;
|
||||
const char* string_value = "test";
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
"string_arg", string_value, "double_arg", double_value);
|
||||
}
|
||||
FindArgument("string_arg", TRACE_VALUE_TYPE_STRING,
|
||||
reinterpret_cast<uint64_t>(string_value));
|
||||
FindArgument("double_arg", TRACE_VALUE_TYPE_DOUBLE,
|
||||
*reinterpret_cast<uint64_t*>(&double_value));
|
||||
EndGC();
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) {
|
||||
StartGC();
|
||||
EndGC();
|
||||
const StatsCollector::Event& event =
|
||||
Heap::From(GetHeap())->stats_collector()->GetPreviousEventForTesting();
|
||||
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
|
||||
EXPECT_TRUE(event.scope_data[i].IsZero());
|
||||
}
|
||||
for (int i = 0; i < StatsCollector::kNumConcurrentScopeIds; ++i) {
|
||||
EXPECT_EQ(0, event.concurrent_scope_data[i]);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
|
||||
for (int scope_id = 0; scope_id < StatsCollector::kNumScopeIds; ++scope_id) {
|
||||
StartGC();
|
||||
DelegatingTracingControllerImpl::check_expectations = false;
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()),
|
||||
static_cast<StatsCollector::ScopeId>(scope_id));
|
||||
v8::base::TimeTicks time = v8::base::TimeTicks::Now();
|
||||
while (time == v8::base::TimeTicks::Now()) {
|
||||
// Force time to progress before destroying scope.
|
||||
}
|
||||
}
|
||||
EndGC();
|
||||
const StatsCollector::Event& event =
|
||||
Heap::From(GetHeap())->stats_collector()->GetPreviousEventForTesting();
|
||||
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
|
||||
if (i == scope_id)
|
||||
EXPECT_LT(v8::base::TimeDelta(), event.scope_data[i]);
|
||||
else
|
||||
EXPECT_TRUE(event.scope_data[i].IsZero());
|
||||
}
|
||||
for (int i = 0; i < StatsCollector::kNumConcurrentScopeIds; ++i) {
|
||||
EXPECT_EQ(0, event.concurrent_scope_data[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
|
||||
for (int scope_id = 0; scope_id < StatsCollector::kNumConcurrentScopeIds;
|
||||
++scope_id) {
|
||||
StartGC();
|
||||
DelegatingTracingControllerImpl::check_expectations = false;
|
||||
{
|
||||
StatsCollector::EnabledConcurrentScope scope(
|
||||
*Heap::From(GetHeap()),
|
||||
static_cast<StatsCollector::ConcurrentScopeId>(scope_id));
|
||||
v8::base::TimeTicks time = v8::base::TimeTicks::Now();
|
||||
while (time == v8::base::TimeTicks::Now()) {
|
||||
// Force time to progress before destroying scope.
|
||||
}
|
||||
}
|
||||
EndGC();
|
||||
const StatsCollector::Event& event =
|
||||
Heap::From(GetHeap())->stats_collector()->GetPreviousEventForTesting();
|
||||
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
|
||||
EXPECT_TRUE(event.scope_data[i].IsZero());
|
||||
}
|
||||
for (int i = 0; i < StatsCollector::kNumConcurrentScopeIds; ++i) {
|
||||
if (i == scope_id)
|
||||
EXPECT_LT(0, event.concurrent_scope_data[i]);
|
||||
else
|
||||
EXPECT_EQ(0, event.concurrent_scope_data[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
@ -34,16 +34,20 @@ class StatsCollectorTest : public ::testing::Test {
|
||||
} // namespace
|
||||
|
||||
TEST_F(StatsCollectorTest, NoMarkedBytes) {
|
||||
stats.NotifyMarkingStarted();
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingCompleted(kNoMarkedBytes);
|
||||
auto event = stats.NotifySweepingCompleted();
|
||||
stats.NotifySweepingCompleted();
|
||||
auto event = stats.GetPreviousEventForTesting();
|
||||
EXPECT_EQ(0u, event.marked_bytes);
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, EventPrevGCMarkedObjectSize) {
|
||||
stats.NotifyMarkingStarted();
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats.NotifyMarkingCompleted(1024);
|
||||
auto event = stats.NotifySweepingCompleted();
|
||||
stats.NotifySweepingCompleted();
|
||||
auto event = stats.GetPreviousEventForTesting();
|
||||
EXPECT_EQ(1024u, event.marked_bytes);
|
||||
}
|
||||
|
||||
@ -62,7 +66,8 @@ TEST_F(StatsCollectorTest, AlllocationReportAboveAllocationThresholdBytes) {
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, InitialAllocatedObjectSize) {
|
||||
stats.NotifyMarkingStarted();
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
EXPECT_EQ(0u, stats.allocated_object_size());
|
||||
stats.NotifyMarkingCompleted(kNoMarkedBytes);
|
||||
EXPECT_EQ(0u, stats.allocated_object_size());
|
||||
@ -71,7 +76,8 @@ TEST_F(StatsCollectorTest, InitialAllocatedObjectSize) {
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, AllocatedObjectSize) {
|
||||
stats.NotifyMarkingStarted();
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
FakeAllocate(kMinReportedSize);
|
||||
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
|
||||
stats.NotifyMarkingCompleted(kMinReportedSize);
|
||||
@ -81,7 +87,8 @@ TEST_F(StatsCollectorTest, AllocatedObjectSize) {
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, AllocatedObjectSizeNoMarkedBytes) {
|
||||
stats.NotifyMarkingStarted();
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
FakeAllocate(kMinReportedSize);
|
||||
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
|
||||
stats.NotifyMarkingCompleted(kNoMarkedBytes);
|
||||
@ -91,7 +98,8 @@ TEST_F(StatsCollectorTest, AllocatedObjectSizeNoMarkedBytes) {
|
||||
}
|
||||
|
||||
TEST_F(StatsCollectorTest, AllocatedObjectSizeAllocateAfterMarking) {
|
||||
stats.NotifyMarkingStarted();
|
||||
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
FakeAllocate(kMinReportedSize);
|
||||
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
|
||||
stats.NotifyMarkingCompleted(kMinReportedSize);
|
||||
@ -127,7 +135,8 @@ TEST_F(StatsCollectorTest, ObserveAllocatedObjectSizeIncreaseAndDecrease) {
|
||||
namespace {
|
||||
|
||||
void FakeGC(StatsCollector* stats, size_t marked_bytes) {
|
||||
stats->NotifyMarkingStarted();
|
||||
stats->NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats->NotifyMarkingCompleted(marked_bytes);
|
||||
stats->NotifySweepingCompleted();
|
||||
}
|
||||
|
@ -46,7 +46,9 @@ class SweeperTest : public testing::TestWithHeap {
|
||||
Sweeper& sweeper = heap->sweeper();
|
||||
// Pretend do finish marking as StatsCollector verifies that Notify*
|
||||
// methods are called in the right order.
|
||||
heap->stats_collector()->NotifyMarkingStarted();
|
||||
heap->stats_collector()->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
heap->stats_collector()->NotifyMarkingCompleted(0);
|
||||
const Sweeper::SweepingConfig sweeping_config{
|
||||
Sweeper::SweepingConfig::SweepingType::kAtomic,
|
||||
|
@ -12,8 +12,10 @@ namespace cppgc {
|
||||
namespace internal {
|
||||
namespace testing {
|
||||
|
||||
TestPlatform::TestPlatform()
|
||||
: DefaultPlatform(0, DefaultPlatform::IdleTaskSupport::kEnabled) {}
|
||||
TestPlatform::TestPlatform(
|
||||
std::unique_ptr<v8::TracingController> tracing_controller)
|
||||
: DefaultPlatform(0 /* thread_pool_size */, IdleTaskSupport::kEnabled,
|
||||
std::move(tracing_controller)) {}
|
||||
|
||||
std::unique_ptr<cppgc::JobHandle> TestPlatform::PostJob(
|
||||
cppgc::TaskPriority priority, std::unique_ptr<cppgc::JobTask> job_task) {
|
||||
|
@ -23,7 +23,8 @@ class TestPlatform : public DefaultPlatform {
|
||||
TestPlatform* platform_;
|
||||
};
|
||||
|
||||
TestPlatform();
|
||||
TestPlatform(
|
||||
std::unique_ptr<v8::TracingController> tracing_controller = nullptr);
|
||||
|
||||
std::unique_ptr<cppgc::JobHandle> PostJob(
|
||||
cppgc::TaskPriority priority,
|
||||
|
@ -18,7 +18,8 @@ std::shared_ptr<TestPlatform> TestWithPlatform::platform_;
|
||||
|
||||
// static
|
||||
void TestWithPlatform::SetUpTestSuite() {
|
||||
platform_ = std::make_unique<TestPlatform>();
|
||||
platform_ = std::make_unique<TestPlatform>(
|
||||
std::make_unique<DelegatingTracingController>());
|
||||
cppgc::InitializeProcess(platform_->GetPageAllocator());
|
||||
}
|
||||
|
||||
|
@ -8,12 +8,41 @@
|
||||
#include "include/cppgc/heap.h"
|
||||
#include "include/cppgc/platform.h"
|
||||
#include "src/heap/cppgc/heap.h"
|
||||
#include "src/heap/cppgc/trace-event.h"
|
||||
#include "test/unittests/heap/cppgc/test-platform.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
namespace testing {
|
||||
class DelegatingTracingController : public TracingController {
|
||||
public:
|
||||
virtual const uint8_t* GetCategoryGroupEnabled(const char* name) {
|
||||
static uint8_t yes = 1;
|
||||
return &yes;
|
||||
}
|
||||
|
||||
virtual uint64_t AddTraceEvent(
|
||||
char phase, const uint8_t* category_enabled_flag, const char* name,
|
||||
const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args,
|
||||
const char** arg_names, const uint8_t* arg_types,
|
||||
const uint64_t* arg_values,
|
||||
std::unique_ptr<ConvertableToTraceFormat>* arg_convertables,
|
||||
unsigned int flags) {
|
||||
return tracing_controller_->AddTraceEvent(
|
||||
phase, category_enabled_flag, name, scope, id, bind_id, num_args,
|
||||
arg_names, arg_types, arg_values, arg_convertables, flags);
|
||||
}
|
||||
|
||||
void SetTracingController(
|
||||
std::unique_ptr<TracingController> tracing_controller_impl) {
|
||||
tracing_controller_ = std::move(tracing_controller_impl);
|
||||
}
|
||||
|
||||
private:
|
||||
std::unique_ptr<TracingController> tracing_controller_ =
|
||||
std::make_unique<TracingController>();
|
||||
};
|
||||
|
||||
class TestWithPlatform : public ::testing::Test {
|
||||
protected:
|
||||
@ -24,6 +53,12 @@ class TestWithPlatform : public ::testing::Test {
|
||||
|
||||
std::shared_ptr<TestPlatform> GetPlatformHandle() const { return platform_; }
|
||||
|
||||
void SetTracingController(
|
||||
std::unique_ptr<TracingController> tracing_controller_impl) {
|
||||
static_cast<DelegatingTracingController*>(platform_->GetTracingController())
|
||||
->SetTracingController(std::move(tracing_controller_impl));
|
||||
}
|
||||
|
||||
protected:
|
||||
static std::shared_ptr<TestPlatform> platform_;
|
||||
};
|
||||
|
@ -27,6 +27,7 @@ class WeakContainerTest : public testing::TestWithHeap {
|
||||
|
||||
void FinishMarking(Config::StackState stack_state) {
|
||||
GetMarkerRef()->FinishMarking(stack_state);
|
||||
GetMarkerRef().reset();
|
||||
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted();
|
||||
}
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user