cppgc: Use tracing scopes
The scopes themselves mostly have the same coverage as current scopes in blink. A few exception due to encapsulation exist and are highlighted as comments on the CL. Bug: chromium:1056170 Change-Id: I48af2cfdfd53a8caa1ab5d805d377f6f13a825bc Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2540552 Commit-Queue: Omer Katz <omerkatz@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#71285}
This commit is contained in:
parent
6a1a3a101e
commit
548fe20837
@ -206,16 +206,26 @@ void CppHeap::TracePrologue(TraceFlags flags) {
|
||||
}
|
||||
|
||||
bool CppHeap::AdvanceTracing(double deadline_in_ms) {
|
||||
// TODO(chromium:1056170): Replace std::numeric_limits<size_t>::max() with a
|
||||
// proper deadline when unified heap transitions to bytes-based deadline.
|
||||
marking_done_ = marker_->AdvanceMarkingWithMaxDuration(
|
||||
v8::base::TimeDelta::FromMillisecondsD(deadline_in_ms));
|
||||
v8::base::TimeDelta deadline =
|
||||
is_in_final_pause_
|
||||
? v8::base::TimeDelta::Max()
|
||||
: v8::base::TimeDelta::FromMillisecondsD(deadline_in_ms);
|
||||
cppgc::internal::StatsCollector::EnabledScope stats_scope(
|
||||
AsBase(),
|
||||
is_in_final_pause_
|
||||
? cppgc::internal::StatsCollector::kAtomicPauseMarkTransitiveClosure
|
||||
: cppgc::internal::StatsCollector::kUnifiedMarkingStep);
|
||||
// TODO(chromium:1056170): Replace when unified heap transitions to
|
||||
// bytes-based deadline.
|
||||
marking_done_ = marker_->AdvanceMarkingWithMaxDuration(deadline);
|
||||
DCHECK_IMPLIES(is_in_final_pause_, marking_done_);
|
||||
return marking_done_;
|
||||
}
|
||||
|
||||
bool CppHeap::IsTracingDone() { return marking_done_; }
|
||||
|
||||
void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
|
||||
is_in_final_pause_ = true;
|
||||
marker_->EnterAtomicPause(stack_state);
|
||||
if (compactor_.CancelIfShouldNotCompact(
|
||||
UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic,
|
||||
@ -225,30 +235,40 @@ void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
|
||||
}
|
||||
|
||||
void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {
|
||||
CHECK(is_in_final_pause_);
|
||||
CHECK(marking_done_);
|
||||
{
|
||||
// Weakness callbacks and pre-finalizers are forbidden from allocating
|
||||
// objects.
|
||||
cppgc::internal::ObjectAllocator::NoAllocationScope no_allocation_scope_(
|
||||
object_allocator_);
|
||||
marker_->LeaveAtomicPause();
|
||||
prefinalizer_handler()->InvokePreFinalizers();
|
||||
is_in_final_pause_ = false;
|
||||
}
|
||||
marker_.reset();
|
||||
// TODO(chromium:1056170): replace build flag with dedicated flag.
|
||||
#if DEBUG
|
||||
UnifiedHeapMarkingVerifier verifier(*this);
|
||||
verifier.Run(cppgc::Heap::StackState::kNoHeapPointers);
|
||||
#endif
|
||||
cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling
|
||||
compactable_space_handling = compactor_.CompactSpacesIfEnabled();
|
||||
{
|
||||
NoGCScope no_gc(*this);
|
||||
const cppgc::internal::Sweeper::SweepingConfig sweeping_config{
|
||||
cppgc::internal::Sweeper::SweepingConfig::SweepingType::
|
||||
kIncrementalAndConcurrent,
|
||||
compactable_space_handling};
|
||||
sweeper().Start(sweeping_config);
|
||||
cppgc::internal::StatsCollector::EnabledScope stats(
|
||||
AsBase(), cppgc::internal::StatsCollector::kAtomicPauseSweepAndCompact);
|
||||
|
||||
{
|
||||
cppgc::internal::ObjectAllocator::NoAllocationScope no_allocation_scope_(
|
||||
object_allocator_);
|
||||
prefinalizer_handler()->InvokePreFinalizers();
|
||||
}
|
||||
marker_.reset();
|
||||
// TODO(chromium:1056170): replace build flag with dedicated flag.
|
||||
#if DEBUG
|
||||
UnifiedHeapMarkingVerifier verifier(*this);
|
||||
verifier.Run(cppgc::Heap::StackState::kNoHeapPointers);
|
||||
#endif
|
||||
|
||||
{
|
||||
NoGCScope no_gc(*this);
|
||||
cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling
|
||||
compactable_space_handling = compactor_.CompactSpacesIfEnabled();
|
||||
const cppgc::internal::Sweeper::SweepingConfig sweeping_config{
|
||||
cppgc::internal::Sweeper::SweepingConfig::SweepingType::
|
||||
kIncrementalAndConcurrent,
|
||||
compactable_space_handling};
|
||||
sweeper().Start(sweeping_config);
|
||||
}
|
||||
}
|
||||
sweeper().NotifyDoneIfNeeded();
|
||||
}
|
||||
|
@ -58,6 +58,7 @@ class V8_EXPORT_PRIVATE CppHeap final : public cppgc::internal::HeapBase,
|
||||
|
||||
Isolate& isolate_;
|
||||
bool marking_done_ = false;
|
||||
bool is_in_final_pause_ = false;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -16,6 +16,7 @@
|
||||
#include "src/heap/cppgc/heap-page.h"
|
||||
#include "src/heap/cppgc/heap-space.h"
|
||||
#include "src/heap/cppgc/raw-heap.h"
|
||||
#include "src/heap/cppgc/stats-collector.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
@ -483,6 +484,9 @@ bool Compactor::CancelIfShouldNotCompact(
|
||||
Compactor::CompactableSpaceHandling Compactor::CompactSpacesIfEnabled() {
|
||||
if (!is_enabled_) return CompactableSpaceHandling::kSweep;
|
||||
|
||||
StatsCollector::DisabledScope stats_scope(
|
||||
*heap_.heap(), StatsCollector::kAtomicPauseCompaction);
|
||||
|
||||
MovableReferences movable_references(*heap_.heap());
|
||||
|
||||
CompactionWorklists::MovableReferencesWorklist::Local local(
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include "src/heap/cppgc/liveness-broker.h"
|
||||
#include "src/heap/cppgc/marking-state.h"
|
||||
#include "src/heap/cppgc/marking-visitor.h"
|
||||
#include "src/heap/cppgc/stats-collector.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
@ -71,6 +72,9 @@ ConcurrentMarkingTask::ConcurrentMarkingTask(
|
||||
: concurrent_marker_(concurrent_marker) {}
|
||||
|
||||
void ConcurrentMarkingTask::Run(JobDelegate* job_delegate) {
|
||||
StatsCollector::EnabledConcurrentScope stats_scope(
|
||||
concurrent_marker_.heap(), StatsCollector::kConcurrentMarkingStep);
|
||||
|
||||
if (!HasWorkForConcurrentMarking(concurrent_marker_.marking_worklists()))
|
||||
return;
|
||||
ConcurrentMarkingState concurrent_marking_state(
|
||||
@ -144,16 +148,22 @@ void ConcurrentMarkingTask::ProcessWorklists(
|
||||
return;
|
||||
}
|
||||
|
||||
if (!DrainWorklistWithYielding(
|
||||
job_delegate, concurrent_marking_state,
|
||||
concurrent_marker_.incremental_marking_schedule(),
|
||||
concurrent_marking_state.ephemeron_pairs_for_processing_worklist(),
|
||||
[&concurrent_marking_state](
|
||||
const MarkingWorklists::EphemeronPairItem& item) {
|
||||
concurrent_marking_state.ProcessEphemeron(item.key,
|
||||
item.value_desc);
|
||||
})) {
|
||||
return;
|
||||
{
|
||||
StatsCollector::DisabledConcurrentScope stats_scope(
|
||||
concurrent_marker_.heap(),
|
||||
StatsCollector::kConcurrentMarkInvokeEphemeronCallbacks);
|
||||
if (!DrainWorklistWithYielding(
|
||||
job_delegate, concurrent_marking_state,
|
||||
concurrent_marker_.incremental_marking_schedule(),
|
||||
concurrent_marking_state
|
||||
.ephemeron_pairs_for_processing_worklist(),
|
||||
[&concurrent_marking_state](
|
||||
const MarkingWorklists::EphemeronPairItem& item) {
|
||||
concurrent_marking_state.ProcessEphemeron(item.key,
|
||||
item.value_desc);
|
||||
})) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
} while (
|
||||
!concurrent_marking_state.marking_worklist().IsLocalAndGlobalEmpty());
|
||||
|
@ -69,7 +69,7 @@ HeapBase::HeapBase(
|
||||
stats_collector_(std::make_unique<StatsCollector>()),
|
||||
stack_(std::make_unique<heap::base::Stack>(
|
||||
v8::base::Stack::GetStackStart())),
|
||||
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()),
|
||||
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)),
|
||||
compactor_(raw_heap_),
|
||||
object_allocator_(&raw_heap_, page_backend_.get(),
|
||||
stats_collector_.get()),
|
||||
|
@ -12,6 +12,7 @@
|
||||
#include "src/heap/cppgc/marker.h"
|
||||
#include "src/heap/cppgc/marking-verifier.h"
|
||||
#include "src/heap/cppgc/prefinalizer-handler.h"
|
||||
#include "src/heap/cppgc/stats-collector.h"
|
||||
|
||||
namespace cppgc {
|
||||
|
||||
@ -155,20 +156,28 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
|
||||
config_.stack_state = stack_state;
|
||||
DCHECK(marker_);
|
||||
{
|
||||
// Pre finalizers are forbidden from allocating objects. Note that this also
|
||||
// guard atomic pause marking below, meaning that no internal method or
|
||||
// This guards atomic pause marking, meaning that no internal method or
|
||||
// external callbacks are allowed to allocate new objects.
|
||||
ObjectAllocator::NoAllocationScope no_allocation_scope_(object_allocator_);
|
||||
marker_->FinishMarking(stack_state);
|
||||
prefinalizer_handler_->InvokePreFinalizers();
|
||||
}
|
||||
marker_.reset();
|
||||
// TODO(chromium:1056170): replace build flag with dedicated flag.
|
||||
#if DEBUG
|
||||
MarkingVerifier verifier(*this);
|
||||
verifier.Run(stack_state);
|
||||
#endif
|
||||
{
|
||||
StatsCollector::EnabledScope stats(
|
||||
*this, StatsCollector::kAtomicPauseSweepAndCompact);
|
||||
|
||||
{
|
||||
// Pre finalizers are forbidden from allocating objects.
|
||||
ObjectAllocator::NoAllocationScope no_allocation_scope_(
|
||||
object_allocator_);
|
||||
prefinalizer_handler_->InvokePreFinalizers();
|
||||
}
|
||||
marker_.reset();
|
||||
// TODO(chromium:1056170): replace build flag with dedicated flag.
|
||||
#if DEBUG
|
||||
MarkingVerifier verifier(*this);
|
||||
verifier.Run(stack_state);
|
||||
#endif
|
||||
|
||||
NoGCScope no_gc(*this);
|
||||
const Sweeper::SweepingConfig sweeping_config{
|
||||
config_.sweeping_type,
|
||||
@ -182,5 +191,13 @@ void Heap::PostGarbageCollection() { gc_in_progress_ = false; }
|
||||
|
||||
void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); }
|
||||
|
||||
void Heap::FinalizeIncrementalGarbageCollectionIfNeeded(
|
||||
Config::StackState stack_state) {
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
*this, StatsCollector::kIncrementalMarkingFinalize);
|
||||
|
||||
FinalizeGarbageCollection(stack_state);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
@ -44,10 +44,7 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
|
||||
void StartGarbageCollection(Config);
|
||||
void FinalizeGarbageCollection(Config::StackState);
|
||||
|
||||
void FinalizeIncrementalGarbageCollectionIfNeeded(
|
||||
Config::StackState stack_state) final {
|
||||
FinalizeGarbageCollection(stack_state);
|
||||
}
|
||||
void FinalizeIncrementalGarbageCollectionIfNeeded(Config::StackState) final;
|
||||
|
||||
void PostGarbageCollection() final;
|
||||
|
||||
|
@ -59,6 +59,8 @@ bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
|
||||
void VisitRememberedSlots(HeapBase& heap,
|
||||
MutatorMarkingState& mutator_marking_state) {
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
heap(), StatsCollector::kVisitRememberedSets);
|
||||
for (void* slot : heap.remembered_slots()) {
|
||||
auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
|
||||
->ObjectHeaderFromInnerAddress(slot);
|
||||
@ -200,6 +202,9 @@ void MarkerBase::StartMarking() {
|
||||
|
||||
is_marking_started_ = true;
|
||||
if (EnterIncrementalMarkingIfNeeded(config_, heap())) {
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
heap(), StatsCollector::kIncrementalMarkingStartMarking);
|
||||
|
||||
// Performing incremental or concurrent marking.
|
||||
schedule_.NotifyIncrementalMarkingStart();
|
||||
// Scanning the stack is expensive so we only do it at the atomic pause.
|
||||
@ -214,6 +219,9 @@ void MarkerBase::StartMarking() {
|
||||
}
|
||||
|
||||
void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
heap(), StatsCollector::kAtomicPauseMarkPrologue);
|
||||
|
||||
if (ExitIncrementalMarkingIfNeeded(config_, heap())) {
|
||||
// Cancel remaining concurrent/incremental tasks.
|
||||
concurrent_marker_->Cancel();
|
||||
@ -228,66 +236,102 @@ void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
|
||||
// is either cleared or the object is retained.
|
||||
g_process_mutex.Pointer()->Lock();
|
||||
|
||||
// VisitRoots also resets the LABs.
|
||||
VisitRoots(config_.stack_state);
|
||||
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
|
||||
mutator_marking_state_.FlushNotFullyConstructedObjects();
|
||||
DCHECK(marking_worklists_.not_fully_constructed_worklist()->IsEmpty());
|
||||
} else {
|
||||
MarkNotFullyConstructedObjects();
|
||||
{
|
||||
StatsCollector::EnabledScope inner_stats_scope(
|
||||
heap(), StatsCollector::kAtomicPauseMarkRoots);
|
||||
// VisitRoots also resets the LABs.
|
||||
VisitRoots(config_.stack_state);
|
||||
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
|
||||
mutator_marking_state_.FlushNotFullyConstructedObjects();
|
||||
DCHECK(marking_worklists_.not_fully_constructed_worklist()->IsEmpty());
|
||||
} else {
|
||||
MarkNotFullyConstructedObjects();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void MarkerBase::LeaveAtomicPause() {
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
heap(), StatsCollector::kAtomicPauseMarkEpilogue);
|
||||
DCHECK(!incremental_marking_handle_);
|
||||
ResetRememberedSet(heap());
|
||||
heap().stats_collector()->NotifyMarkingCompleted(
|
||||
// GetOverallMarkedBytes also includes concurrently marked bytes.
|
||||
schedule_.GetOverallMarkedBytes());
|
||||
is_marking_started_ = false;
|
||||
ProcessWeakness();
|
||||
{
|
||||
// Weakness callbacks are forbidden from allocating objects.
|
||||
ObjectAllocator::NoAllocationScope no_allocation_scope_(
|
||||
heap_.object_allocator());
|
||||
ProcessWeakness();
|
||||
}
|
||||
g_process_mutex.Pointer()->Unlock();
|
||||
}
|
||||
|
||||
void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
|
||||
DCHECK(is_marking_started_);
|
||||
EnterAtomicPause(stack_state);
|
||||
CHECK(ProcessWorklistsWithDeadline(std::numeric_limits<size_t>::max(),
|
||||
v8::base::TimeTicks::Max()));
|
||||
{
|
||||
StatsCollector::EnabledScope advance_tracing_scope(
|
||||
heap(), StatsCollector::kAtomicPauseMarkTransitiveClosure);
|
||||
CHECK(ProcessWorklistsWithDeadline(std::numeric_limits<size_t>::max(),
|
||||
v8::base::TimeTicks::Max()));
|
||||
}
|
||||
mutator_marking_state_.Publish();
|
||||
LeaveAtomicPause();
|
||||
}
|
||||
|
||||
void MarkerBase::ProcessWeakness() {
|
||||
DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type);
|
||||
|
||||
heap().GetWeakPersistentRegion().Trace(&visitor());
|
||||
// Processing cross-thread handles requires taking the process lock.
|
||||
g_process_mutex.Get().AssertHeld();
|
||||
heap().GetWeakCrossThreadPersistentRegion().Trace(&visitor());
|
||||
|
||||
// Call weak callbacks on objects that may now be pointing to dead objects.
|
||||
MarkingWorklists::WeakCallbackItem item;
|
||||
LivenessBroker broker = LivenessBrokerFactory::Create();
|
||||
MarkingWorklists::WeakCallbackWorklist::Local& local =
|
||||
mutator_marking_state_.weak_callback_worklist();
|
||||
while (local.Pop(&item)) {
|
||||
item.callback(broker, item.parameter);
|
||||
{
|
||||
StatsCollector::DisabledScope stats_scope(
|
||||
heap(), StatsCollector::kMarkWeakProcessing);
|
||||
// Call weak callbacks on objects that may now be pointing to dead objects.
|
||||
MarkingWorklists::WeakCallbackItem item;
|
||||
LivenessBroker broker = LivenessBrokerFactory::Create();
|
||||
MarkingWorklists::WeakCallbackWorklist::Local& local =
|
||||
mutator_marking_state_.weak_callback_worklist();
|
||||
while (local.Pop(&item)) {
|
||||
item.callback(broker, item.parameter);
|
||||
}
|
||||
}
|
||||
// Weak callbacks should not add any new objects for marking.
|
||||
DCHECK(marking_worklists_.marking_worklist()->IsEmpty());
|
||||
}
|
||||
|
||||
void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
|
||||
StatsCollector::EnabledScope stats_scope(heap(), StatsCollector::kVisitRoots);
|
||||
|
||||
// Reset LABs before scanning roots. LABs are cleared to allow
|
||||
// ObjectStartBitmap handling without considering LABs.
|
||||
heap().object_allocator().ResetLinearAllocationBuffers();
|
||||
|
||||
heap().GetStrongPersistentRegion().Trace(&visitor());
|
||||
if (config_.marking_type == MarkingConfig::MarkingType::kAtomic) {
|
||||
g_process_mutex.Get().AssertHeld();
|
||||
heap().GetStrongCrossThreadPersistentRegion().Trace(&visitor());
|
||||
{
|
||||
StatsCollector::DisabledScope stats_scope(
|
||||
heap(), StatsCollector::kVisitPersistentRoots);
|
||||
|
||||
{
|
||||
StatsCollector::DisabledScope inner_stats_scope(
|
||||
heap(), StatsCollector::kVisitPersistents);
|
||||
heap().GetStrongPersistentRegion().Trace(&visitor());
|
||||
}
|
||||
if (config_.marking_type == MarkingConfig::MarkingType::kAtomic) {
|
||||
StatsCollector::DisabledScope inner_stats_scope(
|
||||
heap(), StatsCollector::kVisitCrossThreadPersistents);
|
||||
g_process_mutex.Get().AssertHeld();
|
||||
heap().GetStrongCrossThreadPersistentRegion().Trace(&visitor());
|
||||
}
|
||||
}
|
||||
|
||||
if (stack_state != MarkingConfig::StackState::kNoHeapPointers) {
|
||||
StatsCollector::DisabledScope stack_stats_scope(
|
||||
heap(), StatsCollector::kVisitStackRoots);
|
||||
heap().stack()->IteratePointers(&stack_visitor());
|
||||
}
|
||||
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
|
||||
@ -308,6 +352,9 @@ bool MarkerBase::IncrementalMarkingStepForTesting(
|
||||
}
|
||||
|
||||
bool MarkerBase::IncrementalMarkingStep(MarkingConfig::StackState stack_state) {
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
heap(), StatsCollector::kIncrementalMarkingStep);
|
||||
|
||||
if (stack_state == MarkingConfig::StackState::kNoHeapPointers) {
|
||||
mutator_marking_state_.FlushNotFullyConstructedObjects();
|
||||
}
|
||||
@ -333,6 +380,9 @@ bool MarkerBase::AdvanceMarkingWithDeadline(v8::base::TimeDelta max_duration) {
|
||||
if (!incremental_marking_disabled_for_testing_) {
|
||||
size_t step_size_in_bytes =
|
||||
GetNextIncrementalStepDuration(schedule_, heap_);
|
||||
StatsCollector::EnabledScope deadline_scope(
|
||||
heap(), StatsCollector::kIncrementalMarkingWithDeadline, "deadline_ms",
|
||||
max_duration.InMillisecondsF());
|
||||
is_done = ProcessWorklistsWithDeadline(
|
||||
mutator_marking_state_.marked_bytes() + step_size_in_bytes,
|
||||
v8::base::TimeTicks::Now() + max_duration);
|
||||
@ -360,70 +410,97 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
|
||||
mutator_marking_state_.FlushDiscoveredEphemeronPairs();
|
||||
}
|
||||
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
heap(), StatsCollector::kMarkProcessWorklists);
|
||||
|
||||
// Bailout objects may be complicated to trace and thus might take longer
|
||||
// than other objects. Therefore we reduce the interval between deadline
|
||||
// checks to guarantee the deadline is not exceeded.
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline<kDefaultDeadlineCheckInterval /
|
||||
5>(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_.concurrent_marking_bailout_worklist(),
|
||||
[this](const MarkingWorklists::ConcurrentMarkingBailoutItem& item) {
|
||||
mutator_marking_state_.AccountMarkedBytes(item.bailedout_size);
|
||||
item.callback(&visitor(), item.parameter);
|
||||
})) {
|
||||
return false;
|
||||
{
|
||||
StatsCollector::EnabledScope inner_scope(
|
||||
heap(), StatsCollector::kMarkBailOutObjects);
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline<kDefaultDeadlineCheckInterval /
|
||||
5>(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_.concurrent_marking_bailout_worklist(),
|
||||
[this](
|
||||
const MarkingWorklists::ConcurrentMarkingBailoutItem& item) {
|
||||
mutator_marking_state_.AccountMarkedBytes(item.bailedout_size);
|
||||
item.callback(&visitor(), item.parameter);
|
||||
})) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_.previously_not_fully_constructed_worklist(),
|
||||
[this](HeapObjectHeader* header) {
|
||||
mutator_marking_state_.AccountMarkedBytes(*header);
|
||||
DynamicallyTraceMarkedObject<AccessMode::kNonAtomic>(visitor(),
|
||||
*header);
|
||||
})) {
|
||||
return false;
|
||||
{
|
||||
StatsCollector::EnabledScope inner_scope(
|
||||
heap(), StatsCollector::kMarkProcessNotFullyconstructeddWorklist);
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_
|
||||
.previously_not_fully_constructed_worklist(),
|
||||
[this](HeapObjectHeader* header) {
|
||||
mutator_marking_state_.AccountMarkedBytes(*header);
|
||||
DynamicallyTraceMarkedObject<AccessMode::kNonAtomic>(visitor(),
|
||||
*header);
|
||||
})) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_.marking_worklist(),
|
||||
[this](const MarkingWorklists::MarkingItem& item) {
|
||||
const HeapObjectHeader& header =
|
||||
HeapObjectHeader::FromPayload(item.base_object_payload);
|
||||
DCHECK(!header.IsInConstruction<AccessMode::kNonAtomic>());
|
||||
DCHECK(header.IsMarked<AccessMode::kNonAtomic>());
|
||||
mutator_marking_state_.AccountMarkedBytes(header);
|
||||
item.callback(&visitor(), item.base_object_payload);
|
||||
})) {
|
||||
return false;
|
||||
{
|
||||
StatsCollector::EnabledScope inner_scope(
|
||||
heap(), StatsCollector::kMarkProcessMarkingWorklist);
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_.marking_worklist(),
|
||||
[this](const MarkingWorklists::MarkingItem& item) {
|
||||
const HeapObjectHeader& header =
|
||||
HeapObjectHeader::FromPayload(item.base_object_payload);
|
||||
DCHECK(!header.IsInConstruction<AccessMode::kNonAtomic>());
|
||||
DCHECK(header.IsMarked<AccessMode::kNonAtomic>());
|
||||
mutator_marking_state_.AccountMarkedBytes(header);
|
||||
item.callback(&visitor(), item.base_object_payload);
|
||||
})) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_.write_barrier_worklist(),
|
||||
[this](HeapObjectHeader* header) {
|
||||
mutator_marking_state_.AccountMarkedBytes(*header);
|
||||
DynamicallyTraceMarkedObject<AccessMode::kNonAtomic>(visitor(),
|
||||
*header);
|
||||
})) {
|
||||
return false;
|
||||
{
|
||||
StatsCollector::EnabledScope inner_scope(
|
||||
heap(), StatsCollector::kMarkProcessWriteBarrierWorklist);
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_.write_barrier_worklist(),
|
||||
[this](HeapObjectHeader* header) {
|
||||
mutator_marking_state_.AccountMarkedBytes(*header);
|
||||
DynamicallyTraceMarkedObject<AccessMode::kNonAtomic>(visitor(),
|
||||
*header);
|
||||
})) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_.ephemeron_pairs_for_processing_worklist(),
|
||||
[this](const MarkingWorklists::EphemeronPairItem& item) {
|
||||
mutator_marking_state_.ProcessEphemeron(item.key,
|
||||
item.value_desc);
|
||||
})) {
|
||||
return false;
|
||||
{
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
heap(), StatsCollector::kMarkInvokeEphemeronCallbacks);
|
||||
if (!DrainWorklistWithBytesAndTimeDeadline(
|
||||
mutator_marking_state_, marked_bytes_deadline, time_deadline,
|
||||
mutator_marking_state_.ephemeron_pairs_for_processing_worklist(),
|
||||
[this](const MarkingWorklists::EphemeronPairItem& item) {
|
||||
mutator_marking_state_.ProcessEphemeron(item.key,
|
||||
item.value_desc);
|
||||
})) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} while (!mutator_marking_state_.marking_worklist().IsLocalAndGlobalEmpty());
|
||||
return true;
|
||||
}
|
||||
|
||||
void MarkerBase::MarkNotFullyConstructedObjects() {
|
||||
StatsCollector::DisabledScope stats_scope(
|
||||
heap(), StatsCollector::kMarkNotFullyConstructedObjects);
|
||||
std::unordered_set<HeapObjectHeader*> objects =
|
||||
mutator_marking_state_.not_fully_constructed_worklist().Extract();
|
||||
for (HeapObjectHeader* object : objects) {
|
||||
|
@ -6,6 +6,8 @@
|
||||
|
||||
#include <unordered_set>
|
||||
|
||||
#include "src/heap/cppgc/stats-collector.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
@ -19,6 +21,8 @@ void MutatorMarkingState::FlushNotFullyConstructedObjects() {
|
||||
}
|
||||
|
||||
void MutatorMarkingState::FlushDiscoveredEphemeronPairs() {
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
heap_, StatsCollector::kMarkFlushEphemeronPairs);
|
||||
discovered_ephemeron_pairs_worklist_.Publish();
|
||||
if (!discovered_ephemeron_pairs_worklist_.IsGlobalEmpty()) {
|
||||
ephemeron_pairs_for_processing_worklist_.Merge(
|
||||
|
@ -120,9 +120,7 @@ class MarkingStateBase {
|
||||
return movable_slots_worklist_.get();
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
HeapBase& heap_;
|
||||
#endif // DEBUG
|
||||
|
||||
MarkingWorklists::MarkingWorklist::Local marking_worklist_;
|
||||
MarkingWorklists::NotFullyConstructedWorklist&
|
||||
@ -150,9 +148,7 @@ MarkingStateBase::MarkingStateBase(HeapBase& heap,
|
||||
MarkingWorklists& marking_worklists,
|
||||
CompactionWorklists* compaction_worklists)
|
||||
:
|
||||
#ifdef DEBUG
|
||||
heap_(heap),
|
||||
#endif // DEBUG
|
||||
marking_worklist_(marking_worklists.marking_worklist()),
|
||||
not_fully_constructed_worklist_(
|
||||
*marking_worklists.not_fully_constructed_worklist()),
|
||||
|
@ -134,7 +134,11 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
|
||||
|
||||
// 3. Lazily sweep pages of this heap until we find a freed area for
|
||||
// this allocation or we finish sweeping all pages of this heap.
|
||||
// TODO(chromium:1056170): Add lazy sweep.
|
||||
// {
|
||||
// StatsCollector::EnabledScope stats_scope(
|
||||
// *space->raw_heap()->heap(), StatsCollector::kLazySweepOnAllocation);
|
||||
// // TODO(chromium:1056170): Add lazy sweep.
|
||||
// }
|
||||
|
||||
// 4. Complete sweeping.
|
||||
raw_heap_->heap()->sweeper().FinishIfRunning();
|
||||
|
@ -11,6 +11,7 @@
|
||||
#include "src/heap/cppgc/heap-page.h"
|
||||
#include "src/heap/cppgc/heap.h"
|
||||
#include "src/heap/cppgc/liveness-broker.h"
|
||||
#include "src/heap/cppgc/stats-collector.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
@ -29,9 +30,11 @@ bool PreFinalizerRegistrationDispatcher::PreFinalizer::operator==(
|
||||
return (object == other.object) && (callback == other.callback);
|
||||
}
|
||||
|
||||
PreFinalizerHandler::PreFinalizerHandler()
|
||||
PreFinalizerHandler::PreFinalizerHandler(HeapBase& heap)
|
||||
: heap_(heap)
|
||||
#ifdef DEBUG
|
||||
: creation_thread_id_(v8::base::OS::GetCurrentThreadId())
|
||||
,
|
||||
creation_thread_id_(v8::base::OS::GetCurrentThreadId())
|
||||
#endif
|
||||
{
|
||||
}
|
||||
@ -45,6 +48,9 @@ void PreFinalizerHandler::RegisterPrefinalizer(PreFinalizer pre_finalizer) {
|
||||
}
|
||||
|
||||
void PreFinalizerHandler::InvokePreFinalizers() {
|
||||
StatsCollector::DisabledScope stats_scope(
|
||||
heap_, StatsCollector::kInvokePreFinalizers);
|
||||
|
||||
DCHECK(CurrentThreadIsCreationThread());
|
||||
LivenessBroker liveness_broker = LivenessBrokerFactory::Create();
|
||||
ordered_pre_finalizers_.erase(
|
||||
|
@ -12,12 +12,14 @@
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
class HeapBase;
|
||||
|
||||
class PreFinalizerHandler final {
|
||||
public:
|
||||
using PreFinalizer =
|
||||
cppgc::internal::PreFinalizerRegistrationDispatcher::PreFinalizer;
|
||||
|
||||
PreFinalizerHandler();
|
||||
explicit PreFinalizerHandler(HeapBase& heap);
|
||||
|
||||
void RegisterPrefinalizer(PreFinalizer pre_finalizer);
|
||||
|
||||
@ -33,6 +35,7 @@ class PreFinalizerHandler final {
|
||||
// back-to-front.
|
||||
std::vector<PreFinalizer> ordered_pre_finalizers_;
|
||||
|
||||
HeapBase& heap_;
|
||||
#ifdef DEBUG
|
||||
int creation_thread_id_;
|
||||
#endif
|
||||
|
@ -19,11 +19,42 @@
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
#define CPPGC_FOR_ALL_SCOPES(V) \
|
||||
V(MainThreadScopeForTests1) \
|
||||
V(MainThreadScopeForTests2)
|
||||
#define CPPGC_FOR_ALL_SCOPES(V) \
|
||||
V(AtomicPauseCompaction) \
|
||||
V(AtomicPauseMarkEpilogue) \
|
||||
V(AtomicPauseMarkPrologue) \
|
||||
V(AtomicPauseMarkRoots) \
|
||||
V(AtomicPauseMarkTransitiveClosure) \
|
||||
V(AtomicPauseSweepAndCompact) \
|
||||
V(CompleteSweep) \
|
||||
V(IncrementalMarkingFinalize) \
|
||||
V(IncrementalMarkingStartMarking) \
|
||||
V(IncrementalMarkingStep) \
|
||||
V(IncrementalMarkingWithDeadline) \
|
||||
V(InvokePreFinalizers) \
|
||||
V(LazySweepInIdle) \
|
||||
V(LazySweepOnAllocation) \
|
||||
V(MarkBailOutObjects) \
|
||||
V(MarkInvokeEphemeronCallbacks) \
|
||||
V(MarkFlushEphemeronPairs) \
|
||||
V(MarkProcessWorklists) \
|
||||
V(MarkProcessMarkingWorklist) \
|
||||
V(MarkProcessWriteBarrierWorklist) \
|
||||
V(MarkProcessNotFullyconstructeddWorklist) \
|
||||
V(MarkNotFullyConstructedObjects) \
|
||||
V(MarkWeakProcessing) \
|
||||
V(UnifiedMarkingStep) \
|
||||
V(VisitCrossThreadPersistents) \
|
||||
V(VisitPersistentRoots) \
|
||||
V(VisitPersistents) \
|
||||
V(VisitRoots) \
|
||||
V(VisitStackRoots) \
|
||||
V(VisitRememberedSets)
|
||||
|
||||
#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) V(ConcurrentThreadScopeForTests)
|
||||
#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) \
|
||||
V(ConcurrentMarkInvokeEphemeronCallbacks) \
|
||||
V(ConcurrentMarkingStep) \
|
||||
V(ConcurrentSweepingStep)
|
||||
|
||||
// Sink for various time and memory statistics.
|
||||
class V8_EXPORT_PRIVATE StatsCollector final {
|
||||
|
@ -392,9 +392,13 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
|
||||
friend class HeapVisitor<ConcurrentSweepTask>;
|
||||
|
||||
public:
|
||||
explicit ConcurrentSweepTask(SpaceStates* states) : states_(states) {}
|
||||
explicit ConcurrentSweepTask(HeapBase& heap, SpaceStates* states)
|
||||
: heap_(heap), states_(states) {}
|
||||
|
||||
void Run(cppgc::JobDelegate* delegate) final {
|
||||
StatsCollector::EnabledConcurrentScope stats_scope(
|
||||
heap_, StatsCollector::kConcurrentSweepingStep);
|
||||
|
||||
for (SpaceState& state : *states_) {
|
||||
while (auto page = state.unswept_pages.Pop()) {
|
||||
Traverse(*page);
|
||||
@ -438,6 +442,7 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
|
||||
return true;
|
||||
}
|
||||
|
||||
HeapBase& heap_;
|
||||
SpaceStates* states_;
|
||||
std::atomic_bool is_completed_{false};
|
||||
};
|
||||
@ -518,12 +523,16 @@ class Sweeper::SweeperImpl final {
|
||||
void FinishIfRunning() {
|
||||
if (!is_in_progress_) return;
|
||||
|
||||
if (concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid() &&
|
||||
concurrent_sweeper_handle_->UpdatePriorityEnabled()) {
|
||||
concurrent_sweeper_handle_->UpdatePriority(
|
||||
cppgc::TaskPriority::kUserBlocking);
|
||||
{
|
||||
StatsCollector::EnabledScope stats_scope(*heap_->heap(),
|
||||
StatsCollector::kCompleteSweep);
|
||||
if (concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid() &&
|
||||
concurrent_sweeper_handle_->UpdatePriorityEnabled()) {
|
||||
concurrent_sweeper_handle_->UpdatePriority(
|
||||
cppgc::TaskPriority::kUserBlocking);
|
||||
}
|
||||
Finish();
|
||||
}
|
||||
Finish();
|
||||
NotifyDone();
|
||||
}
|
||||
|
||||
@ -587,9 +596,16 @@ class Sweeper::SweeperImpl final {
|
||||
|
||||
MutatorThreadSweeper sweeper(&sweeper_->space_states_,
|
||||
sweeper_->platform_);
|
||||
const bool sweep_complete =
|
||||
sweeper.SweepWithDeadline(deadline_in_seconds);
|
||||
bool sweep_complete;
|
||||
{
|
||||
StatsCollector::EnabledScope stats_scope(
|
||||
*sweeper_->heap_->heap(), StatsCollector::kLazySweepInIdle,
|
||||
"idleDeltaInSeconds",
|
||||
(deadline_in_seconds -
|
||||
sweeper_->platform_->MonotonicallyIncreasingTime()));
|
||||
|
||||
sweep_complete = sweeper.SweepWithDeadline(deadline_in_seconds);
|
||||
}
|
||||
if (sweep_complete) {
|
||||
sweeper_->FinalizeSweep();
|
||||
sweeper_->NotifyDone();
|
||||
@ -620,7 +636,7 @@ class Sweeper::SweeperImpl final {
|
||||
|
||||
concurrent_sweeper_handle_ = platform_->PostJob(
|
||||
cppgc::TaskPriority::kUserVisible,
|
||||
std::make_unique<ConcurrentSweepTask>(&space_states_));
|
||||
std::make_unique<ConcurrentSweepTask>(*heap_->heap(), &space_states_));
|
||||
}
|
||||
|
||||
void CancelSweepers() {
|
||||
|
@ -7,6 +7,7 @@
|
||||
#include "include/cppgc/allocation.h"
|
||||
#include "include/cppgc/custom-space.h"
|
||||
#include "include/cppgc/persistent.h"
|
||||
#include "src/heap/cppgc/garbage-collector.h"
|
||||
#include "src/heap/cppgc/heap-object-header.h"
|
||||
#include "src/heap/cppgc/heap-page.h"
|
||||
#include "src/heap/cppgc/marker.h"
|
||||
@ -125,7 +126,12 @@ namespace internal {
|
||||
|
||||
TEST_F(CompactorTest, NothingToCompact) {
|
||||
StartCompaction();
|
||||
heap()->stats_collector()->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
heap()->stats_collector()->NotifyMarkingCompleted(0);
|
||||
FinishCompaction();
|
||||
heap()->stats_collector()->NotifySweepingCompleted();
|
||||
}
|
||||
|
||||
TEST_F(CompactorTest, CancelledNothingToCompact) {
|
||||
|
@ -76,7 +76,7 @@ class CppgcTracingScopesTest : public testing::TestWithHeap {
|
||||
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted();
|
||||
}
|
||||
|
||||
void ResetTestTracingController(const char* expected_name = nullptr) {
|
||||
void ResetDelegatingTracingController(const char* expected_name = nullptr) {
|
||||
DelegatingTracingControllerImpl::AddTraceEvent_callcount = 0u;
|
||||
DelegatingTracingControllerImpl::stored_num_args = 0;
|
||||
DelegatingTracingControllerImpl::stored_arg_names.clear();
|
||||
@ -102,10 +102,10 @@ class CppgcTracingScopesTest : public testing::TestWithHeap {
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, DisabledScope) {
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
ResetDelegatingTracingController();
|
||||
{
|
||||
StatsCollector::DisabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1);
|
||||
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist);
|
||||
}
|
||||
EXPECT_EQ(0u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
|
||||
EndGC();
|
||||
@ -114,20 +114,21 @@ TEST_F(CppgcTracingScopesTest, DisabledScope) {
|
||||
TEST_F(CppgcTracingScopesTest, EnabledScope) {
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController("CppGC.MainThreadScopeForTests1");
|
||||
ResetDelegatingTracingController("CppGC.MarkProcessMarkingWorklist");
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1);
|
||||
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist);
|
||||
}
|
||||
EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
|
||||
EndGC();
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController("CppGC.MainThreadScopeForTests2");
|
||||
ResetDelegatingTracingController("CppGC.MarkProcessWriteBarrierWorklist");
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests2);
|
||||
*Heap::From(GetHeap()),
|
||||
StatsCollector::kMarkProcessWriteBarrierWorklist);
|
||||
}
|
||||
EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
|
||||
EndGC();
|
||||
@ -138,20 +139,20 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
|
||||
// Scopes always add 2 arguments: epoch and is_forced_gc.
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
ResetDelegatingTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1);
|
||||
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist);
|
||||
}
|
||||
EXPECT_EQ(2, DelegatingTracingControllerImpl::stored_num_args);
|
||||
EndGC();
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
ResetDelegatingTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist,
|
||||
"arg1", 1);
|
||||
}
|
||||
EXPECT_EQ(3, DelegatingTracingControllerImpl::stored_num_args);
|
||||
@ -159,10 +160,10 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
ResetDelegatingTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist,
|
||||
"arg1", 1, "arg2", 2);
|
||||
}
|
||||
EXPECT_EQ(4, DelegatingTracingControllerImpl::stored_num_args);
|
||||
@ -173,10 +174,10 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
|
||||
TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
ResetDelegatingTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist,
|
||||
"uint_arg", 13u, "bool_arg", false);
|
||||
}
|
||||
FindArgument("uint_arg", TRACE_VALUE_TYPE_UINT, 13);
|
||||
@ -185,10 +186,10 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
ResetDelegatingTracingController();
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist,
|
||||
"neg_int_arg", -5, "pos_int_arg", 7);
|
||||
}
|
||||
FindArgument("neg_int_arg", TRACE_VALUE_TYPE_INT, -5);
|
||||
@ -197,12 +198,12 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
|
||||
}
|
||||
{
|
||||
StartGC();
|
||||
ResetTestTracingController();
|
||||
ResetDelegatingTracingController();
|
||||
double double_value = 1.2;
|
||||
const char* string_value = "test";
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
*Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
|
||||
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist,
|
||||
"string_arg", string_value, "double_arg", double_value);
|
||||
}
|
||||
FindArgument("string_arg", TRACE_VALUE_TYPE_STRING,
|
||||
@ -214,10 +215,14 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
|
||||
}
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) {
|
||||
StartGC();
|
||||
EndGC();
|
||||
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
|
||||
stats_collector->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
stats_collector->NotifyMarkingCompleted(0);
|
||||
stats_collector->NotifySweepingCompleted();
|
||||
const StatsCollector::Event& event =
|
||||
Heap::From(GetHeap())->stats_collector()->GetPreviousEventForTesting();
|
||||
stats_collector->GetPreviousEventForTesting();
|
||||
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
|
||||
EXPECT_TRUE(event.scope_data[i].IsZero());
|
||||
}
|
||||
@ -228,7 +233,10 @@ TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) {
|
||||
|
||||
TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
|
||||
for (int scope_id = 0; scope_id < StatsCollector::kNumScopeIds; ++scope_id) {
|
||||
StartGC();
|
||||
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
|
||||
stats_collector->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
DelegatingTracingControllerImpl::check_expectations = false;
|
||||
{
|
||||
StatsCollector::EnabledScope scope(
|
||||
@ -239,9 +247,10 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
|
||||
// Force time to progress before destroying scope.
|
||||
}
|
||||
}
|
||||
EndGC();
|
||||
stats_collector->NotifyMarkingCompleted(0);
|
||||
stats_collector->NotifySweepingCompleted();
|
||||
const StatsCollector::Event& event =
|
||||
Heap::From(GetHeap())->stats_collector()->GetPreviousEventForTesting();
|
||||
stats_collector->GetPreviousEventForTesting();
|
||||
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
|
||||
if (i == scope_id)
|
||||
EXPECT_LT(v8::base::TimeDelta(), event.scope_data[i]);
|
||||
@ -257,7 +266,10 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
|
||||
TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
|
||||
for (int scope_id = 0; scope_id < StatsCollector::kNumConcurrentScopeIds;
|
||||
++scope_id) {
|
||||
StartGC();
|
||||
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
|
||||
stats_collector->NotifyMarkingStarted(
|
||||
GarbageCollector::Config::CollectionType::kMajor,
|
||||
GarbageCollector::Config::IsForcedGC::kNotForced);
|
||||
DelegatingTracingControllerImpl::check_expectations = false;
|
||||
{
|
||||
StatsCollector::EnabledConcurrentScope scope(
|
||||
@ -268,9 +280,10 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
|
||||
// Force time to progress before destroying scope.
|
||||
}
|
||||
}
|
||||
EndGC();
|
||||
stats_collector->NotifyMarkingCompleted(0);
|
||||
stats_collector->NotifySweepingCompleted();
|
||||
const StatsCollector::Event& event =
|
||||
Heap::From(GetHeap())->stats_collector()->GetPreviousEventForTesting();
|
||||
stats_collector->GetPreviousEventForTesting();
|
||||
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
|
||||
EXPECT_TRUE(event.scope_data[i].IsZero());
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user