heap: Fix the tracing of GC cycles
Conceptually, a full GC cycle completes when the sweeping phase is finished. As sweeping is performed concurrently, this happens after Heap::CollectGarbage has returned and, at the latest, before the next full GC cycle begins. However, an arbitrary number of young GC cycles may happen in the meantime. Tracing information for the sweeping phase must be added to the corresponding full GC cycle event. Until now, this was not done correctly: this information was added to the GCTracer's current event and could thus be attributed to a subsequent young or full GC cycle. This CL introduces methods GCTracer::(Start|Stop)Cycle to delimit a cycle (still allowing for full GC cycles to be interrupted by young GC cycles). These methods are different from (Start|Stop)ObservablePause, which delimit the observable pause of each GC. The events of "pending" full GC cycles are kept until they are properly amended and reported, when the sweeping phase is finished. Bug: chromium:1154636 Change-Id: I2fbc65d4807c78656d4abc8c451043f6f86211b1 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3404733 Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Omer Katz <omerkatz@chromium.org> Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org> Cr-Commit-Position: refs/heads/main@{#78905}
This commit is contained in:
parent
078f7c4fca
commit
4ad20bff97
@ -58,13 +58,13 @@ double GCTracer::MonotonicallyIncreasingTimeInMs() {
|
||||
}
|
||||
}
|
||||
|
||||
CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId scope_id) {
|
||||
if (Scope::NeedsYoungEpoch(scope_id)) {
|
||||
return heap_->epoch_young();
|
||||
} else {
|
||||
return heap_->epoch_full();
|
||||
}
|
||||
namespace {
|
||||
std::atomic<CollectionEpoch> global_epoch{0};
|
||||
|
||||
CollectionEpoch next_epoch() {
|
||||
return global_epoch.fetch_add(1, std::memory_order_relaxed) + 1;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
|
||||
: tracer_(tracer), scope_(scope), thread_kind_(thread_kind) {
|
||||
@ -227,6 +227,27 @@ GCTracer::GCTracer(Heap* heap)
|
||||
}
|
||||
}
|
||||
|
||||
void GCTracer::NewCurrentEvent(Event::Type type,
|
||||
GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason) {
|
||||
// If the current event is pending, we can only create a new one if
|
||||
// a young generation GC is interrupting a full GC.
|
||||
DCHECK_IMPLIES(current_pending_,
|
||||
Event::IsYoungGenerationEvent(type) &&
|
||||
!Event::IsYoungGenerationEvent(current_.type));
|
||||
|
||||
// We cannot start a new cycle while a young generation GC cycle has
|
||||
// already interrupted a full GC cycle.
|
||||
DCHECK(!young_gc_while_full_gc_);
|
||||
|
||||
previous_ = current_;
|
||||
young_gc_while_full_gc_ = current_pending_;
|
||||
|
||||
current_ = Event(type, gc_reason, collector_reason);
|
||||
current_.reduce_memory = heap_->ShouldReduceMemory();
|
||||
current_pending_ = true;
|
||||
}
|
||||
|
||||
void GCTracer::ResetForTesting() {
|
||||
current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
|
||||
current_.end_time = MonotonicallyIncreasingTimeInMs();
|
||||
@ -261,51 +282,41 @@ void GCTracer::ResetForTesting() {
|
||||
|
||||
void GCTracer::NotifyYoungGenerationHandling(
|
||||
YoungGenerationHandling young_generation_handling) {
|
||||
DCHECK_GE(1, start_counter_);
|
||||
DCHECK_EQ(Event::SCAVENGER, current_.type);
|
||||
heap_->isolate()->counters()->young_generation_handling()->AddSample(
|
||||
static_cast<int>(young_generation_handling));
|
||||
}
|
||||
|
||||
void GCTracer::Start(GarbageCollector collector,
|
||||
GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason) {
|
||||
void GCTracer::StartObservablePause(GarbageCollector collector,
|
||||
GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason) {
|
||||
DCHECK_EQ(0, start_counter_);
|
||||
start_counter_++;
|
||||
|
||||
previous_ = current_;
|
||||
|
||||
switch (collector) {
|
||||
case GarbageCollector::SCAVENGER:
|
||||
current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
|
||||
break;
|
||||
case GarbageCollector::MINOR_MARK_COMPACTOR:
|
||||
current_ =
|
||||
Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
|
||||
break;
|
||||
case GarbageCollector::MARK_COMPACTOR:
|
||||
if (heap_->incremental_marking()->WasActivated()) {
|
||||
current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
|
||||
collector_reason);
|
||||
} else {
|
||||
current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
|
||||
}
|
||||
break;
|
||||
if (!Heap::IsYoungGenerationCollector(collector) && current_pending_) {
|
||||
// For incremental marking, the event has already been created and we need
|
||||
// to update the GC reason here.
|
||||
current_.gc_reason = gc_reason;
|
||||
current_.collector_reason = collector_reason;
|
||||
} else {
|
||||
// An event needs to be created here and, in case we are in a full GC
|
||||
// cycle, it is not incremental.
|
||||
switch (collector) {
|
||||
case GarbageCollector::SCAVENGER:
|
||||
NewCurrentEvent(Event::SCAVENGER, gc_reason, collector_reason);
|
||||
break;
|
||||
case GarbageCollector::MINOR_MARK_COMPACTOR:
|
||||
NewCurrentEvent(Event::MINOR_MARK_COMPACTOR, gc_reason,
|
||||
collector_reason);
|
||||
break;
|
||||
case GarbageCollector::MARK_COMPACTOR:
|
||||
NewCurrentEvent(Event::MARK_COMPACTOR, gc_reason, collector_reason);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
current_.reduce_memory = heap_->ShouldReduceMemory();
|
||||
current_.start_time = MonotonicallyIncreasingTimeInMs();
|
||||
current_.start_object_size = 0;
|
||||
current_.start_memory_size = 0;
|
||||
current_.start_holes_size = 0;
|
||||
current_.young_object_size = 0;
|
||||
|
||||
current_.incremental_marking_bytes = 0;
|
||||
current_.incremental_marking_duration = 0;
|
||||
|
||||
for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
|
||||
current_.scopes[i] = 0;
|
||||
}
|
||||
DCHECK(IsConsistentWithCollector(collector));
|
||||
|
||||
Counters* counters = heap_->isolate()->counters();
|
||||
|
||||
@ -322,6 +333,29 @@ void GCTracer::Start(GarbageCollector collector,
|
||||
}
|
||||
}
|
||||
|
||||
void GCTracer::StartCycle(GarbageCollector collector,
|
||||
GarbageCollectionReason gc_reason,
|
||||
MarkingType marking) {
|
||||
// We need to create an event only if incremental marking starts a full GC
|
||||
// cycle. Otherwise, we're inside the observable pause and the event has
|
||||
// already been created.
|
||||
switch (marking) {
|
||||
case MarkingType::kAtomic:
|
||||
DCHECK(IsConsistentWithCollector(collector));
|
||||
break;
|
||||
case MarkingType::kIncremental:
|
||||
DCHECK(!Heap::IsYoungGenerationCollector(collector));
|
||||
NewCurrentEvent(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, nullptr);
|
||||
break;
|
||||
}
|
||||
|
||||
if (Heap::IsYoungGenerationCollector(collector)) {
|
||||
epoch_young_ = next_epoch();
|
||||
} else {
|
||||
epoch_full_ = next_epoch();
|
||||
}
|
||||
}
|
||||
|
||||
void GCTracer::StartInSafepoint() {
|
||||
SampleAllocation(current_.start_time, heap_->NewSpaceAllocationCounter(),
|
||||
heap_->OldGenerationAllocationCounter(),
|
||||
@ -351,16 +385,12 @@ void GCTracer::StopInSafepoint() {
|
||||
current_.survived_young_object_size = heap_->SurvivedYoungObjectSize();
|
||||
}
|
||||
|
||||
void GCTracer::Stop(GarbageCollector collector) {
|
||||
void GCTracer::StopObservablePause(GarbageCollector collector) {
|
||||
start_counter_--;
|
||||
DCHECK_EQ(0, start_counter_);
|
||||
DCHECK((collector == GarbageCollector::SCAVENGER &&
|
||||
current_.type == Event::SCAVENGER) ||
|
||||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
|
||||
current_.type == Event::MINOR_MARK_COMPACTOR) ||
|
||||
(collector == GarbageCollector::MARK_COMPACTOR &&
|
||||
(current_.type == Event::MARK_COMPACTOR ||
|
||||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
|
||||
|
||||
const bool is_young = Heap::IsYoungGenerationCollector(collector);
|
||||
DCHECK(IsConsistentWithCollector(collector));
|
||||
|
||||
current_.end_time = MonotonicallyIncreasingTimeInMs();
|
||||
|
||||
@ -371,61 +401,44 @@ void GCTracer::Stop(GarbageCollector collector) {
|
||||
static_cast<int64_t>(duration * base::Time::kMicrosecondsPerMillisecond);
|
||||
auto* long_task_stats = heap_->isolate()->GetCurrentLongTaskStats();
|
||||
|
||||
switch (current_.type) {
|
||||
case Event::SCAVENGER:
|
||||
case Event::MINOR_MARK_COMPACTOR:
|
||||
recorded_minor_gcs_total_.Push(
|
||||
MakeBytesAndDuration(current_.young_object_size, duration));
|
||||
recorded_minor_gcs_survived_.Push(
|
||||
MakeBytesAndDuration(current_.survived_young_object_size, duration));
|
||||
FetchBackgroundMinorGCCounters();
|
||||
long_task_stats->gc_young_wall_clock_duration_us += duration_us;
|
||||
break;
|
||||
case Event::INCREMENTAL_MARK_COMPACTOR:
|
||||
if (is_young) {
|
||||
recorded_minor_gcs_total_.Push(
|
||||
MakeBytesAndDuration(current_.young_object_size, duration));
|
||||
recorded_minor_gcs_survived_.Push(
|
||||
MakeBytesAndDuration(current_.survived_young_object_size, duration));
|
||||
FetchBackgroundMinorGCCounters();
|
||||
long_task_stats->gc_young_wall_clock_duration_us += duration_us;
|
||||
} else {
|
||||
if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
|
||||
current_.incremental_marking_bytes = incremental_marking_bytes_;
|
||||
current_.incremental_marking_duration = incremental_marking_duration_;
|
||||
for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
|
||||
current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
|
||||
current_.scopes[i] = incremental_marking_scopes_[i].duration;
|
||||
}
|
||||
|
||||
RecordMutatorUtilization(
|
||||
current_.end_time, duration + current_.incremental_marking_duration);
|
||||
RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
|
||||
current_.incremental_marking_duration);
|
||||
recorded_incremental_mark_compacts_.Push(
|
||||
MakeBytesAndDuration(current_.end_object_size, duration));
|
||||
RecordGCSumCounters(duration);
|
||||
ResetIncrementalMarkingCounters();
|
||||
combined_mark_compact_speed_cache_ = 0.0;
|
||||
FetchBackgroundMarkCompactCounters();
|
||||
long_task_stats->gc_full_atomic_wall_clock_duration_us += duration_us;
|
||||
break;
|
||||
case Event::MARK_COMPACTOR:
|
||||
} else {
|
||||
DCHECK_EQ(0u, current_.incremental_marking_bytes);
|
||||
DCHECK_EQ(0, current_.incremental_marking_duration);
|
||||
RecordMutatorUtilization(
|
||||
current_.end_time, duration + current_.incremental_marking_duration);
|
||||
recorded_mark_compacts_.Push(
|
||||
MakeBytesAndDuration(current_.end_object_size, duration));
|
||||
RecordGCSumCounters(duration);
|
||||
ResetIncrementalMarkingCounters();
|
||||
combined_mark_compact_speed_cache_ = 0.0;
|
||||
FetchBackgroundMarkCompactCounters();
|
||||
long_task_stats->gc_full_atomic_wall_clock_duration_us += duration_us;
|
||||
break;
|
||||
case Event::START:
|
||||
UNREACHABLE();
|
||||
}
|
||||
RecordMutatorUtilization(current_.end_time,
|
||||
duration + current_.incremental_marking_duration);
|
||||
RecordGCSumCounters(duration);
|
||||
ResetIncrementalMarkingCounters();
|
||||
combined_mark_compact_speed_cache_ = 0.0;
|
||||
FetchBackgroundMarkCompactCounters();
|
||||
long_task_stats->gc_full_atomic_wall_clock_duration_us += duration_us;
|
||||
}
|
||||
FetchBackgroundGeneralCounters();
|
||||
|
||||
heap_->UpdateTotalGCTime(duration);
|
||||
|
||||
if (current_.type == Event::SCAVENGER ||
|
||||
current_.type == Event::MINOR_MARK_COMPACTOR) {
|
||||
ReportYoungCycleToRecorder();
|
||||
if (FLAG_trace_gc_ignore_scavenger) return;
|
||||
}
|
||||
if (FLAG_trace_gc_ignore_scavenger && is_young) return;
|
||||
|
||||
if (FLAG_trace_gc_nvp) {
|
||||
PrintNVP();
|
||||
@ -448,6 +461,29 @@ void GCTracer::Stop(GarbageCollector collector) {
|
||||
}
|
||||
}
|
||||
|
||||
void GCTracer::StopCycle(GarbageCollector collector) {
|
||||
DCHECK(current_pending_);
|
||||
current_pending_ = false;
|
||||
|
||||
DCHECK(IsConsistentWithCollector(collector));
|
||||
|
||||
if (Heap::IsYoungGenerationCollector(collector)) {
|
||||
ReportYoungCycleToRecorder();
|
||||
// If a young generation GC interrupted an unfinished full GC cycle, restore
|
||||
// the event corresponding to the full GC cycle.
|
||||
if (young_gc_while_full_gc_) {
|
||||
std::swap(current_, previous_);
|
||||
current_pending_ = true;
|
||||
young_gc_while_full_gc_ = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void GCTracer::StopCycleIfPending() {
|
||||
if (!current_pending_) return;
|
||||
StopCycle(GarbageCollector::MARK_COMPACTOR);
|
||||
}
|
||||
|
||||
void GCTracer::NotifySweepingCompleted() {
|
||||
if (FLAG_trace_gc_freelists) {
|
||||
PrintIsolate(heap_->isolate(),
|
||||
@ -520,7 +556,6 @@ void GCTracer::AddCompactionEvent(double duration,
|
||||
MakeBytesAndDuration(live_bytes_compacted, duration));
|
||||
}
|
||||
|
||||
|
||||
void GCTracer::AddSurvivalRatio(double promotion_ratio) {
|
||||
recorded_survival_ratios_.Push(promotion_ratio);
|
||||
}
|
||||
@ -591,7 +626,6 @@ void GCTracer::Print() const {
|
||||
current_.collector_reason != nullptr ? current_.collector_reason : "");
|
||||
}
|
||||
|
||||
|
||||
void GCTracer::PrintNVP() const {
|
||||
double duration = current_.end_time - current_.start_time;
|
||||
double spent_in_mutator = current_.start_time - previous_.end_time;
|
||||
|
@ -51,6 +51,8 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
|
||||
GCTracer::Scope::Name(GCTracer::Scope::ScopeId(scope_id)), \
|
||||
"epoch", tracer->CurrentEpoch(scope_id))
|
||||
|
||||
using CollectionEpoch = uint32_t;
|
||||
|
||||
// GCTracer collects and prints ONE line after each garbage collector
|
||||
// invocation IFF --trace_gc is used.
|
||||
class V8_EXPORT_PRIVATE GCTracer {
|
||||
@ -137,6 +139,14 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
START = 4
|
||||
};
|
||||
|
||||
#ifdef DEBUG
|
||||
// Returns true if the event corresponds to a young generation GC.
|
||||
static constexpr bool IsYoungGenerationEvent(Type type) {
|
||||
DCHECK_NE(START, type);
|
||||
return type == SCAVENGER || type == MINOR_MARK_COMPACTOR;
|
||||
}
|
||||
#endif
|
||||
|
||||
Event(Type type, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason);
|
||||
|
||||
@ -211,13 +221,25 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
|
||||
explicit GCTracer(Heap* heap);
|
||||
|
||||
// Start collecting data.
|
||||
void Start(GarbageCollector collector, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason);
|
||||
void StartInSafepoint();
|
||||
CollectionEpoch CurrentEpoch(Scope::ScopeId id) const {
|
||||
return Scope::NeedsYoungEpoch(id) ? epoch_young_ : epoch_full_;
|
||||
}
|
||||
|
||||
// Stop collecting data and print results.
|
||||
void Stop(GarbageCollector collector);
|
||||
// Start and stop a cycle's observable (atomic) pause.
|
||||
void StartObservablePause(GarbageCollector collector,
|
||||
GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason);
|
||||
void StopObservablePause(GarbageCollector collector);
|
||||
|
||||
enum class MarkingType { kAtomic, kIncremental };
|
||||
|
||||
// Start and stop a GC cycle (collecting data and reporting results).
|
||||
void StartCycle(GarbageCollector collector, GarbageCollectionReason gc_reason,
|
||||
MarkingType marking);
|
||||
void StopCycle(GarbageCollector collector);
|
||||
void StopCycleIfPending();
|
||||
|
||||
void StartInSafepoint();
|
||||
void StopInSafepoint();
|
||||
|
||||
void NotifySweepingCompleted();
|
||||
@ -227,6 +249,19 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
void NotifyYoungGenerationHandling(
|
||||
YoungGenerationHandling young_generation_handling);
|
||||
|
||||
#ifdef DEBUG
|
||||
// Checks if the current event is consistent with a collector.
|
||||
bool IsConsistentWithCollector(GarbageCollector collector) const {
|
||||
return (collector == GarbageCollector::SCAVENGER &&
|
||||
current_.type == Event::SCAVENGER) ||
|
||||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
|
||||
current_.type == Event::MINOR_MARK_COMPACTOR) ||
|
||||
(collector == GarbageCollector::MARK_COMPACTOR &&
|
||||
(current_.type == Event::MARK_COMPACTOR ||
|
||||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR));
|
||||
}
|
||||
#endif
|
||||
|
||||
// Sample and accumulate bytes allocated since the last GC.
|
||||
void SampleAllocation(double current_ms, size_t new_space_counter_bytes,
|
||||
size_t old_generation_counter_bytes,
|
||||
@ -353,8 +388,6 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats();
|
||||
#endif // defined(V8_RUNTIME_CALL_STATS)
|
||||
|
||||
CollectionEpoch CurrentEpoch(Scope::ScopeId id);
|
||||
|
||||
private:
|
||||
FRIEND_TEST(GCTracer, AverageSpeed);
|
||||
FRIEND_TEST(GCTracerTest, AllocationThroughput);
|
||||
@ -428,6 +461,9 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
void ReportIncrementalMarkingStepToRecorder();
|
||||
void ReportYoungCycleToRecorder();
|
||||
|
||||
void NewCurrentEvent(Event::Type type, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason);
|
||||
|
||||
// Pointer to the heap that owns this tracer.
|
||||
Heap* heap_;
|
||||
|
||||
@ -438,6 +474,11 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
// Previous tracer event.
|
||||
Event previous_;
|
||||
|
||||
// We need two epochs, since there can be scavenges during incremental
|
||||
// marking.
|
||||
CollectionEpoch epoch_young_ = 0;
|
||||
CollectionEpoch epoch_full_ = 0;
|
||||
|
||||
// Size of incremental marking steps (in bytes) accumulated since the end of
|
||||
// the last mark compact GC.
|
||||
size_t incremental_marking_bytes_;
|
||||
@ -495,6 +536,15 @@ class V8_EXPORT_PRIVATE GCTracer {
|
||||
|
||||
bool metrics_report_pending_ = false;
|
||||
|
||||
// An ongoing GC cycle is considered pending if it has been started with
|
||||
// |StartCycle()| but has not yet been finished with |StopCycle()|.
|
||||
bool current_pending_ = false;
|
||||
|
||||
// When a full GC cycle is interrupted by a young generation GC cycle, the
|
||||
// |previous_| event is used as temporary storage for the |current_| event
|
||||
// that corresponded to the full GC cycle, and this field is set to true.
|
||||
bool young_gc_while_full_gc_ = false;
|
||||
|
||||
v8::metrics::GarbageCollectionFullMainThreadBatchedIncrementalMark
|
||||
incremental_mark_batched_events_;
|
||||
|
||||
|
@ -115,14 +115,6 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
namespace {
|
||||
std::atomic<CollectionEpoch> global_epoch{0};
|
||||
|
||||
CollectionEpoch next_epoch() {
|
||||
return global_epoch.fetch_add(1, std::memory_order_relaxed) + 1;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
|
||||
Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) {
|
||||
return reinterpret_cast<Isolate*>(
|
||||
@ -1787,7 +1779,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
}
|
||||
|
||||
{
|
||||
tracer()->Start(collector, gc_reason, collector_reason);
|
||||
tracer()->StartObservablePause(collector, gc_reason, collector_reason);
|
||||
DCHECK(AllowGarbageCollection::IsAllowed());
|
||||
DisallowGarbageCollection no_gc_during_gc;
|
||||
GarbageCollectionPrologue();
|
||||
@ -1812,8 +1804,8 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
|
||||
tp_heap_->CollectGarbage();
|
||||
} else {
|
||||
freed_global_handles +=
|
||||
PerformGarbageCollection(collector, gc_callback_flags);
|
||||
freed_global_handles += PerformGarbageCollection(
|
||||
collector, gc_reason, collector_reason, gc_callback_flags);
|
||||
}
|
||||
// Clear flags describing the current GC now that the current GC is
|
||||
// complete. Do this before GarbageCollectionEpilogue() since that could
|
||||
@ -1859,7 +1851,10 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
}
|
||||
}
|
||||
|
||||
tracer()->Stop(collector);
|
||||
tracer()->StopObservablePause(collector);
|
||||
if (IsYoungGenerationCollector(collector)) {
|
||||
tracer()->StopCycle(collector);
|
||||
}
|
||||
}
|
||||
|
||||
// Part 3: Invoke all callbacks which should happen after the actual garbage
|
||||
@ -1955,9 +1950,9 @@ void Heap::StartIncrementalMarking(int gc_flags,
|
||||
VerifyCountersAfterSweeping();
|
||||
#endif
|
||||
|
||||
// Now that sweeping is completed, we can update the current epoch for the new
|
||||
// full collection.
|
||||
UpdateEpochFull();
|
||||
// Now that sweeping is completed, we can start the next full GC cycle.
|
||||
tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR, gc_reason,
|
||||
GCTracer::MarkingType::kIncremental);
|
||||
|
||||
set_current_gc_flags(gc_flags);
|
||||
current_gc_callback_flags_ = gc_callback_flags;
|
||||
@ -1971,6 +1966,7 @@ void Heap::CompleteSweepingFull() {
|
||||
if (cpp_heap()) {
|
||||
CppHeap::From(cpp_heap())->FinishSweepingIfRunning();
|
||||
}
|
||||
tracer()->StopCycleIfPending();
|
||||
}
|
||||
|
||||
void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
@ -2166,20 +2162,25 @@ GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) {
|
||||
} // namespace
|
||||
|
||||
size_t Heap::PerformGarbageCollection(
|
||||
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
|
||||
GarbageCollector collector, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason, const v8::GCCallbackFlags gc_callback_flags) {
|
||||
DisallowJavascriptExecution no_js(isolate());
|
||||
|
||||
if (IsYoungGenerationCollector(collector)) {
|
||||
CompleteSweepingYoung(collector);
|
||||
tracer()->StartCycle(collector, gc_reason, GCTracer::MarkingType::kAtomic);
|
||||
} else {
|
||||
DCHECK_EQ(GarbageCollector::MARK_COMPACTOR, collector);
|
||||
CompleteSweepingFull();
|
||||
// If incremental marking has been activated, the full GC cycle has already
|
||||
// started, so don't start a new one.
|
||||
if (!incremental_marking_->WasActivated()) {
|
||||
tracer()->StartCycle(collector, gc_reason,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
}
|
||||
}
|
||||
|
||||
// The last GC cycle is done after completing sweeping. Start the next GC
|
||||
// cycle.
|
||||
UpdateCurrentEpoch(collector);
|
||||
|
||||
DCHECK(tracer()->IsConsistentWithCollector(collector));
|
||||
TRACE_GC_EPOCH(tracer(), CollectorScopeId(collector), ThreadKind::kMain);
|
||||
|
||||
base::Optional<SafepointScope> safepoint_scope;
|
||||
@ -2303,10 +2304,8 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
|
||||
v8::Locker locker(reinterpret_cast<v8::Isolate*>(isolate()));
|
||||
v8::Isolate::Scope isolate_scope(reinterpret_cast<v8::Isolate*>(isolate()));
|
||||
|
||||
const char* collector_reason = nullptr;
|
||||
GarbageCollector collector = GarbageCollector::MARK_COMPACTOR;
|
||||
|
||||
tracer()->Start(collector, gc_reason, collector_reason);
|
||||
tracer()->StartObservablePause(GarbageCollector::MARK_COMPACTOR, gc_reason,
|
||||
nullptr);
|
||||
|
||||
DCHECK_NOT_NULL(isolate()->global_safepoint());
|
||||
|
||||
@ -2318,9 +2317,10 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
|
||||
client->heap()->MakeHeapIterable();
|
||||
});
|
||||
|
||||
PerformGarbageCollection(GarbageCollector::MARK_COMPACTOR);
|
||||
PerformGarbageCollection(GarbageCollector::MARK_COMPACTOR, gc_reason,
|
||||
nullptr);
|
||||
|
||||
tracer()->Stop(collector);
|
||||
tracer()->StopObservablePause(GarbageCollector::MARK_COMPACTOR);
|
||||
}
|
||||
|
||||
void Heap::CompleteSweepingYoung(GarbageCollector collector) {
|
||||
@ -2357,16 +2357,6 @@ void Heap::EnsureSweepingCompleted(HeapObject object) {
|
||||
mark_compact_collector()->EnsurePageIsSwept(page);
|
||||
}
|
||||
|
||||
void Heap::UpdateCurrentEpoch(GarbageCollector collector) {
|
||||
if (IsYoungGenerationCollector(collector)) {
|
||||
epoch_young_ = next_epoch();
|
||||
} else if (incremental_marking()->IsStopped()) {
|
||||
epoch_full_ = next_epoch();
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::UpdateEpochFull() { epoch_full_ = next_epoch(); }
|
||||
|
||||
void Heap::RecomputeLimits(GarbageCollector collector) {
|
||||
if (!((collector == GarbageCollector::MARK_COMPACTOR) ||
|
||||
(HasLowYoungGenerationAllocationRate() &&
|
||||
@ -3808,7 +3798,9 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
|
||||
|
||||
NestedTimedHistogramScope incremental_marking_scope(
|
||||
isolate()->counters()->gc_incremental_marking_finalize());
|
||||
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingFinalize", "epoch", epoch_full());
|
||||
TRACE_EVENT1(
|
||||
"v8", "V8.GCIncrementalMarkingFinalize", "epoch",
|
||||
tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL_FINALIZE));
|
||||
TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE,
|
||||
ThreadKind::kMain);
|
||||
|
||||
|
@ -270,8 +270,6 @@ using EphemeronRememberedSet =
|
||||
std::unordered_map<EphemeronHashTable, std::unordered_set<int>,
|
||||
Object::Hasher>;
|
||||
|
||||
using CollectionEpoch = uint32_t;
|
||||
|
||||
class Heap {
|
||||
public:
|
||||
// Stores ephemeron entries where the EphemeronHashTable is in old-space,
|
||||
@ -550,8 +548,6 @@ class Heap {
|
||||
|
||||
void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk);
|
||||
|
||||
void UpdateCurrentEpoch(GarbageCollector collector);
|
||||
|
||||
inline Address* NewSpaceAllocationTopAddress();
|
||||
inline Address* NewSpaceAllocationLimitAddress();
|
||||
inline Address* OldSpaceAllocationTopAddress();
|
||||
@ -1677,11 +1673,6 @@ class Heap {
|
||||
|
||||
static Isolate* GetIsolateFromWritableObject(HeapObject object);
|
||||
|
||||
CollectionEpoch epoch_young() { return epoch_young_; }
|
||||
CollectionEpoch epoch_full() { return epoch_full_; }
|
||||
|
||||
void UpdateEpochFull();
|
||||
|
||||
// Ensure that we have swept all spaces in such a way that we can iterate
|
||||
// over all objects.
|
||||
void MakeHeapIterable();
|
||||
@ -1821,7 +1812,8 @@ class Heap {
|
||||
// Performs garbage collection in a safepoint.
|
||||
// Returns the number of freed global handles.
|
||||
size_t PerformGarbageCollection(
|
||||
GarbageCollector collector,
|
||||
GarbageCollector collector, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason,
|
||||
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
|
||||
|
||||
// Performs garbage collection in the shared heap.
|
||||
@ -2521,11 +2513,6 @@ class Heap {
|
||||
|
||||
std::unique_ptr<third_party_heap::Heap> tp_heap_;
|
||||
|
||||
// We need two epochs, since there can be scavenges during incremental
|
||||
// marking.
|
||||
CollectionEpoch epoch_young_ = 0;
|
||||
CollectionEpoch epoch_full_ = 0;
|
||||
|
||||
// Classes in "heap" can be friends.
|
||||
friend class AlwaysAllocateScope;
|
||||
friend class ArrayBufferCollector;
|
||||
|
@ -191,8 +191,9 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
|
||||
static_cast<int>(gc_reason));
|
||||
NestedTimedHistogramScope incremental_marking_scope(
|
||||
counters->gc_incremental_marking_start());
|
||||
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingStart", "epoch",
|
||||
heap_->epoch_full());
|
||||
TRACE_EVENT1(
|
||||
"v8", "V8.GCIncrementalMarkingStart", "epoch",
|
||||
heap_->tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL_START));
|
||||
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START,
|
||||
ThreadKind::kMain);
|
||||
heap_->tracer()->NotifyIncrementalMarkingStart();
|
||||
@ -791,7 +792,8 @@ StepResult IncrementalMarking::AdvanceWithDeadline(
|
||||
StepOrigin step_origin) {
|
||||
NestedTimedHistogramScope incremental_marking_scope(
|
||||
heap_->isolate()->counters()->gc_incremental_marking());
|
||||
TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch", heap_->epoch_full());
|
||||
TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch",
|
||||
heap_->tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL));
|
||||
TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL,
|
||||
ThreadKind::kMain);
|
||||
DCHECK(!IsStopped());
|
||||
|
@ -7004,6 +7004,9 @@ TEST(Regress978156) {
|
||||
i::IncrementalMarking* marking = heap->incremental_marking();
|
||||
if (marking->IsStopped()) {
|
||||
SafepointScope scope(heap);
|
||||
heap->tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting,
|
||||
GCTracer::MarkingType::kIncremental);
|
||||
marking->Start(i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
|
||||
|
@ -16,11 +16,11 @@
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include "src/init/v8.h"
|
||||
|
||||
#include "src/handles/global-handles.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/incremental-marking.h"
|
||||
#include "src/heap/spaces.h"
|
||||
#include "src/init/v8.h"
|
||||
#include "src/objects/objects-inl.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
@ -129,6 +129,9 @@ UNINITIALIZED_TEST(IncrementalMarkingUsingTasks) {
|
||||
marking->Stop();
|
||||
{
|
||||
SafepointScope scope(heap);
|
||||
heap->tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting,
|
||||
GCTracer::MarkingType::kIncremental);
|
||||
marking->Start(i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
CHECK(platform.PendingTask());
|
||||
|
@ -53,14 +53,41 @@ TEST(GCTracer, AverageSpeed) {
|
||||
|
||||
namespace {
|
||||
|
||||
void SampleAndAddAllocaton(v8::internal::GCTracer* tracer, double time_ms,
|
||||
size_t per_space_counter_bytes) {
|
||||
void SampleAndAddAllocation(GCTracer* tracer, double time_ms,
|
||||
size_t per_space_counter_bytes) {
|
||||
// Increment counters of all spaces.
|
||||
tracer->SampleAllocation(time_ms, per_space_counter_bytes,
|
||||
per_space_counter_bytes, per_space_counter_bytes);
|
||||
tracer->AddAllocation(time_ms);
|
||||
}
|
||||
|
||||
void StartTracing(GCTracer* tracer, GarbageCollector collector,
|
||||
GCTracer::MarkingType marking) {
|
||||
switch (marking) {
|
||||
case GCTracer::MarkingType::kAtomic:
|
||||
tracer->StartObservablePause(collector, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
tracer->StartCycle(collector, GarbageCollectionReason::kTesting,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
break;
|
||||
case GCTracer::MarkingType::kIncremental:
|
||||
DCHECK(!Heap::IsYoungGenerationCollector(collector));
|
||||
tracer->StartCycle(collector, GarbageCollectionReason::kTesting,
|
||||
GCTracer::MarkingType::kIncremental);
|
||||
tracer->StartObservablePause(collector, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void StopTracing(GCTracer* tracer, GarbageCollector collector) {
|
||||
tracer->StopObservablePause(collector);
|
||||
if (Heap::IsYoungGenerationCollector(collector))
|
||||
tracer->StopCycle(collector);
|
||||
else
|
||||
tracer->StopCycleIfPending();
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST_F(GCTracerTest, AllocationThroughput) {
|
||||
@ -70,17 +97,17 @@ TEST_F(GCTracerTest, AllocationThroughput) {
|
||||
|
||||
const int time1 = 100;
|
||||
const size_t counter1 = 1000;
|
||||
SampleAndAddAllocaton(tracer, time1, counter1);
|
||||
SampleAndAddAllocation(tracer, time1, counter1);
|
||||
const int time2 = 200;
|
||||
const size_t counter2 = 2000;
|
||||
SampleAndAddAllocaton(tracer, time2, counter2);
|
||||
SampleAndAddAllocation(tracer, time2, counter2);
|
||||
// Will only consider the current sample.
|
||||
EXPECT_EQ(2 * (counter2 - counter1) / (time2 - time1),
|
||||
static_cast<size_t>(
|
||||
tracer->AllocationThroughputInBytesPerMillisecond(100)));
|
||||
const int time3 = 1000;
|
||||
const size_t counter3 = 30000;
|
||||
SampleAndAddAllocaton(tracer, time3, counter3);
|
||||
SampleAndAddAllocation(tracer, time3, counter3);
|
||||
// Only consider last sample.
|
||||
EXPECT_EQ(2 * (counter3 - counter2) / (time3 - time2),
|
||||
static_cast<size_t>(
|
||||
@ -97,10 +124,10 @@ TEST_F(GCTracerTest, PerGenerationAllocationThroughput) {
|
||||
|
||||
const int time1 = 100;
|
||||
const size_t counter1 = 1000;
|
||||
SampleAndAddAllocaton(tracer, time1, counter1);
|
||||
SampleAndAddAllocation(tracer, time1, counter1);
|
||||
const int time2 = 200;
|
||||
const size_t counter2 = 2000;
|
||||
SampleAndAddAllocaton(tracer, time2, counter2);
|
||||
SampleAndAddAllocation(tracer, time2, counter2);
|
||||
const size_t expected_throughput1 = (counter2 - counter1) / (time2 - time1);
|
||||
EXPECT_EQ(expected_throughput1,
|
||||
static_cast<size_t>(
|
||||
@ -114,7 +141,7 @@ TEST_F(GCTracerTest, PerGenerationAllocationThroughput) {
|
||||
tracer->EmbedderAllocationThroughputInBytesPerMillisecond()));
|
||||
const int time3 = 1000;
|
||||
const size_t counter3 = 30000;
|
||||
SampleAndAddAllocaton(tracer, time3, counter3);
|
||||
SampleAndAddAllocation(tracer, time3, counter3);
|
||||
const size_t expected_throughput2 = (counter3 - counter1) / (time3 - time1);
|
||||
EXPECT_EQ(expected_throughput2,
|
||||
static_cast<size_t>(
|
||||
@ -134,10 +161,10 @@ TEST_F(GCTracerTest, PerGenerationAllocationThroughputWithProvidedTime) {
|
||||
|
||||
const int time1 = 100;
|
||||
const size_t counter1 = 1000;
|
||||
SampleAndAddAllocaton(tracer, time1, counter1);
|
||||
SampleAndAddAllocation(tracer, time1, counter1);
|
||||
const int time2 = 200;
|
||||
const size_t counter2 = 2000;
|
||||
SampleAndAddAllocaton(tracer, time2, counter2);
|
||||
SampleAndAddAllocation(tracer, time2, counter2);
|
||||
const size_t expected_throughput1 = (counter2 - counter1) / (time2 - time1);
|
||||
EXPECT_EQ(
|
||||
expected_throughput1,
|
||||
@ -149,7 +176,7 @@ TEST_F(GCTracerTest, PerGenerationAllocationThroughputWithProvidedTime) {
|
||||
tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100)));
|
||||
const int time3 = 1000;
|
||||
const size_t counter3 = 30000;
|
||||
SampleAndAddAllocaton(tracer, time3, counter3);
|
||||
SampleAndAddAllocation(tracer, time3, counter3);
|
||||
const size_t expected_throughput2 = (counter3 - counter2) / (time3 - time2);
|
||||
// Only consider last sample.
|
||||
EXPECT_EQ(
|
||||
@ -177,12 +204,12 @@ TEST_F(GCTracerTest, RegularScope) {
|
||||
tracer->ResetForTesting();
|
||||
|
||||
EXPECT_DOUBLE_EQ(0.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]);
|
||||
// Sample not added because it's not within a started tracer.
|
||||
// Sample not added because the cycle has not started.
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 10);
|
||||
StartTracing(tracer, GarbageCollector::MARK_COMPACTOR,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100);
|
||||
tracer->Start(GarbageCollector::MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting, "collector unittest");
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100);
|
||||
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
|
||||
StopTracing(tracer, GarbageCollector::MARK_COMPACTOR);
|
||||
EXPECT_DOUBLE_EQ(100.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]);
|
||||
}
|
||||
|
||||
@ -194,12 +221,10 @@ TEST_F(GCTracerTest, IncrementalScope) {
|
||||
0.0, tracer->current_.scopes[GCTracer::Scope::MC_INCREMENTAL_FINALIZE]);
|
||||
// Sample is added because its ScopeId is listed as incremental sample.
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
|
||||
tracer->Start(GarbageCollector::MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting, "collector unittest");
|
||||
// Switch to incremental MC to enable writing back incremental scopes.
|
||||
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
|
||||
StartTracing(tracer, GarbageCollector::MARK_COMPACTOR,
|
||||
GCTracer::MarkingType::kIncremental);
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
|
||||
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
|
||||
StopTracing(tracer, GarbageCollector::MARK_COMPACTOR);
|
||||
EXPECT_DOUBLE_EQ(
|
||||
200.0, tracer->current_.scopes[GCTracer::Scope::MC_INCREMENTAL_FINALIZE]);
|
||||
}
|
||||
@ -211,15 +236,13 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) {
|
||||
// Round 1.
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 50);
|
||||
// Scavenger has no impact on incremental marking details.
|
||||
tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
tracer->Stop(GarbageCollector::SCAVENGER);
|
||||
tracer->Start(GarbageCollector::MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting, "collector unittest");
|
||||
// Switch to incremental MC to enable writing back incremental scopes.
|
||||
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
|
||||
StartTracing(tracer, GarbageCollector::SCAVENGER,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
StopTracing(tracer, GarbageCollector::SCAVENGER);
|
||||
StartTracing(tracer, GarbageCollector::MARK_COMPACTOR,
|
||||
GCTracer::MarkingType::kIncremental);
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
|
||||
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
|
||||
StopTracing(tracer, GarbageCollector::MARK_COMPACTOR);
|
||||
EXPECT_DOUBLE_EQ(
|
||||
100,
|
||||
tracer->current_
|
||||
@ -239,12 +262,10 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) {
|
||||
// Round 2. Numbers should be reset.
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 13);
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 15);
|
||||
tracer->Start(GarbageCollector::MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting, "collector unittest");
|
||||
// Switch to incremental MC to enable writing back incremental scopes.
|
||||
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
|
||||
StartTracing(tracer, GarbageCollector::MARK_COMPACTOR,
|
||||
GCTracer::MarkingType::kIncremental);
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 122);
|
||||
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
|
||||
StopTracing(tracer, GarbageCollector::MARK_COMPACTOR);
|
||||
EXPECT_DOUBLE_EQ(
|
||||
122,
|
||||
tracer->current_
|
||||
@ -276,24 +297,22 @@ TEST_F(GCTracerTest, IncrementalMarkingSpeed) {
|
||||
EXPECT_EQ(1000000 / 100,
|
||||
tracer->IncrementalMarkingSpeedInBytesPerMillisecond());
|
||||
// Scavenger has no impact on incremental marking details.
|
||||
tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
tracer->Stop(GarbageCollector::SCAVENGER);
|
||||
StartTracing(tracer, GarbageCollector::SCAVENGER,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
StopTracing(tracer, GarbageCollector::SCAVENGER);
|
||||
// 1000000 bytes in 100ms.
|
||||
tracer->AddIncrementalMarkingStep(100, 1000000);
|
||||
EXPECT_EQ(300, tracer->incremental_marking_duration_);
|
||||
EXPECT_EQ(3000000u, tracer->incremental_marking_bytes_);
|
||||
EXPECT_EQ(1000000 / 100,
|
||||
tracer->IncrementalMarkingSpeedInBytesPerMillisecond());
|
||||
tracer->Start(GarbageCollector::MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting, "collector unittest");
|
||||
// Switch to incremental MC.
|
||||
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
|
||||
StartTracing(tracer, GarbageCollector::MARK_COMPACTOR,
|
||||
GCTracer::MarkingType::kIncremental);
|
||||
// 1000000 bytes in 100ms.
|
||||
tracer->AddIncrementalMarkingStep(100, 1000000);
|
||||
EXPECT_EQ(400, tracer->incremental_marking_duration_);
|
||||
EXPECT_EQ(4000000u, tracer->incremental_marking_bytes_);
|
||||
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
|
||||
StopTracing(tracer, GarbageCollector::MARK_COMPACTOR);
|
||||
EXPECT_EQ(400, tracer->current_.incremental_marking_duration);
|
||||
EXPECT_EQ(4000000u, tracer->current_.incremental_marking_bytes);
|
||||
EXPECT_EQ(0, tracer->incremental_marking_duration_);
|
||||
@ -303,11 +322,9 @@ TEST_F(GCTracerTest, IncrementalMarkingSpeed) {
|
||||
|
||||
// Round 2.
|
||||
tracer->AddIncrementalMarkingStep(2000, 1000);
|
||||
tracer->Start(GarbageCollector::MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting, "collector unittest");
|
||||
// Switch to incremental MC.
|
||||
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
|
||||
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
|
||||
StartTracing(tracer, GarbageCollector::MARK_COMPACTOR,
|
||||
GCTracer::MarkingType::kIncremental);
|
||||
StopTracing(tracer, GarbageCollector::MARK_COMPACTOR);
|
||||
EXPECT_DOUBLE_EQ((4000000.0 / 400 + 1000.0 / 2000) / 2,
|
||||
static_cast<double>(
|
||||
tracer->IncrementalMarkingSpeedInBytesPerMillisecond()));
|
||||
@ -352,13 +369,13 @@ TEST_F(GCTracerTest, MutatorUtilization) {
|
||||
TEST_F(GCTracerTest, BackgroundScavengerScope) {
|
||||
GCTracer* tracer = i_isolate()->heap()->tracer();
|
||||
tracer->ResetForTesting();
|
||||
tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
StartTracing(tracer, GarbageCollector::SCAVENGER,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
tracer->AddScopeSampleBackground(
|
||||
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, 10);
|
||||
tracer->AddScopeSampleBackground(
|
||||
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, 1);
|
||||
tracer->Stop(GarbageCollector::SCAVENGER);
|
||||
StopTracing(tracer, GarbageCollector::SCAVENGER);
|
||||
EXPECT_DOUBLE_EQ(
|
||||
11, tracer->current_
|
||||
.scopes[GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]);
|
||||
@ -367,8 +384,8 @@ TEST_F(GCTracerTest, BackgroundScavengerScope) {
|
||||
TEST_F(GCTracerTest, BackgroundMinorMCScope) {
|
||||
GCTracer* tracer = i_isolate()->heap()->tracer();
|
||||
tracer->ResetForTesting();
|
||||
tracer->Start(GarbageCollector::MINOR_MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting, "collector unittest");
|
||||
StartTracing(tracer, GarbageCollector::MINOR_MARK_COMPACTOR,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
tracer->AddScopeSampleBackground(GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
|
||||
10);
|
||||
tracer->AddScopeSampleBackground(GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
|
||||
@ -381,7 +398,7 @@ TEST_F(GCTracerTest, BackgroundMinorMCScope) {
|
||||
GCTracer::Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 30);
|
||||
tracer->AddScopeSampleBackground(
|
||||
GCTracer::Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 3);
|
||||
tracer->Stop(GarbageCollector::MINOR_MARK_COMPACTOR);
|
||||
StopTracing(tracer, GarbageCollector::MINOR_MARK_COMPACTOR);
|
||||
EXPECT_DOUBLE_EQ(
|
||||
11,
|
||||
tracer->current_.scopes[GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING]);
|
||||
@ -401,14 +418,14 @@ TEST_F(GCTracerTest, BackgroundMajorMCScope) {
|
||||
200);
|
||||
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_MARKING, 10);
|
||||
// Scavenger should not affect the major mark-compact scopes.
|
||||
tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
tracer->Stop(GarbageCollector::SCAVENGER);
|
||||
StartTracing(tracer, GarbageCollector::SCAVENGER,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
StopTracing(tracer, GarbageCollector::SCAVENGER);
|
||||
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_SWEEPING, 20);
|
||||
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_MARKING, 1);
|
||||
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_SWEEPING, 2);
|
||||
tracer->Start(GarbageCollector::MARK_COMPACTOR,
|
||||
GarbageCollectionReason::kTesting, "collector unittest");
|
||||
StartTracing(tracer, GarbageCollector::MARK_COMPACTOR,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_EVACUATE_COPY,
|
||||
30);
|
||||
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_EVACUATE_COPY,
|
||||
@ -417,7 +434,7 @@ TEST_F(GCTracerTest, BackgroundMajorMCScope) {
|
||||
GCTracer::Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 40);
|
||||
tracer->AddScopeSampleBackground(
|
||||
GCTracer::Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 4);
|
||||
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
|
||||
StopTracing(tracer, GarbageCollector::MARK_COMPACTOR);
|
||||
EXPECT_DOUBLE_EQ(
|
||||
111, tracer->current_.scopes[GCTracer::Scope::MC_BACKGROUND_MARKING]);
|
||||
EXPECT_DOUBLE_EQ(
|
||||
|
Loading…
Reference in New Issue
Block a user