From 8ab39ebcf90e6e838dd5ca2d6bbfdd8111841777 Mon Sep 17 00:00:00 2001 From: mlippautz Date: Wed, 3 May 2017 13:59:28 -0700 Subject: [PATCH] [heap] Add GC accounting to slow allocation and incremental marking job BUG=v8:6343 Review-Url: https://codereview.chromium.org/2861763002 Cr-Commit-Position: refs/heads/master@{#45073} --- src/counters.h | 2 ++ src/heap/heap.cc | 2 +- src/heap/incremental-marking-job.cc | 4 ++++ src/heap/spaces.cc | 15 +++++++++++++-- src/heap/spaces.h | 6 +++++- 5 files changed, 25 insertions(+), 4 deletions(-) diff --git a/src/counters.h b/src/counters.h index 79fd2359a7..19265f0a28 100644 --- a/src/counters.h +++ b/src/counters.h @@ -716,6 +716,8 @@ class RuntimeCallTimer final { V(FunctionCallback) \ V(GC) \ V(GC_AllAvailableGarbage) \ + V(GC_IncrementalMarkingJob) \ + V(GC_SlowAllocateRaw) \ V(GCEpilogueCallback) \ V(GCPrologueCallback) \ V(GenericNamedPropertyDefinerCallback) \ diff --git a/src/heap/heap.cc b/src/heap/heap.cc index cc85014311..92516ece91 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -978,7 +978,7 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* collector_reason, const v8::GCCallbackFlags gc_callback_flags) { // The VM is in the GC state until exiting this function. - VMState state(isolate_); + VMState state(isolate()); RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GC); #ifdef DEBUG diff --git a/src/heap/incremental-marking-job.cc b/src/heap/incremental-marking-job.cc index 393b9cce7e..de7264792a 100644 --- a/src/heap/incremental-marking-job.cc +++ b/src/heap/incremental-marking-job.cc @@ -10,6 +10,7 @@ #include "src/heap/incremental-marking.h" #include "src/isolate.h" #include "src/v8.h" +#include "src/vm-state-inl.h" namespace v8 { namespace internal { @@ -42,6 +43,9 @@ void IncrementalMarkingJob::Task::Step(Heap* heap) { } void IncrementalMarkingJob::Task::RunInternal() { + VMState state(isolate()); + RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GC_IncrementalMarkingJob); + Heap* heap = isolate()->heap(); job_->NotifyTask(); IncrementalMarking* incremental_marking = heap->incremental_marking(); diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc index 371dfb1e56..03295770df 100644 --- a/src/heap/spaces.cc +++ b/src/heap/spaces.cc @@ -20,6 +20,7 @@ #include "src/objects-inl.h" #include "src/snapshot/snapshot.h" #include "src/v8.h" +#include "src/vm-state-inl.h" namespace v8 { namespace internal { @@ -2896,11 +2897,21 @@ HeapObject* CompactionSpace::SweepAndRetryAllocation(int size_in_bytes) { } HeapObject* PagedSpace::SlowAllocateRaw(int size_in_bytes) { + VMState state(heap()->isolate()); + RuntimeCallTimerScope(heap()->isolate(), + &RuntimeCallStats::GC_SlowAllocateRaw); + return RawSlowAllocateRaw(size_in_bytes); +} + +HeapObject* CompactionSpace::SlowAllocateRaw(int size_in_bytes) { + return RawSlowAllocateRaw(size_in_bytes); +} + +HeapObject* PagedSpace::RawSlowAllocateRaw(int size_in_bytes) { + // Allocation in this space has failed. DCHECK_GE(size_in_bytes, 0); const int kMaxPagesToSweep = 1; - // Allocation in this space has failed. - MarkCompactCollector* collector = heap()->mark_compact_collector(); // Sweeping is still in progress. if (collector->sweeping_in_progress()) { diff --git a/src/heap/spaces.h b/src/heap/spaces.h index 6bac44ee6d..604be74b3e 100644 --- a/src/heap/spaces.h +++ b/src/heap/spaces.h @@ -2183,7 +2183,9 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) { int size_in_bytes); // Slow path of AllocateRaw. This function is space-dependent. - MUST_USE_RESULT HeapObject* SlowAllocateRaw(int size_in_bytes); + MUST_USE_RESULT virtual HeapObject* SlowAllocateRaw(int size_in_bytes); + + MUST_USE_RESULT HeapObject* RawSlowAllocateRaw(int size_in_bytes); size_t area_size_; @@ -2742,6 +2744,8 @@ class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace { MUST_USE_RESULT HeapObject* SweepAndRetryAllocation( int size_in_bytes) override; + + MUST_USE_RESULT HeapObject* SlowAllocateRaw(int size_in_bytes) override; };