diff --git a/src/heap/heap.cc b/src/heap/heap.cc index c220b451c5..dbf0c8ed9a 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -265,7 +265,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space, } if (incremental_marking()->NeedsFinalization() && - OldGenerationSpaceAvailable() == 0) { + AllocationLimitOvershotByLargeMargin()) { *reason = "Incremental marking needs finalization"; return MARK_COMPACTOR; } @@ -5320,7 +5320,7 @@ bool Heap::ShouldExpandOldGenerationOnSlowAllocation() { if (ShouldOptimizeForMemoryUsage()) return false; if (incremental_marking()->NeedsFinalization()) { - return false; + return !AllocationLimitOvershotByLargeMargin(); } if (incremental_marking()->IsStopped() && diff --git a/src/heap/heap.h b/src/heap/heap.h index f2123d4b9f..4d408b6e46 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -1823,6 +1823,19 @@ class Heap { static_cast(PromotedTotalSize()); } + // We allow incremental marking to overshoot the allocation limit for + // performace reasons. If the overshoot is too large then we are more + // eager to finalize incremental marking. + inline bool AllocationLimitOvershotByLargeMargin() { + if (old_generation_allocation_limit_ >= PromotedTotalSize()) return false; + uint64_t overshoot = PromotedTotalSize() - old_generation_allocation_limit_; + // Overshoot margin is 50% of allocation limit or half-way to the max heap. + uint64_t margin = + Min(old_generation_allocation_limit_ / 2, + (max_old_generation_size_ - old_generation_allocation_limit_) / 2); + return overshoot >= margin; + } + void UpdateTotalGCTime(double duration); bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }