diff --git a/src/heap/heap.cc b/src/heap/heap.cc index e2230e42e4..d6f2383e4d 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -1555,7 +1555,8 @@ bool Heap::CollectGarbage(AllocationSpace space, } } - bool next_gc_likely_to_collect_more = false; + size_t freed_global_handles = 0; + size_t committed_memory_before = 0; if (collector == MARK_COMPACTOR) { @@ -1581,18 +1582,69 @@ bool Heap::CollectGarbage(AllocationSpace space, OptionalTimedHistogramScope histogram_timer_priority_scope( gc_type_priority_timer, isolate_, mode); - next_gc_likely_to_collect_more = - PerformGarbageCollection(collector, gc_callback_flags); + if (!IsYoungGenerationCollector(collector)) { + PROFILE(isolate_, CodeMovingGCEvent()); + } + + GCType gc_type = collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact + : kGCTypeScavenge; + { + GCCallbacksScope scope(this); + // Temporary override any embedder stack state as callbacks may create + // their own state on the stack and recursively trigger GC. + EmbedderStackStateScope embedder_scope( + local_embedder_heap_tracer(), + EmbedderHeapTracer::EmbedderStackState::kUnknown); + if (scope.CheckReenter()) { + AllowHeapAllocation allow_allocation; + AllowJavascriptExecution allow_js(isolate()); + TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE); + VMState state(isolate_); + HandleScope handle_scope(isolate_); + CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); + } + } + + if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) { + tp_heap_->CollectGarbage(); + } else { + freed_global_handles += + PerformGarbageCollection(collector, gc_callback_flags); + } + // Clear is_current_gc_forced now that the current GC is complete. Do this + // before GarbageCollectionEpilogue() since that could trigger another + // unforced GC. + is_current_gc_forced_ = false; + + { + TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES); + gc_post_processing_depth_++; + { + AllowHeapAllocation allow_allocation; + AllowJavascriptExecution allow_js(isolate()); + freed_global_handles += + isolate_->global_handles()->PostGarbageCollectionProcessing( + collector, gc_callback_flags); + } + gc_post_processing_depth_--; + } + + { + GCCallbacksScope scope(this); + if (scope.CheckReenter()) { + AllowHeapAllocation allow_allocation; + AllowJavascriptExecution allow_js(isolate()); + TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE); + VMState state(isolate_); + HandleScope handle_scope(isolate_); + CallGCEpilogueCallbacks(gc_type, gc_callback_flags); + } + } if (collector == MARK_COMPACTOR || collector == SCAVENGER) { tracer()->RecordGCPhasesHistograms(gc_type_timer); } } - // Clear is_current_gc_forced now that the current GC is complete. Do this - // before GarbageCollectionEpilogue() since that could trigger another - // unforced GC. - is_current_gc_forced_ = false; - GarbageCollectionEpilogue(); if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) { isolate()->CheckDetachedContextsAfterGC(); @@ -1639,7 +1691,7 @@ bool Heap::CollectGarbage(AllocationSpace space, kGCCallbackScheduleIdleGarbageCollection); } - return next_gc_likely_to_collect_more; + return freed_global_handles > 0; } @@ -2004,61 +2056,22 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) { tracer()->AddSurvivalRatio(survival_rate); } -bool Heap::PerformGarbageCollection( +size_t Heap::PerformGarbageCollection( GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) { DisallowJavascriptExecution no_js(isolate()); - - if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) { - return tp_heap_->CollectGarbage(); - } - - size_t freed_global_handles = 0; - - if (!IsYoungGenerationCollector(collector)) { - PROFILE(isolate_, CodeMovingGCEvent()); - } - -#ifdef VERIFY_HEAP - if (FLAG_verify_heap) { - VerifyStringTable(this->isolate()); - } -#endif - - GCType gc_type = - collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; - - { - GCCallbacksScope scope(this); - // Temporary override any embedder stack state as callbacks may create their - // own state on the stack and recursively trigger GC. - EmbedderStackStateScope embedder_scope( - local_embedder_heap_tracer(), - EmbedderHeapTracer::EmbedderStackState::kUnknown); - if (scope.CheckReenter()) { - AllowHeapAllocation allow_allocation; - AllowJavascriptExecution allow_js(isolate()); - TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE); - VMState state(isolate_); - HandleScope handle_scope(isolate_); - CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); - } - } - + base::Optional optional_safepoint_scope; if (FLAG_local_heaps) { - safepoint()->Start(); - + optional_safepoint_scope.emplace(this); // Fill and reset all LABs safepoint()->IterateLocalHeaps( [](LocalHeap* local_heap) { local_heap->FreeLinearAllocationArea(); }); } - - tracer()->StartInSafepoint(); - #ifdef VERIFY_HEAP if (FLAG_verify_heap) { Verify(); } #endif + tracer()->StartInSafepoint(); EnsureFromSpaceIsCommitted(); @@ -2067,34 +2080,13 @@ bool Heap::PerformGarbageCollection( switch (collector) { case MARK_COMPACTOR: - UpdateOldGenerationAllocationCounter(); - // Perform mark-sweep with optional compaction. MarkCompact(); - old_generation_size_configured_ = true; - // This should be updated before PostGarbageCollectionProcessing, which - // can cause another GC. Take into account the objects promoted during - // GC. - old_generation_allocation_counter_at_last_gc_ += - static_cast(promoted_objects_size_); - old_generation_size_at_last_gc_ = OldGenerationSizeOfObjects(); - global_memory_at_last_gc_ = GlobalSizeOfObjects(); break; case MINOR_MARK_COMPACTOR: MinorMarkCompact(); break; case SCAVENGER: - if ((fast_promotion_mode_ && - CanExpandOldGeneration(new_space()->Size() + - new_lo_space()->Size()))) { - tracer()->NotifyYoungGenerationHandling( - YoungGenerationHandling::kFastPromotionDuringScavenge); - EvacuateYoungGeneration(); - } else { - tracer()->NotifyYoungGenerationHandling( - YoungGenerationHandling::kRegularScavenge); - - Scavenge(); - } + Scavenge(); break; } @@ -2116,6 +2108,13 @@ bool Heap::PerformGarbageCollection( isolate_->counters()->objs_since_last_young()->Set(0); + isolate_->eternal_handles()->PostGarbageCollectionProcessing(); + + // Update relocatables. + Relocatable::PostGarbageCollectionProcessing(isolate_); + + size_t freed_global_handles; + { TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES); // First round weak callbacks are not supposed to allocate and trigger @@ -2146,45 +2145,7 @@ bool Heap::PerformGarbageCollection( tracer()->StopInSafepoint(); - if (FLAG_local_heaps) safepoint()->End(); - - { - TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES); - gc_post_processing_depth_++; - { - AllowHeapAllocation allow_allocation; - AllowJavascriptExecution allow_js(isolate()); - freed_global_handles += - isolate_->global_handles()->PostGarbageCollectionProcessing( - collector, gc_callback_flags); - } - gc_post_processing_depth_--; - } - - isolate_->eternal_handles()->PostGarbageCollectionProcessing(); - - // Update relocatables. - Relocatable::PostGarbageCollectionProcessing(isolate_); - - { - GCCallbacksScope scope(this); - if (scope.CheckReenter()) { - AllowHeapAllocation allow_allocation; - AllowJavascriptExecution allow_js(isolate()); - TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE); - VMState state(isolate_); - HandleScope handle_scope(isolate_); - CallGCEpilogueCallbacks(gc_type, gc_callback_flags); - } - } - -#ifdef VERIFY_HEAP - if (FLAG_verify_heap) { - VerifyStringTable(this->isolate()); - } -#endif - - return freed_global_handles > 0; + return freed_global_handles; } void Heap::RecomputeLimits(GarbageCollector collector) { @@ -2296,10 +2257,11 @@ void Heap::MarkCompact() { LOG(isolate_, ResourceEvent("markcompact", "begin")); - uint64_t size_of_objects_before_gc = SizeOfObjects(); - CodeSpaceMemoryModificationScope code_modifcation(this); + UpdateOldGenerationAllocationCounter(); + uint64_t size_of_objects_before_gc = SizeOfObjects(); + mark_compact_collector()->Prepare(); ms_count_++; @@ -2315,6 +2277,14 @@ void Heap::MarkCompact() { if (FLAG_allocation_site_pretenuring) { EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc); } + old_generation_size_configured_ = true; + // This should be updated before PostGarbageCollectionProcessing, which + // can cause another GC. Take into account the objects promoted during + // GC. + old_generation_allocation_counter_at_last_gc_ += + static_cast(promoted_objects_size_); + old_generation_size_at_last_gc_ = OldGenerationSizeOfObjects(); + global_memory_at_last_gc_ = GlobalSizeOfObjects(); } void Heap::MinorMarkCompact() { @@ -2429,6 +2399,16 @@ void Heap::EvacuateYoungGeneration() { } void Heap::Scavenge() { + if ((fast_promotion_mode_ && + CanExpandOldGeneration(new_space()->Size() + new_lo_space()->Size()))) { + tracer()->NotifyYoungGenerationHandling( + YoungGenerationHandling::kFastPromotionDuringScavenge); + EvacuateYoungGeneration(); + return; + } + tracer()->NotifyYoungGenerationHandling( + YoungGenerationHandling::kRegularScavenge); + TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE); base::MutexGuard guard(relocation_mutex()); ConcurrentMarking::PauseScope pause_scope(concurrent_marking()); @@ -4240,6 +4220,7 @@ void Heap::Verify() { lo_space_->Verify(isolate()); code_lo_space_->Verify(isolate()); new_lo_space_->Verify(isolate()); + VerifyStringTable(isolate()); } void Heap::VerifyReadOnlyHeap() { diff --git a/src/heap/heap.h b/src/heap/heap.h index 47e2b931f8..888d174c02 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -1635,10 +1635,9 @@ class Heap { // over all objects. May cause a GC. void MakeHeapIterable(); - // Performs garbage collection - // Returns whether there is a chance another major GC could - // collect more garbage. - bool PerformGarbageCollection( + // Performs garbage collection in a safepoint. + // Returns the number of freed global handles. + size_t PerformGarbageCollection( GarbageCollector collector, const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);