[heap] Refactor Heap::PerformGarbageCollection

This ensures that PerformGarbageCollection runs completely within a
LocalHeap safepoint. External prologues and epilogues that may trigger
GC and run JS are moved outside.

Bug: v8:10315

Change-Id: I5c0081f0791ba5d27152c119a2a0d454056656d3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2190756
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67736}
This commit is contained in:
Ulan Degenbaev 2020-05-08 22:21:29 +02:00 committed by Commit Bot
parent 3df36990b3
commit d0dbee4772
2 changed files with 99 additions and 119 deletions

View File

@ -1555,7 +1555,8 @@ bool Heap::CollectGarbage(AllocationSpace space,
} }
} }
bool next_gc_likely_to_collect_more = false; size_t freed_global_handles = 0;
size_t committed_memory_before = 0; size_t committed_memory_before = 0;
if (collector == MARK_COMPACTOR) { if (collector == MARK_COMPACTOR) {
@ -1581,18 +1582,69 @@ bool Heap::CollectGarbage(AllocationSpace space,
OptionalTimedHistogramScope histogram_timer_priority_scope( OptionalTimedHistogramScope histogram_timer_priority_scope(
gc_type_priority_timer, isolate_, mode); gc_type_priority_timer, isolate_, mode);
next_gc_likely_to_collect_more = if (!IsYoungGenerationCollector(collector)) {
PerformGarbageCollection(collector, gc_callback_flags); PROFILE(isolate_, CodeMovingGCEvent());
if (collector == MARK_COMPACTOR || collector == SCAVENGER) { }
tracer()->RecordGCPhasesHistograms(gc_type_timer);
GCType gc_type = collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact
: kGCTypeScavenge;
{
GCCallbacksScope scope(this);
// Temporary override any embedder stack state as callbacks may create
// their own state on the stack and recursively trigger GC.
EmbedderStackStateScope embedder_scope(
local_embedder_heap_tracer(),
EmbedderHeapTracer::EmbedderStackState::kUnknown);
if (scope.CheckReenter()) {
AllowHeapAllocation allow_allocation;
AllowJavascriptExecution allow_js(isolate());
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
VMState<EXTERNAL> state(isolate_);
HandleScope handle_scope(isolate_);
CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags);
} }
} }
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
tp_heap_->CollectGarbage();
} else {
freed_global_handles +=
PerformGarbageCollection(collector, gc_callback_flags);
}
// Clear is_current_gc_forced now that the current GC is complete. Do this // Clear is_current_gc_forced now that the current GC is complete. Do this
// before GarbageCollectionEpilogue() since that could trigger another // before GarbageCollectionEpilogue() since that could trigger another
// unforced GC. // unforced GC.
is_current_gc_forced_ = false; is_current_gc_forced_ = false;
{
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
gc_post_processing_depth_++;
{
AllowHeapAllocation allow_allocation;
AllowJavascriptExecution allow_js(isolate());
freed_global_handles +=
isolate_->global_handles()->PostGarbageCollectionProcessing(
collector, gc_callback_flags);
}
gc_post_processing_depth_--;
}
{
GCCallbacksScope scope(this);
if (scope.CheckReenter()) {
AllowHeapAllocation allow_allocation;
AllowJavascriptExecution allow_js(isolate());
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
VMState<EXTERNAL> state(isolate_);
HandleScope handle_scope(isolate_);
CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
}
}
if (collector == MARK_COMPACTOR || collector == SCAVENGER) {
tracer()->RecordGCPhasesHistograms(gc_type_timer);
}
}
GarbageCollectionEpilogue(); GarbageCollectionEpilogue();
if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) { if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) {
isolate()->CheckDetachedContextsAfterGC(); isolate()->CheckDetachedContextsAfterGC();
@ -1639,7 +1691,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
kGCCallbackScheduleIdleGarbageCollection); kGCCallbackScheduleIdleGarbageCollection);
} }
return next_gc_likely_to_collect_more; return freed_global_handles > 0;
} }
@ -2004,61 +2056,22 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
tracer()->AddSurvivalRatio(survival_rate); tracer()->AddSurvivalRatio(survival_rate);
} }
bool Heap::PerformGarbageCollection( size_t Heap::PerformGarbageCollection(
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) { GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
DisallowJavascriptExecution no_js(isolate()); DisallowJavascriptExecution no_js(isolate());
base::Optional<SafepointScope> optional_safepoint_scope;
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
return tp_heap_->CollectGarbage();
}
size_t freed_global_handles = 0;
if (!IsYoungGenerationCollector(collector)) {
PROFILE(isolate_, CodeMovingGCEvent());
}
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyStringTable(this->isolate());
}
#endif
GCType gc_type =
collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
{
GCCallbacksScope scope(this);
// Temporary override any embedder stack state as callbacks may create their
// own state on the stack and recursively trigger GC.
EmbedderStackStateScope embedder_scope(
local_embedder_heap_tracer(),
EmbedderHeapTracer::EmbedderStackState::kUnknown);
if (scope.CheckReenter()) {
AllowHeapAllocation allow_allocation;
AllowJavascriptExecution allow_js(isolate());
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
VMState<EXTERNAL> state(isolate_);
HandleScope handle_scope(isolate_);
CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags);
}
}
if (FLAG_local_heaps) { if (FLAG_local_heaps) {
safepoint()->Start(); optional_safepoint_scope.emplace(this);
// Fill and reset all LABs // Fill and reset all LABs
safepoint()->IterateLocalHeaps( safepoint()->IterateLocalHeaps(
[](LocalHeap* local_heap) { local_heap->FreeLinearAllocationArea(); }); [](LocalHeap* local_heap) { local_heap->FreeLinearAllocationArea(); });
} }
tracer()->StartInSafepoint();
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap) { if (FLAG_verify_heap) {
Verify(); Verify();
} }
#endif #endif
tracer()->StartInSafepoint();
EnsureFromSpaceIsCommitted(); EnsureFromSpaceIsCommitted();
@ -2067,34 +2080,13 @@ bool Heap::PerformGarbageCollection(
switch (collector) { switch (collector) {
case MARK_COMPACTOR: case MARK_COMPACTOR:
UpdateOldGenerationAllocationCounter();
// Perform mark-sweep with optional compaction.
MarkCompact(); MarkCompact();
old_generation_size_configured_ = true;
// This should be updated before PostGarbageCollectionProcessing, which
// can cause another GC. Take into account the objects promoted during
// GC.
old_generation_allocation_counter_at_last_gc_ +=
static_cast<size_t>(promoted_objects_size_);
old_generation_size_at_last_gc_ = OldGenerationSizeOfObjects();
global_memory_at_last_gc_ = GlobalSizeOfObjects();
break; break;
case MINOR_MARK_COMPACTOR: case MINOR_MARK_COMPACTOR:
MinorMarkCompact(); MinorMarkCompact();
break; break;
case SCAVENGER: case SCAVENGER:
if ((fast_promotion_mode_ &&
CanExpandOldGeneration(new_space()->Size() +
new_lo_space()->Size()))) {
tracer()->NotifyYoungGenerationHandling(
YoungGenerationHandling::kFastPromotionDuringScavenge);
EvacuateYoungGeneration();
} else {
tracer()->NotifyYoungGenerationHandling(
YoungGenerationHandling::kRegularScavenge);
Scavenge(); Scavenge();
}
break; break;
} }
@ -2116,6 +2108,13 @@ bool Heap::PerformGarbageCollection(
isolate_->counters()->objs_since_last_young()->Set(0); isolate_->counters()->objs_since_last_young()->Set(0);
isolate_->eternal_handles()->PostGarbageCollectionProcessing();
// Update relocatables.
Relocatable::PostGarbageCollectionProcessing(isolate_);
size_t freed_global_handles;
{ {
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES); TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
// First round weak callbacks are not supposed to allocate and trigger // First round weak callbacks are not supposed to allocate and trigger
@ -2146,45 +2145,7 @@ bool Heap::PerformGarbageCollection(
tracer()->StopInSafepoint(); tracer()->StopInSafepoint();
if (FLAG_local_heaps) safepoint()->End(); return freed_global_handles;
{
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
gc_post_processing_depth_++;
{
AllowHeapAllocation allow_allocation;
AllowJavascriptExecution allow_js(isolate());
freed_global_handles +=
isolate_->global_handles()->PostGarbageCollectionProcessing(
collector, gc_callback_flags);
}
gc_post_processing_depth_--;
}
isolate_->eternal_handles()->PostGarbageCollectionProcessing();
// Update relocatables.
Relocatable::PostGarbageCollectionProcessing(isolate_);
{
GCCallbacksScope scope(this);
if (scope.CheckReenter()) {
AllowHeapAllocation allow_allocation;
AllowJavascriptExecution allow_js(isolate());
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
VMState<EXTERNAL> state(isolate_);
HandleScope handle_scope(isolate_);
CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
}
}
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyStringTable(this->isolate());
}
#endif
return freed_global_handles > 0;
} }
void Heap::RecomputeLimits(GarbageCollector collector) { void Heap::RecomputeLimits(GarbageCollector collector) {
@ -2296,10 +2257,11 @@ void Heap::MarkCompact() {
LOG(isolate_, ResourceEvent("markcompact", "begin")); LOG(isolate_, ResourceEvent("markcompact", "begin"));
uint64_t size_of_objects_before_gc = SizeOfObjects();
CodeSpaceMemoryModificationScope code_modifcation(this); CodeSpaceMemoryModificationScope code_modifcation(this);
UpdateOldGenerationAllocationCounter();
uint64_t size_of_objects_before_gc = SizeOfObjects();
mark_compact_collector()->Prepare(); mark_compact_collector()->Prepare();
ms_count_++; ms_count_++;
@ -2315,6 +2277,14 @@ void Heap::MarkCompact() {
if (FLAG_allocation_site_pretenuring) { if (FLAG_allocation_site_pretenuring) {
EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc); EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
} }
old_generation_size_configured_ = true;
// This should be updated before PostGarbageCollectionProcessing, which
// can cause another GC. Take into account the objects promoted during
// GC.
old_generation_allocation_counter_at_last_gc_ +=
static_cast<size_t>(promoted_objects_size_);
old_generation_size_at_last_gc_ = OldGenerationSizeOfObjects();
global_memory_at_last_gc_ = GlobalSizeOfObjects();
} }
void Heap::MinorMarkCompact() { void Heap::MinorMarkCompact() {
@ -2429,6 +2399,16 @@ void Heap::EvacuateYoungGeneration() {
} }
void Heap::Scavenge() { void Heap::Scavenge() {
if ((fast_promotion_mode_ &&
CanExpandOldGeneration(new_space()->Size() + new_lo_space()->Size()))) {
tracer()->NotifyYoungGenerationHandling(
YoungGenerationHandling::kFastPromotionDuringScavenge);
EvacuateYoungGeneration();
return;
}
tracer()->NotifyYoungGenerationHandling(
YoungGenerationHandling::kRegularScavenge);
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE); TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE);
base::MutexGuard guard(relocation_mutex()); base::MutexGuard guard(relocation_mutex());
ConcurrentMarking::PauseScope pause_scope(concurrent_marking()); ConcurrentMarking::PauseScope pause_scope(concurrent_marking());
@ -4240,6 +4220,7 @@ void Heap::Verify() {
lo_space_->Verify(isolate()); lo_space_->Verify(isolate());
code_lo_space_->Verify(isolate()); code_lo_space_->Verify(isolate());
new_lo_space_->Verify(isolate()); new_lo_space_->Verify(isolate());
VerifyStringTable(isolate());
} }
void Heap::VerifyReadOnlyHeap() { void Heap::VerifyReadOnlyHeap() {

View File

@ -1635,10 +1635,9 @@ class Heap {
// over all objects. May cause a GC. // over all objects. May cause a GC.
void MakeHeapIterable(); void MakeHeapIterable();
// Performs garbage collection // Performs garbage collection in a safepoint.
// Returns whether there is a chance another major GC could // Returns the number of freed global handles.
// collect more garbage. size_t PerformGarbageCollection(
bool PerformGarbageCollection(
GarbageCollector collector, GarbageCollector collector,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);