Reland "[heap] Refactor Heap::PerformGarbageCollection"
This reverts commit6fba287c53
. Reason for revert: Not the cause. Original change's description: > Revert "[heap] Refactor Heap::PerformGarbageCollection" > > This reverts commitd0dbee4772
. > > Reason for revert: Breaks MSVC bot (https://cr-buildbucket.appspot.com/build/8880517266974148704) > > Original change's description: > > [heap] Refactor Heap::PerformGarbageCollection > > > > This ensures that PerformGarbageCollection runs completely within a > > LocalHeap safepoint. External prologues and epilogues that may trigger > > GC and run JS are moved outside. > > > > Bug: v8:10315 > > > > Change-Id: I5c0081f0791ba5d27152c119a2a0d454056656d3 > > Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2190756 > > Commit-Queue: Ulan Degenbaev <ulan@chromium.org> > > Reviewed-by: Dominik Inführ <dinfuehr@chromium.org> > > Reviewed-by: Michael Lippautz <mlippautz@chromium.org> > > Cr-Commit-Position: refs/heads/master@{#67736} > > TBR=ulan@chromium.org,mlippautz@chromium.org,dinfuehr@chromium.org > > Change-Id: I62e62d3f4cd50a3e8f0037902f158baef68cb3b1 > No-Presubmit: true > No-Tree-Checks: true > No-Try: true > Bug: v8:10315 > Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2195823 > Reviewed-by: Leszek Swirski <leszeks@chromium.org> > Commit-Queue: Leszek Swirski <leszeks@chromium.org> > Cr-Commit-Position: refs/heads/master@{#67739} TBR=ulan@chromium.org,mlippautz@chromium.org,leszeks@chromium.org,dinfuehr@chromium.org Bug: v8:10315 Change-Id: I8b9046c51fd43ca48066250085f589f6aa81d5f5 # Reland without changes. No-Presubmit: true No-Tree-Checks: true No-Try: true Change-Id: I8b9046c51fd43ca48066250085f589f6aa81d5f5 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2196301 Commit-Queue: Leszek Swirski <leszeks@chromium.org> Reviewed-by: Leszek Swirski <leszeks@chromium.org> Cr-Commit-Position: refs/heads/master@{#67743}
This commit is contained in:
parent
faa6d7ad76
commit
040e832414
211
src/heap/heap.cc
211
src/heap/heap.cc
@ -1555,7 +1555,8 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
}
|
||||
}
|
||||
|
||||
bool next_gc_likely_to_collect_more = false;
|
||||
size_t freed_global_handles = 0;
|
||||
|
||||
size_t committed_memory_before = 0;
|
||||
|
||||
if (collector == MARK_COMPACTOR) {
|
||||
@ -1581,18 +1582,69 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
OptionalTimedHistogramScope histogram_timer_priority_scope(
|
||||
gc_type_priority_timer, isolate_, mode);
|
||||
|
||||
next_gc_likely_to_collect_more =
|
||||
PerformGarbageCollection(collector, gc_callback_flags);
|
||||
if (!IsYoungGenerationCollector(collector)) {
|
||||
PROFILE(isolate_, CodeMovingGCEvent());
|
||||
}
|
||||
|
||||
GCType gc_type = collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact
|
||||
: kGCTypeScavenge;
|
||||
{
|
||||
GCCallbacksScope scope(this);
|
||||
// Temporary override any embedder stack state as callbacks may create
|
||||
// their own state on the stack and recursively trigger GC.
|
||||
EmbedderStackStateScope embedder_scope(
|
||||
local_embedder_heap_tracer(),
|
||||
EmbedderHeapTracer::EmbedderStackState::kUnknown);
|
||||
if (scope.CheckReenter()) {
|
||||
AllowHeapAllocation allow_allocation;
|
||||
AllowJavascriptExecution allow_js(isolate());
|
||||
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
|
||||
VMState<EXTERNAL> state(isolate_);
|
||||
HandleScope handle_scope(isolate_);
|
||||
CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags);
|
||||
}
|
||||
}
|
||||
|
||||
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
|
||||
tp_heap_->CollectGarbage();
|
||||
} else {
|
||||
freed_global_handles +=
|
||||
PerformGarbageCollection(collector, gc_callback_flags);
|
||||
}
|
||||
// Clear is_current_gc_forced now that the current GC is complete. Do this
|
||||
// before GarbageCollectionEpilogue() since that could trigger another
|
||||
// unforced GC.
|
||||
is_current_gc_forced_ = false;
|
||||
|
||||
{
|
||||
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
|
||||
gc_post_processing_depth_++;
|
||||
{
|
||||
AllowHeapAllocation allow_allocation;
|
||||
AllowJavascriptExecution allow_js(isolate());
|
||||
freed_global_handles +=
|
||||
isolate_->global_handles()->PostGarbageCollectionProcessing(
|
||||
collector, gc_callback_flags);
|
||||
}
|
||||
gc_post_processing_depth_--;
|
||||
}
|
||||
|
||||
{
|
||||
GCCallbacksScope scope(this);
|
||||
if (scope.CheckReenter()) {
|
||||
AllowHeapAllocation allow_allocation;
|
||||
AllowJavascriptExecution allow_js(isolate());
|
||||
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
|
||||
VMState<EXTERNAL> state(isolate_);
|
||||
HandleScope handle_scope(isolate_);
|
||||
CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
|
||||
}
|
||||
}
|
||||
if (collector == MARK_COMPACTOR || collector == SCAVENGER) {
|
||||
tracer()->RecordGCPhasesHistograms(gc_type_timer);
|
||||
}
|
||||
}
|
||||
|
||||
// Clear is_current_gc_forced now that the current GC is complete. Do this
|
||||
// before GarbageCollectionEpilogue() since that could trigger another
|
||||
// unforced GC.
|
||||
is_current_gc_forced_ = false;
|
||||
|
||||
GarbageCollectionEpilogue();
|
||||
if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) {
|
||||
isolate()->CheckDetachedContextsAfterGC();
|
||||
@ -1639,7 +1691,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
kGCCallbackScheduleIdleGarbageCollection);
|
||||
}
|
||||
|
||||
return next_gc_likely_to_collect_more;
|
||||
return freed_global_handles > 0;
|
||||
}
|
||||
|
||||
|
||||
@ -2004,61 +2056,22 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
|
||||
tracer()->AddSurvivalRatio(survival_rate);
|
||||
}
|
||||
|
||||
bool Heap::PerformGarbageCollection(
|
||||
size_t Heap::PerformGarbageCollection(
|
||||
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
|
||||
DisallowJavascriptExecution no_js(isolate());
|
||||
|
||||
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
|
||||
return tp_heap_->CollectGarbage();
|
||||
}
|
||||
|
||||
size_t freed_global_handles = 0;
|
||||
|
||||
if (!IsYoungGenerationCollector(collector)) {
|
||||
PROFILE(isolate_, CodeMovingGCEvent());
|
||||
}
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap) {
|
||||
VerifyStringTable(this->isolate());
|
||||
}
|
||||
#endif
|
||||
|
||||
GCType gc_type =
|
||||
collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
|
||||
|
||||
{
|
||||
GCCallbacksScope scope(this);
|
||||
// Temporary override any embedder stack state as callbacks may create their
|
||||
// own state on the stack and recursively trigger GC.
|
||||
EmbedderStackStateScope embedder_scope(
|
||||
local_embedder_heap_tracer(),
|
||||
EmbedderHeapTracer::EmbedderStackState::kUnknown);
|
||||
if (scope.CheckReenter()) {
|
||||
AllowHeapAllocation allow_allocation;
|
||||
AllowJavascriptExecution allow_js(isolate());
|
||||
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
|
||||
VMState<EXTERNAL> state(isolate_);
|
||||
HandleScope handle_scope(isolate_);
|
||||
CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags);
|
||||
}
|
||||
}
|
||||
|
||||
base::Optional<SafepointScope> optional_safepoint_scope;
|
||||
if (FLAG_local_heaps) {
|
||||
safepoint()->Start();
|
||||
|
||||
optional_safepoint_scope.emplace(this);
|
||||
// Fill and reset all LABs
|
||||
safepoint()->IterateLocalHeaps(
|
||||
[](LocalHeap* local_heap) { local_heap->FreeLinearAllocationArea(); });
|
||||
}
|
||||
|
||||
tracer()->StartInSafepoint();
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap) {
|
||||
Verify();
|
||||
}
|
||||
#endif
|
||||
tracer()->StartInSafepoint();
|
||||
|
||||
EnsureFromSpaceIsCommitted();
|
||||
|
||||
@ -2067,34 +2080,13 @@ bool Heap::PerformGarbageCollection(
|
||||
|
||||
switch (collector) {
|
||||
case MARK_COMPACTOR:
|
||||
UpdateOldGenerationAllocationCounter();
|
||||
// Perform mark-sweep with optional compaction.
|
||||
MarkCompact();
|
||||
old_generation_size_configured_ = true;
|
||||
// This should be updated before PostGarbageCollectionProcessing, which
|
||||
// can cause another GC. Take into account the objects promoted during
|
||||
// GC.
|
||||
old_generation_allocation_counter_at_last_gc_ +=
|
||||
static_cast<size_t>(promoted_objects_size_);
|
||||
old_generation_size_at_last_gc_ = OldGenerationSizeOfObjects();
|
||||
global_memory_at_last_gc_ = GlobalSizeOfObjects();
|
||||
break;
|
||||
case MINOR_MARK_COMPACTOR:
|
||||
MinorMarkCompact();
|
||||
break;
|
||||
case SCAVENGER:
|
||||
if ((fast_promotion_mode_ &&
|
||||
CanExpandOldGeneration(new_space()->Size() +
|
||||
new_lo_space()->Size()))) {
|
||||
tracer()->NotifyYoungGenerationHandling(
|
||||
YoungGenerationHandling::kFastPromotionDuringScavenge);
|
||||
EvacuateYoungGeneration();
|
||||
} else {
|
||||
tracer()->NotifyYoungGenerationHandling(
|
||||
YoungGenerationHandling::kRegularScavenge);
|
||||
|
||||
Scavenge();
|
||||
}
|
||||
Scavenge();
|
||||
break;
|
||||
}
|
||||
|
||||
@ -2116,6 +2108,13 @@ bool Heap::PerformGarbageCollection(
|
||||
|
||||
isolate_->counters()->objs_since_last_young()->Set(0);
|
||||
|
||||
isolate_->eternal_handles()->PostGarbageCollectionProcessing();
|
||||
|
||||
// Update relocatables.
|
||||
Relocatable::PostGarbageCollectionProcessing(isolate_);
|
||||
|
||||
size_t freed_global_handles;
|
||||
|
||||
{
|
||||
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
|
||||
// First round weak callbacks are not supposed to allocate and trigger
|
||||
@ -2146,45 +2145,7 @@ bool Heap::PerformGarbageCollection(
|
||||
|
||||
tracer()->StopInSafepoint();
|
||||
|
||||
if (FLAG_local_heaps) safepoint()->End();
|
||||
|
||||
{
|
||||
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
|
||||
gc_post_processing_depth_++;
|
||||
{
|
||||
AllowHeapAllocation allow_allocation;
|
||||
AllowJavascriptExecution allow_js(isolate());
|
||||
freed_global_handles +=
|
||||
isolate_->global_handles()->PostGarbageCollectionProcessing(
|
||||
collector, gc_callback_flags);
|
||||
}
|
||||
gc_post_processing_depth_--;
|
||||
}
|
||||
|
||||
isolate_->eternal_handles()->PostGarbageCollectionProcessing();
|
||||
|
||||
// Update relocatables.
|
||||
Relocatable::PostGarbageCollectionProcessing(isolate_);
|
||||
|
||||
{
|
||||
GCCallbacksScope scope(this);
|
||||
if (scope.CheckReenter()) {
|
||||
AllowHeapAllocation allow_allocation;
|
||||
AllowJavascriptExecution allow_js(isolate());
|
||||
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
|
||||
VMState<EXTERNAL> state(isolate_);
|
||||
HandleScope handle_scope(isolate_);
|
||||
CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap) {
|
||||
VerifyStringTable(this->isolate());
|
||||
}
|
||||
#endif
|
||||
|
||||
return freed_global_handles > 0;
|
||||
return freed_global_handles;
|
||||
}
|
||||
|
||||
void Heap::RecomputeLimits(GarbageCollector collector) {
|
||||
@ -2296,10 +2257,11 @@ void Heap::MarkCompact() {
|
||||
|
||||
LOG(isolate_, ResourceEvent("markcompact", "begin"));
|
||||
|
||||
uint64_t size_of_objects_before_gc = SizeOfObjects();
|
||||
|
||||
CodeSpaceMemoryModificationScope code_modifcation(this);
|
||||
|
||||
UpdateOldGenerationAllocationCounter();
|
||||
uint64_t size_of_objects_before_gc = SizeOfObjects();
|
||||
|
||||
mark_compact_collector()->Prepare();
|
||||
|
||||
ms_count_++;
|
||||
@ -2315,6 +2277,14 @@ void Heap::MarkCompact() {
|
||||
if (FLAG_allocation_site_pretenuring) {
|
||||
EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
|
||||
}
|
||||
old_generation_size_configured_ = true;
|
||||
// This should be updated before PostGarbageCollectionProcessing, which
|
||||
// can cause another GC. Take into account the objects promoted during
|
||||
// GC.
|
||||
old_generation_allocation_counter_at_last_gc_ +=
|
||||
static_cast<size_t>(promoted_objects_size_);
|
||||
old_generation_size_at_last_gc_ = OldGenerationSizeOfObjects();
|
||||
global_memory_at_last_gc_ = GlobalSizeOfObjects();
|
||||
}
|
||||
|
||||
void Heap::MinorMarkCompact() {
|
||||
@ -2429,6 +2399,16 @@ void Heap::EvacuateYoungGeneration() {
|
||||
}
|
||||
|
||||
void Heap::Scavenge() {
|
||||
if ((fast_promotion_mode_ &&
|
||||
CanExpandOldGeneration(new_space()->Size() + new_lo_space()->Size()))) {
|
||||
tracer()->NotifyYoungGenerationHandling(
|
||||
YoungGenerationHandling::kFastPromotionDuringScavenge);
|
||||
EvacuateYoungGeneration();
|
||||
return;
|
||||
}
|
||||
tracer()->NotifyYoungGenerationHandling(
|
||||
YoungGenerationHandling::kRegularScavenge);
|
||||
|
||||
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE);
|
||||
base::MutexGuard guard(relocation_mutex());
|
||||
ConcurrentMarking::PauseScope pause_scope(concurrent_marking());
|
||||
@ -4240,6 +4220,7 @@ void Heap::Verify() {
|
||||
lo_space_->Verify(isolate());
|
||||
code_lo_space_->Verify(isolate());
|
||||
new_lo_space_->Verify(isolate());
|
||||
VerifyStringTable(isolate());
|
||||
}
|
||||
|
||||
void Heap::VerifyReadOnlyHeap() {
|
||||
|
@ -1635,10 +1635,9 @@ class Heap {
|
||||
// over all objects. May cause a GC.
|
||||
void MakeHeapIterable();
|
||||
|
||||
// Performs garbage collection
|
||||
// Returns whether there is a chance another major GC could
|
||||
// collect more garbage.
|
||||
bool PerformGarbageCollection(
|
||||
// Performs garbage collection in a safepoint.
|
||||
// Returns the number of freed global handles.
|
||||
size_t PerformGarbageCollection(
|
||||
GarbageCollector collector,
|
||||
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user