[heap] Separate GC phases flag.

This CL introduces the --separate-gc-phases flag, which prevents that young and old generation GCs can happen at the same time. When incremental marking is in progress and a young generation GC triggers, marking is forced to finish and the full collection takes care of the young generation.

Bug: v8:12503
Change-Id: Ia3e4814f46bff0fdc404b0ac618dfd48fe7cf20c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3351973
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79799}
This commit is contained in:
Hannes Payer 2022-04-05 12:53:59 +02:00 committed by V8 LUCI CQ
parent c2852992fc
commit e220866e61
8 changed files with 29 additions and 15 deletions

View File

@ -1186,6 +1186,8 @@ DEFINE_BOOL(huge_max_old_generation_size, true,
"Increase max size of the old space to 4 GB for x64 systems with"
"the physical memory bigger than 16 GB")
DEFINE_SIZE_T(initial_old_space_size, 0, "initial old space size (in Mbytes)")
DEFINE_BOOL(separate_gc_phases, false,
"yound and full garbage collection phases are not overlapping")
DEFINE_BOOL(global_gc_scheduling, true,
"enable GC scheduling based on global memory")
DEFINE_BOOL(gc_global, false, "always perform global GCs")

View File

@ -472,6 +472,12 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
return GarbageCollector::MARK_COMPACTOR;
}
if (FLAG_separate_gc_phases && incremental_marking()->IsMarking()) {
// TODO(v8:12503): Remove previous condition when flag gets removed.
*reason = "Incremental marking forced finalization";
return GarbageCollector::MARK_COMPACTOR;
}
if (!CanPromoteYoungAndExpandOldGeneration(0)) {
isolate_->counters()
->gc_compactor_caused_by_oldspace_exhaustion()
@ -2702,6 +2708,7 @@ void Heap::EvacuateYoungGeneration() {
void Heap::Scavenge() {
DCHECK_NOT_NULL(new_space());
DCHECK_IMPLIES(FLAG_separate_gc_phases, !incremental_marking()->IsMarking());
if (FLAG_trace_incremental_marking && !incremental_marking()->IsStopped()) {
isolate()->PrintWithTimestamp(

View File

@ -210,7 +210,7 @@ TEST(ArrayBuffer_UnregisterDuringSweep) {
}
TEST(ArrayBuffer_NonLivePromotion) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking || FLAG_separate_gc_phases) return;
FLAG_concurrent_array_buffer_sweeping = false;
ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved when promoting
@ -248,7 +248,7 @@ TEST(ArrayBuffer_NonLivePromotion) {
}
TEST(ArrayBuffer_LivePromotion) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking || FLAG_separate_gc_phases) return;
FLAG_concurrent_array_buffer_sweeping = false;
ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved when promoting

View File

@ -1505,7 +1505,8 @@ TEST(TestInternalWeakLists) {
// Some flags turn Scavenge collections into Mark-sweep collections
// and hence are incompatible with this test case.
if (FLAG_gc_global || FLAG_stress_compaction ||
FLAG_stress_incremental_marking || FLAG_single_generation)
FLAG_stress_incremental_marking || FLAG_single_generation ||
FLAG_separate_gc_phases)
return;
FLAG_retain_maps_for_n_gc = 0;
@ -5772,7 +5773,7 @@ class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
};
TEST(Regress631969) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking || FLAG_separate_gc_phases) return;
FLAG_manual_evacuation_candidates_selection = true;
FLAG_parallel_compaction = false;
ManualGCScope manual_gc_scope;
@ -6199,7 +6200,7 @@ TEST(RememberedSet_InsertInLargePage) {
}
TEST(RememberedSet_InsertOnPromotingObjectToOld) {
if (FLAG_single_generation) return;
if (FLAG_single_generation || FLAG_stress_incremental_marking) return;
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
@ -6225,11 +6226,12 @@ TEST(RememberedSet_InsertOnPromotingObjectToOld) {
CcTest::CollectGarbage(i::NEW_SPACE);
CHECK(heap->InOldSpace(*arr));
CHECK(heap->InYoungGeneration(arr->get(0)));
CHECK_EQ(1, GetRememberedSetSize<OLD_TO_NEW>(*arr));
}
TEST(RememberedSet_RemoveStaleOnScavenge) {
if (FLAG_single_generation) return;
if (FLAG_single_generation || FLAG_stress_incremental_marking) return;
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
@ -6504,7 +6506,7 @@ Isolate* oom_isolate = nullptr;
void OOMCallback(const char* location, bool is_heap_oom) {
Heap* heap = oom_isolate->heap();
size_t kSlack = heap->new_space() ? heap->new_space()->Capacity() : 0;
size_t kSlack = heap->new_space() ? heap->MaxSemiSpaceSize() : 0;
CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack);
CHECK_LE(heap->memory_allocator()->Size(), heap->MaxReserved() + kSlack);
base::OS::ExitProcess(0);
@ -6709,9 +6711,9 @@ UNINITIALIZED_TEST(OutOfMemorySmallObjects) {
}
}
CHECK_LE(state.old_generation_capacity_at_oom,
kOldGenerationLimit + state.new_space_capacity_at_oom);
CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
state.new_space_capacity_at_oom);
kOldGenerationLimit + heap->MaxSemiSpaceSize());
CHECK_LE(kOldGenerationLimit,
state.old_generation_capacity_at_oom + heap->MaxSemiSpaceSize());
CHECK_LE(
state.memory_allocator_size_at_oom,
MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
@ -6931,7 +6933,7 @@ TEST(CodeObjectRegistry) {
TEST(Regress9701) {
ManualGCScope manual_gc_scope;
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking || FLAG_separate_gc_phases) return;
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
// Start with an empty new space.

View File

@ -176,7 +176,8 @@ TEST(WeakReferencesOldToCleared) {
}
TEST(ObjectMovesBeforeClearingWeakField) {
if (!FLAG_incremental_marking || FLAG_single_generation) {
if (!FLAG_incremental_marking || FLAG_single_generation ||
FLAG_separate_gc_phases) {
return;
}
ManualGCScope manual_gc_scope;
@ -277,7 +278,7 @@ TEST(ObjectWithWeakReferencePromoted) {
}
TEST(ObjectWithClearedWeakReferencePromoted) {
if (FLAG_single_generation) return;
if (FLAG_single_generation || FLAG_stress_incremental_marking) return;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();

View File

@ -7897,6 +7897,7 @@ static void ResetUseValueAndSetFlag(
}
void v8::internal::heap::HeapTester::ResetWeakHandle(bool global_gc) {
if (FLAG_stress_incremental_marking) return;
using v8::Context;
using v8::Local;
using v8::Object;

View File

@ -925,7 +925,8 @@ TEST(JSWeakRefScavengedInWorklist) {
}
TEST(JSWeakRefTenuredInWorklist) {
if (!FLAG_incremental_marking || FLAG_single_generation) {
if (!FLAG_incremental_marking || FLAG_single_generation ||
FLAG_separate_gc_phases) {
return;
}

View File

@ -191,7 +191,7 @@ TEST(WeakMapPromotionMarkCompact) {
}
TEST(WeakMapScavenge) {
if (i::FLAG_single_generation) return;
if (i::FLAG_single_generation || i::FLAG_stress_incremental_marking) return;
LocalContext context;
Isolate* isolate = GetIsolateFrom(&context);
Factory* factory = isolate->factory();