heap: Rename compaction flags

Rename flags to align them with other flags that are named in an
enabled way and drop the "never" prefix.

Drive-by: Refactor compaction entry point.

Bug: v8:12251
Change-Id: If2b189152f3cd22038b87fe3cc2ba0db4953ae23
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3270534
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77813}
This commit is contained in:
Michael Lippautz 2021-11-10 09:07:47 +01:00 committed by V8 LUCI CQ
parent 90a9d6cb13
commit c88140fed6
14 changed files with 96 additions and 101 deletions

View File

@ -1335,15 +1335,23 @@ DEFINE_INT(heap_growing_percent, 0,
"specifies heap growing factor as (1 + heap_growing_percent/100)")
DEFINE_INT(v8_os_page_size, 0, "override OS page size (in KBytes)")
DEFINE_BOOL(allocation_buffer_parking, true, "allocation buffer parking")
DEFINE_BOOL(always_compact, false, "Perform compaction on every full GC")
DEFINE_BOOL(never_compact, false,
"Never perform compaction on full GC - testing only")
DEFINE_BOOL(never_compact_with_stack, false,
"Never perform compaction when finalizing a full GC with stack")
DEFINE_BOOL(compact, true,
"Perform compaction on full GCs based on V8's default heuristics")
DEFINE_BOOL(compact_code_space, true,
"Perform code space compaction on full collections.")
DEFINE_BOOL(compact_on_every_full_gc, false,
"Perform compaction on every full GC")
DEFINE_BOOL(compact_with_stack, true,
"Perform compaction when finalizing a full GC with stack")
DEFINE_BOOL(
never_compact_code_space_with_stack, false,
"Never perform code space compaction when finalizing a full GC with stack")
DEFINE_BOOL(compact_code_space, true, "Compact code space on full collections")
compact_code_space_with_stack, true,
"Perform code space compaction when finalizing a full GC with stack")
DEFINE_BOOL(stress_compaction, false,
"Stress GC compaction to flush out bugs (implies "
"--force_marking_deque_overflows)")
DEFINE_BOOL(stress_compaction_random, false,
"Stress GC compaction by selecting random percent of pages as "
"evacuation candidates. Overrides stress_compaction.")
DEFINE_BOOL(flush_baseline_code, false,
"flush of baseline code when it has not been executed recently")
DEFINE_BOOL(flush_bytecode, true,
@ -1358,12 +1366,6 @@ DEFINE_BOOL(stress_per_context_marking_worklist, false,
DEFINE_BOOL(force_marking_deque_overflows, false,
"force overflows of marking deque by reducing it's size "
"to 64 words")
DEFINE_BOOL(stress_compaction, false,
"stress the GC compactor to flush out bugs (implies "
"--force_marking_deque_overflows)")
DEFINE_BOOL(stress_compaction_random, false,
"Stress GC compaction by selecting random percent of pages as "
"evacuation candidates. It overrides stress_compaction.")
DEFINE_BOOL(stress_incremental_marking, false,
"force incremental marking for small heaps and run it more often")

View File

@ -231,7 +231,8 @@ void IncrementalMarking::StartMarking() {
heap_->InvokeIncrementalMarkingPrologueCallbacks();
is_compacting_ = !FLAG_never_compact && collector_->StartCompaction();
is_compacting_ = collector_->StartCompaction(
MarkCompactCollector::StartCompactionMode::kIncremental);
collector_->StartMarking();
SetState(MARKING);

View File

@ -6,6 +6,7 @@
#include <unordered_map>
#include "src/base/logging.h"
#include "src/base/optional.h"
#include "src/base/utils/random-number-generator.h"
#include "src/codegen/compilation-cache.h"
@ -468,16 +469,10 @@ int MarkCompactCollectorBase::NumberOfParallelCompactionTasks() {
MarkCompactCollector::MarkCompactCollector(Heap* heap)
: MarkCompactCollectorBase(heap),
page_parallel_job_semaphore_(0),
#ifdef DEBUG
state_(IDLE),
#endif
is_shared_heap_(heap->IsShared()),
was_marked_incrementally_(false),
evacuation_(false),
compacting_(false),
black_allocation_(false),
have_code_to_deoptimize_(false),
sweeper_(new Sweeper(heap, non_atomic_marking_state())) {
}
@ -524,30 +519,32 @@ static void TraceFragmentation(PagedSpace* space) {
static_cast<int>(free), static_cast<double>(free) * 100 / reserved);
}
bool MarkCompactCollector::StartCompaction() {
if (!compacting_) {
DCHECK(evacuation_candidates_.empty());
bool MarkCompactCollector::StartCompaction(StartCompactionMode mode) {
DCHECK(!compacting_);
DCHECK(evacuation_candidates_.empty());
if (FLAG_gc_experiment_less_compaction && !heap_->ShouldReduceMemory())
return false;
CollectEvacuationCandidates(heap()->old_space());
if (FLAG_compact_code_space &&
(heap()->IsGCWithoutStack() ||
!FLAG_never_compact_code_space_with_stack)) {
CollectEvacuationCandidates(heap()->code_space());
} else if (FLAG_trace_fragmentation) {
TraceFragmentation(heap()->code_space());
}
if (FLAG_trace_fragmentation) {
TraceFragmentation(heap()->map_space());
}
compacting_ = !evacuation_candidates_.empty();
// Bailouts for completely disabled compaction.
if (!FLAG_compact ||
(mode == StartCompactionMode::kAtomic && !heap()->IsGCWithoutStack() &&
!FLAG_compact_with_stack) ||
(FLAG_gc_experiment_less_compaction && !heap_->ShouldReduceMemory())) {
return false;
}
CollectEvacuationCandidates(heap()->old_space());
if (FLAG_compact_code_space &&
(heap()->IsGCWithoutStack() || FLAG_compact_code_space_with_stack)) {
CollectEvacuationCandidates(heap()->code_space());
} else if (FLAG_trace_fragmentation) {
TraceFragmentation(heap()->code_space());
}
if (FLAG_trace_fragmentation) {
TraceFragmentation(heap()->map_space());
}
compacting_ = !evacuation_candidates_.empty();
return compacting_;
}
@ -735,7 +732,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
const bool in_standard_path =
!(FLAG_manual_evacuation_candidates_selection ||
FLAG_stress_compaction_random || FLAG_stress_compaction ||
FLAG_always_compact);
FLAG_compact_on_every_full_gc);
// Those variables will only be initialized if |in_standard_path|, and are not
// used otherwise.
size_t max_evacuated_bytes;
@ -847,7 +844,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
for (size_t i = 0; i < pages.size(); i++) {
size_t live_bytes = pages[i].first;
DCHECK_GE(area_size, live_bytes);
if (FLAG_always_compact ||
if (FLAG_compact_on_every_full_gc ||
((total_live_bytes + live_bytes) <= max_evacuated_bytes)) {
candidate_count++;
total_live_bytes += live_bytes;
@ -870,7 +867,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
DCHECK_LE(estimated_new_pages, candidate_count);
int estimated_released_pages = candidate_count - estimated_new_pages;
// Avoid (compact -> expand) cycles.
if ((estimated_released_pages == 0) && !FLAG_always_compact) {
if ((estimated_released_pages == 0) && !FLAG_compact_on_every_full_gc) {
candidate_count = 0;
}
for (int i = 0; i < candidate_count; i++) {
@ -910,7 +907,6 @@ void MarkCompactCollector::Prepare() {
state_ = PREPARE_GC;
#endif
DCHECK(!FLAG_never_compact || !FLAG_always_compact);
DCHECK(!sweeping_in_progress());
if (!was_marked_incrementally_) {
@ -919,11 +915,7 @@ void MarkCompactCollector::Prepare() {
heap_->local_embedder_heap_tracer()->TracePrologue(
heap_->flags_for_embedder_tracer());
}
const bool should_compact =
heap()->IsGCWithoutStack() || !FLAG_never_compact_with_stack;
if (!FLAG_never_compact && should_compact) {
StartCompaction();
}
StartCompaction(StartCompactionMode::kAtomic);
StartMarking();
}
@ -3578,11 +3570,9 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
}
if (!heap()->IsGCWithoutStack()) {
if (FLAG_never_compact_with_stack ||
FLAG_never_compact_code_space_with_stack) {
if (!FLAG_compact_with_stack || !FLAG_compact_code_space_with_stack) {
for (Page* page : old_space_evacuation_pages_) {
if (FLAG_never_compact_with_stack ||
page->owner_identity() == CODE_SPACE) {
if (!FLAG_compact_with_stack || page->owner_identity() == CODE_SPACE) {
ReportAbortedEvacuationCandidateDueToFlags(page->area_start(), page);
// Set this flag early on in this case to allow filtering such pages
// below.

View File

@ -451,6 +451,11 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
kTrackNewlyDiscoveredObjects
};
enum class StartCompactionMode {
kIncremental,
kAtomic,
};
MarkingState* marking_state() { return &marking_state_; }
NonAtomicMarkingState* non_atomic_marking_state() {
@ -474,7 +479,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// it to complete as requested by |stop_request|).
void FinishConcurrentMarking();
bool StartCompaction();
// Returns whether compaction is running.
bool StartCompaction(StartCompactionMode mode);
void AbortCompaction();
@ -728,7 +734,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
void RightTrimDescriptorArray(DescriptorArray array, int descriptors_to_trim);
base::Mutex mutex_;
base::Semaphore page_parallel_job_semaphore_;
base::Semaphore page_parallel_job_semaphore_{0};
#ifdef DEBUG
enum CollectorState{IDLE,
@ -745,17 +751,13 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
const bool is_shared_heap_;
bool was_marked_incrementally_;
bool evacuation_;
bool was_marked_incrementally_ = false;
bool evacuation_ = false;
// True if we are collecting slots to perform evacuation from evacuation
// candidates.
bool compacting_;
bool black_allocation_;
bool have_code_to_deoptimize_;
bool compacting_ = false;
bool black_allocation_ = false;
bool have_code_to_deoptimize_ = false;
MarkingWorklists marking_worklists_;

View File

@ -133,7 +133,7 @@ TEST(ArrayBuffer_ScavengeAndMC) {
}
TEST(ArrayBuffer_Compaction) {
if (FLAG_never_compact) return;
if (!FLAG_compact) return;
ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true;
FLAG_concurrent_array_buffer_sweeping = false;
@ -437,7 +437,7 @@ TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
}
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) {
if (FLAG_never_compact) return;
if (!FLAG_compact) return;
ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true;
FLAG_concurrent_array_buffer_sweeping = false;

View File

@ -43,7 +43,7 @@ void CheckAllObjectsOnPage(const std::vector<Handle<FixedArray>>& handles,
} // namespace
HEAP_TEST(CompactionFullAbortedPage) {
if (FLAG_never_compact || FLAG_crash_on_aborted_evacuation) return;
if (!FLAG_compact || FLAG_crash_on_aborted_evacuation) return;
// Test the scenario where we reach OOM during compaction and the whole page
// is aborted.
@ -106,7 +106,7 @@ int GetObjectSize(int objects_per_page) {
} // namespace
HEAP_TEST(CompactionPartiallyAbortedPage) {
if (FLAG_never_compact || FLAG_crash_on_aborted_evacuation) return;
if (!FLAG_compact || FLAG_crash_on_aborted_evacuation) return;
// Test the scenario where we reach OOM during compaction and parts of the
// page have already been migrated to a new one.
@ -186,7 +186,7 @@ HEAP_TEST(CompactionPartiallyAbortedPage) {
}
HEAP_TEST(CompactionPartiallyAbortedPageWithInvalidatedSlots) {
if (FLAG_never_compact || FLAG_crash_on_aborted_evacuation) return;
if (!FLAG_compact || FLAG_crash_on_aborted_evacuation) return;
// Test evacuating a page partially when it contains recorded
// slots and invalidated objects.
@ -269,7 +269,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithInvalidatedSlots) {
}
HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
if (FLAG_never_compact || FLAG_crash_on_aborted_evacuation) return;
if (!FLAG_compact || FLAG_crash_on_aborted_evacuation) return;
// Test the scenario where we reach OOM during compaction and parts of the
// page have already been migrated to a new one. Objects on the aborted page
// are linked together. This test makes sure that intra-aborted page pointers
@ -362,7 +362,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
}
HEAP_TEST(CompactionPartiallyAbortedPageWithRememberedSetEntries) {
if (FLAG_never_compact) return;
if (!FLAG_compact) return;
// Test the scenario where we reach OOM during compaction and parts of the
// page have already been migrated to a new one. Objects on the aborted page
// are linked together and the very first object on the aborted page points

View File

@ -94,7 +94,7 @@ TEST(ExternalString_ExternalBackingStoreSizeDecreases) {
}
TEST(ExternalString_ExternalBackingStoreSizeIncreasesMarkCompact) {
if (FLAG_never_compact) return;
if (!FLAG_compact) return;
ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();

View File

@ -625,7 +625,7 @@ TEST(DeleteWeakGlobalHandle) {
}
TEST(BytecodeArray) {
if (FLAG_never_compact) return;
if (!FLAG_compact) return;
static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
static const int kRawBytesSize = sizeof(kRawBytes);
static const int32_t kFrameSize = 32;
@ -1231,7 +1231,7 @@ UNINITIALIZED_TEST(Regress10843) {
FLAG_max_semi_space_size = 2;
FLAG_min_semi_space_size = 2;
FLAG_max_old_space_size = 8;
FLAG_always_compact = true;
FLAG_compact_on_every_full_gc = true;
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);
@ -3113,7 +3113,7 @@ TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
TEST(ReleaseOverReservedPages) {
if (FLAG_never_compact) return;
if (!FLAG_compact) return;
FLAG_trace_gc = true;
// The optimizer can allocate stuff, messing up the test.
#ifndef V8_LITE_MODE
@ -3744,7 +3744,7 @@ TEST(Regress169928) {
TEST(LargeObjectSlotRecording) {
if (!FLAG_incremental_marking) return;
if (FLAG_never_compact) return;
if (!FLAG_compact) return;
ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
@ -6947,7 +6947,7 @@ TEST(AllocateExternalBackingStore) {
TEST(CodeObjectRegistry) {
// We turn off compaction to ensure that code is not moving.
FLAG_never_compact = true;
FLAG_compact = false;
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
@ -7260,7 +7260,7 @@ TEST(IsPendingAllocationLOSpace) {
}
TEST(Regress10900) {
FLAG_always_compact = true;
FLAG_compact_on_every_full_gc = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
@ -7308,7 +7308,7 @@ void GenerateGarbage() {
} // anonymous namespace
TEST(Regress11181) {
FLAG_always_compact = true;
FLAG_compact_on_every_full_gc = true;
CcTest::InitializeVM();
TracingFlags::runtime_stats.store(
v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE,

View File

@ -183,9 +183,9 @@ HEAP_TEST(MarkCompactCollector) {
}
HEAP_TEST(DoNotEvacuatePinnedPages) {
if (FLAG_never_compact || !FLAG_single_generation) return;
if (!FLAG_compact || !FLAG_single_generation) return;
FLAG_always_compact = true;
FLAG_compact_on_every_full_gc = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
@ -217,8 +217,8 @@ HEAP_TEST(DoNotEvacuatePinnedPages) {
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
// always_compact ensures that this page is an evacuation candidate, so with
// the pin flag cleared compaction should now move it.
// `compact_on_every_full_gc` ensures that this page is an evacuation
// candidate, so with the pin flag cleared compaction should now move it.
for (Handle<FixedArray> object : handles) {
CHECK_NE(page, Page::FromHeapObject(*object));
}

View File

@ -13888,7 +13888,7 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) {
#if ENABLE_SPARKPLUG
i::FLAG_baseline_batch_compilation = false;
#endif
if (i::FLAG_never_compact) return;
if (!i::FLAG_compact) return;
const char* script =
"function bar() {"
" var sum = 0;"
@ -21748,7 +21748,7 @@ class RegExpInterruptTest {
TEST(RegExpInterruptAndCollectAllGarbage) {
// Move all movable objects on GC.
i::FLAG_always_compact = true;
i::FLAG_compact_on_every_full_gc = true;
// We want to be stuck regexp execution, so no fallback to linear-time
// engine.
// TODO(mbid,v8:10765): Find a way to test interrupt support of the

View File

@ -442,7 +442,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) {
TEST(EmbeddedObj) {
#ifdef V8_COMPRESS_POINTERS
FLAG_always_compact = true;
FLAG_compact_on_every_full_gc = true;
v8::V8::Initialize();
Isolate* isolate = CcTest::i_isolate();

View File

@ -2037,7 +2037,7 @@ TEST(CodeSerializerLargeCodeObject) {
TEST(CodeSerializerLargeCodeObjectWithIncrementalMarking) {
if (!FLAG_incremental_marking) return;
if (FLAG_never_compact) return;
if (!FLAG_compact) return;
ManualGCScope manual_gc_scope;
FLAG_always_opt = false;
const char* filter_flag = "--turbo-filter=NOTHING";

View File

@ -226,9 +226,9 @@ TEST(WeakMapScavenge) {
// Test that weak map values on an evacuation candidate which are not reachable
// by other paths are correctly recorded in the slots buffer.
TEST(Regress2060a) {
if (i::FLAG_never_compact) return;
if (!i::FLAG_compact) return;
if (i::FLAG_enable_third_party_heap) return;
FLAG_always_compact = true;
FLAG_compact_on_every_full_gc = true;
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
LocalContext context;
Isolate* isolate = GetIsolateFrom(&context);
@ -259,7 +259,7 @@ TEST(Regress2060a) {
}
// Force compacting garbage collection.
CHECK(FLAG_always_compact);
CHECK(FLAG_compact_on_every_full_gc);
CcTest::CollectAllGarbage();
}
@ -267,8 +267,8 @@ TEST(Regress2060a) {
// Test that weak map keys on an evacuation candidate which are reachable by
// other strong paths are correctly recorded in the slots buffer.
TEST(Regress2060b) {
if (i::FLAG_never_compact) return;
FLAG_always_compact = true;
if (!i::FLAG_compact) return;
FLAG_compact_on_every_full_gc = true;
#ifdef VERIFY_HEAP
FLAG_verify_heap = true;
#endif
@ -303,7 +303,7 @@ TEST(Regress2060b) {
// Force compacting garbage collection. The subsequent collections are used
// to verify that key references were actually updated.
CHECK(FLAG_always_compact);
CHECK(FLAG_compact_on_every_full_gc);
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();

View File

@ -163,9 +163,9 @@ TEST(WeakSet_Shrinking) {
// Test that weak set values on an evacuation candidate which are not reachable
// by other paths are correctly recorded in the slots buffer.
TEST(WeakSet_Regress2060a) {
if (i::FLAG_never_compact) return;
if (!i::FLAG_compact) return;
if (i::FLAG_enable_third_party_heap) return;
FLAG_always_compact = true;
FLAG_compact_on_every_full_gc = true;
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
LocalContext context;
Isolate* isolate = GetIsolateFrom(&context);
@ -196,7 +196,7 @@ TEST(WeakSet_Regress2060a) {
}
// Force compacting garbage collection.
CHECK(FLAG_always_compact);
CHECK(FLAG_compact_on_every_full_gc);
CcTest::CollectAllGarbage();
}
@ -204,9 +204,9 @@ TEST(WeakSet_Regress2060a) {
// Test that weak set keys on an evacuation candidate which are reachable by
// other strong paths are correctly recorded in the slots buffer.
TEST(WeakSet_Regress2060b) {
if (i::FLAG_never_compact) return;
if (!i::FLAG_compact) return;
if (i::FLAG_enable_third_party_heap) return;
FLAG_always_compact = true;
FLAG_compact_on_every_full_gc = true;
#ifdef VERIFY_HEAP
FLAG_verify_heap = true;
#endif
@ -241,7 +241,7 @@ TEST(WeakSet_Regress2060b) {
// Force compacting garbage collection. The subsequent collections are used
// to verify that key references were actually updated.
CHECK(FLAG_always_compact);
CHECK(FLAG_compact_on_every_full_gc);
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();