Disable --stress-concurrent-allocation for tests that change free lists
Tests that use SimulateFullSpace and SealCurrentObjects do not work if there is a background thread allocating concurrently. Bug: v8:10315 Change-Id: I73a4c9db8eb32fdf3e07fcb8f5dda309de797709 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2390765 Reviewed-by: Dominik Inführ <dinfuehr@chromium.org> Commit-Queue: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#69693}
This commit is contained in:
parent
92993c1baf
commit
8832a9e1f7
@ -23,6 +23,10 @@ void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }
|
||||
void InvokeMarkSweep() { CcTest::CollectAllGarbage(); }
|
||||
|
||||
void SealCurrentObjects(Heap* heap) {
|
||||
// If you see this check failing, disable the flag at the start of your test:
|
||||
// FLAG_stress_concurrent_allocation = false;
|
||||
// Background thread allocating concurrently interferes with this function.
|
||||
CHECK(!FLAG_stress_concurrent_allocation);
|
||||
CcTest::CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
||||
@ -154,6 +158,10 @@ bool FillCurrentPageButNBytes(v8::internal::NewSpace* space, int extra_bytes,
|
||||
|
||||
void SimulateFullSpace(v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles) {
|
||||
// If you see this check failing, disable the flag at the start of your test:
|
||||
// FLAG_stress_concurrent_allocation = false;
|
||||
// Background thread allocating concurrently interferes with this function.
|
||||
CHECK(!FLAG_stress_concurrent_allocation);
|
||||
while (heap::FillCurrentPage(space, out_handles) || space->AddFreshPage()) {
|
||||
}
|
||||
}
|
||||
@ -190,7 +198,10 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
|
||||
}
|
||||
|
||||
void SimulateFullSpace(v8::internal::PagedSpace* space) {
|
||||
SafepointScope scope(space->heap());
|
||||
// If you see this check failing, disable the flag at the start of your test:
|
||||
// FLAG_stress_concurrent_allocation = false;
|
||||
// Background thread allocating concurrently interferes with this function.
|
||||
CHECK(!FLAG_stress_concurrent_allocation);
|
||||
CodeSpaceMemoryModificationScope modification_scope(space->heap());
|
||||
i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
|
@ -96,6 +96,8 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
|
||||
|
||||
|
||||
HEAP_TEST(StressHandles) {
|
||||
// For TestAllocateAfterFailures.
|
||||
FLAG_stress_concurrent_allocation = false;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
v8::Local<v8::Context> env = v8::Context::New(CcTest::isolate());
|
||||
env->Enter();
|
||||
@ -128,6 +130,8 @@ Handle<AccessorInfo> TestAccessorInfo(
|
||||
|
||||
|
||||
TEST(StressJS) {
|
||||
// For TestAllocateAfterFailures in TestGetter.
|
||||
FLAG_stress_concurrent_allocation = false;
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
@ -1820,6 +1820,7 @@ static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
|
||||
// Test the case where allocation must be done from the free list, so filler
|
||||
// may precede or follow the object.
|
||||
TEST(TestAlignedOverAllocation) {
|
||||
ManualGCScope manual_gc_scope;
|
||||
Heap* heap = CcTest::heap();
|
||||
// Test checks for fillers before and behind objects and requires a fresh
|
||||
// page and empty free list.
|
||||
@ -1943,7 +1944,7 @@ TEST(GrowAndShrinkNewSpace) {
|
||||
// Avoid shrinking new space in GC epilogue. This can happen if allocation
|
||||
// throughput samples have been taken while executing the benchmark.
|
||||
FLAG_predictable = true;
|
||||
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
NewSpace* new_space = heap->new_space();
|
||||
@ -1999,6 +2000,7 @@ TEST(GrowAndShrinkNewSpace) {
|
||||
|
||||
TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
|
||||
if (FLAG_single_generation) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
|
||||
@ -2393,6 +2395,7 @@ TEST(IdleNotificationFinishMarking) {
|
||||
TEST(OptimizedAllocationAlwaysInNewSpace) {
|
||||
if (FLAG_single_generation) return;
|
||||
FLAG_allow_natives_syntax = true;
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
CcTest::InitializeVM();
|
||||
if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
@ -5024,6 +5027,7 @@ TEST(Regress388880) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
FLAG_stress_incremental_marking = false;
|
||||
FLAG_expose_gc = true;
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
@ -5779,6 +5783,7 @@ Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
|
||||
}
|
||||
|
||||
TEST(Regress609761) {
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
Heap* heap = CcTest::heap();
|
||||
@ -5788,6 +5793,7 @@ TEST(Regress609761) {
|
||||
}
|
||||
|
||||
TEST(LiveBytes) {
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
Heap* heap = CcTest::heap();
|
||||
@ -5905,6 +5911,7 @@ TEST(Regress631969) {
|
||||
|
||||
TEST(LeftTrimFixedArrayInBlackArea) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
Heap* heap = CcTest::heap();
|
||||
@ -5945,6 +5952,7 @@ TEST(LeftTrimFixedArrayInBlackArea) {
|
||||
|
||||
TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
Heap* heap = CcTest::heap();
|
||||
@ -6013,6 +6021,7 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
|
||||
|
||||
TEST(ContinuousRightTrimFixedArrayInBlackArea) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
Heap* heap = CcTest::heap();
|
||||
@ -6221,6 +6230,7 @@ static size_t GetRememberedSetSize(HeapObject obj) {
|
||||
|
||||
TEST(RememberedSet_InsertOnWriteBarrier) {
|
||||
if (FLAG_single_generation) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
@ -6248,6 +6258,7 @@ TEST(RememberedSet_InsertOnWriteBarrier) {
|
||||
|
||||
TEST(RememberedSet_InsertInLargePage) {
|
||||
if (FLAG_single_generation) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
@ -6274,6 +6285,7 @@ TEST(RememberedSet_InsertInLargePage) {
|
||||
|
||||
TEST(RememberedSet_InsertOnPromotingObjectToOld) {
|
||||
if (FLAG_single_generation) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
@ -6303,6 +6315,7 @@ TEST(RememberedSet_InsertOnPromotingObjectToOld) {
|
||||
|
||||
TEST(RememberedSet_RemoveStaleOnScavenge) {
|
||||
if (FLAG_single_generation) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
@ -6342,7 +6355,7 @@ TEST(RememberedSet_RemoveStaleOnScavenge) {
|
||||
// that compaction has happened and otherwise relies on code's self-validation.
|
||||
TEST(RememberedSet_OldToOld) {
|
||||
if (FLAG_stress_incremental_marking) return;
|
||||
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
@ -6733,6 +6746,7 @@ HEAP_TEST(Regress779503) {
|
||||
// that it currently processes because it might allocate over the currently
|
||||
// processed slot.
|
||||
if (FLAG_single_generation) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
const int kArraySize = 2048;
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
@ -7158,7 +7172,6 @@ TEST(GarbageCollectionWithLocalHeap) {
|
||||
}
|
||||
|
||||
TEST(Regress10698) {
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::i_isolate()->heap();
|
||||
Factory* factory = CcTest::i_isolate()->factory();
|
||||
|
@ -100,6 +100,7 @@ class MockPlatform : public TestPlatform {
|
||||
|
||||
TEST(IncrementalMarkingUsingTasks) {
|
||||
if (!i::FLAG_incremental_marking) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
FLAG_stress_incremental_marking = false;
|
||||
CcTest::InitializeVM();
|
||||
MockPlatform platform;
|
||||
|
@ -48,6 +48,7 @@ Page* HeapTester::AllocateByteArraysOnPage(
|
||||
}
|
||||
|
||||
HEAP_TEST(InvalidatedSlotsNoInvalidatedRanges) {
|
||||
FLAG_stress_concurrent_allocation = false; // For AllocateByteArraysOnPage.
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
std::vector<ByteArray> byte_arrays;
|
||||
@ -63,6 +64,7 @@ HEAP_TEST(InvalidatedSlotsNoInvalidatedRanges) {
|
||||
}
|
||||
|
||||
HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
|
||||
FLAG_stress_concurrent_allocation = false; // For AllocateByteArraysOnPage.
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
std::vector<ByteArray> byte_arrays;
|
||||
@ -87,6 +89,7 @@ HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
|
||||
}
|
||||
|
||||
HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
|
||||
FLAG_stress_concurrent_allocation = false; // For AllocateByteArraysOnPage.
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
std::vector<ByteArray> byte_arrays;
|
||||
@ -156,6 +159,7 @@ HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
|
||||
}
|
||||
|
||||
HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
|
||||
FLAG_stress_concurrent_allocation = false; // For AllocateByteArraysOnPage.
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
std::vector<ByteArray> byte_arrays;
|
||||
|
@ -53,6 +53,7 @@ namespace heap {
|
||||
|
||||
TEST(Promotion) {
|
||||
if (FLAG_single_generation) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
{
|
||||
@ -74,6 +75,7 @@ TEST(Promotion) {
|
||||
|
||||
HEAP_TEST(NoPromotion) {
|
||||
if (FLAG_always_promote_young_mc) return;
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
// Page promotion allows pages to be moved to old space even in the case of
|
||||
// OOM scenarios.
|
||||
FLAG_page_promotion = false;
|
||||
@ -425,6 +427,7 @@ UNINITIALIZED_TEST(RegressJoinThreadsOnIsolateDeinit) {
|
||||
}
|
||||
|
||||
TEST(Regress5829) {
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
v8::HandleScope sc(CcTest::isolate());
|
||||
|
@ -565,6 +565,7 @@ UNINITIALIZED_TEST(InlineAllocationObserverCadence) {
|
||||
}
|
||||
|
||||
HEAP_TEST(Regress777177) {
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
@ -650,6 +651,7 @@ HEAP_TEST(Regress791582) {
|
||||
|
||||
TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) {
|
||||
FLAG_stress_incremental_marking = false;
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope scope(isolate);
|
||||
@ -678,6 +680,7 @@ TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) {
|
||||
}
|
||||
|
||||
TEST(ShrinkPageToHighWaterMarkNoFiller) {
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope scope(isolate);
|
||||
@ -700,6 +703,7 @@ TEST(ShrinkPageToHighWaterMarkNoFiller) {
|
||||
}
|
||||
|
||||
TEST(ShrinkPageToHighWaterMarkOneWordFiller) {
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope scope(isolate);
|
||||
@ -727,6 +731,7 @@ TEST(ShrinkPageToHighWaterMarkOneWordFiller) {
|
||||
}
|
||||
|
||||
TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
|
||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope scope(isolate);
|
||||
|
@ -51,6 +51,7 @@ class MockPlatformForUnmapper : public TestPlatform {
|
||||
};
|
||||
|
||||
TEST(EagerUnmappingInCollectAllAvailableGarbage) {
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
CcTest::InitializeVM();
|
||||
MockPlatformForUnmapper platform;
|
||||
Heap* heap = CcTest::heap();
|
||||
|
@ -685,6 +685,7 @@ TEST(MakingExternalOneByteStringConditions) {
|
||||
|
||||
|
||||
TEST(MakingExternalUnalignedOneByteString) {
|
||||
i::FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
LocalContext env;
|
||||
v8::HandleScope scope(env->GetIsolate());
|
||||
|
||||
@ -13809,6 +13810,7 @@ static void event_handler(const v8::JitCodeEvent* event) {
|
||||
UNINITIALIZED_TEST(SetJitCodeEventHandler) {
|
||||
i::FLAG_stress_compaction = true;
|
||||
i::FLAG_incremental_marking = false;
|
||||
i::FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
if (i::FLAG_never_compact) return;
|
||||
const char* script =
|
||||
"function bar() {"
|
||||
@ -17970,6 +17972,8 @@ TEST(GCCallbacksWithData) {
|
||||
}
|
||||
|
||||
TEST(GCCallbacks) {
|
||||
// For SimulateFullSpace in PrologueCallbackAlloc and EpilogueCallbackAlloc.
|
||||
i::FLAG_stress_concurrent_allocation = false;
|
||||
LocalContext context;
|
||||
v8::Isolate* isolate = context->GetIsolate();
|
||||
gc_callbacks_isolate = isolate;
|
||||
|
@ -297,6 +297,8 @@ static void TestHashMapDoesNotCauseGC(Handle<HashMap> table) {
|
||||
|
||||
TEST(ObjectHashTableCausesGC) {
|
||||
i::FLAG_stress_compaction = false;
|
||||
// For SimulateFullSpace in TestHashMapDoesNotCauseGC.
|
||||
i::FLAG_stress_concurrent_allocation = false;
|
||||
LocalContext context;
|
||||
v8::HandleScope scope(context->GetIsolate());
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
|
@ -1749,6 +1749,7 @@ TEST(ExternalStringIndexOf) {
|
||||
|
||||
#define GC_INSIDE_NEW_STRING_FROM_UTF8_SUB_STRING(NAME, STRING) \
|
||||
TEST(GCInsideNewStringFromUtf8SubStringWith##NAME) { \
|
||||
FLAG_stress_concurrent_allocation = false; /* For SimulateFullSpace. */ \
|
||||
CcTest::InitializeVM(); \
|
||||
LocalContext context; \
|
||||
v8::HandleScope scope(CcTest::isolate()); \
|
||||
|
@ -231,6 +231,7 @@ TEST(WeakMapScavenge) {
|
||||
TEST(Regress2060a) {
|
||||
if (i::FLAG_never_compact) return;
|
||||
FLAG_always_compact = true;
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
LocalContext context;
|
||||
Isolate* isolate = GetIsolateFrom(&context);
|
||||
Factory* factory = isolate->factory();
|
||||
@ -272,6 +273,7 @@ TEST(Regress2060b) {
|
||||
#ifdef VERIFY_HEAP
|
||||
FLAG_verify_heap = true;
|
||||
#endif
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
|
||||
LocalContext context;
|
||||
Isolate* isolate = GetIsolateFrom(&context);
|
||||
|
@ -165,6 +165,7 @@ TEST(WeakSet_Shrinking) {
|
||||
TEST(WeakSet_Regress2060a) {
|
||||
if (i::FLAG_never_compact) return;
|
||||
FLAG_always_compact = true;
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
LocalContext context;
|
||||
Isolate* isolate = GetIsolateFrom(&context);
|
||||
Factory* factory = isolate->factory();
|
||||
@ -206,6 +207,7 @@ TEST(WeakSet_Regress2060b) {
|
||||
#ifdef VERIFY_HEAP
|
||||
FLAG_verify_heap = true;
|
||||
#endif
|
||||
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
|
||||
|
||||
LocalContext context;
|
||||
Isolate* isolate = GetIsolateFrom(&context);
|
||||
|
Loading…
Reference in New Issue
Block a user