[heap] Fix failed tests when enabling single generation
* Filtered some tests that rely on incremental_marking and shape tracking Bug: v8:11644 Change-Id: Ic9833bf1e49e6413422484858cd1054dd2500092 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2822284 Commit-Queue: Wenyu Zhao <wenyu.zhao@anu.edu.au> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#74078}
This commit is contained in:
parent
1786ab50dc
commit
2cd77745d9
1
BUILD.gn
1
BUILD.gn
@ -1928,6 +1928,7 @@ action("v8_dump_build_config") {
|
|||||||
"v8_enable_atomic_object_field_writes=" +
|
"v8_enable_atomic_object_field_writes=" +
|
||||||
"$v8_enable_atomic_object_field_writes",
|
"$v8_enable_atomic_object_field_writes",
|
||||||
"v8_enable_concurrent_marking=$v8_enable_concurrent_marking",
|
"v8_enable_concurrent_marking=$v8_enable_concurrent_marking",
|
||||||
|
"v8_enable_single_generation=$v8_enable_single_generation",
|
||||||
"v8_enable_i18n_support=$v8_enable_i18n_support",
|
"v8_enable_i18n_support=$v8_enable_i18n_support",
|
||||||
"v8_enable_verify_predictable=$v8_enable_verify_predictable",
|
"v8_enable_verify_predictable=$v8_enable_verify_predictable",
|
||||||
"v8_enable_verify_csa=$v8_enable_verify_csa",
|
"v8_enable_verify_csa=$v8_enable_verify_csa",
|
||||||
|
@ -1181,7 +1181,7 @@ Handle<Context> Factory::NewCatchContext(Handle<Context> previous,
|
|||||||
isolate()->catch_context_map(), Context::SizeFor(variadic_part_length),
|
isolate()->catch_context_map(), Context::SizeFor(variadic_part_length),
|
||||||
variadic_part_length, AllocationType::kYoung);
|
variadic_part_length, AllocationType::kYoung);
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
DCHECK(Heap::InYoungGeneration(context));
|
DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
|
||||||
context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
|
context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
|
||||||
context.set_previous(*previous, SKIP_WRITE_BARRIER);
|
context.set_previous(*previous, SKIP_WRITE_BARRIER);
|
||||||
context.set(Context::THROWN_OBJECT_INDEX, *thrown_object, SKIP_WRITE_BARRIER);
|
context.set(Context::THROWN_OBJECT_INDEX, *thrown_object, SKIP_WRITE_BARRIER);
|
||||||
@ -1206,7 +1206,7 @@ Handle<Context> Factory::NewDebugEvaluateContext(Handle<Context> previous,
|
|||||||
Context::SizeFor(variadic_part_length),
|
Context::SizeFor(variadic_part_length),
|
||||||
variadic_part_length, AllocationType::kYoung);
|
variadic_part_length, AllocationType::kYoung);
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
DCHECK(Heap::InYoungGeneration(context));
|
DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
|
||||||
context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
|
context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
|
||||||
context.set_previous(*previous, SKIP_WRITE_BARRIER);
|
context.set_previous(*previous, SKIP_WRITE_BARRIER);
|
||||||
context.set_extension(*ext, SKIP_WRITE_BARRIER);
|
context.set_extension(*ext, SKIP_WRITE_BARRIER);
|
||||||
@ -1229,7 +1229,7 @@ Handle<Context> Factory::NewWithContext(Handle<Context> previous,
|
|||||||
isolate()->with_context_map(), Context::SizeFor(variadic_part_length),
|
isolate()->with_context_map(), Context::SizeFor(variadic_part_length),
|
||||||
variadic_part_length, AllocationType::kYoung);
|
variadic_part_length, AllocationType::kYoung);
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
DCHECK(Heap::InYoungGeneration(context));
|
DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
|
||||||
context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
|
context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
|
||||||
context.set_previous(*previous, SKIP_WRITE_BARRIER);
|
context.set_previous(*previous, SKIP_WRITE_BARRIER);
|
||||||
context.set_extension(*extension, SKIP_WRITE_BARRIER);
|
context.set_extension(*extension, SKIP_WRITE_BARRIER);
|
||||||
@ -1245,7 +1245,7 @@ Handle<Context> Factory::NewBlockContext(Handle<Context> previous,
|
|||||||
isolate()->block_context_map(), Context::SizeFor(variadic_part_length),
|
isolate()->block_context_map(), Context::SizeFor(variadic_part_length),
|
||||||
variadic_part_length, AllocationType::kYoung);
|
variadic_part_length, AllocationType::kYoung);
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
DCHECK(Heap::InYoungGeneration(context));
|
DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
|
||||||
context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
|
context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
|
||||||
context.set_previous(*previous, SKIP_WRITE_BARRIER);
|
context.set_previous(*previous, SKIP_WRITE_BARRIER);
|
||||||
return handle(context, isolate());
|
return handle(context, isolate());
|
||||||
@ -1258,7 +1258,7 @@ Handle<Context> Factory::NewBuiltinContext(Handle<NativeContext> native_context,
|
|||||||
isolate()->function_context_map(), Context::SizeFor(variadic_part_length),
|
isolate()->function_context_map(), Context::SizeFor(variadic_part_length),
|
||||||
variadic_part_length, AllocationType::kYoung);
|
variadic_part_length, AllocationType::kYoung);
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
DCHECK(Heap::InYoungGeneration(context));
|
DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
|
||||||
context.set_scope_info(read_only_roots().empty_scope_info(),
|
context.set_scope_info(read_only_roots().empty_scope_info(),
|
||||||
SKIP_WRITE_BARRIER);
|
SKIP_WRITE_BARRIER);
|
||||||
context.set_previous(*native_context, SKIP_WRITE_BARRIER);
|
context.set_previous(*native_context, SKIP_WRITE_BARRIER);
|
||||||
|
@ -87,6 +87,7 @@ HeapObject SemiSpaceObjectIterator::Next() {
|
|||||||
AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
|
AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
|
||||||
AllocationAlignment alignment,
|
AllocationAlignment alignment,
|
||||||
AllocationOrigin origin) {
|
AllocationOrigin origin) {
|
||||||
|
DCHECK(!FLAG_single_generation);
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
VerifyTop();
|
VerifyTop();
|
||||||
#endif
|
#endif
|
||||||
|
@ -715,6 +715,7 @@ int FixedArrayLenFromSize(int size) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void FillUpOneNewSpacePage(Isolate* isolate, Heap* heap) {
|
void FillUpOneNewSpacePage(Isolate* isolate, Heap* heap) {
|
||||||
|
DCHECK(!FLAG_single_generation);
|
||||||
PauseAllocationObserversScope pause_observers(heap);
|
PauseAllocationObserversScope pause_observers(heap);
|
||||||
NewSpace* space = heap->new_space();
|
NewSpace* space = heap->new_space();
|
||||||
// We cannot rely on `space->limit()` to point to the end of the current page
|
// We cannot rely on `space->limit()` to point to the end of the current page
|
||||||
|
@ -122,7 +122,8 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
|
|||||||
CHECK((allocation == AllocationType::kYoung &&
|
CHECK((allocation == AllocationType::kYoung &&
|
||||||
heap->new_space()->Contains(*handles.back())) ||
|
heap->new_space()->Contains(*handles.back())) ||
|
||||||
(allocation == AllocationType::kOld &&
|
(allocation == AllocationType::kOld &&
|
||||||
heap->InOldSpace(*handles.back())));
|
heap->InOldSpace(*handles.back())) ||
|
||||||
|
FLAG_single_generation);
|
||||||
free_memory -= handles.back()->Size();
|
free_memory -= handles.back()->Size();
|
||||||
}
|
}
|
||||||
return handles;
|
return handles;
|
||||||
|
@ -398,15 +398,16 @@ TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
|
|||||||
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
||||||
ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
|
ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
|
||||||
|
|
||||||
const size_t backing_store_before =
|
const Space* space = FLAG_incremental_marking
|
||||||
heap->new_space()->ExternalBackingStoreBytes(type);
|
? static_cast<Space*>(heap->new_space())
|
||||||
|
: static_cast<Space*>(heap->old_space());
|
||||||
|
const size_t backing_store_before = space->ExternalBackingStoreBytes(type);
|
||||||
{
|
{
|
||||||
const size_t kArraybufferSize = 117;
|
const size_t kArraybufferSize = 117;
|
||||||
v8::HandleScope handle_scope(isolate);
|
v8::HandleScope handle_scope(isolate);
|
||||||
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
|
||||||
USE(ab);
|
USE(ab);
|
||||||
const size_t backing_store_after =
|
const size_t backing_store_after = space->ExternalBackingStoreBytes(type);
|
||||||
heap->new_space()->ExternalBackingStoreBytes(type);
|
|
||||||
CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
|
CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -340,6 +340,7 @@ class ConcurrentBlackAllocationThread final : public v8::base::Thread {
|
|||||||
};
|
};
|
||||||
|
|
||||||
UNINITIALIZED_TEST(ConcurrentBlackAllocation) {
|
UNINITIALIZED_TEST(ConcurrentBlackAllocation) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
v8::Isolate::CreateParams create_params;
|
v8::Isolate::CreateParams create_params;
|
||||||
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
|
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
|
||||||
v8::Isolate* isolate = v8::Isolate::New(create_params);
|
v8::Isolate* isolate = v8::Isolate::New(create_params);
|
||||||
@ -399,6 +400,7 @@ class ConcurrentWriteBarrierThread final : public v8::base::Thread {
|
|||||||
};
|
};
|
||||||
|
|
||||||
UNINITIALIZED_TEST(ConcurrentWriteBarrier) {
|
UNINITIALIZED_TEST(ConcurrentWriteBarrier) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
if (!FLAG_concurrent_marking) {
|
if (!FLAG_concurrent_marking) {
|
||||||
// The test requires concurrent marking barrier.
|
// The test requires concurrent marking barrier.
|
||||||
return;
|
return;
|
||||||
@ -463,6 +465,7 @@ class ConcurrentRecordRelocSlotThread final : public v8::base::Thread {
|
|||||||
};
|
};
|
||||||
|
|
||||||
UNINITIALIZED_TEST(ConcurrentRecordRelocSlot) {
|
UNINITIALIZED_TEST(ConcurrentRecordRelocSlot) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
if (!FLAG_concurrent_marking) {
|
if (!FLAG_concurrent_marking) {
|
||||||
// The test requires concurrent marking barrier.
|
// The test requires concurrent marking barrier.
|
||||||
return;
|
return;
|
||||||
|
@ -104,6 +104,7 @@ TEST(ConcurrentMarkingPreemptAndReschedule) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST(ConcurrentMarkingMarkedBytes) {
|
TEST(ConcurrentMarkingMarkedBytes) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
if (!i::FLAG_concurrent_marking) return;
|
if (!i::FLAG_concurrent_marking) return;
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
@ -124,6 +125,7 @@ TEST(ConcurrentMarkingMarkedBytes) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
UNINITIALIZED_TEST(ConcurrentMarkingStoppedOnTeardown) {
|
UNINITIALIZED_TEST(ConcurrentMarkingStoppedOnTeardown) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
if (!i::FLAG_concurrent_marking) return;
|
if (!i::FLAG_concurrent_marking) return;
|
||||||
|
|
||||||
v8::Isolate::CreateParams create_params;
|
v8::Isolate::CreateParams create_params;
|
||||||
|
@ -251,6 +251,7 @@ TEST(FinalizeTracingIsNoopWhenNotMarking) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST(FinalizeTracingWhenMarking) {
|
TEST(FinalizeTracingWhenMarking) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
ManualGCScope manual_gc;
|
ManualGCScope manual_gc;
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
v8::Isolate* isolate = CcTest::isolate();
|
v8::Isolate* isolate = CcTest::isolate();
|
||||||
@ -709,6 +710,7 @@ TEST(TracedGlobalSetFinalizationCallbackMarkSweep) {
|
|||||||
|
|
||||||
TEST(TracePrologueCallingIntoV8WriteBarrier) {
|
TEST(TracePrologueCallingIntoV8WriteBarrier) {
|
||||||
// Regression test: https://crbug.com/940003
|
// Regression test: https://crbug.com/940003
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
ManualGCScope manual_gc;
|
ManualGCScope manual_gc;
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
v8::Isolate* isolate = CcTest::isolate();
|
v8::Isolate* isolate = CcTest::isolate();
|
||||||
|
@ -1265,6 +1265,7 @@ UNINITIALIZED_TEST(Regress10843) {
|
|||||||
|
|
||||||
// Tests that spill slots from optimized code don't have weak pointers.
|
// Tests that spill slots from optimized code don't have weak pointers.
|
||||||
TEST(Regress10774) {
|
TEST(Regress10774) {
|
||||||
|
if (FLAG_single_generation) return;
|
||||||
i::FLAG_allow_natives_syntax = true;
|
i::FLAG_allow_natives_syntax = true;
|
||||||
i::FLAG_turboprop = true;
|
i::FLAG_turboprop = true;
|
||||||
i::FLAG_turbo_dynamic_map_checks = true;
|
i::FLAG_turbo_dynamic_map_checks = true;
|
||||||
@ -1325,6 +1326,7 @@ TEST(Regress10774) {
|
|||||||
#ifndef V8_LITE_MODE
|
#ifndef V8_LITE_MODE
|
||||||
|
|
||||||
TEST(TestOptimizeAfterBytecodeFlushingCandidate) {
|
TEST(TestOptimizeAfterBytecodeFlushingCandidate) {
|
||||||
|
if (FLAG_single_generation) return;
|
||||||
FLAG_opt = true;
|
FLAG_opt = true;
|
||||||
FLAG_always_opt = false;
|
FLAG_always_opt = false;
|
||||||
#if ENABLE_SPARKPLUG
|
#if ENABLE_SPARKPLUG
|
||||||
@ -1803,6 +1805,7 @@ static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
|
|||||||
|
|
||||||
|
|
||||||
TEST(TestAlignedAllocation) {
|
TEST(TestAlignedAllocation) {
|
||||||
|
if (FLAG_single_generation) return;
|
||||||
// Double misalignment is 4 on 32-bit platforms or when pointer compression
|
// Double misalignment is 4 on 32-bit platforms or when pointer compression
|
||||||
// is enabled, 0 on 64-bit ones when pointer compression is disabled.
|
// is enabled, 0 on 64-bit ones when pointer compression is disabled.
|
||||||
const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
|
const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
|
||||||
@ -6583,6 +6586,7 @@ HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
HEAP_TEST(MarkCompactEpochCounter) {
|
HEAP_TEST(MarkCompactEpochCounter) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
v8::HandleScope scope(CcTest::isolate());
|
v8::HandleScope scope(CcTest::isolate());
|
||||||
@ -6949,6 +6953,7 @@ TEST(Regress8014) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST(Regress8617) {
|
TEST(Regress8617) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
FLAG_manual_evacuation_candidates_selection = true;
|
FLAG_manual_evacuation_candidates_selection = true;
|
||||||
LocalContext env;
|
LocalContext env;
|
||||||
@ -6991,6 +6996,7 @@ TEST(Regress8617) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
HEAP_TEST(MemoryReducerActivationForSmallHeaps) {
|
HEAP_TEST(MemoryReducerActivationForSmallHeaps) {
|
||||||
|
if (FLAG_single_generation) return;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
LocalContext env;
|
LocalContext env;
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
@ -7160,6 +7166,7 @@ TEST(GarbageCollectionWithLocalHeap) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST(Regress10698) {
|
TEST(Regress10698) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
Heap* heap = CcTest::i_isolate()->heap();
|
Heap* heap = CcTest::i_isolate()->heap();
|
||||||
Factory* factory = CcTest::i_isolate()->factory();
|
Factory* factory = CcTest::i_isolate()->factory();
|
||||||
|
@ -199,6 +199,7 @@ Handle<FixedArray> AllocateArrayOnEvacuationCandidate(Isolate* isolate,
|
|||||||
}
|
}
|
||||||
|
|
||||||
HEAP_TEST(InvalidatedSlotsRightTrimFixedArray) {
|
HEAP_TEST(InvalidatedSlotsRightTrimFixedArray) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
FLAG_manual_evacuation_candidates_selection = true;
|
FLAG_manual_evacuation_candidates_selection = true;
|
||||||
FLAG_parallel_compaction = false;
|
FLAG_parallel_compaction = false;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
@ -230,6 +231,7 @@ HEAP_TEST(InvalidatedSlotsRightTrimFixedArray) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
HEAP_TEST(InvalidatedSlotsRightTrimLargeFixedArray) {
|
HEAP_TEST(InvalidatedSlotsRightTrimLargeFixedArray) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
FLAG_manual_evacuation_candidates_selection = true;
|
FLAG_manual_evacuation_candidates_selection = true;
|
||||||
FLAG_parallel_compaction = false;
|
FLAG_parallel_compaction = false;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
@ -267,6 +269,7 @@ HEAP_TEST(InvalidatedSlotsRightTrimLargeFixedArray) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
HEAP_TEST(InvalidatedSlotsLeftTrimFixedArray) {
|
HEAP_TEST(InvalidatedSlotsLeftTrimFixedArray) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
FLAG_manual_evacuation_candidates_selection = true;
|
FLAG_manual_evacuation_candidates_selection = true;
|
||||||
FLAG_parallel_compaction = false;
|
FLAG_parallel_compaction = false;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
@ -298,6 +301,7 @@ HEAP_TEST(InvalidatedSlotsLeftTrimFixedArray) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
HEAP_TEST(InvalidatedSlotsFastToSlow) {
|
HEAP_TEST(InvalidatedSlotsFastToSlow) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
FLAG_manual_evacuation_candidates_selection = true;
|
FLAG_manual_evacuation_candidates_selection = true;
|
||||||
FLAG_parallel_compaction = false;
|
FLAG_parallel_compaction = false;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
|
@ -427,6 +427,7 @@ UNINITIALIZED_TEST(RegressJoinThreadsOnIsolateDeinit) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST(Regress5829) {
|
TEST(Regress5829) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
|
@ -269,6 +269,7 @@ TEST(ComputeDiscardMemoryAreas) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST(NewSpace) {
|
TEST(NewSpace) {
|
||||||
|
if (FLAG_single_generation) return;
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
Heap* heap = isolate->heap();
|
Heap* heap = isolate->heap();
|
||||||
TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
|
TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
|
||||||
@ -516,6 +517,7 @@ void testAllocationObserver(Isolate* i_isolate, T* space) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
UNINITIALIZED_TEST(AllocationObserver) {
|
UNINITIALIZED_TEST(AllocationObserver) {
|
||||||
|
if (FLAG_single_generation) return;
|
||||||
v8::Isolate::CreateParams create_params;
|
v8::Isolate::CreateParams create_params;
|
||||||
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
|
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
|
||||||
v8::Isolate* isolate = v8::Isolate::New(create_params);
|
v8::Isolate* isolate = v8::Isolate::New(create_params);
|
||||||
@ -538,6 +540,7 @@ UNINITIALIZED_TEST(AllocationObserver) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
UNINITIALIZED_TEST(InlineAllocationObserverCadence) {
|
UNINITIALIZED_TEST(InlineAllocationObserverCadence) {
|
||||||
|
if (FLAG_single_generation) return;
|
||||||
v8::Isolate::CreateParams create_params;
|
v8::Isolate::CreateParams create_params;
|
||||||
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
|
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
|
||||||
v8::Isolate* isolate = v8::Isolate::New(create_params);
|
v8::Isolate* isolate = v8::Isolate::New(create_params);
|
||||||
@ -617,6 +620,7 @@ HEAP_TEST(Regress777177) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
HEAP_TEST(Regress791582) {
|
HEAP_TEST(Regress791582) {
|
||||||
|
if (FLAG_single_generation) return;
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
Heap* heap = isolate->heap();
|
Heap* heap = isolate->heap();
|
||||||
|
@ -16,6 +16,7 @@ namespace internal {
|
|||||||
namespace heap {
|
namespace heap {
|
||||||
|
|
||||||
HEAP_TEST(WriteBarrier_Marking) {
|
HEAP_TEST(WriteBarrier_Marking) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
@ -56,6 +57,7 @@ HEAP_TEST(WriteBarrier_Marking) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
HEAP_TEST(WriteBarrier_MarkingExtension) {
|
HEAP_TEST(WriteBarrier_MarkingExtension) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
|
@ -1849,6 +1849,7 @@ TEST(CodeSerializerLargeCodeObject) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST(CodeSerializerLargeCodeObjectWithIncrementalMarking) {
|
TEST(CodeSerializerLargeCodeObjectWithIncrementalMarking) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
if (FLAG_never_compact) return;
|
if (FLAG_never_compact) return;
|
||||||
ManualGCScope manual_gc_scope;
|
ManualGCScope manual_gc_scope;
|
||||||
FLAG_always_opt = false;
|
FLAG_always_opt = false;
|
||||||
|
@ -1480,4 +1480,20 @@
|
|||||||
'regress/wasm/regress-1010272': [SKIP],
|
'regress/wasm/regress-1010272': [SKIP],
|
||||||
}],
|
}],
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
['single_generation', {
|
||||||
|
# These tests rely on allocation site tracking which only works in the young generation.
|
||||||
|
'array-constructor-feedback': [SKIP],
|
||||||
|
'wasm/generic-wrapper': [SKIP],
|
||||||
|
'regress/regress-trap-allocation-memento': [SKIP],
|
||||||
|
'regress/regress-crbug-1151890': [SKIP],
|
||||||
|
'regress/regress-crbug-1163184': [SKIP],
|
||||||
|
'regress/regress-11519': [SKIP],
|
||||||
|
'regress/regress-4121': [SKIP],
|
||||||
|
'packed-elements': [SKIP],
|
||||||
|
'const-dict-tracking': [SKIP],
|
||||||
|
'compiler/native-context-specialization-hole-check': [SKIP],
|
||||||
|
'compiler/test-literal-map-migration': [SKIP],
|
||||||
|
}], # single_generation
|
||||||
|
|
||||||
]
|
]
|
||||||
|
@ -66,6 +66,7 @@ TEST_F(UnifiedHeapTest, FindingV8ToBlinkReference) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(UnifiedHeapTest, WriteBarrierV8ToCppReference) {
|
TEST_F(UnifiedHeapTest, WriteBarrierV8ToCppReference) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
v8::HandleScope scope(v8_isolate());
|
v8::HandleScope scope(v8_isolate());
|
||||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||||
v8::Context::Scope context_scope(context);
|
v8::Context::Scope context_scope(context);
|
||||||
@ -92,6 +93,7 @@ TEST_F(UnifiedHeapTest, WriteBarrierV8ToCppReference) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(UnifiedHeapTest, WriteBarrierCppToV8Reference) {
|
TEST_F(UnifiedHeapTest, WriteBarrierCppToV8Reference) {
|
||||||
|
if (!FLAG_incremental_marking) return;
|
||||||
v8::HandleScope scope(v8_isolate());
|
v8::HandleScope scope(v8_isolate());
|
||||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||||
v8::Context::Scope context_scope(context);
|
v8::Context::Scope context_scope(context);
|
||||||
|
@ -172,6 +172,7 @@ class BuildConfig(object):
|
|||||||
self.cfi_vptr = build_config['is_cfi']
|
self.cfi_vptr = build_config['is_cfi']
|
||||||
self.control_flow_integrity = build_config['v8_control_flow_integrity']
|
self.control_flow_integrity = build_config['v8_control_flow_integrity']
|
||||||
self.concurrent_marking = build_config['v8_enable_concurrent_marking']
|
self.concurrent_marking = build_config['v8_enable_concurrent_marking']
|
||||||
|
self.single_generation = build_config['v8_enable_single_generation']
|
||||||
self.dcheck_always_on = build_config['dcheck_always_on']
|
self.dcheck_always_on = build_config['dcheck_always_on']
|
||||||
self.gcov_coverage = build_config['is_gcov_coverage']
|
self.gcov_coverage = build_config['is_gcov_coverage']
|
||||||
self.is_android = build_config['is_android']
|
self.is_android = build_config['is_android']
|
||||||
@ -664,6 +665,7 @@ class BaseTestRunner(object):
|
|||||||
"cfi_vptr": self.build_config.cfi_vptr,
|
"cfi_vptr": self.build_config.cfi_vptr,
|
||||||
"control_flow_integrity": self.build_config.control_flow_integrity,
|
"control_flow_integrity": self.build_config.control_flow_integrity,
|
||||||
"concurrent_marking": self.build_config.concurrent_marking,
|
"concurrent_marking": self.build_config.concurrent_marking,
|
||||||
|
"single_generation": self.build_config.single_generation,
|
||||||
"dcheck_always_on": self.build_config.dcheck_always_on,
|
"dcheck_always_on": self.build_config.dcheck_always_on,
|
||||||
"deopt_fuzzer": False,
|
"deopt_fuzzer": False,
|
||||||
"endurance_fuzzer": False,
|
"endurance_fuzzer": False,
|
||||||
|
@ -23,6 +23,7 @@
|
|||||||
"v8_enable_pointer_compression": true,
|
"v8_enable_pointer_compression": true,
|
||||||
"v8_enable_pointer_compression_shared_cage": true,
|
"v8_enable_pointer_compression_shared_cage": true,
|
||||||
"v8_control_flow_integrity": false,
|
"v8_control_flow_integrity": false,
|
||||||
|
"v8_enable_single_generation": false,
|
||||||
"v8_enable_third_party_heap": false,
|
"v8_enable_third_party_heap": false,
|
||||||
"v8_enable_webassembly": true
|
"v8_enable_webassembly": true
|
||||||
}
|
}
|
||||||
|
@ -23,6 +23,7 @@
|
|||||||
"v8_enable_pointer_compression": false,
|
"v8_enable_pointer_compression": false,
|
||||||
"v8_enable_pointer_compression_shared_cage": false,
|
"v8_enable_pointer_compression_shared_cage": false,
|
||||||
"v8_control_flow_integrity": false,
|
"v8_control_flow_integrity": false,
|
||||||
|
"v8_enable_single_generation": false,
|
||||||
"v8_enable_third_party_heap": false,
|
"v8_enable_third_party_heap": false,
|
||||||
"v8_enable_webassembly": true
|
"v8_enable_webassembly": true
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user