[test] Fix or skip CHECKs in tests for TPH

* Mostly are heap/space/page checks.

Bug: v8:11641
Change-Id: Ia1726f414109ac5e8a3bdb18ccaa46a63db6bc95
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2849823
Commit-Queue: Wenyu Zhao <wenyu.zhao@anu.edu.au>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74317}
This commit is contained in:
Wenyu Zhao 2021-04-30 12:54:25 +10:00 committed by V8 LUCI CQ
parent 1a9c9f0fc0
commit 7134d7f656
13 changed files with 76 additions and 38 deletions

View File

@ -568,7 +568,7 @@ StartupData SnapshotCreator::CreateBlob(
i::GarbageCollectionReason::kSnapshotCreator);
{
i::HandleScope scope(isolate);
isolate->heap()->CompactWeakArrayLists(internal::AllocationType::kOld);
isolate->heap()->CompactWeakArrayLists();
}
i::Snapshot::ClearReconstructableDataForSerialization(

View File

@ -465,7 +465,12 @@ bool Heap::InToPage(HeapObject heap_object) {
return BasicMemoryChunk::FromHeapObject(heap_object)->IsToPage();
}
bool Heap::InOldSpace(Object object) { return old_space_->Contains(object); }
bool Heap::InOldSpace(Object object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL)
return object.IsHeapObject() &&
third_party_heap::Heap::InOldSpace(object.ptr());
return old_space_->Contains(object);
}
// static
Heap* Heap::FromWritableHeapObject(HeapObject obj) {

View File

@ -3134,7 +3134,8 @@ bool Heap::IsImmovable(HeapObject object) {
bool Heap::IsLargeObject(HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL)
return third_party_heap::Heap::InLargeObjectSpace(object.address());
return third_party_heap::Heap::InLargeObjectSpace(object.address()) ||
third_party_heap::Heap::InSpace(object.address(), CODE_LO_SPACE);
return BasicMemoryChunk::FromHeapObject(object)->IsLargePage();
}
@ -4116,6 +4117,8 @@ bool Heap::SharedHeapContains(HeapObject value) const {
}
bool Heap::InSpace(HeapObject value, AllocationSpace space) const {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL)
return third_party_heap::Heap::InSpace(value.address(), space);
if (memory_allocator()->IsOutsideAllocatedSpace(value.address())) {
return false;
}
@ -5825,7 +5828,7 @@ Handle<WeakArrayList> CompactWeakArrayList(Heap* heap,
} // anonymous namespace
void Heap::CompactWeakArrayLists(AllocationType allocation) {
void Heap::CompactWeakArrayLists() {
// Find known PrototypeUsers and compact them.
std::vector<Handle<PrototypeInfo>> prototype_infos;
{
@ -5842,20 +5845,18 @@ void Heap::CompactWeakArrayLists(AllocationType allocation) {
for (auto& prototype_info : prototype_infos) {
Handle<WeakArrayList> array(
WeakArrayList::cast(prototype_info->prototype_users()), isolate());
DCHECK_IMPLIES(allocation == AllocationType::kOld,
InOldSpace(*array) ||
DCHECK(InOldSpace(*array) ||
*array == ReadOnlyRoots(this).empty_weak_array_list());
WeakArrayList new_array = PrototypeUsers::Compact(
array, this, JSObject::PrototypeRegistryCompactionCallback, allocation);
array, this, JSObject::PrototypeRegistryCompactionCallback,
AllocationType::kOld);
prototype_info->set_prototype_users(new_array);
}
// Find known WeakArrayLists and compact them.
Handle<WeakArrayList> scripts(script_list(), isolate());
DCHECK_IMPLIES(
!V8_ENABLE_THIRD_PARTY_HEAP_BOOL && allocation == AllocationType::kOld,
InOldSpace(*scripts));
scripts = CompactWeakArrayList(this, scripts, allocation);
DCHECK(InOldSpace(*scripts));
scripts = CompactWeakArrayList(this, scripts, AllocationType::kOld);
set_script_list(*scripts);
}
@ -6190,7 +6191,7 @@ HeapObjectIterator::~HeapObjectIterator() {
#ifdef DEBUG
// Assert that in filtering mode we have iterated through all
// objects. Otherwise, heap will be left in an inconsistent state.
if (!V8_ENABLE_THIRD_PARTY_HEAP_BOOL && filtering_ != kNoFiltering) {
if (filtering_ != kNoFiltering) {
DCHECK_NULL(object_iterator_);
}
#endif

View File

@ -751,7 +751,7 @@ class Heap {
size_t backing_store_bytes() const { return backing_store_bytes_; }
void CompactWeakArrayLists(AllocationType allocation);
void CompactWeakArrayLists();
V8_EXPORT_PRIVATE void AddRetainedMap(Handle<NativeContext> context,
Handle<Map> map);

View File

@ -37,6 +37,12 @@ const base::AddressRegion& Heap::GetCodeRange() {
return no_region;
}
// static
bool Heap::InSpace(Address, AllocationSpace) { return false; }
// static
bool Heap::InOldSpace(Address) { return false; }
// static
bool Heap::InCodeSpace(Address) { return false; }

View File

@ -26,6 +26,10 @@ class Heap {
const base::AddressRegion& GetCodeRange();
static bool InSpace(Address address, AllocationSpace space);
static bool InOldSpace(Address address);
static bool InCodeSpace(Address address);
static bool InReadOnlySpace(Address address);

View File

@ -198,11 +198,13 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
// have recorded slots in free space.
isolate->heap()->ClearRecordedSlot(*receiver,
receiver->RawField(index.offset()));
if (!FLAG_enable_third_party_heap) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*receiver);
chunk->InvalidateRecordedSlots(*receiver);
}
}
}
}
// If the {receiver_map} was marked stable before, then there could be
// optimized code that depends on the assumption that no object that
// reached this {receiver_map} transitions away from it without triggering

View File

@ -1927,6 +1927,7 @@ TEST(TestAlignedOverAllocation) {
}
TEST(HeapNumberAlignment) {
if (!FLAG_allocation_site_pretenuring) return;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
@ -3847,11 +3848,13 @@ static void TestFillersFromPersistentHandles(bool promote) {
// GC should retain the trimmed array but drop all of the three fillers.
CcTest::CollectGarbage(NEW_SPACE);
if (!FLAG_single_generation) {
if (promote) {
CHECK(heap->InOldSpace(*tail));
} else {
CHECK(Heap::InYoungGeneration(*tail));
}
}
CHECK_EQ(n - 6, (*tail).length());
CHECK(!filler_1->IsHeapObject());
CHECK(!filler_2->IsHeapObject());
@ -3859,12 +3862,12 @@ static void TestFillersFromPersistentHandles(bool promote) {
}
TEST(DoNotEvacuateFillersFromPersistentHandles) {
if (FLAG_single_generation) return;
if (FLAG_single_generation || FLAG_move_object_start) return;
TestFillersFromPersistentHandles(false /*promote*/);
}
TEST(DoNotPromoteFillersFromPersistentHandles) {
if (FLAG_single_generation) return;
if (FLAG_single_generation || FLAG_move_object_start) return;
TestFillersFromPersistentHandles(true /*promote*/);
}
@ -6247,7 +6250,8 @@ TEST(UncommitUnusedLargeObjectMemory) {
Handle<FixedArray> array =
isolate->factory()->NewFixedArray(200000, AllocationType::kOld);
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
CHECK(chunk->owner_identity() == LO_SPACE);
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
chunk->owner_identity() == LO_SPACE);
intptr_t size_before = array->Size();
size_t committed_memory_before = chunk->CommittedPhysicalMemory();
@ -6438,6 +6442,7 @@ TEST(RememberedSet_OldToOld) {
}
TEST(RememberedSetRemoveRange) {
if (FLAG_single_generation) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
@ -6446,7 +6451,8 @@ TEST(RememberedSetRemoveRange) {
Handle<FixedArray> array = isolate->factory()->NewFixedArray(
Page::kPageSize / kTaggedSize, AllocationType::kOld);
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
CHECK(chunk->owner_identity() == LO_SPACE);
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
chunk->owner_identity() == LO_SPACE);
Address start = array->address();
// Maps slot to boolean indicator of whether the slot should be in the set.
std::map<Address, bool> slots;
@ -7206,7 +7212,7 @@ class TestAllocationTracker : public HeapObjectAllocationTracker {
HEAP_TEST(CodeLargeObjectSpace) {
Heap* heap = CcTest::heap();
int size_in_bytes =
MemoryChunkLayout::MaxRegularCodeObjectSize() + kTaggedSize;
Heap::MaxRegularHeapObjectSize(AllocationType::kCode) + kTaggedSize;
TestAllocationTracker allocation_tracker{size_in_bytes};
heap->AddHeapObjectAllocationTracker(&allocation_tracker);
@ -7240,7 +7246,7 @@ UNINITIALIZED_HEAP_TEST(CodeLargeObjectSpace64k) {
// Allocate a regular code object.
{
int size_in_bytes =
MemoryChunkLayout::MaxRegularCodeObjectSize() - kTaggedSize;
Heap::MaxRegularHeapObjectSize(AllocationType::kCode) - kTaggedSize;
TestAllocationTracker allocation_tracker{size_in_bytes};
heap->AddHeapObjectAllocationTracker(&allocation_tracker);
@ -7262,7 +7268,7 @@ UNINITIALIZED_HEAP_TEST(CodeLargeObjectSpace64k) {
// Allocate a large code object.
{
int size_in_bytes =
MemoryChunkLayout::MaxRegularCodeObjectSize() + kTaggedSize;
Heap::MaxRegularHeapObjectSize(AllocationType::kCode) + kTaggedSize;
TestAllocationTracker allocation_tracker{size_in_bytes};
heap->AddHeapObjectAllocationTracker(&allocation_tracker);
@ -7291,7 +7297,8 @@ TEST(IsPendingAllocationNewSpace) {
Factory* factory = isolate->factory();
HandleScope handle_scope(isolate);
Handle<FixedArray> object = factory->NewFixedArray(5, AllocationType::kYoung);
CHECK(heap->IsPendingAllocation(*object));
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
heap->IsPendingAllocation(*object));
heap->PublishPendingAllocations();
CHECK(!heap->IsPendingAllocation(*object));
}
@ -7304,7 +7311,8 @@ TEST(IsPendingAllocationNewLOSpace) {
HandleScope handle_scope(isolate);
Handle<FixedArray> object = factory->NewFixedArray(
FixedArray::kMaxRegularLength + 1, AllocationType::kYoung);
CHECK(heap->IsPendingAllocation(*object));
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
heap->IsPendingAllocation(*object));
heap->PublishPendingAllocations();
CHECK(!heap->IsPendingAllocation(*object));
}
@ -7316,7 +7324,8 @@ TEST(IsPendingAllocationOldSpace) {
Factory* factory = isolate->factory();
HandleScope handle_scope(isolate);
Handle<FixedArray> object = factory->NewFixedArray(5, AllocationType::kOld);
CHECK(heap->IsPendingAllocation(*object));
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
heap->IsPendingAllocation(*object));
heap->PublishPendingAllocations();
CHECK(!heap->IsPendingAllocation(*object));
}
@ -7329,7 +7338,8 @@ TEST(IsPendingAllocationLOSpace) {
HandleScope handle_scope(isolate);
Handle<FixedArray> object = factory->NewFixedArray(
FixedArray::kMaxRegularLength + 1, AllocationType::kOld);
CHECK(heap->IsPendingAllocation(*object));
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
heap->IsPendingAllocation(*object));
heap->PublishPendingAllocations();
CHECK(!heap->IsPendingAllocation(*object));
}

View File

@ -75,7 +75,7 @@ TEST(HeapObjectIterator) {
for (HeapObject obj = iterator.Next(); !obj.is_null();
obj = iterator.Next()) {
CHECK(!ReadOnlyHeap::Contains(obj));
CHECK_IMPLIES(!FLAG_enable_third_party_heap, !ReadOnlyHeap::Contains(obj));
CHECK(CcTest::heap()->Contains(obj));
if (sample_object == obj) seen_sample_object = true;
}

View File

@ -16754,8 +16754,11 @@ THREADED_TEST(GetHeapStatistics) {
CHECK_EQ(0u, heap_statistics.used_heap_size());
c1->GetIsolate()->GetHeapStatistics(&heap_statistics);
CHECK_NE(static_cast<int>(heap_statistics.total_heap_size()), 0);
if (!v8::internal::FLAG_enable_third_party_heap) {
// TODO(wenyuzhao): Get used size from third_party_heap interface
CHECK_NE(static_cast<int>(heap_statistics.used_heap_size()), 0);
}
}
TEST(GetHeapSpaceStatistics) {
LocalContext c1;

View File

@ -61,7 +61,7 @@ TEST(Factory_CodeBuilder) {
// Create a big function that ends up in CODE_LO_SPACE.
const int instruction_size =
MemoryChunkLayout::MaxRegularCodeObjectSize() + 1;
Heap::MaxRegularHeapObjectSize(AllocationType::kCode) + 1;
std::unique_ptr<byte[]> instructions(new byte[instruction_size]);
CodeDesc desc;

View File

@ -230,6 +230,7 @@ TEST(WeakMapScavenge) {
// by other paths are correctly recorded in the slots buffer.
TEST(Regress2060a) {
if (i::FLAG_never_compact) return;
if (i::FLAG_enable_third_party_heap) return;
FLAG_always_compact = true;
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
LocalContext context;
@ -253,7 +254,8 @@ TEST(Regress2060a) {
Handle<JSObject> object =
factory->NewJSObject(function, AllocationType::kOld);
CHECK(!Heap::InYoungGeneration(*object));
CHECK(!first_page->Contains(object->address()));
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
!first_page->Contains(object->address()));
int32_t hash = key->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakmap, key, object, hash);
}
@ -292,7 +294,8 @@ TEST(Regress2060b) {
for (int i = 0; i < 32; i++) {
keys[i] = factory->NewJSObject(function, AllocationType::kOld);
CHECK(!Heap::InYoungGeneration(*keys[i]));
CHECK(!first_page->Contains(keys[i]->address()));
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
!first_page->Contains(keys[i]->address()));
}
Handle<JSWeakMap> weakmap = isolate->factory()->NewJSWeakMap();
for (int i = 0; i < 32; i++) {

View File

@ -164,6 +164,7 @@ TEST(WeakSet_Shrinking) {
// by other paths are correctly recorded in the slots buffer.
TEST(WeakSet_Regress2060a) {
if (i::FLAG_never_compact) return;
if (i::FLAG_enable_third_party_heap) return;
FLAG_always_compact = true;
FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
LocalContext context;
@ -187,7 +188,8 @@ TEST(WeakSet_Regress2060a) {
Handle<JSObject> object =
factory->NewJSObject(function, AllocationType::kOld);
CHECK(!Heap::InYoungGeneration(*object));
CHECK(!first_page->Contains(object->address()));
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
!first_page->Contains(object->address()));
int32_t hash = key->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakset, key, object, hash);
}
@ -203,6 +205,7 @@ TEST(WeakSet_Regress2060a) {
// other strong paths are correctly recorded in the slots buffer.
TEST(WeakSet_Regress2060b) {
if (i::FLAG_never_compact) return;
if (i::FLAG_enable_third_party_heap) return;
FLAG_always_compact = true;
#ifdef VERIFY_HEAP
FLAG_verify_heap = true;
@ -226,7 +229,8 @@ TEST(WeakSet_Regress2060b) {
for (int i = 0; i < 32; i++) {
keys[i] = factory->NewJSObject(function, AllocationType::kOld);
CHECK(!Heap::InYoungGeneration(*keys[i]));
CHECK(!first_page->Contains(keys[i]->address()));
CHECK_IMPLIES(!FLAG_enable_third_party_heap,
!first_page->Contains(keys[i]->address()));
}
Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
for (int i = 0; i < 32; i++) {