[heap] Move Verify* methods out of the heap class

Methods are now defined in heap-verifier.h in the HeapVerifier class.

Bug: v8:11708
Change-Id: I13e7f1760598f3659ad6aa31082840caf2e44038
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3857558
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Jakob Linke <jgruber@chromium.org>
Reviewed-by: Camillo Bruni <cbruni@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82810}
This commit is contained in:
Dominik Inführ 2022-08-30 09:18:43 +02:00 committed by V8 LUCI CQ
parent c9c490891a
commit 810a0b5ff7
15 changed files with 227 additions and 143 deletions

View File

@ -1461,6 +1461,8 @@ filegroup(
"src/heap/heap-inl.h",
"src/heap/heap-layout-tracer.cc",
"src/heap/heap-layout-tracer.h",
"src/heap/heap-verifier.cc",
"src/heap/heap-verifier.h",
"src/heap/heap-write-barrier-inl.h",
"src/heap/heap-write-barrier.cc",
"src/heap/heap-write-barrier.h",

View File

@ -24,6 +24,7 @@ include_rules = [
# TODO(v8:10496): Don't expose so much (through transitive includes) outside
# of heap/.
"+src/heap/heap.h",
"+src/heap/heap-verifier.h",
"+src/heap/heap-inl.h",
"+src/heap/heap-write-barrier-inl.h",
"+src/heap/heap-write-barrier.h",

View File

@ -63,7 +63,7 @@
#include "src/handles/persistent-handles.h"
#include "src/handles/shared-object-conveyors.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap.h"
#include "src/heap/heap-verifier.h"
#include "src/heap/local-heap.h"
#include "src/heap/parked-scope.h"
#include "src/heap/read-only-heap.h"
@ -4364,7 +4364,7 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
heap_.VerifyReadOnlyHeap();
HeapVerifier::VerifyReadOnlyHeap(&heap_);
}
#endif

View File

@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/heap-verifier.h"
#include "include/v8-locker.h"
#include "src/codegen/assembler-inl.h"
#include "src/codegen/reloc-info.h"
@ -13,6 +15,7 @@
#include "src/heap/new-spaces.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/paged-spaces.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/read-only-spaces.h"
#include "src/heap/remembered-set.h"
#include "src/heap/safepoint.h"
@ -26,18 +29,44 @@
namespace v8 {
namespace internal {
void Heap::Verify() {
CHECK(HasBeenSetUp());
IgnoreLocalGCRequests ignore_gc_requests(this);
SafepointScope safepoint_scope(this);
class HeapVerification final {
public:
explicit HeapVerification(Heap* heap) : heap_(heap) {}
void Verify();
void VerifyReadOnlyHeap();
void VerifySharedHeap(Isolate* initiator);
private:
void VerifyInvalidatedObjectSize();
ReadOnlySpace* read_only_space() const { return heap_->read_only_space(); }
NewSpace* new_space() const { return heap_->new_space(); }
OldSpace* old_space() const { return heap_->old_space(); }
MapSpace* map_space() const { return heap_->map_space(); }
CodeSpace* code_space() const { return heap_->code_space(); }
LargeObjectSpace* lo_space() const { return heap_->lo_space(); }
CodeLargeObjectSpace* code_lo_space() const { return heap_->code_lo_space(); }
NewLargeObjectSpace* new_lo_space() const { return heap_->new_lo_space(); }
Isolate* isolate() const { return heap_->isolate(); }
Heap* heap() const { return heap_; }
Heap* heap_;
};
void HeapVerification::Verify() {
CHECK(heap()->HasBeenSetUp());
IgnoreLocalGCRequests ignore_gc_requests(heap());
SafepointScope safepoint_scope(heap());
HandleScope scope(isolate());
MakeHeapIterable();
heap()->MakeHeapIterable();
array_buffer_sweeper()->EnsureFinished();
heap()->array_buffer_sweeper()->EnsureFinished();
VerifyPointersVisitor visitor(this);
IterateRoots(&visitor, {});
VerifyPointersVisitor visitor(heap());
heap()->IterateRoots(&visitor, {});
if (!isolate()->context().is_null() &&
!isolate()->normalized_map_cache()->IsUndefined(isolate())) {
@ -52,27 +81,27 @@ void Heap::Verify() {
if (isolate()->has_active_deserializer()) return;
VerifySmisVisitor smis_visitor;
IterateSmiRoots(&smis_visitor);
heap()->IterateSmiRoots(&smis_visitor);
if (new_space_) new_space_->Verify(isolate());
if (new_space()) new_space()->Verify(isolate());
old_space_->Verify(isolate(), &visitor);
if (map_space_) {
map_space_->Verify(isolate(), &visitor);
old_space()->Verify(isolate(), &visitor);
if (map_space()) {
map_space()->Verify(isolate(), &visitor);
}
VerifyPointersVisitor no_dirty_regions_visitor(this);
code_space_->Verify(isolate(), &no_dirty_regions_visitor);
VerifyPointersVisitor no_dirty_regions_visitor(heap());
code_space()->Verify(isolate(), &no_dirty_regions_visitor);
lo_space_->Verify(isolate());
code_lo_space_->Verify(isolate());
if (new_lo_space_) new_lo_space_->Verify(isolate());
lo_space()->Verify(isolate());
code_lo_space()->Verify(isolate());
if (new_lo_space()) new_lo_space()->Verify(isolate());
isolate()->string_table()->VerifyIfOwnedBy(isolate());
VerifyInvalidatedObjectSize();
#if DEBUG
VerifyCommittedPhysicalMemory();
heap()->VerifyCommittedPhysicalMemory();
#endif // DEBUG
}
@ -87,8 +116,8 @@ void VerifyInvalidatedSlots(InvalidatedSlots* invalidated_slots) {
}
} // namespace
void Heap::VerifyInvalidatedObjectSize() {
OldGenerationMemoryChunkIterator chunk_iterator(this);
void HeapVerification::VerifyInvalidatedObjectSize() {
OldGenerationMemoryChunkIterator chunk_iterator(heap());
MemoryChunk* chunk;
while ((chunk = chunk_iterator.next()) != nullptr) {
@ -98,29 +127,9 @@ void Heap::VerifyInvalidatedObjectSize() {
}
}
void Heap::VerifyReadOnlyHeap() {
CHECK(!read_only_space_->writable());
read_only_space_->Verify(isolate());
}
void Heap::VerifySharedHeap(Isolate* initiator) {
DCHECK(IsShared());
// Stop all client isolates attached to this isolate.
GlobalSafepointScope global_safepoint(initiator);
// Migrate shared isolate to the main thread of the initiator isolate.
v8::Locker locker(reinterpret_cast<v8::Isolate*>(isolate()));
v8::Isolate::Scope isolate_scope(reinterpret_cast<v8::Isolate*>(isolate()));
DCHECK_NOT_NULL(isolate()->global_safepoint());
// Free all shared LABs to make the shared heap iterable.
isolate()->global_safepoint()->IterateClientIsolates([](Isolate* client) {
client->heap()->FreeSharedLinearAllocationAreas();
});
Verify();
void HeapVerification::VerifyReadOnlyHeap() {
CHECK(!read_only_space()->writable());
read_only_space()->Verify(isolate());
}
class SlotVerifyingVisitor : public ObjectVisitorWithCageBases {
@ -268,45 +277,6 @@ void CollectSlots(MemoryChunk* chunk, Address start, Address end,
});
}
void Heap::VerifyRememberedSetFor(HeapObject object) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
DCHECK_IMPLIES(chunk->mutex() == nullptr, ReadOnlyHeap::Contains(object));
// In RO_SPACE chunk->mutex() may be nullptr, so just ignore it.
base::LockGuard<base::Mutex, base::NullBehavior::kIgnoreIfNull> lock_guard(
chunk->mutex());
PtrComprCageBase cage_base(isolate());
Address start = object.address();
Address end = start + object.Size(cage_base);
if (chunk->InSharedHeap() || InYoungGeneration(object)) {
CHECK_NULL(chunk->slot_set<OLD_TO_NEW>());
CHECK_NULL(chunk->typed_slot_set<OLD_TO_NEW>());
CHECK_NULL(chunk->slot_set<OLD_TO_OLD>());
CHECK_NULL(chunk->typed_slot_set<OLD_TO_OLD>());
}
if (!InYoungGeneration(object)) {
std::set<Address> old_to_new;
std::set<std::pair<SlotType, Address>> typed_old_to_new;
CollectSlots<OLD_TO_NEW>(chunk, start, end, &old_to_new, &typed_old_to_new);
OldToNewSlotVerifyingVisitor old_to_new_visitor(
isolate(), &old_to_new, &typed_old_to_new,
&this->ephemeron_remembered_set_);
object.IterateBody(cage_base, &old_to_new_visitor);
std::set<Address> old_to_shared;
std::set<std::pair<SlotType, Address>> typed_old_to_shared;
CollectSlots<OLD_TO_SHARED>(chunk, start, end, &old_to_shared,
&typed_old_to_shared);
OldToSharedSlotVerifyingVisitor old_to_shared_visitor(
isolate(), &old_to_shared, &typed_old_to_shared);
object.IterateBody(cage_base, &old_to_shared_visitor);
}
// TODO(v8:11797): Add old to old slot set verification once all weak objects
// have their own instance types and slots are recorded for all weak fields.
}
// Helper class for collecting slot addresses.
class SlotCollectingVisitor final : public ObjectVisitor {
public:
@ -351,43 +321,123 @@ class SlotCollectingVisitor final : public ObjectVisitor {
#endif
};
void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) {
// static
void HeapVerifier::VerifyHeap(Heap* heap) {
HeapVerification verifier(heap);
verifier.Verify();
}
// static
void HeapVerifier::VerifyReadOnlyHeap(Heap* heap) {
HeapVerification verifier(heap);
verifier.VerifyReadOnlyHeap();
}
// static
void HeapVerifier::VerifySharedHeap(Heap* heap, Isolate* initiator) {
DCHECK(heap->IsShared());
Isolate* isolate = heap->isolate();
// Stop all client isolates attached to this isolate.
GlobalSafepointScope global_safepoint(initiator);
// Migrate shared isolate to the main thread of the initiator isolate.
v8::Locker locker(reinterpret_cast<v8::Isolate*>(isolate));
v8::Isolate::Scope isolate_scope(reinterpret_cast<v8::Isolate*>(isolate));
DCHECK_NOT_NULL(isolate->global_safepoint());
// Free all shared LABs to make the shared heap iterable.
isolate->global_safepoint()->IterateClientIsolates([](Isolate* client) {
client->heap()->FreeSharedLinearAllocationAreas();
});
HeapVerifier::VerifyHeap(heap);
}
// static
void HeapVerifier::VerifyRememberedSetFor(Heap* heap, HeapObject object) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
DCHECK_IMPLIES(chunk->mutex() == nullptr, ReadOnlyHeap::Contains(object));
// In RO_SPACE chunk->mutex() may be nullptr, so just ignore it.
base::LockGuard<base::Mutex, base::NullBehavior::kIgnoreIfNull> lock_guard(
chunk->mutex());
PtrComprCageBase cage_base(heap->isolate());
Address start = object.address();
Address end = start + object.Size(cage_base);
if (chunk->InSharedHeap() || Heap::InYoungGeneration(object)) {
CHECK_NULL(chunk->slot_set<OLD_TO_NEW>());
CHECK_NULL(chunk->typed_slot_set<OLD_TO_NEW>());
CHECK_NULL(chunk->slot_set<OLD_TO_OLD>());
CHECK_NULL(chunk->typed_slot_set<OLD_TO_OLD>());
}
if (!Heap::InYoungGeneration(object)) {
std::set<Address> old_to_new;
std::set<std::pair<SlotType, Address>> typed_old_to_new;
CollectSlots<OLD_TO_NEW>(chunk, start, end, &old_to_new, &typed_old_to_new);
OldToNewSlotVerifyingVisitor old_to_new_visitor(
heap->isolate(), &old_to_new, &typed_old_to_new,
&heap->ephemeron_remembered_set_);
object.IterateBody(cage_base, &old_to_new_visitor);
std::set<Address> old_to_shared;
std::set<std::pair<SlotType, Address>> typed_old_to_shared;
CollectSlots<OLD_TO_SHARED>(chunk, start, end, &old_to_shared,
&typed_old_to_shared);
OldToSharedSlotVerifyingVisitor old_to_shared_visitor(
heap->isolate(), &old_to_shared, &typed_old_to_shared);
object.IterateBody(cage_base, &old_to_shared_visitor);
}
// TODO(v8:11797): Add old to old slot set verification once all weak objects
// have their own instance types and slots are recorded for all weak fields.
}
// static
void HeapVerifier::VerifyObjectLayoutChange(Heap* heap, HeapObject object,
Map new_map) {
// Object layout changes are currently not supported on background threads.
DCHECK_NULL(LocalHeap::Current());
if (!FLAG_verify_heap) return;
PtrComprCageBase cage_base(isolate());
PtrComprCageBase cage_base(heap->isolate());
// Check that Heap::NotifyObjectLayoutChange was called for object transitions
// that are not safe for concurrent marking.
// If you see this check triggering for a freshly allocated object,
// use object->set_map_after_allocation() to initialize its map.
if (pending_layout_change_object_.is_null()) {
VerifySafeMapTransition(object, new_map);
if (heap->pending_layout_change_object_.is_null()) {
VerifySafeMapTransition(heap, object, new_map);
} else {
DCHECK_EQ(pending_layout_change_object_, object);
pending_layout_change_object_ = HeapObject();
DCHECK_EQ(heap->pending_layout_change_object_, object);
heap->pending_layout_change_object_ = HeapObject();
}
}
void Heap::VerifySafeMapTransition(HeapObject object, Map new_map) {
PtrComprCageBase cage_base(isolate());
// static
void HeapVerifier::VerifySafeMapTransition(Heap* heap, HeapObject object,
Map new_map) {
PtrComprCageBase cage_base(heap->isolate());
if (object.IsJSObject(cage_base)) {
// Without double unboxing all in-object fields of a JSObject are tagged.
return;
}
if (object.IsString(cage_base) &&
(new_map == ReadOnlyRoots(this).thin_string_map() ||
new_map == ReadOnlyRoots(this).thin_one_byte_string_map() ||
new_map == ReadOnlyRoots(this).shared_thin_string_map() ||
new_map == ReadOnlyRoots(this).shared_thin_one_byte_string_map())) {
(new_map == ReadOnlyRoots(heap).thin_string_map() ||
new_map == ReadOnlyRoots(heap).thin_one_byte_string_map() ||
new_map == ReadOnlyRoots(heap).shared_thin_string_map() ||
new_map == ReadOnlyRoots(heap).shared_thin_one_byte_string_map())) {
// When transitioning a string to ThinString,
// Heap::NotifyObjectLayoutChange doesn't need to be invoked because only
// tagged fields are introduced.
return;
}
if (FLAG_shared_string_table && object.IsString(cage_base) &&
InstanceTypeChecker::IsInternalizedString(new_map.instance_type())) {
// In-place internalization does not change a string's fields.
@ -397,6 +447,7 @@ void Heap::VerifySafeMapTransition(HeapObject object, Map new_map) {
// DCHECKs to fail.
return;
}
// Check that the set of slots before and after the transition match.
SlotCollectingVisitor old_visitor;
object.IterateFast(cage_base, &old_visitor);

54
src/heap/heap-verifier.h Normal file
View File

@ -0,0 +1,54 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_HEAP_VERIFIER_H_
#define V8_HEAP_HEAP_VERIFIER_H_
#include "src/common/globals.h"
#include "src/heap/read-only-heap.h"
#ifdef VERIFY_HEAP
namespace v8 {
namespace internal {
class Heap;
class ReadOnlyHeap;
class HeapVerifier final {
public:
// Verify the heap is in its normal state before or after a GC.
V8_EXPORT_PRIVATE static void VerifyHeap(Heap* heap);
// Verify the read-only heap after all read-only heap objects have been
// created.
static void VerifyReadOnlyHeap(Heap* heap);
// Verify the shared heap, initiating from a client heap. This performs a
// global safepoint, then the normal heap verification.
static void VerifySharedHeap(Heap* heap, Isolate* initiator);
// Verifies OLD_TO_NEW and OLD_TO_SHARED remembered sets for this object.
static void VerifyRememberedSetFor(Heap* heap, HeapObject object);
// Checks that this is a safe map transition.
V8_EXPORT_PRIVATE static void VerifySafeMapTransition(Heap* heap,
HeapObject object,
Map new_map);
// This function checks that either
// - the map transition is safe,
// - or it was communicated to GC using NotifyObjectLayoutChange.
V8_EXPORT_PRIVATE static void VerifyObjectLayoutChange(Heap* heap,
HeapObject object,
Map new_map);
private:
HeapVerifier();
};
} // namespace internal
} // namespace v8
#endif // VERIFY_HEAP
#endif // V8_HEAP_HEAP_VERIFIER_H_

View File

@ -2309,7 +2309,7 @@ size_t Heap::PerformGarbageCollection(
// We don't really perform a GC here but need this scope for the nested
// SafepointScope inside Verify().
AllowGarbageCollection allow_gc;
Verify();
HeapVerifier::VerifyHeap(this);
}
#endif // VERIFY_HEAP
@ -2393,7 +2393,7 @@ size_t Heap::PerformGarbageCollection(
// We don't really perform a GC here but need this scope for the nested
// SafepointScope inside Verify().
AllowGarbageCollection allow_gc;
Verify();
HeapVerifier::VerifyHeap(this);
}
#endif // VERIFY_HEAP
@ -5709,13 +5709,13 @@ void Heap::StartTearDown() {
// tear down parts of the Isolate.
if (FLAG_verify_heap) {
AllowGarbageCollection allow_gc;
Verify();
HeapVerifier::VerifyHeap(this);
// If this is a client Isolate of a shared Isolate, verify that there are no
// shared-to-local pointers before tearing down the client Isolate and
// creating dangling pointers.
if (Isolate* shared_isolate = isolate()->shared_isolate()) {
shared_isolate->heap()->VerifySharedHeap(isolate());
HeapVerifier::VerifySharedHeap(shared_isolate->heap(), isolate());
}
}
#endif

View File

@ -1151,17 +1151,6 @@ class Heap {
void NotifyObjectSizeChange(HeapObject, int old_size, int new_size,
ClearRecordedSlots clear_recorded_slots);
#ifdef VERIFY_HEAP
// This function checks that either
// - the map transition is safe,
// - or it was communicated to GC using NotifyObjectLayoutChange.
V8_EXPORT_PRIVATE void VerifyObjectLayoutChange(HeapObject object,
Map new_map);
// Checks that this is a safe map transition.
V8_EXPORT_PRIVATE void VerifySafeMapTransition(HeapObject object,
Map new_map);
#endif
// ===========================================================================
// Deoptimization support API. ===============================================
// ===========================================================================
@ -1631,24 +1620,7 @@ class Heap {
// it supports a forwarded map. Fails if the map is not the code map.
Map GcSafeMapOfCodeSpaceObject(HeapObject object);
// =============================================================================
#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
V8_EXPORT_PRIVATE void Verify();
// Verify the read-only heap after all read-only heap objects have been
// created.
void VerifyReadOnlyHeap();
// Verify the shared heap, initiating from a client heap. This performs a
// global safepoint, then the normal heap verification.
void VerifySharedHeap(Isolate* initiator);
void VerifyRememberedSetFor(HeapObject object);
// Verify that cached size of invalidated object is up-to-date.
void VerifyInvalidatedObjectSize();
#endif
// =============================================================================
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
void V8_EXPORT_PRIVATE set_allocation_timeout(int allocation_timeout);
@ -2464,6 +2436,7 @@ class Heap {
friend class GlobalHandleMarkingVisitor;
friend class HeapAllocator;
friend class HeapObjectIterator;
friend class HeapVerifier;
friend class ScavengeTaskObserver;
friend class IgnoreLocalGCRequests;
friend class IncrementalMarking;

View File

@ -409,7 +409,7 @@ void LargeObjectSpace::Verify(Isolate* isolate) {
object.ObjectVerify(isolate);
if (!FLAG_verify_heap_skip_remembered_set) {
heap()->VerifyRememberedSetFor(object);
HeapVerifier::VerifyRememberedSetFor(heap(), object);
}
// Byte arrays and strings don't have interior pointers.

View File

@ -809,7 +809,7 @@ void PagedSpaceBase::Verify(Isolate* isolate, ObjectVisitor* visitor) const {
object.ObjectVerify(isolate);
if (identity() != RO_SPACE && !FLAG_verify_heap_skip_remembered_set) {
isolate->heap()->VerifyRememberedSetFor(object);
HeapVerifier::VerifyRememberedSetFor(isolate->heap(), object);
}
// All the interior pointers should be contained in the heap.

View File

@ -20,6 +20,7 @@
#include "src/common/ptr-compr-inl.h"
#include "src/handles/handles-inl.h"
#include "src/heap/factory.h"
#include "src/heap/heap-verifier.h"
#include "src/heap/heap-write-barrier-inl.h"
#include "src/heap/read-only-heap-inl.h"
#include "src/numbers/conversions-inl.h"
@ -833,10 +834,10 @@ void HeapObject::set_map(Map value, MemoryOrder order, VerificationMode mode) {
if (FLAG_verify_heap && !value.is_null()) {
Heap* heap = GetHeapFromWritableObject(*this);
if (mode == VerificationMode::kSafeMapTransition) {
heap->VerifySafeMapTransition(*this, value);
HeapVerifier::VerifySafeMapTransition(heap, *this, value);
} else {
DCHECK_EQ(mode, VerificationMode::kPotentialLayoutChange);
heap->VerifyObjectLayoutChange(*this, value);
HeapVerifier::VerifyObjectLayoutChange(heap, *this, value);
}
}
#endif

View File

@ -2727,7 +2727,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
#ifdef VERIFY_HEAP
Heap* debug_heap = heap_;
if (FLAG_verify_heap) {
debug_heap->Verify();
HeapVerifier::VerifyHeap(debug_heap);
}
#endif
@ -2735,7 +2735,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
debug_heap->Verify();
HeapVerifier::VerifyHeap(debug_heap);
}
#endif

View File

@ -354,7 +354,7 @@ void Snapshot::SerializeDeserializeAndVerifyForTesting(
CHECK(new_native_context->IsNativeContext());
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) new_isolate->heap()->Verify();
if (FLAG_verify_heap) HeapVerifier::VerifyHeap(new_isolate->heap());
#endif // VERIFY_HEAP
}
new_isolate->Exit();

View File

@ -45,6 +45,7 @@
#include "src/heap/factory.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap-verifier.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/large-spaces.h"
#include "src/heap/mark-compact.h"
@ -4339,7 +4340,7 @@ TEST(NewSpaceObjectsInOptimizedCode) {
CcTest::CollectGarbage(OLD_SPACE);
CHECK(!Heap::InYoungGeneration(*foo));
#ifdef VERIFY_HEAP
CcTest::heap()->Verify();
HeapVerifier::VerifyHeap(CcTest::heap());
#endif
CHECK(!bar->code().marked_for_deoptimization());
code = handle(FromCodeT(bar->code()), isolate);

View File

@ -17,6 +17,7 @@
#include "src/debug/debug.h"
#include "src/execution/isolate.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap-verifier.h"
#include "src/numbers/hash-seed-inl.h"
#include "src/objects/js-array-inl.h"
#include "src/objects/js-promise-inl.h"
@ -1877,7 +1878,7 @@ TEST(AllocateJSObjectFromMap) {
CHECK_EQ(result->elements(), *empty_fixed_array);
CHECK(result->HasFastProperties());
#ifdef VERIFY_HEAP
isolate->heap()->Verify();
HeapVerifier::VerifyHeap(isolate->heap());
#endif
}
}
@ -1906,7 +1907,7 @@ TEST(AllocateJSObjectFromMap) {
}
CHECK(!result->HasFastProperties());
#ifdef VERIFY_HEAP
isolate->heap()->Verify();
HeapVerifier::VerifyHeap(isolate->heap());
#endif
}
}
@ -3803,7 +3804,7 @@ TEST(SmallOrderedHashMapAllocate) {
capacity = capacity << 1;
}
#ifdef VERIFY_HEAP
isolate->heap()->Verify();
HeapVerifier::VerifyHeap(isolate->heap());
#endif
}
@ -3841,7 +3842,7 @@ TEST(SmallOrderedHashSetAllocate) {
capacity = capacity << 1;
}
#ifdef VERIFY_HEAP
isolate->heap()->Verify();
HeapVerifier::VerifyHeap(isolate->heap());
#endif
}

View File

@ -264,7 +264,7 @@ static void SanityCheck(v8::Isolate* v8_isolate) {
Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
v8::HandleScope scope(v8_isolate);
#ifdef VERIFY_HEAP
isolate->heap()->Verify();
HeapVerifier::VerifyHeap(isolate->heap());
#endif
CHECK(isolate->global_object()->IsJSObject());
CHECK(isolate->native_context()->IsContext());