Add verifier for retaining paths in heap snapshots
The web app owner who notified me about bugs v8:12112 and v8:12126 asked me a reasonable question: "how am I ever supposed to trust the retaining paths in the devtools, if the heap snapshot is generated by a different component than the actual marking code?". This change is my attempt to answer that question. If verification is enabled, the heap snapshot generator will visit each heap object with a realistic marking visitor to find all references from that object. It will then check that those references match the HeapGraphEdges in the snapshot. I also considered the idea that we could collect retaining information during the last GC cycle before taking the heap snapshot, or during an extra GC cycle immediately after. However, running the full GC provides the embedder with the opportunity to run arbitrary code (including JS) both before and after PerformGarbageCollection, so there is no clear guarantee that the heap state during the snapshot actually matches the heap state during marking. Bug: v8:12112, v8:12126 Change-Id: Id29e75ecf9eee19e35daedbdb4a3e1df64785380 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3299590 Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Camillo Bruni <cbruni@chromium.org> Commit-Queue: Seth Brenith <seth.brenith@microsoft.com> Cr-Commit-Position: refs/heads/main@{#78952}
This commit is contained in:
parent
2ce2c9c77c
commit
d937a0bb0c
18
BUILD.gn
18
BUILD.gn
@ -342,6 +342,9 @@ declare_args() {
|
||||
# When it's disabled, the --turbo-allocation-folding runtime flag will be ignored.
|
||||
v8_enable_allocation_folding = true
|
||||
|
||||
# Enable runtime verification of heap snapshots produced for devtools.
|
||||
v8_enable_heap_snapshot_verify = ""
|
||||
|
||||
# Enable global allocation site tracking.
|
||||
v8_allocation_site_tracking = true
|
||||
|
||||
@ -385,6 +388,10 @@ if (v8_enable_test_features == "") {
|
||||
if (v8_enable_v8_checks == "") {
|
||||
v8_enable_v8_checks = v8_enable_debugging_features
|
||||
}
|
||||
if (v8_enable_heap_snapshot_verify == "") {
|
||||
v8_enable_heap_snapshot_verify =
|
||||
v8_enable_debugging_features || dcheck_always_on
|
||||
}
|
||||
if (v8_enable_snapshot_code_comments) {
|
||||
assert(v8_code_comments == true || v8_code_comments == "",
|
||||
"v8_enable_snapshot_code_comments conflicts with v8_code_comments.")
|
||||
@ -891,6 +898,9 @@ config("features") {
|
||||
if (v8_enable_debug_code) {
|
||||
defines += [ "V8_ENABLE_DEBUG_CODE" ]
|
||||
}
|
||||
if (v8_enable_heap_snapshot_verify) {
|
||||
defines += [ "V8_ENABLE_HEAP_SNAPSHOT_VERIFY" ]
|
||||
}
|
||||
if (v8_enable_snapshot_native_code_counters) {
|
||||
defines += [ "V8_SNAPSHOT_NATIVE_CODE_COUNTERS" ]
|
||||
}
|
||||
@ -3572,6 +3582,10 @@ v8_header_set("v8_internal_headers") {
|
||||
]
|
||||
}
|
||||
|
||||
if (v8_enable_heap_snapshot_verify) {
|
||||
sources += [ "src/heap/reference-summarizer.h" ]
|
||||
}
|
||||
|
||||
if (v8_current_cpu == "x86") {
|
||||
sources += [ ### gcmole(arch:ia32) ###
|
||||
"src/baseline/ia32/baseline-assembler-ia32-inl.h",
|
||||
@ -4513,6 +4527,10 @@ v8_source_set("v8_base_without_compiler") {
|
||||
]
|
||||
}
|
||||
|
||||
if (v8_enable_heap_snapshot_verify) {
|
||||
sources += [ "src/heap/reference-summarizer.cc" ]
|
||||
}
|
||||
|
||||
if (v8_current_cpu == "x86") {
|
||||
sources += [ ### gcmole(arch:ia32) ###
|
||||
"src/codegen/ia32/assembler-ia32.cc",
|
||||
|
1
src/DEPS
1
src/DEPS
@ -36,6 +36,7 @@ include_rules = [
|
||||
"+src/heap/parked-scope.h",
|
||||
"+src/heap/read-only-heap-inl.h",
|
||||
"+src/heap/read-only-heap.h",
|
||||
"+src/heap/reference-summarizer.h",
|
||||
"+src/heap/safepoint.h",
|
||||
"+src/heap/base/stack.h",
|
||||
"+src/heap/conservative-stack-visitor.h",
|
||||
|
@ -1618,6 +1618,11 @@ DEFINE_INT(heap_snapshot_string_limit, 1024,
|
||||
"truncate strings to this length in the heap snapshot")
|
||||
DEFINE_BOOL(heap_profiler_show_hidden_objects, false,
|
||||
"use 'native' rather than 'hidden' node type in snapshot")
|
||||
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
|
||||
DEFINE_BOOL(heap_snapshot_verify, false,
|
||||
"verify that heap snapshot matches marking visitor behavior")
|
||||
DEFINE_IMPLICATION(enable_slow_asserts, heap_snapshot_verify)
|
||||
#endif
|
||||
|
||||
// sampling-heap-profiler.cc
|
||||
DEFINE_BOOL(sampling_heap_profiler_suppress_randomness, false,
|
||||
|
@ -26,6 +26,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkObject(
|
||||
HeapObject host, HeapObject object) {
|
||||
DCHECK(ReadOnlyHeap::Contains(object) || heap_->Contains(object));
|
||||
concrete_visitor()->SynchronizePageAccess(object);
|
||||
AddStrongReferenceForReferenceSummarizer(host, object);
|
||||
if (concrete_visitor()->marking_state()->WhiteToGrey(object)) {
|
||||
local_marking_worklists_->Push(object);
|
||||
if (V8_UNLIKELY(concrete_visitor()->retaining_path_mode() ==
|
||||
@ -65,6 +66,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessWeakHeapObject(
|
||||
// the reference when we know the liveness of the whole transitive
|
||||
// closure.
|
||||
local_weak_objects_->weak_references_local.Push(std::make_pair(host, slot));
|
||||
AddWeakReferenceForReferenceSummarizer(host, heap_object);
|
||||
}
|
||||
}
|
||||
|
||||
@ -117,6 +119,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer(
|
||||
if (host.IsWeakObject(object)) {
|
||||
local_weak_objects_->weak_objects_in_code_local.Push(
|
||||
std::make_pair(object, host));
|
||||
AddWeakReferenceForReferenceSummarizer(host, object);
|
||||
} else {
|
||||
MarkObject(host, object);
|
||||
}
|
||||
@ -244,7 +247,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitFixedArray(
|
||||
// in the large object space.
|
||||
ProgressBar& progress_bar =
|
||||
MemoryChunk::FromHeapObject(object)->ProgressBar();
|
||||
return progress_bar.IsEnabled()
|
||||
return CanUpdateValuesInHeap() && progress_bar.IsEnabled()
|
||||
? VisitFixedArrayWithProgressBar(map, object, progress_bar)
|
||||
: concrete_visitor()->VisitLeftTrimmableArray(map, object);
|
||||
}
|
||||
@ -344,6 +347,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
|
||||
|
||||
concrete_visitor()->SynchronizePageAccess(key);
|
||||
concrete_visitor()->RecordSlot(table, key_slot, key);
|
||||
AddWeakReferenceForReferenceSummarizer(table, key);
|
||||
|
||||
ObjectSlot value_slot =
|
||||
table.RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
|
||||
@ -357,6 +361,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
|
||||
HeapObject value = HeapObject::cast(value_obj);
|
||||
concrete_visitor()->SynchronizePageAccess(value);
|
||||
concrete_visitor()->RecordSlot(table, value_slot, value);
|
||||
AddWeakReferenceForReferenceSummarizer(table, value);
|
||||
|
||||
// Revisit ephemerons with both key and value unreachable at end
|
||||
// of concurrent marking cycle.
|
||||
@ -387,6 +392,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
|
||||
// JSWeakRef points to a potentially dead object. We have to process
|
||||
// them when we know the liveness of the whole transitive closure.
|
||||
local_weak_objects_->js_weak_refs_local.Push(weak_ref);
|
||||
AddWeakReferenceForReferenceSummarizer(weak_ref, target);
|
||||
}
|
||||
}
|
||||
return size;
|
||||
@ -417,6 +423,8 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
|
||||
// token. We have to process them when we know the liveness of the whole
|
||||
// transitive closure.
|
||||
local_weak_objects_->weak_cells_local.Push(weak_cell);
|
||||
AddWeakReferenceForReferenceSummarizer(weak_cell, target);
|
||||
AddWeakReferenceForReferenceSummarizer(weak_cell, unregister_token);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
@ -443,8 +451,11 @@ template <typename ConcreteVisitor, typename MarkingState>
|
||||
void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptors(
|
||||
DescriptorArray descriptor_array, int number_of_own_descriptors) {
|
||||
int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
|
||||
int16_t old_marked = descriptor_array.UpdateNumberOfMarkedDescriptors(
|
||||
mark_compact_epoch_, new_marked);
|
||||
int16_t old_marked = 0;
|
||||
if (CanUpdateValuesInHeap()) {
|
||||
old_marked = descriptor_array.UpdateNumberOfMarkedDescriptors(
|
||||
mark_compact_epoch_, new_marked);
|
||||
}
|
||||
if (old_marked < new_marked) {
|
||||
VisitPointers(
|
||||
descriptor_array,
|
||||
|
@ -25,6 +25,11 @@ struct EphemeronMarking {
|
||||
template <typename ConcreteState, AccessMode access_mode>
|
||||
class MarkingStateBase {
|
||||
public:
|
||||
// Declares that this marking state is not collecting retainers, so the
|
||||
// marking visitor may update the heap state to store information about
|
||||
// progress, and may avoid fully visiting an object if it is safe to do so.
|
||||
static constexpr bool kCollectRetainers = false;
|
||||
|
||||
explicit MarkingStateBase(PtrComprCageBase cage_base)
|
||||
#if V8_COMPRESS_POINTERS
|
||||
: cage_base_(cage_base)
|
||||
@ -102,6 +107,15 @@ class MarkingStateBase {
|
||||
static_cast<ConcreteState*>(this)->SetLiveBytes(chunk, 0);
|
||||
}
|
||||
|
||||
void AddStrongReferenceForReferenceSummarizer(HeapObject host,
|
||||
HeapObject obj) {
|
||||
// This is not a reference summarizer, so there is nothing to do here.
|
||||
}
|
||||
|
||||
void AddWeakReferenceForReferenceSummarizer(HeapObject host, HeapObject obj) {
|
||||
// This is not a reference summarizer, so there is nothing to do here.
|
||||
}
|
||||
|
||||
private:
|
||||
#if V8_COMPRESS_POINTERS
|
||||
const PtrComprCageBase cage_base_;
|
||||
@ -258,6 +272,23 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
|
||||
// Marks the object grey and pushes it on the marking work list.
|
||||
V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
|
||||
|
||||
V8_INLINE void AddStrongReferenceForReferenceSummarizer(HeapObject host,
|
||||
HeapObject obj) {
|
||||
concrete_visitor()
|
||||
->marking_state()
|
||||
->AddStrongReferenceForReferenceSummarizer(host, obj);
|
||||
}
|
||||
|
||||
V8_INLINE void AddWeakReferenceForReferenceSummarizer(HeapObject host,
|
||||
HeapObject obj) {
|
||||
concrete_visitor()->marking_state()->AddWeakReferenceForReferenceSummarizer(
|
||||
host, obj);
|
||||
}
|
||||
|
||||
constexpr bool CanUpdateValuesInHeap() {
|
||||
return !MarkingState::kCollectRetainers;
|
||||
}
|
||||
|
||||
MarkingWorklists::Local* const local_marking_worklists_;
|
||||
WeakObjects::Local* const local_weak_objects_;
|
||||
Heap* const heap_;
|
||||
|
116
src/heap/reference-summarizer.cc
Normal file
116
src/heap/reference-summarizer.cc
Normal file
@ -0,0 +1,116 @@
|
||||
// Copyright 2022 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/heap/reference-summarizer.h"
|
||||
|
||||
#include "src/heap/mark-compact-inl.h"
|
||||
#include "src/heap/marking-visitor-inl.h"
|
||||
#include "src/objects/embedder-data-array-inl.h"
|
||||
#include "src/objects/js-array-buffer-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
// A class which acts as a MarkingState but does not actually update any marking
|
||||
// bits. It reports all objects as white and all transitions as successful. It
|
||||
// also tracks which objects are retained by the primary object according to the
|
||||
// marking visitor.
|
||||
class ReferenceSummarizerMarkingState final {
|
||||
public:
|
||||
// Declares that this marking state is collecting retainers, so the marking
|
||||
// visitor must fully visit each object and can't update on-heap state.
|
||||
static constexpr bool kCollectRetainers = true;
|
||||
|
||||
explicit ReferenceSummarizerMarkingState(HeapObject object)
|
||||
: primary_object_(object),
|
||||
local_marking_worklists_(&marking_worklists_),
|
||||
local_weak_objects_(&weak_objects_) {}
|
||||
|
||||
~ReferenceSummarizerMarkingState() {
|
||||
// Clean up temporary state.
|
||||
local_weak_objects_.Publish();
|
||||
weak_objects_.Clear();
|
||||
local_marking_worklists_.Publish();
|
||||
marking_worklists_.Clear();
|
||||
}
|
||||
|
||||
// Retrieves the references that were collected by this marker. This operation
|
||||
// transfers ownership of the set, so calling it again would yield an empty
|
||||
// result.
|
||||
ReferenceSummary DestructivelyRetrieveReferences() {
|
||||
ReferenceSummary tmp = std::move(references_);
|
||||
references_.Clear();
|
||||
return tmp;
|
||||
}
|
||||
|
||||
// Standard marking visitor functions:
|
||||
|
||||
bool IsWhite(HeapObject obj) const { return true; }
|
||||
|
||||
bool IsBlackOrGrey(HeapObject obj) const { return false; }
|
||||
|
||||
bool WhiteToGrey(HeapObject obj) { return true; }
|
||||
|
||||
bool GreyToBlack(HeapObject obj) { return true; }
|
||||
|
||||
// Adds a retaining relationship found by the marking visitor.
|
||||
void AddStrongReferenceForReferenceSummarizer(HeapObject host,
|
||||
HeapObject obj) {
|
||||
AddReference(host, obj, references_.strong_references());
|
||||
}
|
||||
|
||||
// Adds a non-retaining weak reference found by the marking visitor. The value
|
||||
// in an ephemeron hash table entry is also included here, since it is not
|
||||
// known to be strong without further information about the key.
|
||||
void AddWeakReferenceForReferenceSummarizer(HeapObject host, HeapObject obj) {
|
||||
AddReference(host, obj, references_.weak_references());
|
||||
}
|
||||
|
||||
// Other member functions, not part of the marking visitor contract:
|
||||
|
||||
MarkingWorklists::Local* local_marking_worklists() {
|
||||
return &local_marking_worklists_;
|
||||
}
|
||||
WeakObjects::Local* local_weak_objects() { return &local_weak_objects_; }
|
||||
|
||||
private:
|
||||
void AddReference(
|
||||
HeapObject host, HeapObject obj,
|
||||
std::unordered_set<HeapObject, Object::Hasher>& references) {
|
||||
// It's possible that the marking visitor handles multiple objects at once,
|
||||
// such as a Map and its DescriptorArray, but we're only interested in
|
||||
// references from the primary object.
|
||||
if (host == primary_object_) {
|
||||
references.insert(obj);
|
||||
}
|
||||
}
|
||||
|
||||
ReferenceSummary references_;
|
||||
HeapObject primary_object_;
|
||||
MarkingWorklists marking_worklists_;
|
||||
MarkingWorklists::Local local_marking_worklists_;
|
||||
WeakObjects weak_objects_;
|
||||
WeakObjects::Local local_weak_objects_;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
ReferenceSummary ReferenceSummary::SummarizeReferencesFrom(Heap* heap,
|
||||
HeapObject obj) {
|
||||
ReferenceSummarizerMarkingState marking_state(obj);
|
||||
|
||||
MainMarkingVisitor<ReferenceSummarizerMarkingState> visitor(
|
||||
&marking_state, marking_state.local_marking_worklists(),
|
||||
marking_state.local_weak_objects(), heap, 0 /*mark_compact_epoch*/,
|
||||
{} /*code_flush_mode*/, false /*embedder_tracing_enabled*/,
|
||||
true /*should_keep_ages_unchanged*/);
|
||||
visitor.Visit(obj.map(heap->isolate()), obj);
|
||||
|
||||
return marking_state.DestructivelyRetrieveReferences();
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
55
src/heap/reference-summarizer.h
Normal file
55
src/heap/reference-summarizer.h
Normal file
@ -0,0 +1,55 @@
|
||||
// Copyright 2022 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_HEAP_REFERENCE_SUMMARIZER_H_
|
||||
#define V8_HEAP_REFERENCE_SUMMARIZER_H_
|
||||
|
||||
#include <unordered_set>
|
||||
|
||||
#include "src/objects/heap-object.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class Heap;
|
||||
|
||||
class ReferenceSummary {
|
||||
public:
|
||||
ReferenceSummary() = default;
|
||||
ReferenceSummary(ReferenceSummary&& other) V8_NOEXCEPT
|
||||
: strong_references_(std::move(other.strong_references_)),
|
||||
weak_references_(std::move(other.weak_references_)) {}
|
||||
|
||||
// Produces a set of objects referred to by the object. This function uses a
|
||||
// realistic marking visitor, so its results are likely to match real GC
|
||||
// behavior. Intended only for verification.
|
||||
static ReferenceSummary SummarizeReferencesFrom(Heap* heap, HeapObject obj);
|
||||
|
||||
// All objects which the chosen object has strong pointers to.
|
||||
std::unordered_set<HeapObject, Object::Hasher>& strong_references() {
|
||||
return strong_references_;
|
||||
}
|
||||
|
||||
// All objects which the chosen object has weak pointers to. The values in
|
||||
// ephemeron hash tables are also included here, even though they aren't
|
||||
// normal weak pointers.
|
||||
std::unordered_set<HeapObject, Object::Hasher>& weak_references() {
|
||||
return weak_references_;
|
||||
}
|
||||
|
||||
void Clear() {
|
||||
strong_references_.clear();
|
||||
weak_references_.clear();
|
||||
}
|
||||
|
||||
private:
|
||||
std::unordered_set<HeapObject, Object::Hasher> strong_references_;
|
||||
std::unordered_set<HeapObject, Object::Hasher> weak_references_;
|
||||
DISALLOW_GARBAGE_COLLECTION(no_gc)
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_HEAP_REFERENCE_SUMMARIZER_H_
|
@ -42,6 +42,152 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
|
||||
class HeapEntryVerifier {
|
||||
public:
|
||||
HeapEntryVerifier(HeapSnapshotGenerator* generator, HeapObject obj)
|
||||
: generator_(generator),
|
||||
primary_object_(obj),
|
||||
reference_summary_(
|
||||
ReferenceSummary::SummarizeReferencesFrom(generator->heap(), obj)) {
|
||||
generator->set_verifier(this);
|
||||
}
|
||||
~HeapEntryVerifier() {
|
||||
CheckAllReferencesWereChecked();
|
||||
generator_->set_verifier(nullptr);
|
||||
}
|
||||
|
||||
// Checks that `host` retains `target`, according to the marking visitor. This
|
||||
// allows us to verify, when adding edges to the snapshot, that they
|
||||
// correspond to real retaining relationships.
|
||||
void CheckStrongReference(HeapObject host, HeapObject target) {
|
||||
// All references should be from the current primary object.
|
||||
CHECK_EQ(host, primary_object_);
|
||||
|
||||
checked_objects_.insert(target);
|
||||
|
||||
// Check whether there is a direct strong reference from host to target.
|
||||
if (reference_summary_.strong_references().find(target) !=
|
||||
reference_summary_.strong_references().end()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// There is no direct reference from host to target, but sometimes heap
|
||||
// snapshots include references that skip one, two, or three objects, such
|
||||
// as __proto__ on a JSObject referring to its Map's prototype, or a
|
||||
// property getter that bypasses the property array and accessor info. At
|
||||
// this point, we must check for those indirect references.
|
||||
for (size_t level = 0; level < 3; ++level) {
|
||||
const std::unordered_set<HeapObject, Object::Hasher>& indirect =
|
||||
GetIndirectStrongReferences(level);
|
||||
if (indirect.find(target) != indirect.end()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
FATAL("Could not find any matching reference");
|
||||
}
|
||||
|
||||
// Checks that `host` has a weak reference to `target`, according to the
|
||||
// marking visitor.
|
||||
void CheckWeakReference(HeapObject host, HeapObject target) {
|
||||
// All references should be from the current primary object.
|
||||
CHECK_EQ(host, primary_object_);
|
||||
|
||||
checked_objects_.insert(target);
|
||||
CHECK_NE(reference_summary_.weak_references().find(target),
|
||||
reference_summary_.weak_references().end());
|
||||
}
|
||||
|
||||
// Marks the relationship between `host` and `target` as checked, even if the
|
||||
// marking visitor found no such relationship. This is necessary for
|
||||
// ephemerons, where a pair of objects is required to retain the target.
|
||||
// Use this function with care, since it bypasses verification.
|
||||
void MarkReferenceCheckedWithoutChecking(HeapObject host, HeapObject target) {
|
||||
if (host == primary_object_) {
|
||||
checked_objects_.insert(target);
|
||||
}
|
||||
}
|
||||
|
||||
// Verifies that all of the references found by the marking visitor were
|
||||
// checked via a call to CheckStrongReference or CheckWeakReference, or
|
||||
// deliberately skipped via a call to MarkReferenceCheckedWithoutChecking.
|
||||
// This ensures that there aren't retaining relationships found by the marking
|
||||
// visitor which were omitted from the heap snapshot.
|
||||
void CheckAllReferencesWereChecked() {
|
||||
// Both loops below skip pointers to read-only objects, because the heap
|
||||
// snapshot deliberately omits many of those (see IsEssentialObject).
|
||||
// Read-only objects can't ever retain normal read-write objects, so these
|
||||
// are fine to skip.
|
||||
for (HeapObject obj : reference_summary_.strong_references()) {
|
||||
if (!BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
|
||||
CHECK_NE(checked_objects_.find(obj), checked_objects_.end());
|
||||
}
|
||||
}
|
||||
for (HeapObject obj : reference_summary_.weak_references()) {
|
||||
if (!BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
|
||||
CHECK_NE(checked_objects_.find(obj), checked_objects_.end());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
const std::unordered_set<HeapObject, Object::Hasher>&
|
||||
GetIndirectStrongReferences(size_t level) {
|
||||
CHECK_GE(indirect_strong_references_.size(), level);
|
||||
|
||||
if (indirect_strong_references_.size() == level) {
|
||||
// Expansion is needed.
|
||||
indirect_strong_references_.resize(level + 1);
|
||||
const std::unordered_set<HeapObject, Object::Hasher>& previous =
|
||||
level == 0 ? reference_summary_.strong_references()
|
||||
: indirect_strong_references_[level - 1];
|
||||
for (HeapObject obj : previous) {
|
||||
if (BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
|
||||
// Marking visitors don't expect to visit objects in read-only space,
|
||||
// and will fail DCHECKs if they are used on those objects. Read-only
|
||||
// objects can never retain anything outside read-only space, so
|
||||
// skipping those objects doesn't weaken verification.
|
||||
continue;
|
||||
}
|
||||
|
||||
// Indirect references should only bypass internal structures, not
|
||||
// user-visible objects or contexts.
|
||||
if (obj.IsJSReceiver() || obj.IsString() || obj.IsContext()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ReferenceSummary summary =
|
||||
ReferenceSummary::SummarizeReferencesFrom(generator_->heap(), obj);
|
||||
indirect_strong_references_[level].insert(
|
||||
summary.strong_references().begin(),
|
||||
summary.strong_references().end());
|
||||
}
|
||||
}
|
||||
|
||||
return indirect_strong_references_[level];
|
||||
}
|
||||
|
||||
DISALLOW_GARBAGE_COLLECTION(no_gc)
|
||||
HeapSnapshotGenerator* generator_;
|
||||
HeapObject primary_object_;
|
||||
|
||||
// All objects referred to by primary_object_, according to a marking visitor.
|
||||
ReferenceSummary reference_summary_;
|
||||
|
||||
// Objects that have been checked via a call to CheckStrongReference or
|
||||
// CheckWeakReference, or deliberately skipped via a call to
|
||||
// MarkReferenceCheckedWithoutChecking.
|
||||
std::unordered_set<HeapObject, Object::Hasher> checked_objects_;
|
||||
|
||||
// Objects transitively retained by the primary object. The objects in the set
|
||||
// at index i are retained by the primary object via a chain of i+1
|
||||
// intermediate objects.
|
||||
std::vector<std::unordered_set<HeapObject, Object::Hasher>>
|
||||
indirect_strong_references_;
|
||||
};
|
||||
#endif
|
||||
|
||||
HeapGraphEdge::HeapGraphEdge(Type type, const char* name, HeapEntry* from,
|
||||
HeapEntry* to)
|
||||
: bit_field_(TypeField::encode(type) |
|
||||
@ -78,29 +224,84 @@ HeapEntry::HeapEntry(HeapSnapshot* snapshot, int index, Type type,
|
||||
DCHECK_GE(index, 0);
|
||||
}
|
||||
|
||||
void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
|
||||
const char* name,
|
||||
HeapEntry* entry) {
|
||||
++children_count_;
|
||||
snapshot_->edges().emplace_back(type, name, this, entry);
|
||||
void HeapEntry::VerifyReference(HeapGraphEdge::Type type, HeapEntry* entry,
|
||||
HeapSnapshotGenerator* generator,
|
||||
ReferenceVerification verification) {
|
||||
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
|
||||
if (verification == kOffHeapPointer || generator->verifier() == nullptr) {
|
||||
// Off-heap pointers are outside the scope of this verification; we just
|
||||
// trust the embedder to provide accurate data. If the verifier is null,
|
||||
// then verification is disabled.
|
||||
return;
|
||||
}
|
||||
if (verification == kCustomWeakPointer) {
|
||||
// The caller declared that this is a weak pointer ignored by the marking
|
||||
// visitor. All we can verify at this point is that the edge type declares
|
||||
// it to be weak.
|
||||
CHECK_EQ(type, HeapGraphEdge::kWeak);
|
||||
return;
|
||||
}
|
||||
Address from_address =
|
||||
reinterpret_cast<Address>(generator->FindHeapThingForHeapEntry(this));
|
||||
Address to_address =
|
||||
reinterpret_cast<Address>(generator->FindHeapThingForHeapEntry(entry));
|
||||
if (from_address == kNullAddress || to_address == kNullAddress) {
|
||||
// One of these entries doesn't correspond to a real heap object.
|
||||
// Verification is not possible.
|
||||
return;
|
||||
}
|
||||
HeapObject from_obj = HeapObject::cast(Object(from_address));
|
||||
HeapObject to_obj = HeapObject::cast(Object(to_address));
|
||||
if (BasicMemoryChunk::FromHeapObject(to_obj)->InReadOnlySpace()) {
|
||||
// We can't verify pointers into read-only space, because marking visitors
|
||||
// might not mark those. For example, every Map has a pointer to the
|
||||
// MetaMap, but marking visitors don't bother with following that link.
|
||||
// Read-only objects are immortal and can never point to things outside of
|
||||
// read-only space, so ignoring these objects is safe from the perspective
|
||||
// of ensuring accurate retaining paths for normal read-write objects.
|
||||
// Therefore, do nothing.
|
||||
} else if (verification == kEphemeron) {
|
||||
// Ephemerons can't be verified because they aren't marked directly by the
|
||||
// marking visitor.
|
||||
generator->verifier()->MarkReferenceCheckedWithoutChecking(from_obj,
|
||||
to_obj);
|
||||
} else if (type == HeapGraphEdge::kWeak) {
|
||||
generator->verifier()->CheckWeakReference(from_obj, to_obj);
|
||||
} else {
|
||||
generator->verifier()->CheckStrongReference(from_obj, to_obj);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
|
||||
int index,
|
||||
HeapEntry* entry) {
|
||||
void HeapEntry::SetNamedReference(HeapGraphEdge::Type type, const char* name,
|
||||
HeapEntry* entry,
|
||||
HeapSnapshotGenerator* generator,
|
||||
ReferenceVerification verification) {
|
||||
++children_count_;
|
||||
snapshot_->edges().emplace_back(type, name, this, entry);
|
||||
VerifyReference(type, entry, generator, verification);
|
||||
}
|
||||
|
||||
void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type, int index,
|
||||
HeapEntry* entry,
|
||||
HeapSnapshotGenerator* generator,
|
||||
ReferenceVerification verification) {
|
||||
++children_count_;
|
||||
snapshot_->edges().emplace_back(type, index, this, entry);
|
||||
VerifyReference(type, entry, generator, verification);
|
||||
}
|
||||
|
||||
void HeapEntry::SetNamedAutoIndexReference(HeapGraphEdge::Type type,
|
||||
const char* description,
|
||||
HeapEntry* child,
|
||||
StringsStorage* names) {
|
||||
StringsStorage* names,
|
||||
HeapSnapshotGenerator* generator,
|
||||
ReferenceVerification verification) {
|
||||
int index = children_count_ + 1;
|
||||
const char* name = description
|
||||
? names->GetFormatted("%d / %s", index, description)
|
||||
: names->GetName(index);
|
||||
SetNamedReference(type, name, child);
|
||||
SetNamedReference(type, name, child, generator, verification);
|
||||
}
|
||||
|
||||
void HeapEntry::Print(const char* prefix, const char* edge_name, int max_depth,
|
||||
@ -1019,9 +1220,11 @@ void V8HeapExplorer::ExtractEphemeronHashTableReferences(
|
||||
key_entry->name(), key_entry->id(), value_entry->name(),
|
||||
value_entry->id(), table_entry->id());
|
||||
key_entry->SetNamedAutoIndexReference(HeapGraphEdge::kInternal, edge_name,
|
||||
value_entry, names_);
|
||||
table_entry->SetNamedAutoIndexReference(HeapGraphEdge::kInternal,
|
||||
edge_name, value_entry, names_);
|
||||
value_entry, names_, generator_,
|
||||
HeapEntry::kEphemeron);
|
||||
table_entry->SetNamedAutoIndexReference(
|
||||
HeapGraphEdge::kInternal, edge_name, value_entry, names_, generator_,
|
||||
HeapEntry::kEphemeron);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1082,11 +1285,12 @@ void V8HeapExplorer::ExtractContextReferences(HeapEntry* entry,
|
||||
|
||||
SetWeakReference(entry, "optimized_code_list",
|
||||
context.get(Context::OPTIMIZED_CODE_LIST),
|
||||
Context::OffsetOfElementAt(Context::OPTIMIZED_CODE_LIST));
|
||||
SetWeakReference(
|
||||
entry, "deoptimized_code_list",
|
||||
context.get(Context::DEOPTIMIZED_CODE_LIST),
|
||||
Context::OffsetOfElementAt(Context::DEOPTIMIZED_CODE_LIST));
|
||||
Context::OffsetOfElementAt(Context::OPTIMIZED_CODE_LIST),
|
||||
HeapEntry::kCustomWeakPointer);
|
||||
SetWeakReference(entry, "deoptimized_code_list",
|
||||
context.get(Context::DEOPTIMIZED_CODE_LIST),
|
||||
Context::OffsetOfElementAt(Context::DEOPTIMIZED_CODE_LIST),
|
||||
HeapEntry::kCustomWeakPointer);
|
||||
STATIC_ASSERT(Context::OPTIMIZED_CODE_LIST == Context::FIRST_WEAK_SLOT);
|
||||
STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
|
||||
Context::NATIVE_CONTEXT_SLOTS);
|
||||
@ -1334,7 +1538,7 @@ void V8HeapExplorer::ExtractJSArrayBufferReferences(HeapEntry* entry,
|
||||
HeapEntry* data_entry =
|
||||
generator_->FindOrAddEntry(buffer.backing_store(), &allocator);
|
||||
entry->SetNamedReference(HeapGraphEdge::kInternal, "backing_store",
|
||||
data_entry);
|
||||
data_entry, generator_, HeapEntry::kOffHeapPointer);
|
||||
}
|
||||
|
||||
void V8HeapExplorer::ExtractJSPromiseReferences(HeapEntry* entry,
|
||||
@ -1386,7 +1590,8 @@ void V8HeapExplorer::ExtractNumberReference(HeapEntry* entry, Object number) {
|
||||
SnapshotObjectId id = heap_object_map_->get_next_id();
|
||||
HeapEntry* child_entry =
|
||||
snapshot_->AddEntry(HeapEntry::kString, name, id, 0, 0);
|
||||
entry->SetNamedReference(HeapGraphEdge::kInternal, "value", child_entry);
|
||||
entry->SetNamedReference(HeapGraphEdge::kInternal, "value", child_entry,
|
||||
generator_);
|
||||
}
|
||||
|
||||
void V8HeapExplorer::ExtractFeedbackVectorReferences(
|
||||
@ -1694,6 +1899,18 @@ bool V8HeapExplorer::IterateAndExtractReferences(
|
||||
visited_fields_.resize(max_pointer, false);
|
||||
}
|
||||
|
||||
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
|
||||
std::unique_ptr<HeapEntryVerifier> verifier;
|
||||
// MarkingVisitorBase doesn't expect that we will ever visit read-only
|
||||
// objects, and fails DCHECKs if we attempt to. Read-only objects can
|
||||
// never retain read-write objects, so there is no risk in skipping
|
||||
// verification for them.
|
||||
if (FLAG_heap_snapshot_verify &&
|
||||
!BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
|
||||
verifier = std::make_unique<HeapEntryVerifier>(generator, obj);
|
||||
}
|
||||
#endif
|
||||
|
||||
HeapEntry* entry = GetEntry(obj);
|
||||
ExtractReferences(entry, obj);
|
||||
SetInternalReference(entry, "map", obj.map(cage_base),
|
||||
@ -1757,7 +1974,8 @@ void V8HeapExplorer::SetContextReference(HeapEntry* parent_entry,
|
||||
HeapEntry* child_entry = GetEntry(child_obj);
|
||||
if (child_entry == nullptr) return;
|
||||
parent_entry->SetNamedReference(HeapGraphEdge::kContextVariable,
|
||||
names_->GetName(reference_name), child_entry);
|
||||
names_->GetName(reference_name), child_entry,
|
||||
generator_);
|
||||
MarkVisitedField(field_offset);
|
||||
}
|
||||
|
||||
@ -1774,15 +1992,15 @@ void V8HeapExplorer::SetNativeBindReference(HeapEntry* parent_entry,
|
||||
HeapEntry* child_entry = GetEntry(child_obj);
|
||||
if (child_entry == nullptr) return;
|
||||
parent_entry->SetNamedReference(HeapGraphEdge::kShortcut, reference_name,
|
||||
child_entry);
|
||||
child_entry, generator_);
|
||||
}
|
||||
|
||||
void V8HeapExplorer::SetElementReference(HeapEntry* parent_entry, int index,
|
||||
Object child_obj) {
|
||||
HeapEntry* child_entry = GetEntry(child_obj);
|
||||
if (child_entry == nullptr) return;
|
||||
parent_entry->SetIndexedReference(HeapGraphEdge::kElement, index,
|
||||
child_entry);
|
||||
parent_entry->SetIndexedReference(HeapGraphEdge::kElement, index, child_entry,
|
||||
generator_);
|
||||
}
|
||||
|
||||
void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry,
|
||||
@ -1794,7 +2012,7 @@ void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry,
|
||||
HeapEntry* child_entry = GetEntry(child_obj);
|
||||
DCHECK_NOT_NULL(child_entry);
|
||||
parent_entry->SetNamedReference(HeapGraphEdge::kInternal, reference_name,
|
||||
child_entry);
|
||||
child_entry, generator_);
|
||||
MarkVisitedField(field_offset);
|
||||
}
|
||||
|
||||
@ -1806,7 +2024,8 @@ void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry, int index,
|
||||
HeapEntry* child_entry = GetEntry(child_obj);
|
||||
DCHECK_NOT_NULL(child_entry);
|
||||
parent_entry->SetNamedReference(HeapGraphEdge::kInternal,
|
||||
names_->GetName(index), child_entry);
|
||||
names_->GetName(index), child_entry,
|
||||
generator_);
|
||||
MarkVisitedField(field_offset);
|
||||
}
|
||||
|
||||
@ -1822,20 +2041,20 @@ void V8HeapExplorer::SetHiddenReference(HeapObject parent_obj,
|
||||
DCHECK_NOT_NULL(child_entry);
|
||||
if (IsEssentialHiddenReference(parent_obj, field_offset)) {
|
||||
parent_entry->SetIndexedReference(HeapGraphEdge::kHidden, index,
|
||||
child_entry);
|
||||
child_entry, generator_);
|
||||
}
|
||||
}
|
||||
|
||||
void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry,
|
||||
const char* reference_name,
|
||||
Object child_obj, int field_offset) {
|
||||
void V8HeapExplorer::SetWeakReference(
|
||||
HeapEntry* parent_entry, const char* reference_name, Object child_obj,
|
||||
int field_offset, HeapEntry::ReferenceVerification verification) {
|
||||
if (!IsEssentialObject(child_obj)) {
|
||||
return;
|
||||
}
|
||||
HeapEntry* child_entry = GetEntry(child_obj);
|
||||
DCHECK_NOT_NULL(child_entry);
|
||||
parent_entry->SetNamedReference(HeapGraphEdge::kWeak, reference_name,
|
||||
child_entry);
|
||||
child_entry, generator_, verification);
|
||||
MarkVisitedField(field_offset);
|
||||
}
|
||||
|
||||
@ -1847,8 +2066,9 @@ void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry, int index,
|
||||
}
|
||||
HeapEntry* child_entry = GetEntry(child_obj);
|
||||
DCHECK_NOT_NULL(child_entry);
|
||||
parent_entry->SetNamedReference(
|
||||
HeapGraphEdge::kWeak, names_->GetFormatted("%d", index), child_entry);
|
||||
parent_entry->SetNamedReference(HeapGraphEdge::kWeak,
|
||||
names_->GetFormatted("%d", index),
|
||||
child_entry, generator_);
|
||||
if (field_offset.has_value()) {
|
||||
MarkVisitedField(*field_offset);
|
||||
}
|
||||
@ -1885,25 +2105,25 @@ void V8HeapExplorer::SetPropertyReference(HeapEntry* parent_entry,
|
||||
.get())
|
||||
: names_->GetName(reference_name);
|
||||
|
||||
parent_entry->SetNamedReference(type, name, child_entry);
|
||||
parent_entry->SetNamedReference(type, name, child_entry, generator_);
|
||||
MarkVisitedField(field_offset);
|
||||
}
|
||||
|
||||
void V8HeapExplorer::SetRootGcRootsReference() {
|
||||
snapshot_->root()->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
|
||||
snapshot_->gc_roots());
|
||||
snapshot_->root()->SetIndexedAutoIndexReference(
|
||||
HeapGraphEdge::kElement, snapshot_->gc_roots(), generator_);
|
||||
}
|
||||
|
||||
void V8HeapExplorer::SetUserGlobalReference(Object child_obj) {
|
||||
HeapEntry* child_entry = GetEntry(child_obj);
|
||||
DCHECK_NOT_NULL(child_entry);
|
||||
snapshot_->root()->SetNamedAutoIndexReference(HeapGraphEdge::kShortcut,
|
||||
nullptr, child_entry, names_);
|
||||
snapshot_->root()->SetNamedAutoIndexReference(
|
||||
HeapGraphEdge::kShortcut, nullptr, child_entry, names_, generator_);
|
||||
}
|
||||
|
||||
void V8HeapExplorer::SetGcRootsReference(Root root) {
|
||||
snapshot_->gc_roots()->SetIndexedAutoIndexReference(
|
||||
HeapGraphEdge::kElement, snapshot_->gc_subroot(root));
|
||||
HeapGraphEdge::kElement, snapshot_->gc_subroot(root), generator_);
|
||||
}
|
||||
|
||||
void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description,
|
||||
@ -1921,11 +2141,11 @@ void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description,
|
||||
HeapGraphEdge::Type edge_type =
|
||||
is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kInternal;
|
||||
if (name != nullptr) {
|
||||
snapshot_->gc_subroot(root)->SetNamedReference(edge_type, name,
|
||||
child_entry);
|
||||
snapshot_->gc_subroot(root)->SetNamedReference(edge_type, name, child_entry,
|
||||
generator_);
|
||||
} else {
|
||||
snapshot_->gc_subroot(root)->SetNamedAutoIndexReference(
|
||||
edge_type, description, child_entry, names_);
|
||||
edge_type, description, child_entry, names_, generator_);
|
||||
}
|
||||
|
||||
// For full heap snapshots we do not emit user roots but rather rely on
|
||||
@ -2222,7 +2442,8 @@ bool NativeObjectsExplorer::IterateAndExtractReferences(
|
||||
if (auto* entry = EntryForEmbedderGraphNode(node.get())) {
|
||||
if (node->IsRootNode()) {
|
||||
snapshot_->root()->SetIndexedAutoIndexReference(
|
||||
HeapGraphEdge::kElement, entry);
|
||||
HeapGraphEdge::kElement, entry, generator_,
|
||||
HeapEntry::kOffHeapPointer);
|
||||
}
|
||||
if (node->WrapperNode()) {
|
||||
MergeNodeIntoEntry(entry, node.get(), node->WrapperNode());
|
||||
@ -2238,10 +2459,13 @@ bool NativeObjectsExplorer::IterateAndExtractReferences(
|
||||
HeapEntry* to = EntryForEmbedderGraphNode(edge.to);
|
||||
if (!to) continue;
|
||||
if (edge.name == nullptr) {
|
||||
from->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, to);
|
||||
from->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, to,
|
||||
generator_,
|
||||
HeapEntry::kOffHeapPointer);
|
||||
} else {
|
||||
from->SetNamedReference(HeapGraphEdge::kInternal,
|
||||
names_->GetCopy(edge.name), to);
|
||||
names_->GetCopy(edge.name), to, generator_,
|
||||
HeapEntry::kOffHeapPointer);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2250,16 +2474,13 @@ bool NativeObjectsExplorer::IterateAndExtractReferences(
|
||||
}
|
||||
|
||||
HeapSnapshotGenerator::HeapSnapshotGenerator(
|
||||
HeapSnapshot* snapshot,
|
||||
v8::ActivityControl* control,
|
||||
v8::HeapProfiler::ObjectNameResolver* resolver,
|
||||
Heap* heap)
|
||||
HeapSnapshot* snapshot, v8::ActivityControl* control,
|
||||
v8::HeapProfiler::ObjectNameResolver* resolver, Heap* heap)
|
||||
: snapshot_(snapshot),
|
||||
control_(control),
|
||||
v8_heap_explorer_(snapshot_, this, resolver),
|
||||
dom_explorer_(snapshot_, this),
|
||||
heap_(heap) {
|
||||
}
|
||||
heap_(heap) {}
|
||||
|
||||
namespace {
|
||||
class V8_NODISCARD NullContextForSnapshotScope {
|
||||
|
@ -24,6 +24,10 @@
|
||||
#include "src/profiler/strings-storage.h"
|
||||
#include "src/strings/string-hasher.h"
|
||||
|
||||
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
|
||||
#include "src/heap/reference-summarizer.h"
|
||||
#endif
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
@ -141,17 +145,40 @@ class HeapEntry {
|
||||
}
|
||||
uint8_t detachedness() const { return detachedness_; }
|
||||
|
||||
void SetIndexedReference(
|
||||
HeapGraphEdge::Type type, int index, HeapEntry* entry);
|
||||
void SetNamedReference(
|
||||
HeapGraphEdge::Type type, const char* name, HeapEntry* entry);
|
||||
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
|
||||
HeapEntry* child) {
|
||||
SetIndexedReference(type, children_count_ + 1, child);
|
||||
enum ReferenceVerification {
|
||||
// Verify that the reference can be found via marking, if verification is
|
||||
// enabled.
|
||||
kVerify,
|
||||
|
||||
// Skip verifying that the reference can be found via marking, for any of
|
||||
// the following reasons:
|
||||
|
||||
kEphemeron,
|
||||
kOffHeapPointer,
|
||||
kCustomWeakPointer,
|
||||
};
|
||||
|
||||
void VerifyReference(HeapGraphEdge::Type type, HeapEntry* entry,
|
||||
HeapSnapshotGenerator* generator,
|
||||
ReferenceVerification verification);
|
||||
void SetIndexedReference(HeapGraphEdge::Type type, int index,
|
||||
HeapEntry* entry, HeapSnapshotGenerator* generator,
|
||||
ReferenceVerification verification = kVerify);
|
||||
void SetNamedReference(HeapGraphEdge::Type type, const char* name,
|
||||
HeapEntry* entry, HeapSnapshotGenerator* generator,
|
||||
ReferenceVerification verification = kVerify);
|
||||
void SetIndexedAutoIndexReference(
|
||||
HeapGraphEdge::Type type, HeapEntry* child,
|
||||
HeapSnapshotGenerator* generator,
|
||||
ReferenceVerification verification = kVerify) {
|
||||
SetIndexedReference(type, children_count_ + 1, child, generator,
|
||||
verification);
|
||||
}
|
||||
void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
|
||||
const char* description, HeapEntry* child,
|
||||
StringsStorage* strings);
|
||||
StringsStorage* strings,
|
||||
HeapSnapshotGenerator* generator,
|
||||
ReferenceVerification verification = kVerify);
|
||||
|
||||
V8_EXPORT_PRIVATE void Print(const char* prefix, const char* edge_name,
|
||||
int max_depth, int indent) const;
|
||||
@ -438,8 +465,10 @@ class V8_EXPORT_PRIVATE V8HeapExplorer : public HeapEntriesAllocator {
|
||||
int field_offset = -1);
|
||||
void SetHiddenReference(HeapObject parent_obj, HeapEntry* parent_entry,
|
||||
int index, Object child, int field_offset);
|
||||
void SetWeakReference(HeapEntry* parent_entry, const char* reference_name,
|
||||
Object child_obj, int field_offset);
|
||||
void SetWeakReference(
|
||||
HeapEntry* parent_entry, const char* reference_name, Object child_obj,
|
||||
int field_offset,
|
||||
HeapEntry::ReferenceVerification verification = HeapEntry::kVerify);
|
||||
void SetWeakReference(HeapEntry* parent_entry, int index, Object child_obj,
|
||||
base::Optional<int> field_offset);
|
||||
void SetPropertyReference(HeapEntry* parent_entry, Name reference_name,
|
||||
@ -511,6 +540,8 @@ class NativeObjectsExplorer {
|
||||
friend class GlobalHandlesExtractor;
|
||||
};
|
||||
|
||||
class HeapEntryVerifier;
|
||||
|
||||
class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
|
||||
public:
|
||||
// The HeapEntriesMap instance is used to track a mapping between
|
||||
@ -539,10 +570,33 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
|
||||
}
|
||||
|
||||
HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
|
||||
return entries_map_.emplace(ptr, allocator->AllocateEntry(ptr))
|
||||
.first->second;
|
||||
HeapEntry* result =
|
||||
entries_map_.emplace(ptr, allocator->AllocateEntry(ptr)).first->second;
|
||||
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
|
||||
if (FLAG_heap_snapshot_verify) {
|
||||
reverse_entries_map_.emplace(result, ptr);
|
||||
}
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
|
||||
HeapThing FindHeapThingForHeapEntry(HeapEntry* entry) {
|
||||
// The reverse lookup map is only populated if the verification flag is
|
||||
// enabled.
|
||||
DCHECK(FLAG_heap_snapshot_verify);
|
||||
|
||||
auto it = reverse_entries_map_.find(entry);
|
||||
return it == reverse_entries_map_.end() ? nullptr : it->second;
|
||||
}
|
||||
|
||||
HeapEntryVerifier* verifier() const { return verifier_; }
|
||||
void set_verifier(HeapEntryVerifier* verifier) {
|
||||
DCHECK_IMPLIES(verifier_, !verifier);
|
||||
verifier_ = verifier;
|
||||
}
|
||||
#endif
|
||||
|
||||
HeapEntry* AddEntry(Smi smi, HeapEntriesAllocator* allocator) {
|
||||
return smis_map_.emplace(smi.value(), allocator->AllocateEntry(smi))
|
||||
.first->second;
|
||||
@ -558,6 +612,8 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
|
||||
return entry != nullptr ? entry : AddEntry(smi, allocator);
|
||||
}
|
||||
|
||||
Heap* heap() const { return heap_; }
|
||||
|
||||
private:
|
||||
bool FillReferences();
|
||||
void ProgressStep() override;
|
||||
@ -575,6 +631,11 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
|
||||
uint32_t progress_counter_;
|
||||
uint32_t progress_total_;
|
||||
Heap* heap_;
|
||||
|
||||
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
|
||||
std::unordered_map<HeapEntry*, HeapThing> reverse_entries_map_;
|
||||
HeapEntryVerifier* verifier_ = nullptr;
|
||||
#endif
|
||||
};
|
||||
|
||||
class OutputStreamWriter;
|
||||
|
Loading…
Reference in New Issue
Block a user