[handles] Remove precise on-stack representation of global handles

Since https://crrev.com/c/3806439 on-stack traced handles are marked
conservatively when being used in combination with CppHeap.

This change removes the precise on-stack representation of the
internal traced nodes as they nodes would anyways be marked
conservatively. The effects are:
- cheaper representation (just a single node space);
- uniform handling: no checks to distinguish on-stack vs on-heap;
- no brittleness around cleaning on-stack handles when the event loop
 is empty;

Change-Id: Id859623bfed77a66bdd064ea8065536264515eae
Bug: v8:13141
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3812039
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82306}
This commit is contained in:
Michael Lippautz 2022-08-09 14:43:31 +02:00 committed by V8 LUCI CQ
parent d757c72e09
commit 6953b5550e
17 changed files with 257 additions and 429 deletions

View File

@ -1450,6 +1450,8 @@ filegroup(
"src/heap/gc-tracer.cc",
"src/heap/gc-tracer-inl.h",
"src/heap/gc-tracer.h",
"src/heap/global-handle-marking-visitor.cc",
"src/heap/global-handle-marking-visitor.h",
"src/heap/heap-allocator-inl.h",
"src/heap/heap-allocator.cc",
"src/heap/heap-allocator.h",

View File

@ -3121,6 +3121,7 @@ v8_header_set("v8_internal_headers") {
"src/heap/gc-idle-time-handler.h",
"src/heap/gc-tracer-inl.h",
"src/heap/gc-tracer.h",
"src/heap/global-handle-marking-visitor.h",
"src/heap/heap-allocator-inl.h",
"src/heap/heap-allocator.h",
"src/heap/heap-controller.h",
@ -4506,6 +4507,7 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/free-list.cc",
"src/heap/gc-idle-time-handler.cc",
"src/heap/gc-tracer.cc",
"src/heap/global-handle-marking-visitor.cc",
"src/heap/heap-allocator.cc",
"src/heap/heap-controller.cc",
"src/heap/heap-layout-tracer.cc",

View File

@ -10350,7 +10350,7 @@ void HeapProfiler::SetGetDetachednessCallback(GetDetachednessCallback callback,
void EmbedderHeapTracer::SetStackStart(void* stack_start) {
CHECK(v8_isolate_);
reinterpret_cast<i::Isolate*>(v8_isolate_)
->global_handles()
->heap()
->SetStackStart(stack_start);
}

View File

@ -663,16 +663,6 @@ class GlobalHandles::TracedNode final
void clear_markbit() { flags_ = Markbit::update(flags_, false); }
template <AccessMode access_mode = AccessMode::NON_ATOMIC>
bool is_on_stack() const {
if constexpr (access_mode == AccessMode::NON_ATOMIC) {
return IsOnStack::decode(flags_);
}
return IsOnStack::decode(base::AsAtomic8::Relaxed_Load(&flags_));
}
void set_is_on_stack(bool v) { flags_ = IsOnStack::update(flags_, v); }
void clear_object() {
reinterpret_cast<std::atomic<Address>*>(&object_)->store(
kNullAddress, std::memory_order_relaxed);
@ -702,16 +692,14 @@ class GlobalHandles::TracedNode final
using NodeState = base::BitField8<State, 0, 2>;
using IsInYoungList = NodeState::Next<bool, 1>;
using IsRoot = IsInYoungList::Next<bool, 1>;
using IsOnStack = IsRoot::Next<bool, 1>;
// The markbit is the exception as it can be set from the main and marker
// threads at the same time.
using Markbit = IsOnStack::Next<bool, 1>;
using Markbit = IsRoot::Next<bool, 1>;
void ClearImplFields() {
set_root(true);
// Nodes are black allocated for simplicity.
set_markbit();
set_is_on_stack(false);
}
void CheckNodeIsFreeNodeImpl() const {
@ -723,143 +711,6 @@ class GlobalHandles::TracedNode final
friend class NodeBase<GlobalHandles::TracedNode>;
};
// Space to keep track of on-stack handles (e.g. TracedReference). Such
// references are treated as root for any V8 garbage collection. The data
// structure is self healing and pessimistally filters outdated entries on
// insertion and iteration.
//
// Design doc: http://bit.ly/on-stack-traced-reference
class GlobalHandles::OnStackTracedNodeSpace final {
public:
static GlobalHandles* GetGlobalHandles(const TracedNode* on_stack_node) {
// An on-stack node should never be picked up by the concurrent marker.
DCHECK(on_stack_node->is_on_stack());
return reinterpret_cast<const NodeEntry*>(on_stack_node)->global_handles;
}
explicit OnStackTracedNodeSpace(GlobalHandles* global_handles)
: global_handles_(global_handles) {}
void SetStackStart(void* stack_start) {
CHECK(on_stack_nodes_.empty());
stack_.SetStackStart(base::Stack::GetRealStackAddressForSlot(stack_start));
}
V8_INLINE bool IsOnStack(uintptr_t slot) const;
void Iterate(RootVisitor* v);
TracedNode* Allocate(uintptr_t address);
void CleanupBelowCurrentStackPosition();
void NotifyEmptyEmbedderStack();
size_t NumberOfHandlesForTesting() const { return on_stack_nodes_.size(); }
private:
struct NodeEntry {
TracedNode node;
// Used to find back to GlobalHandles from a Node on copy. Needs to follow
// node.
GlobalHandles* global_handles;
};
// Keeps track of registered handles. The data structure is cleaned on
// iteration and when adding new references using the current stack address.
// Cleaning is based on current stack address and the key of the map which is
// slightly different for ASAN configs -- see below.
#ifdef V8_USE_ADDRESS_SANITIZER
// Mapping from stack slots or real stack frames to the corresponding nodes.
// In case a reference is part of a fake frame, we map it to the real stack
// frame base instead of the actual stack slot. The list keeps all nodes for
// a particular real frame.
std::map<uintptr_t, std::list<NodeEntry>> on_stack_nodes_;
#else // !V8_USE_ADDRESS_SANITIZER
// Mapping from stack slots to the corresponding nodes. We don't expect
// aliasing with overlapping lifetimes of nodes.
std::map<uintptr_t, NodeEntry> on_stack_nodes_;
#endif // !V8_USE_ADDRESS_SANITIZER
::heap::base::Stack stack_;
GlobalHandles* global_handles_ = nullptr;
size_t acquire_count_ = 0;
};
bool GlobalHandles::OnStackTracedNodeSpace::IsOnStack(uintptr_t slot) const {
// By the time this function is called, the stack start may not be set (i.e.
// SetStackStart() was not called). In that case, assume the slot is not on
// stack.
if (!stack_.stack_start()) return false;
return stack_.IsOnStack(reinterpret_cast<void*>(slot));
}
void GlobalHandles::OnStackTracedNodeSpace::NotifyEmptyEmbedderStack() {
on_stack_nodes_.clear();
}
void GlobalHandles::OnStackTracedNodeSpace::Iterate(RootVisitor* v) {
#ifdef V8_USE_ADDRESS_SANITIZER
for (auto& pair : on_stack_nodes_) {
for (auto& node_entry : pair.second) {
TracedNode& node = node_entry.node;
if (node.IsRetainer()) {
v->VisitRootPointer(Root::kGlobalHandles, "on-stack TracedReference",
node.location());
}
}
}
#else // !V8_USE_ADDRESS_SANITIZER
// Handles have been cleaned from the GC entry point which is higher up the
// stack.
for (auto& pair : on_stack_nodes_) {
TracedNode& node = pair.second.node;
if (node.IsRetainer()) {
v->VisitRootPointer(Root::kGlobalHandles, "on-stack TracedReference",
node.location());
}
}
#endif // !V8_USE_ADDRESS_SANITIZER
}
GlobalHandles::TracedNode* GlobalHandles::OnStackTracedNodeSpace::Allocate(
uintptr_t slot) {
constexpr size_t kAcquireCleanupThresholdLog2 = 8;
constexpr size_t kAcquireCleanupThresholdMask =
(size_t{1} << kAcquireCleanupThresholdLog2) - 1;
DCHECK(IsOnStack(slot));
if (((acquire_count_++) & kAcquireCleanupThresholdMask) == 0) {
CleanupBelowCurrentStackPosition();
}
NodeEntry entry;
entry.node.Free(nullptr);
entry.global_handles = global_handles_;
#ifdef V8_USE_ADDRESS_SANITIZER
auto pair = on_stack_nodes_.insert(
{base::Stack::GetRealStackAddressForSlot(slot), {}});
pair.first->second.push_back(std::move(entry));
TracedNode* result = &(pair.first->second.back().node);
#else // !V8_USE_ADDRESS_SANITIZER
auto pair = on_stack_nodes_.insert(
{base::Stack::GetRealStackAddressForSlot(slot), std::move(entry)});
if (!pair.second) {
// Insertion failed because there already was an entry present for that
// stack address. This can happen because cleanup is conservative in which
// stack limits it used. Reusing the entry is fine as there's no aliasing of
// different references with the same stack slot.
pair.first->second.node.Free(nullptr);
}
TracedNode* result = &(pair.first->second.node);
#endif // !V8_USE_ADDRESS_SANITIZER
result->CheckNodeIsFreeNode();
return result;
}
void GlobalHandles::OnStackTracedNodeSpace::CleanupBelowCurrentStackPosition() {
if (on_stack_nodes_.empty()) return;
const uintptr_t stack_ptr = reinterpret_cast<uintptr_t>(
::heap::base::Stack::GetCurrentStackPointerForLocalVariables());
const auto it = on_stack_nodes_.upper_bound(stack_ptr);
on_stack_nodes_.erase(on_stack_nodes_.begin(), it);
}
// static
void GlobalHandles::EnableMarkingBarrier(Isolate* isolate) {
auto* global_handles = isolate->global_handles();
@ -880,9 +731,6 @@ void GlobalHandles::TracedNode::Verify(GlobalHandles* global_handles,
#ifdef DEBUG
const TracedNode* node = FromLocation(*slot);
DCHECK(node->IsInUse());
const bool slot_on_stack = global_handles->on_stack_nodes_->IsOnStack(
reinterpret_cast<uintptr_t>(slot));
DCHECK_EQ(slot_on_stack, node->is_on_stack<AccessMode::ATOMIC>());
auto* incremental_marking =
global_handles->isolate()->heap()->incremental_marking();
if (incremental_marking && incremental_marking->IsMarking()) {
@ -896,12 +744,9 @@ void GlobalHandles::TracedNode::Verify(GlobalHandles* global_handles,
node->markbit<AccessMode::ATOMIC>());
}
}
if (!node->is_on_stack<AccessMode::ATOMIC>()) {
// On-heap nodes have seprate lists for young generation processing.
bool is_young_gen_object = ObjectInYoungGeneration(node->object());
DCHECK_IMPLIES(is_young_gen_object, node->is_in_young_list());
}
bool in_young_list =
DCHECK_IMPLIES(ObjectInYoungGeneration(node->object()),
node->is_in_young_list());
const bool in_young_list =
std::find(global_handles->traced_young_nodes_.begin(),
global_handles->traced_young_nodes_.end(),
node) != global_handles->traced_young_nodes_.end();
@ -909,14 +754,6 @@ void GlobalHandles::TracedNode::Verify(GlobalHandles* global_handles,
#endif // DEBUG
}
void GlobalHandles::CleanupOnStackReferencesBelowCurrentStackPosition() {
on_stack_nodes_->CleanupBelowCurrentStackPosition();
}
size_t GlobalHandles::NumberOfOnStackHandlesForTesting() {
return on_stack_nodes_->NumberOfHandlesForTesting();
}
size_t GlobalHandles::TotalSize() const {
return regular_nodes_->TotalSize() + traced_nodes_->TotalSize();
}
@ -930,21 +767,13 @@ size_t GlobalHandles::handles_count() const {
return regular_nodes_->handles_count() + traced_nodes_->handles_count();
}
void GlobalHandles::SetStackStart(void* stack_start) {
on_stack_nodes_->SetStackStart(stack_start);
}
void GlobalHandles::NotifyEmptyEmbedderStack() {
on_stack_nodes_->NotifyEmptyEmbedderStack();
}
GlobalHandles::GlobalHandles(Isolate* isolate)
: isolate_(isolate),
regular_nodes_(new NodeSpace<GlobalHandles::Node>(this)),
traced_nodes_(new NodeSpace<GlobalHandles::TracedNode>(this)),
on_stack_nodes_(new OnStackTracedNodeSpace(this)) {}
regular_nodes_(std::make_unique<NodeSpace<GlobalHandles::Node>>(this)),
traced_nodes_(
std::make_unique<NodeSpace<GlobalHandles::TracedNode>>(this)) {}
GlobalHandles::~GlobalHandles() { regular_nodes_.reset(nullptr); }
GlobalHandles::~GlobalHandles() = default;
namespace {
@ -970,28 +799,13 @@ Handle<Object> GlobalHandles::Create(Address value) {
Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
GlobalHandleStoreMode store_mode) {
return CreateTraced(
value, slot, store_mode,
on_stack_nodes_->IsOnStack(reinterpret_cast<uintptr_t>(slot)));
}
Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
GlobalHandleStoreMode store_mode,
bool is_on_stack) {
GlobalHandles::TracedNode* node;
if (is_on_stack) {
node = on_stack_nodes_->Allocate(reinterpret_cast<uintptr_t>(slot));
node->set_is_on_stack(true);
// No write barrier needed as on-stack nodes are treated as roots.
} else {
node = traced_nodes_->Allocate();
if (NeedsTrackingInYoungNodes(value, node)) {
traced_young_nodes_.push_back(node);
node->set_in_young_list(true);
}
if (store_mode != GlobalHandleStoreMode::kInitializingStore) {
WriteBarrier::MarkingFromGlobalHandle(value);
}
GlobalHandles::TracedNode* node = traced_nodes_->Allocate();
if (NeedsTrackingInYoungNodes(value, node)) {
traced_young_nodes_.push_back(node);
node->set_in_young_list(true);
}
if (store_mode != GlobalHandleStoreMode::kInitializingStore) {
WriteBarrier::MarkingFromGlobalHandle(value);
}
return node->Publish(value);
}
@ -1026,8 +840,7 @@ void GlobalHandles::CopyTracedReference(const Address* const* from,
DCHECK_NOT_NULL(*from);
DCHECK_NULL(*to);
const TracedNode* from_node = TracedNode::FromLocation(*from);
// TODO(chromium:1322114): Temporary sanity check.
CHECK_NE(kGlobalHandleZapValue, from_node->raw_object());
DCHECK_NE(kGlobalHandleZapValue, from_node->raw_object());
GlobalHandles* global_handles =
GlobalHandles::From(const_cast<TracedNode*>(from_node));
Handle<Object> o = global_handles->CreateTraced(
@ -1043,6 +856,7 @@ void GlobalHandles::CopyTracedReference(const Address* const* from,
#endif // VERIFY_HEAP
}
// static
void GlobalHandles::MoveGlobal(Address** from, Address** to) {
DCHECK_NOT_NULL(*from);
DCHECK_NOT_NULL(*to);
@ -1054,6 +868,7 @@ void GlobalHandles::MoveGlobal(Address** from, Address** to) {
// Strong handles do not require fixups.
}
// static
void GlobalHandles::MoveTracedReference(Address** from, Address** to) {
// Fast path for moving from an empty reference.
if (!*from) {
@ -1070,78 +885,26 @@ void GlobalHandles::MoveTracedReference(Address** from, Address** to) {
#ifdef DEBUG
global_handles = GlobalHandles::From(from_node);
#endif // DEBUG
bool from_on_stack = from_node->is_on_stack<AccessMode::ATOMIC>();
bool to_on_stack = false;
if (!to_node) {
// Figure out whether stack or heap to allow fast path for heap->heap move.
global_handles = GlobalHandles::From(from_node);
to_on_stack = global_handles->on_stack_nodes_->IsOnStack(
reinterpret_cast<uintptr_t>(to));
} else {
to_on_stack = to_node->is_on_stack<AccessMode::ATOMIC>();
}
// Moving.
if (from_on_stack || to_on_stack) {
// Move involving a stack slot.
if (!to_node) {
DCHECK(global_handles);
// TODO(chromium:1322114): Temporary sanity check.
CHECK_NE(kGlobalHandleZapValue, from_node->raw_object());
Handle<Object> o = global_handles->CreateTraced(
from_node->object(), reinterpret_cast<Address*>(to),
GlobalHandleStoreMode::kAssigningStore, to_on_stack);
SetSlotThreadSafe(to, o.location());
to_node = TracedNode::FromLocation(*to);
// The node was newly acquired which implies that the node markbit is
// already set.
DCHECK(to_node->markbit());
} else {
DCHECK(to_node->IsInUse());
// TODO(chromium:1322114): Temporary sanity check.
CHECK_NE(kGlobalHandleZapValue, from_node->raw_object());
to_node->CopyObjectReference(*from_node);
if (!to_node->is_on_stack() && !to_node->is_in_young_list() &&
ObjectInYoungGeneration(to_node->object())) {
global_handles = GlobalHandles::From(from_node);
global_handles->traced_young_nodes_.push_back(to_node);
to_node->set_in_young_list(true);
}
if (!to_on_stack) {
// The node was reused, so there's no need for a node write barrier
// here.
WriteBarrier::MarkingFromGlobalHandle(to_node->object());
}
}
DestroyTracedReference(*from);
SetSlotThreadSafe(from, nullptr);
} else {
// Pure heap move.
DCHECK_IMPLIES(*to, to_node->IsInUse());
// TODO(chromium:1322114): Temporary sanity checks.
CHECK_NE(kGlobalHandleZapValue, from_node->raw_object());
if (*to) {
CHECK_NE(kGlobalHandleZapValue, to_node->raw_object());
}
DestroyTracedReference(*to);
SetSlotThreadSafe(to, *from);
to_node = from_node;
DCHECK_NOT_NULL(*from);
DCHECK_NOT_NULL(*to);
DCHECK_EQ(*from, *to);
// Write barrier needs to cover node as well as object.
to_node->set_markbit<AccessMode::ATOMIC>();
WriteBarrier::MarkingFromGlobalHandle(to_node->object());
SetSlotThreadSafe(from, nullptr);
}
// Pure heap move.
DCHECK_IMPLIES(*to, to_node->IsInUse());
DCHECK_IMPLIES(*to, kGlobalHandleZapValue != to_node->raw_object());
DCHECK_NE(kGlobalHandleZapValue, from_node->raw_object());
DestroyTracedReference(*to);
SetSlotThreadSafe(to, *from);
to_node = from_node;
DCHECK_NOT_NULL(*from);
DCHECK_NOT_NULL(*to);
DCHECK_EQ(*from, *to);
// Write barrier needs to cover node as well as object.
to_node->set_markbit<AccessMode::ATOMIC>();
WriteBarrier::MarkingFromGlobalHandle(to_node->object());
SetSlotThreadSafe(from, nullptr);
TracedNode::Verify(global_handles, to);
}
// static
GlobalHandles* GlobalHandles::From(const TracedNode* node) {
return node->is_on_stack<AccessMode::ATOMIC>()
? OnStackTracedNodeSpace::GetGlobalHandles(node)
: NodeBlock<TracedNode>::From(node)->global_handles();
return NodeBlock<TracedNode>::From(node)->global_handles();
}
// static
@ -1175,12 +938,6 @@ void GlobalHandles::Destroy(Address* location) {
void GlobalHandles::DestroyTracedReference(Address* location) {
if (location != nullptr) {
TracedNode* node = TracedNode::FromLocation(location);
if (node->is_on_stack<AccessMode::ATOMIC>()) {
node->Release(nullptr);
return;
}
DCHECK(!node->is_on_stack<AccessMode::ATOMIC>());
auto* global_handles = GlobalHandles::From(node);
// When marking is off the handle may be freed immediately. Note that this
// includes also the case when invoking the first pass callbacks during the
@ -1195,15 +952,8 @@ void GlobalHandles::DestroyTracedReference(Address* location) {
//
// On-heap traced nodes are released in the atomic pause in
// `IterateWeakRootsForPhantomHandles()` when they are discovered as not
// marked.
//
// Eagerly clear out the object here to avoid needlessly marking it from
// this point on. Also clear out callback and backreference for the version
// with callbacks to avoid calling into possibly dead memory later.
//
// In the case this happens during incremental marking, the node may
// still be spuriously marked as live and is then only reclaimed on the
// next cycle.
// marked. Eagerly clear out the object here to avoid needlessly marking it
// from this point on. The node will be reclaimed on the next cycle.
node->clear_object();
}
}
@ -1510,10 +1260,6 @@ void GlobalHandles::IterateStrongRoots(RootVisitor* v) {
}
}
void GlobalHandles::IterateStrongStackRoots(RootVisitor* v) {
on_stack_nodes_->Iterate(v);
}
void GlobalHandles::IterateWeakRoots(RootVisitor* v) {
for (Node* node : *regular_nodes_) {
if (node->IsWeak()) {
@ -1541,7 +1287,6 @@ void GlobalHandles::IterateAllRoots(RootVisitor* v) {
v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
}
}
on_stack_nodes_->Iterate(v);
}
DISABLE_CFI_PERF
@ -1557,7 +1302,6 @@ void GlobalHandles::IterateAllYoungRoots(RootVisitor* v) {
v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location());
}
}
on_stack_nodes_->Iterate(v);
}
DISABLE_CFI_PERF

View File

@ -94,9 +94,6 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
template <typename T>
inline Handle<T> Create(T value);
Handle<Object> CreateTraced(Object value, Address* slot,
GlobalHandleStoreMode store_mode,
bool is_on_stack);
Handle<Object> CreateTraced(Object value, Address* slot,
GlobalHandleStoreMode store_mode);
Handle<Object> CreateTraced(Address value, Address* slot,
@ -112,7 +109,6 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags);
void IterateStrongRoots(RootVisitor* v);
void IterateStrongStackRoots(RootVisitor* v);
void IterateWeakRoots(RootVisitor* v);
void IterateAllRoots(RootVisitor* v);
void IterateAllYoungRoots(RootVisitor* v);
@ -157,15 +153,9 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
size_t TotalSize() const;
size_t UsedSize() const;
// Number of global handles.
size_t handles_count() const;
void SetStackStart(void* stack_start);
void NotifyEmptyEmbedderStack();
void CleanupOnStackReferencesBelowCurrentStackPosition();
size_t NumberOfOnStackHandlesForTesting();
using NodeBounds = std::vector<std::pair<const void*, const void*>>;
NodeBounds GetTracedNodeBounds() const;
@ -185,7 +175,6 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
class NodeSpace;
class PendingPhantomCallback;
class TracedNode;
class OnStackTracedNodeSpace;
static GlobalHandles* From(const TracedNode*);
@ -213,7 +202,6 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
std::unique_ptr<NodeSpace<TracedNode>> traced_nodes_;
std::vector<TracedNode*> traced_young_nodes_;
std::unique_ptr<OnStackTracedNodeSpace> on_stack_nodes_;
std::vector<std::pair<Node*, PendingPhantomCallback>>
regular_pending_phantom_callbacks_;

View File

@ -46,6 +46,7 @@
#include "src/heap/embedder-tracing-inl.h"
#include "src/heap/embedder-tracing.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/global-handle-marking-visitor.h"
#include "src/heap/marking-worklist.h"
#include "src/heap/sweeper.h"
#include "src/init/v8.h"
@ -252,42 +253,26 @@ class UnifiedHeapConservativeMarkingVisitor final
public:
UnifiedHeapConservativeMarkingVisitor(
HeapBase& heap, MutatorMarkingState& mutator_marking_state,
cppgc::Visitor& visitor, UnifiedHeapMarkingState& marking_state)
: ConservativeMarkingVisitor(heap, mutator_marking_state, visitor),
marking_state_(marking_state) {}
cppgc::Visitor& visitor)
: ConservativeMarkingVisitor(heap, mutator_marking_state, visitor) {}
~UnifiedHeapConservativeMarkingVisitor() override = default;
void SetTracedNodeBounds(GlobalHandles::NodeBounds traced_node_bounds) {
traced_node_bounds_ = std::move(traced_node_bounds);
void SetGlobalHandlesMarkingVisitor(
std::unique_ptr<GlobalHandleMarkingVisitor>
global_handle_marking_visitor) {
global_handle_marking_visitor_ = std::move(global_handle_marking_visitor);
}
void TraceConservativelyIfNeeded(const void* address) override {
ConservativeMarkingVisitor::TraceConservativelyIfNeeded(address);
TraceTracedNodesConservatively(address);
}
private:
void TraceTracedNodesConservatively(const void* address) {
const auto upper_it =
std::upper_bound(traced_node_bounds_.begin(), traced_node_bounds_.end(),
address, [](const void* needle, const auto& pair) {
return needle < pair.first;
});
// Also checks emptiness as begin() == end() on empty maps.
if (upper_it == traced_node_bounds_.begin()) return;
const auto bounds = std::next(upper_it, -1);
if (address < bounds->second) {
auto object = GlobalHandles::MarkTracedConservatively(
const_cast<Address*>(reinterpret_cast<const Address*>(address)),
const_cast<Address*>(
reinterpret_cast<const Address*>(bounds->first)));
marking_state_.MarkAndPush(object);
if (global_handle_marking_visitor_) {
global_handle_marking_visitor_->VisitPointer(address);
}
}
GlobalHandles::NodeBounds traced_node_bounds_;
UnifiedHeapMarkingState& marking_state_;
private:
std::unique_ptr<GlobalHandleMarkingVisitor> global_handle_marking_visitor_ =
nullptr;
};
} // namespace
@ -344,8 +329,7 @@ UnifiedHeapMarker::UnifiedHeapMarker(Heap* v8_heap,
heap, mutator_marking_state_,
mutator_unified_heap_marking_state_)),
conservative_marking_visitor_(heap, mutator_marking_state_,
*marking_visitor_,
mutator_unified_heap_marking_state_) {
*marking_visitor_) {
concurrent_marker_ = std::make_unique<UnifiedHeapConcurrentMarker>(
heap_, v8_heap, marking_worklists_, schedule_, platform_,
mutator_unified_heap_marking_state_, config.collection_type);
@ -531,7 +515,7 @@ void CppHeap::AttachIsolate(Isolate* isolate) {
&CppGraphBuilder::Run, this);
}
SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this));
isolate_->global_handles()->SetStackStart(base::Stack::GetStackStart());
isolate_->heap()->SetStackStart(base::Stack::GetStackStart());
oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl);
no_gc_scope_--;
}
@ -701,8 +685,11 @@ void CppHeap::EnterFinalPause(cppgc::EmbedderStackState stack_state) {
auto& marker = marker_.get()->To<UnifiedHeapMarker>();
// Scan global handles conservatively in case we are attached to an Isolate.
if (isolate_) {
marker.conservative_visitor().SetTracedNodeBounds(
isolate()->global_handles()->GetTracedNodeBounds());
auto& heap = *isolate()->heap();
marker.conservative_visitor().SetGlobalHandlesMarkingVisitor(
std::make_unique<GlobalHandleMarkingVisitor>(
heap, *heap.mark_compact_collector()->marking_state(),
*heap.mark_compact_collector()->local_marking_worklists()));
}
marker.EnterAtomicPause(stack_state);
if (isolate_ && *collection_type_ == CollectionType::kMinor) {

View File

@ -43,10 +43,6 @@ void UnifiedHeapMarkingState::MarkAndPush(
// non-empty `TracedReferenceBase` when `CppHeap` is in detached mode.
Object object = BasicTracedReferenceExtractor::GetObjectForMarking(reference);
MarkAndPush(object);
}
void UnifiedHeapMarkingState::MarkAndPush(Object object) {
if (!object.IsHeapObject()) {
// The embedder is not aware of whether numbers are materialized as heap
// objects are just passed around as Smis. This branch also filters out

View File

@ -25,7 +25,6 @@ class UnifiedHeapMarkingState final {
void Update(MarkingWorklists::Local*);
V8_INLINE void MarkAndPush(const TracedReferenceBase&);
V8_INLINE void MarkAndPush(v8::internal::Object);
private:
Heap* const heap_;

View File

@ -175,16 +175,6 @@ void LocalEmbedderHeapTracer::StartIncrementalMarkingIfNeeded() {
}
}
void LocalEmbedderHeapTracer::NotifyEmptyEmbedderStack() {
auto* overriden_stack_state = isolate_->heap()->overriden_stack_state();
if (overriden_stack_state &&
(*overriden_stack_state ==
cppgc::EmbedderStackState::kMayContainHeapPointers))
return;
isolate_->global_handles()->NotifyEmptyEmbedderStack();
}
void LocalEmbedderHeapTracer::EmbedderWriteBarrier(Heap* heap,
JSObject js_object) {
DCHECK(InUse());

View File

@ -152,8 +152,6 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
return default_embedder_roots_handler_;
}
void NotifyEmptyEmbedderStack();
EmbedderHeapTracer::EmbedderStackState embedder_stack_state() const {
return embedder_stack_state_;
}

View File

@ -0,0 +1,51 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/global-handle-marking-visitor.h"
#include "src/heap/marking-worklist-inl.h"
namespace v8 {
namespace internal {
GlobalHandleMarkingVisitor::GlobalHandleMarkingVisitor(
Heap& heap, MarkingState& marking_state,
MarkingWorklists::Local& local_marking_worklist)
: heap_(heap),
marking_state_(marking_state),
local_marking_worklist_(local_marking_worklist),
traced_node_bounds_(
heap.isolate()->global_handles()->GetTracedNodeBounds()) {}
void GlobalHandleMarkingVisitor::VisitPointer(const void* address) {
const auto upper_it = std::upper_bound(
traced_node_bounds_.begin(), traced_node_bounds_.end(), address,
[](const void* needle, const auto& pair) { return needle < pair.first; });
// Also checks emptiness as begin() == end() on empty bounds.
if (upper_it == traced_node_bounds_.begin()) return;
const auto bounds = std::next(upper_it, -1);
if (address < bounds->second) {
auto object = GlobalHandles::MarkTracedConservatively(
const_cast<Address*>(reinterpret_cast<const Address*>(address)),
const_cast<Address*>(reinterpret_cast<const Address*>(bounds->first)));
if (!object.IsHeapObject()) {
// The embedder is not aware of whether numbers are materialized as heap
// objects are just passed around as Smis. This branch also filters out
// intentionally passed `Smi::zero()` that indicate that there's no
// object to mark.
return;
}
HeapObject heap_object = HeapObject::cast(object);
if (marking_state_.WhiteToGrey(heap_object)) {
local_marking_worklist_.Push(heap_object);
}
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_.AddRetainingRoot(Root::kWrapperTracing, heap_object);
}
}
}
} // namespace internal
} // namespace v8

View File

@ -0,0 +1,36 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_GLOBAL_HANDLE_MARKING_VISITOR_H_
#define V8_HEAP_GLOBAL_HANDLE_MARKING_VISITOR_H_
#include "src/handles/global-handles.h"
#include "src/heap/base/stack.h"
#include "src/heap/heap.h"
#include "src/heap/mark-compact.h"
namespace v8 {
namespace internal {
// Root marking visitor for conservatively marking traced global handles.
// The visitor assumes that on-stack pointers may point into global handle nodes
// which requires them to be kept alive.
class GlobalHandleMarkingVisitor final : public ::heap::base::StackVisitor {
public:
GlobalHandleMarkingVisitor(Heap&, MarkingState&, MarkingWorklists::Local&);
~GlobalHandleMarkingVisitor() override = default;
void VisitPointer(const void*) override;
private:
Heap& heap_;
MarkingState& marking_state_;
MarkingWorklists::Local& local_marking_worklist_;
GlobalHandles::NodeBounds traced_node_bounds_;
};
#endif // V8_HEAP_GLOBAL_HANDLE_MARKING_VISITOR_H_
} // namespace internal
} // namespace v8

View File

@ -36,6 +36,7 @@
#include "src/execution/vm-state-inl.h"
#include "src/handles/global-handles-inl.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/base/stack.h"
#include "src/heap/basic-memory-chunk.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/code-range.h"
@ -1819,11 +1820,6 @@ bool Heap::CollectGarbage(AllocationSpace space,
this, IsYoungGenerationCollector(collector) ? "MinorGC" : "MajorGC",
GarbageCollectionReasonToString(gc_reason));
// Filter on-stack reference below this method.
isolate()
->global_handles()
->CleanupOnStackReferencesBelowCurrentStackPosition();
if (collector == GarbageCollector::MARK_COMPACTOR && cpp_heap()) {
// CppHeap needs a stack marker at the top of all entry points to allow
// deterministic passes over the stack. E.g., a verifier that should only
@ -5100,10 +5096,7 @@ void Heap::IterateBuiltins(RootVisitor* v) {
static_assert(Builtins::AllBuiltinsAreIsolateIndependent());
}
void Heap::IterateStackRoots(RootVisitor* v) {
isolate_->Iterate(v);
isolate_->global_handles()->IterateStrongStackRoots(v);
}
void Heap::IterateStackRoots(RootVisitor* v) { isolate_->Iterate(v); }
namespace {
size_t GlobalMemorySizeFromV8Size(size_t v8_size) {
@ -5793,6 +5786,7 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
tracer_.reset(new GCTracer(this));
array_buffer_sweeper_.reset(new ArrayBufferSweeper(this));
gc_idle_time_handler_.reset(new GCIdleTimeHandler());
stack_ = std::make_unique<::heap::base::Stack>();
memory_measurement_.reset(new MemoryMeasurement(isolate()));
memory_reducer_.reset(new MemoryReducer(this));
if (V8_UNLIKELY(TracingFlags::is_gc_stats_enabled())) {
@ -5993,6 +5987,12 @@ const cppgc::EmbedderStackState* Heap::overriden_stack_state() const {
return cpp_heap ? cpp_heap->override_stack_state() : nullptr;
}
void Heap::SetStackStart(void* stack_start) {
stack_->SetStackStart(stack_start);
}
::heap::base::Stack& Heap::stack() { return *stack_.get(); }
void Heap::RegisterExternallyReferencedObject(Address* location) {
GlobalHandles::MarkTraced(location);
Object object(*location);
@ -6114,6 +6114,7 @@ void Heap::TearDown() {
concurrent_marking_.reset();
gc_idle_time_handler_.reset();
stack_.reset();
memory_measurement_.reset();
allocation_tracker_for_debugging_.reset();
@ -7581,8 +7582,6 @@ EmbedderStackStateScope::EmbedderStackStateScope(
}
local_tracer_->embedder_stack_state_ = stack_state;
if (EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers == stack_state)
local_tracer_->NotifyEmptyEmbedderStack();
}
// static
@ -7598,8 +7597,6 @@ EmbedderStackStateScope::EmbedderStackStateScope(
: local_tracer_(local_tracer),
old_stack_state_(local_tracer_->embedder_stack_state_) {
local_tracer_->embedder_stack_state_ = stack_state;
if (EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers == stack_state)
local_tracer_->NotifyEmptyEmbedderStack();
}
EmbedderStackStateScope::~EmbedderStackStateScope() {

View File

@ -28,6 +28,7 @@
#include "src/common/globals.h"
#include "src/heap/allocation-observer.h"
#include "src/heap/allocation-result.h"
#include "src/heap/base/stack.h"
#include "src/heap/heap-allocator.h"
#include "src/init/heap-symbols.h"
#include "src/objects/allocation-site.h"
@ -51,6 +52,12 @@ class ClassNameAsHeapObjectNameScope;
} // namespace internal
} // namespace cppgc
namespace heap {
namespace base {
class Stack;
} // namespace base
} // namespace heap
namespace v8 {
namespace debug {
@ -60,6 +67,7 @@ using OutOfMemoryCallback = void (*)(void* data);
namespace internal {
namespace heap {
class HeapTester;
class TestMemoryAllocatorScope;
} // namespace heap
@ -1196,11 +1204,16 @@ class Heap {
const cppgc::EmbedderStackState* overriden_stack_state() const;
V8_EXPORT_PRIVATE void SetStackStart(void* stack_start);
::heap::base::Stack& stack();
// ===========================================================================
// Embedder roots optimizations. =============================================
// ===========================================================================
V8_EXPORT_PRIVATE void SetEmbedderRootsHandler(EmbedderRootsHandler* handler);
V8_EXPORT_PRIVATE
void SetEmbedderRootsHandler(EmbedderRootsHandler* handler);
EmbedderRootsHandler* GetEmbedderRootsHandler() const;
@ -2336,6 +2349,7 @@ class Heap {
std::unique_ptr<LocalEmbedderHeapTracer> local_embedder_heap_tracer_;
std::unique_ptr<AllocationTrackerForDebugging>
allocation_tracker_for_debugging_;
std::unique_ptr<::heap::base::Stack> stack_;
// This object controls virtual space reserved for code on the V8 heap. This
// is only valid for 64-bit architectures where kRequiresCodeRange.
@ -2455,6 +2469,7 @@ class Heap {
friend class EvacuateVisitorBase;
friend class GCCallbacksScope;
friend class GCTracer;
friend class GlobalHandleMarkingVisitor;
friend class HeapAllocator;
friend class HeapObjectIterator;
friend class ScavengeTaskObserver;

View File

@ -27,6 +27,7 @@
#include "src/heap/evacuation-allocator-inl.h"
#include "src/heap/gc-tracer-inl.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/global-handle-marking-visitor.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/index-generator.h"
@ -2082,6 +2083,20 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
ProcessTopOptimizedFrame(custom_root_body_visitor, client);
});
}
if (!heap_->cpp_heap() && heap_->local_embedder_heap_tracer()->InUse()) {
// Conservative global handle scanning is necessary for keeping
// v8::TracedReference alive from the stack. This is only needed when using
// `EmbedderHeapTracer` and not using `CppHeap`.
auto& stack = heap()->stack();
if (stack.stack_start() &&
heap_->local_embedder_heap_tracer()->embedder_stack_state() ==
cppgc::EmbedderStackState::kMayContainHeapPointers) {
GlobalHandleMarkingVisitor global_handles_marker(
*heap_, marking_state_, *local_marking_worklists_);
stack.IteratePointers(&global_handles_marker);
}
}
}
#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB

View File

@ -224,7 +224,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnConstruction) {
}
}
TEST_F(TracedReferenceTest, WriteBarrierOnHeapReset) {
TEST_F(TracedReferenceTest, WriteBarrierForOnHeapReset) {
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
@ -239,14 +239,15 @@ TEST_F(TracedReferenceTest, WriteBarrierOnHeapReset) {
MarkingState state(i_isolate());
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref->Reset(v8_isolate(), local);
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
EXPECT_FALSE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackReset) {
if (!FLAG_incremental_marking) return;
TEST_F(TracedReferenceTest, WriteBarrierForOnStackReset) {
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
heap()->SetStackStart(base::Stack::GetStackStart());
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
@ -259,7 +260,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackReset) {
MarkingState state(i_isolate());
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref.Reset(v8_isolate(), local);
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
EXPECT_FALSE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
@ -281,14 +282,15 @@ TEST_F(TracedReferenceTest, WriteBarrierOnHeapCopy) {
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
*ref_to = *ref_from;
EXPECT_TRUE(!ref_from->IsEmpty());
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
EXPECT_FALSE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackCopy) {
if (!FLAG_incremental_marking) return;
TEST_F(TracedReferenceTest, WriteBarrierForOnStackCopy) {
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
heap()->SetStackStart(base::Stack::GetStackStart());
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
@ -304,11 +306,11 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackCopy) {
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref_to = *ref_from;
EXPECT_TRUE(!ref_from->IsEmpty());
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
EXPECT_FALSE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, WriteBarrierOnMove) {
TEST_F(TracedReferenceTest, WriteBarrierForOnHeapMove) {
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
@ -326,15 +328,15 @@ TEST_F(TracedReferenceTest, WriteBarrierOnMove) {
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
*ref_to = std::move(*ref_from);
ASSERT_TRUE(ref_from->IsEmpty());
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
EXPECT_FALSE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackMove) {
TEST_F(TracedReferenceTest, WriteBarrierForOnStackMove) {
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
heap()->SetStackStart(base::Stack::GetStackStart());
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
@ -350,7 +352,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackMove) {
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref_to = std::move(*ref_from);
ASSERT_TRUE(ref_from->IsEmpty());
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
EXPECT_FALSE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}

View File

@ -788,6 +788,8 @@ TEST_F(EmbedderTracingTest, BasicTracedReference) {
v8::HandleScope scope(v8_isolate());
TestEmbedderHeapTracer tracer;
heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer);
tracer.SetStackStart(const_cast<void*>(
::heap::base::Stack::GetCurrentStackPointerForLocalVariables()));
i::GlobalHandles* global_handles = i_isolate()->global_handles();
const size_t initial_count = global_handles->handles_count();
@ -804,8 +806,17 @@ TEST_F(EmbedderTracingTest, BasicTracedReference) {
}
traced->~TracedReference<v8::Value>();
EXPECT_EQ(initial_count + 1, global_handles->handles_count());
// GC should clear the handle.
FullGC();
{
// Conservative scanning may find stale pointers to on-stack handles.
// Disable scanning, assuming the slots are overwritten.
EmbedderStackStateScope scope =
EmbedderStackStateScope::ExplicitScopeForTesting(
reinterpret_cast<i::Isolate*>(v8_isolate())
->heap()
->local_embedder_heap_tracer(),
EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers);
FullGC();
}
EXPECT_EQ(initial_count, global_handles->handles_count());
delete[] memory;
}
@ -929,14 +940,14 @@ enum class Operation {
};
template <typename T>
void PerformOperation(Operation op, T* lhs, T* rhs) {
V8_NOINLINE void PerformOperation(Operation op, T* target, T* source) {
switch (op) {
case Operation::kMove:
*lhs = std::move(*rhs);
*target = std::move(*source);
break;
case Operation::kCopy:
*lhs = *rhs;
rhs->Reset();
*target = *source;
source->Reset();
break;
}
}
@ -980,12 +991,22 @@ V8_NOINLINE void StackToHeapTest(v8::Isolate* v8_isolate,
tracer->AddReferenceForTracing(heap_handle);
FullGC(v8_isolate);
EXPECT_FALSE(observer.IsEmpty());
tracer->AddReferenceForTracing(heap_handle);
PerformOperation(op, heap_handle, &stack_handle);
tracer->AddReferenceForTracing(heap_handle);
FullGC(v8_isolate);
EXPECT_FALSE(observer.IsEmpty());
FullGC(v8_isolate);
EXPECT_TRUE(observer.IsEmpty());
{
// Conservative scanning may find stale pointers to on-stack handles.
// Disable scanning, assuming the slots are overwritten.
EmbedderStackStateScope scope =
EmbedderStackStateScope::ExplicitScopeForTesting(
reinterpret_cast<i::Isolate*>(v8_isolate)
->heap()
->local_embedder_heap_tracer(),
EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers);
FullGC(v8_isolate);
}
ASSERT_TRUE(observer.IsEmpty());
delete heap_handle;
}
@ -1070,46 +1091,23 @@ V8_NOINLINE void StackToStackTest(v8::Isolate* v8_isolate,
EXPECT_TRUE(observer.IsEmpty());
}
V8_NOINLINE void TracedReferenceCleanedTest(v8::Isolate* v8_isolate,
TestEmbedderHeapTracer* tracer) {
v8::HandleScope scope(v8_isolate);
v8::Local<v8::Object> object(ConstructTraceableJSApiObject(
v8_isolate->GetCurrentContext(), nullptr, nullptr));
const size_t before = reinterpret_cast<Isolate*>(v8_isolate)
->global_handles()
->NumberOfOnStackHandlesForTesting();
for (int i = 0; i < 100; i++) {
v8::TracedReference<v8::Value> stack_handle;
stack_handle.Reset(v8_isolate, object);
}
EXPECT_EQ(before + 1, reinterpret_cast<Isolate*>(v8_isolate)
->global_handles()
->NumberOfOnStackHandlesForTesting());
}
} // namespace
TEST_F(EmbedderTracingTest, TracedReferenceOnStack) {
ManualGCScope manual_gc(i_isolate());
TestEmbedderHeapTracer tracer;
heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer);
tracer.SetStackStart(&manual_gc);
tracer.SetStackStart(const_cast<void*>(
::heap::base::Stack::GetCurrentStackPointerForLocalVariables()));
OnStackTest<v8::TracedReference<v8::Value>>(v8_isolate(), &tracer);
}
TEST_F(EmbedderTracingTest, TracedReferenceCleaned) {
ManualGCScope manual_gc(i_isolate());
TestEmbedderHeapTracer tracer;
heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer);
tracer.SetStackStart(&manual_gc);
TracedReferenceCleanedTest(v8_isolate(), &tracer);
}
TEST_F(EmbedderTracingTest, TracedReferenceMove) {
ManualGCScope manual_gc(i_isolate());
TestEmbedderHeapTracer tracer;
heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer);
tracer.SetStackStart(&manual_gc);
tracer.SetStackStart(const_cast<void*>(
::heap::base::Stack::GetCurrentStackPointerForLocalVariables()));
StackToHeapTest(v8_isolate(), &tracer, Operation::kMove,
TargetHandling::kNonInitialized);
StackToHeapTest(v8_isolate(), &tracer, Operation::kMove,
@ -1134,7 +1132,8 @@ TEST_F(EmbedderTracingTest, TracedReferenceCopy) {
ManualGCScope manual_gc(i_isolate());
TestEmbedderHeapTracer tracer;
heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer);
tracer.SetStackStart(&manual_gc);
tracer.SetStackStart(const_cast<void*>(
::heap::base::Stack::GetCurrentStackPointerForLocalVariables()));
StackToHeapTest(v8_isolate(), &tracer, Operation::kCopy,
TargetHandling::kNonInitialized);
StackToHeapTest(v8_isolate(), &tracer, Operation::kCopy,
@ -1168,27 +1167,34 @@ V8_NOINLINE void CreateTracedReferenceInDeepStack(
observer->SetWeak();
}
V8_NOINLINE void TracedReferenceNotifyEmptyStackTest(
V8_NOINLINE void TracedReferenceOnStackReferencesAreTemporaryTest(
v8::Isolate* v8_isolate, TestEmbedderHeapTracer* tracer) {
v8::Global<v8::Object> observer;
CreateTracedReferenceInDeepStack(v8_isolate, &observer);
EXPECT_FALSE(observer.IsEmpty());
reinterpret_cast<i::Isolate*>(v8_isolate)
->heap()
->local_embedder_heap_tracer()
->NotifyEmptyEmbedderStack();
FullGC(v8_isolate);
{
// Conservative scanning may find stale pointers to on-stack handles.
// Disable scanning, assuming the slots are overwritten.
EmbedderStackStateScope scope =
EmbedderStackStateScope::ExplicitScopeForTesting(
reinterpret_cast<i::Isolate*>(v8_isolate)
->heap()
->local_embedder_heap_tracer(),
EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers);
FullGC(v8_isolate);
}
EXPECT_TRUE(observer.IsEmpty());
}
} // namespace
TEST_F(EmbedderTracingTest, NotifyEmptyStack) {
TEST_F(EmbedderTracingTest, OnStackReferencesAreTemporary) {
ManualGCScope manual_gc(i_isolate());
TestEmbedderHeapTracer tracer;
heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer);
tracer.SetStackStart(&manual_gc);
TracedReferenceNotifyEmptyStackTest(v8_isolate(), &tracer);
tracer.SetStackStart(const_cast<void*>(
::heap::base::Stack::GetCurrentStackPointerForLocalVariables()));
TracedReferenceOnStackReferencesAreTemporaryTest(v8_isolate(), &tracer);
}
} // namespace heap