diff --git a/BUILD.bazel b/BUILD.bazel index 9a47a2c883..1b7e7e1576 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -1382,6 +1382,8 @@ filegroup( "src/handles/global-handles-inl.h", "src/handles/global-handles.cc", "src/handles/global-handles.h", + "src/handles/traced-handles.cc", + "src/handles/traced-handles.h", "src/handles/handles-inl.h", "src/handles/handles.cc", "src/handles/handles.h", diff --git a/BUILD.gn b/BUILD.gn index 0bd85fc9dc..1dbfbc78c4 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -3033,6 +3033,7 @@ v8_header_set("v8_internal_headers") { "src/handles/maybe-handles.h", "src/handles/persistent-handles.h", "src/handles/shared-object-conveyor-handles.h", + "src/handles/traced-handles.h", "src/heap/allocation-observer.h", "src/heap/allocation-result.h", "src/heap/allocation-stats.h", @@ -4439,6 +4440,7 @@ v8_source_set("v8_base_without_compiler") { "src/handles/local-handles.cc", "src/handles/persistent-handles.cc", "src/handles/shared-object-conveyor-handles.cc", + "src/handles/traced-handles.cc", "src/heap/allocation-observer.cc", "src/heap/array-buffer-sweeper.cc", "src/heap/base-space.cc", diff --git a/include/v8-internal.h b/include/v8-internal.h index a52d066c83..e2477e85fe 100644 --- a/include/v8-internal.h +++ b/include/v8-internal.h @@ -581,6 +581,8 @@ class Internals { static const int kNodeStateMask = 0x3; static const int kNodeStateIsWeakValue = 2; + static const int kTracedNodeClassIdOffset = kApiSystemPointerSize + 2; + static const int kFirstNonstringType = 0x80; static const int kOddballType = 0x83; static const int kForeignType = 0xcc; diff --git a/include/v8-traced-handle.h b/include/v8-traced-handle.h index 7719b9bc01..e0fd57c49d 100644 --- a/include/v8-traced-handle.h +++ b/include/v8-traced-handle.h @@ -403,7 +403,7 @@ void TracedReferenceBase::SetWrapperClassId(uint16_t class_id) { using I = internal::Internals; if (IsEmpty()) return; internal::Address* obj = reinterpret_cast(val_); - uint8_t* addr = reinterpret_cast(obj) + I::kNodeClassIdOffset; + uint8_t* addr = reinterpret_cast(obj) + I::kTracedNodeClassIdOffset; *reinterpret_cast(addr) = class_id; } @@ -411,7 +411,7 @@ uint16_t TracedReferenceBase::WrapperClassId() const { using I = internal::Internals; if (IsEmpty()) return 0; internal::Address* obj = reinterpret_cast(val_); - uint8_t* addr = reinterpret_cast(obj) + I::kNodeClassIdOffset; + uint8_t* addr = reinterpret_cast(obj) + I::kTracedNodeClassIdOffset; return *reinterpret_cast(addr); } diff --git a/src/api/api.cc b/src/api/api.cc index f9bf060d04..f231865f80 100644 --- a/src/api/api.cc +++ b/src/api/api.cc @@ -63,6 +63,7 @@ #include "src/handles/global-handles.h" #include "src/handles/persistent-handles.h" #include "src/handles/shared-object-conveyor-handles.h" +#include "src/handles/traced-handles.h" #include "src/heap/embedder-tracing.h" #include "src/heap/heap-inl.h" #include "src/heap/heap-write-barrier.h" @@ -792,8 +793,7 @@ i::Address* GlobalizeTracedReference(i::Isolate* i_isolate, i::Address* obj, Utils::ApiCheck((slot != nullptr), "v8::GlobalizeTracedReference", "the address slot must be not null"); #endif - i::Handle result = - i_isolate->global_handles()->CreateTraced(*obj, slot, store_mode); + auto result = i_isolate->traced_handles()->Create(*obj, slot, store_mode); #ifdef VERIFY_HEAP if (i::v8_flags.verify_heap) { i::Object(*obj).ObjectVerify(i_isolate); @@ -803,16 +803,16 @@ i::Address* GlobalizeTracedReference(i::Isolate* i_isolate, i::Address* obj, } void MoveTracedReference(internal::Address** from, internal::Address** to) { - GlobalHandles::MoveTracedReference(from, to); + TracedHandles::Move(from, to); } void CopyTracedReference(const internal::Address* const* from, internal::Address** to) { - GlobalHandles::CopyTracedReference(from, to); + TracedHandles::Copy(from, to); } void DisposeTracedReference(internal::Address* location) { - GlobalHandles::DestroyTracedReference(location); + TracedHandles::Destroy(location); } } // namespace internal @@ -10529,7 +10529,7 @@ void EmbedderHeapTracer::IterateTracedGlobalHandles( TracedGlobalHandleVisitor* visitor) { i::Isolate* i_isolate = reinterpret_cast(v8_isolate_); i::DisallowGarbageCollection no_gc; - i_isolate->global_handles()->IterateTracedNodes(visitor); + i_isolate->traced_handles()->Iterate(visitor); } bool EmbedderHeapTracer::IsRootForNonTracingGC( diff --git a/src/execution/isolate.cc b/src/execution/isolate.cc index 6ddad0276c..41b9f56473 100644 --- a/src/execution/isolate.cc +++ b/src/execution/isolate.cc @@ -3409,6 +3409,7 @@ Isolate::Isolate(std::unique_ptr isolate_allocator, isolate_allocator_(std::move(isolate_allocator)), id_(isolate_counter.fetch_add(1, std::memory_order_relaxed)), allocator_(new TracingAccountingAllocator(this)), + traced_handles_(this), builtins_(this), #if defined(DEBUG) || defined(VERIFY_HEAP) num_active_deserializers_(0), diff --git a/src/execution/isolate.h b/src/execution/isolate.h index 7f1eb99c61..935594e1d8 100644 --- a/src/execution/isolate.h +++ b/src/execution/isolate.h @@ -31,6 +31,7 @@ #include "src/execution/shared-mutex-guard-if-off-thread.h" #include "src/execution/stack-guard.h" #include "src/handles/handles.h" +#include "src/handles/traced-handles.h" #include "src/heap/factory.h" #include "src/heap/heap.h" #include "src/heap/read-only-heap.h" @@ -1289,6 +1290,8 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { GlobalHandles* global_handles() const { return global_handles_; } + TracedHandles* traced_handles() { return &traced_handles_; } + EternalHandles* eternal_handles() const { return eternal_handles_; } ThreadManager* thread_manager() const { return thread_manager_; } @@ -2214,6 +2217,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { AccountingAllocator* allocator_ = nullptr; InnerPointerToCodeCache* inner_pointer_to_code_cache_ = nullptr; GlobalHandles* global_handles_ = nullptr; + TracedHandles traced_handles_; EternalHandles* eternal_handles_ = nullptr; ThreadManager* thread_manager_ = nullptr; bigint::Processor* bigint_processor_ = nullptr; diff --git a/src/handles/global-handles-inl.h b/src/handles/global-handles-inl.h index 449a10776e..1f86e2dcb4 100644 --- a/src/handles/global-handles-inl.h +++ b/src/handles/global-handles-inl.h @@ -27,12 +27,6 @@ T GlobalHandleVector::Pop() { return obj; } -// static -Object GlobalHandles::Acquire(Address* location) { - return Object(reinterpret_cast*>(location)->load( - std::memory_order_acquire)); -} - } // namespace internal } // namespace v8 diff --git a/src/handles/global-handles.cc b/src/handles/global-handles.cc index 58c6c03c7c..66b9cff80e 100644 --- a/src/handles/global-handles.cc +++ b/src/handles/global-handles.cc @@ -10,7 +10,6 @@ #include #include -#include "include/v8-traced-handle.h" #include "src/api/api-inl.h" #include "src/base/compiler-specific.h" #include "src/base/logging.h" @@ -196,7 +195,6 @@ class GlobalHandles::NodeSpace final { public: using BlockType = NodeBlock; using iterator = NodeIterator; - using NodeBounds = GlobalHandles::NodeBounds; static NodeSpace* From(NodeType* node); static void Release(NodeType* node); @@ -213,8 +211,6 @@ class GlobalHandles::NodeSpace final { size_t TotalSize() const { return blocks_ * sizeof(NodeType) * kBlockSize; } size_t handles_count() const { return handles_count_; } - NodeBounds GetNodeBlockBounds() const; - private: void PutNodesOnFreeList(BlockType* block); V8_INLINE void Free(NodeType* node); @@ -257,21 +253,6 @@ NodeType* GlobalHandles::NodeSpace::Allocate() { return node; } -template -typename GlobalHandles::NodeSpace::NodeBounds -GlobalHandles::NodeSpace::GetNodeBlockBounds() const { - NodeBounds block_bounds; - for (BlockType* current = first_used_block_; current; - current = current->next_used()) { - block_bounds.push_back({current->begin_address(), current->end_address()}); - } - std::sort(block_bounds.begin(), block_bounds.end(), - [](const auto& pair1, const auto& pair2) { - return pair1.first < pair2.first; - }); - return block_bounds; -} - template void GlobalHandles::NodeSpace::PutNodesOnFreeList(BlockType* block) { for (int32_t i = kBlockSize - 1; i >= 0; --i) { @@ -611,174 +592,19 @@ class GlobalHandles::Node final : public NodeBase { friend class NodeBase; }; -class GlobalHandles::TracedNode final - : public NodeBase { - public: - TracedNode() { - DCHECK(!is_in_young_list()); - DCHECK(!markbit()); - } - - // Copy and move ctors are used when constructing a TracedNode when recording - // a node for on-stack data structures. (Older compilers may refer to copy - // instead of move ctor.) - TracedNode(TracedNode&& other) V8_NOEXCEPT = default; - TracedNode(const TracedNode& other) V8_NOEXCEPT = default; - - enum State { FREE = 0, NORMAL, NEAR_DEATH }; - - State state() const { return NodeState::decode(flags_); } - void set_state(State state) { flags_ = NodeState::update(flags_, state); } - - void MarkAsFree() { set_state(FREE); } - void MarkAsUsed() { set_state(NORMAL); } - - template - bool IsInUse() const { - if constexpr (access_mode == AccessMode::NON_ATOMIC) { - return NodeState::decode(flags_) != FREE; - } - const auto flags = - reinterpret_cast&>(flags_).load( - std::memory_order_relaxed); - return NodeState::decode(flags); - } - - bool IsRetainer() const { return state() == NORMAL; } - - bool is_in_young_list() const { return IsInYoungList::decode(flags_); } - void set_in_young_list(bool v) { flags_ = IsInYoungList::update(flags_, v); } - - bool is_root() const { return IsRoot::decode(flags_); } - void set_root(bool v) { flags_ = IsRoot::update(flags_, v); } - - template - void set_markbit() { - if constexpr (access_mode == AccessMode::NON_ATOMIC) { - flags_ = Markbit::update(flags_, true); - return; - } - std::atomic& atomic_flags = - reinterpret_cast&>(flags_); - const uint8_t new_value = - Markbit::update(atomic_flags.load(std::memory_order_relaxed), true); - atomic_flags.fetch_or(new_value, std::memory_order_relaxed); - } - - template - bool markbit() const { - if constexpr (access_mode == AccessMode::NON_ATOMIC) { - return Markbit::decode(flags_); - } - const auto flags = - reinterpret_cast&>(flags_).load( - std::memory_order_relaxed); - return Markbit::decode(flags); - } - - void clear_markbit() { flags_ = Markbit::update(flags_, false); } - - void clear_object() { - reinterpret_cast*>(&object_)->store( - kNullAddress, std::memory_order_relaxed); - } - - void CopyObjectReference(const TracedNode& other) { - reinterpret_cast*>(&object_)->store( - other.object_, std::memory_order_relaxed); - } - - void ResetPhantomHandle() { - DCHECK(IsInUse()); - NodeSpace::Release(this); - DCHECK(!IsInUse()); - } - - static void Verify(const Address* const* slot); - - protected: - // Various state is managed in a bit field where some of the state is managed - // concurrently, whereas other state is managed only on the main thread when - // no concurrent thread has access to flags, e.g., in the atomic pause of the - // garbage collector. All state is made available to other threads using - // `Publish()`. - // - // The following state is modified only on the main thread. - using NodeState = base::BitField8; - using IsInYoungList = NodeState::Next; - using IsRoot = IsInYoungList::Next; - // The markbit is the exception as it can be set from the main and marker - // threads at the same time. - using Markbit = IsRoot::Next; - - void ClearImplFields() { set_root(true); } - - void CheckNodeIsFreeNodeImpl() const { - DCHECK(is_root()); - DCHECK(!markbit()); - DCHECK(!IsInUse()); - } - - friend class NodeBase; -}; - -// static -void GlobalHandles::EnableMarkingBarrier(Isolate* isolate) { - auto* global_handles = isolate->global_handles(); - DCHECK(!global_handles->is_marking_); - global_handles->is_marking_ = true; -} - -// static -void GlobalHandles::DisableMarkingBarrier(Isolate* isolate) { - auto* global_handles = isolate->global_handles(); - DCHECK(global_handles->is_marking_); - global_handles->is_marking_ = false; -} - -// static -void GlobalHandles::TracedNode::Verify(const Address* const* slot) { -#ifdef DEBUG - const TracedNode* node = FromLocation(*slot); - auto* global_handles = GlobalHandles::From(node); - DCHECK(node->IsInUse()); - Heap* heap = global_handles->isolate()->heap(); - auto* incremental_marking = heap->incremental_marking(); - if (incremental_marking && incremental_marking->IsMarking()) { - Object object = node->object(); - if (object.IsHeapObject()) { - DCHECK_IMPLIES(node->markbit(), - !heap->marking_state()->IsWhite(HeapObject::cast(object))); - } - } - DCHECK_IMPLIES(ObjectInYoungGeneration(node->object()), - node->is_in_young_list()); - const bool in_young_list = - std::find(global_handles->traced_young_nodes_.begin(), - global_handles->traced_young_nodes_.end(), - node) != global_handles->traced_young_nodes_.end(); - DCHECK_EQ(in_young_list, node->is_in_young_list()); -#endif // DEBUG -} - -size_t GlobalHandles::TotalSize() const { - return regular_nodes_->TotalSize() + traced_nodes_->TotalSize(); -} +size_t GlobalHandles::TotalSize() const { return regular_nodes_->TotalSize(); } size_t GlobalHandles::UsedSize() const { - return regular_nodes_->handles_count() * sizeof(Node) + - traced_nodes_->handles_count() * sizeof(TracedNode); + return regular_nodes_->handles_count() * sizeof(Node); } size_t GlobalHandles::handles_count() const { - return regular_nodes_->handles_count() + traced_nodes_->handles_count(); + return regular_nodes_->handles_count(); } GlobalHandles::GlobalHandles(Isolate* isolate) : isolate_(isolate), - regular_nodes_(std::make_unique>(this)), - traced_nodes_( - std::make_unique>(this)) {} + regular_nodes_(std::make_unique>(this)) {} GlobalHandles::~GlobalHandles() = default; @@ -804,25 +630,6 @@ Handle GlobalHandles::Create(Address value) { return Create(Object(value)); } -Handle GlobalHandles::CreateTraced(Object value, Address* slot, - GlobalHandleStoreMode store_mode) { - GlobalHandles::TracedNode* node = traced_nodes_->Allocate(); - if (NeedsTrackingInYoungNodes(value, node)) { - traced_young_nodes_.push_back(node); - node->set_in_young_list(true); - } - if (is_marking_ && store_mode != GlobalHandleStoreMode::kInitializingStore) { - node->set_markbit(); - WriteBarrier::MarkingFromGlobalHandle(value); - } - return node->Publish(value); -} - -Handle GlobalHandles::CreateTraced(Address value, Address* slot, - GlobalHandleStoreMode store_mode) { - return CreateTraced(Object(value), slot, store_mode); -} - Handle GlobalHandles::CopyGlobal(Address* location) { DCHECK_NOT_NULL(location); GlobalHandles* global_handles = @@ -835,35 +642,6 @@ Handle GlobalHandles::CopyGlobal(Address* location) { return global_handles->Create(*location); } -namespace { -void SetSlotThreadSafe(Address** slot, Address* val) { - reinterpret_cast*>(slot)->store( - val, std::memory_order_relaxed); -} -} // namespace - -// static -void GlobalHandles::CopyTracedReference(const Address* const* from, - Address** to) { - DCHECK_NOT_NULL(*from); - DCHECK_NULL(*to); - const TracedNode* from_node = TracedNode::FromLocation(*from); - DCHECK_NE(kGlobalHandleZapValue, from_node->raw_object()); - GlobalHandles* global_handles = - GlobalHandles::From(const_cast(from_node)); - Handle o = global_handles->CreateTraced( - from_node->object(), reinterpret_cast(to), - GlobalHandleStoreMode::kAssigningStore); - SetSlotThreadSafe(to, o.location()); - TracedNode::Verify(from); - TracedNode::Verify(to); -#ifdef VERIFY_HEAP - if (v8_flags.verify_heap) { - Object(**to).ObjectVerify(global_handles->isolate()); - } -#endif // VERIFY_HEAP -} - // static void GlobalHandles::MoveGlobal(Address** from, Address** to) { DCHECK_NOT_NULL(*from); @@ -876,111 +654,12 @@ void GlobalHandles::MoveGlobal(Address** from, Address** to) { // Strong handles do not require fixups. } -// static -void GlobalHandles::MoveTracedReference(Address** from, Address** to) { - // Fast path for moving from an empty reference. - if (!*from) { - DestroyTracedReference(*to); - SetSlotThreadSafe(to, nullptr); - return; - } - - // Determining whether from or to are on stack. - TracedNode* from_node = TracedNode::FromLocation(*from); - DCHECK(from_node->IsInUse()); - TracedNode* to_node = TracedNode::FromLocation(*to); - // Pure heap move. - DCHECK_IMPLIES(*to, to_node->IsInUse()); - DCHECK_IMPLIES(*to, kGlobalHandleZapValue != to_node->raw_object()); - DCHECK_NE(kGlobalHandleZapValue, from_node->raw_object()); - DestroyTracedReference(*to); - SetSlotThreadSafe(to, *from); - to_node = from_node; - DCHECK_NOT_NULL(*from); - DCHECK_NOT_NULL(*to); - DCHECK_EQ(*from, *to); - if (GlobalHandles::From(to_node)->is_marking_) { - // Write barrier needs to cover node as well as object. - to_node->set_markbit(); - WriteBarrier::MarkingFromGlobalHandle(to_node->object()); - } - SetSlotThreadSafe(from, nullptr); - TracedNode::Verify(to); -} - -// static -GlobalHandles* GlobalHandles::From(const TracedNode* node) { - return NodeBlock::From(node)->global_handles(); -} - -// static -void GlobalHandles::MarkTraced(Address* location) { - TracedNode* node = TracedNode::FromLocation(location); - DCHECK(node->IsInUse()); - node->set_markbit(); -} - -// static -Object GlobalHandles::MarkTracedConservatively( - Address* inner_location, Address* traced_node_block_base) { - // Compute the `TracedNode` address based on its inner pointer. - const ptrdiff_t delta = reinterpret_cast(inner_location) - - reinterpret_cast(traced_node_block_base); - const auto index = delta / sizeof(TracedNode); - TracedNode& node = - reinterpret_cast(traced_node_block_base)[index]; - // `MarkTracedConservatively()` runs concurrently with marking code. Reading - // state concurrently to setting the markbit is safe. - if (!node.IsInUse()) return Smi::zero(); - node.set_markbit(); - return node.object(); -} - void GlobalHandles::Destroy(Address* location) { if (location != nullptr) { NodeSpace::Release(Node::FromLocation(location)); } } -// static -void GlobalHandles::DestroyTracedReference(Address* location) { - if (!location) return; - - TracedNode* node = TracedNode::FromLocation(location); - auto* global_handles = GlobalHandles::From(node); - DCHECK_IMPLIES(global_handles->is_marking_, - !global_handles->is_sweeping_on_mutator_thread_); - DCHECK_IMPLIES(global_handles->is_sweeping_on_mutator_thread_, - !global_handles->is_marking_); - - // If sweeping on the mutator thread is running then the handle destruction - // may be a result of a Reset() call from a destructor. The node will be - // reclaimed on the next cycle. - // - // This allows v8::TracedReference::Reset() calls from destructors on - // objects that may be used from stack and heap. - if (global_handles->is_sweeping_on_mutator_thread_) { - return; - } - - if (global_handles->is_marking_) { - // Incremental marking is on. This also covers the scavenge case which - // prohibits eagerly reclaiming nodes when marking is on during a scavenge. - // - // On-heap traced nodes are released in the atomic pause in - // `IterateWeakRootsForPhantomHandles()` when they are discovered as not - // marked. Eagerly clear out the object here to avoid needlessly marking it - // from this point on. The node will be reclaimed on the next cycle. - node->clear_object(); - return; - } - - // In case marking and sweeping are off, the handle may be freed immediately. - // Note that this includes also the case when invoking the first pass - // callbacks during the atomic pause which requires releasing a node fully. - NodeSpace::Release(node); -} - using GenericCallback = v8::WeakCallbackInfo::Callback; void GlobalHandles::MakeWeak(Address* location, void* parameter, @@ -1031,42 +710,6 @@ void GlobalHandles::IterateWeakRootsForPhantomHandles( for (Node* node : *regular_nodes_) { if (node->IsWeakRetainer()) ResetWeakNodeIfDead(node, should_reset_handle); } - for (TracedNode* node : *traced_nodes_) { - if (!node->IsInUse()) continue; - // Detect unreachable nodes first. - if (!node->markbit()) { - // The handle itself is unreachable. We can clear it even if the target V8 - // object is alive. - node->ResetPhantomHandle(); - continue; - } - // Clear the markbit for the next GC. - node->clear_markbit(); - DCHECK(node->IsInUse()); - // TODO(v8:13141): Turn into a DCHECK after some time. - CHECK(!should_reset_handle(isolate()->heap(), node->location())); - } -} - -void GlobalHandles::ComputeWeaknessForYoungObjects( - WeakSlotCallback is_unmodified) { - if (!v8_flags.reclaim_unmodified_wrappers) return; - - // Treat all objects as roots during incremental marking to avoid corrupting - // marking worklists. - if (isolate()->heap()->incremental_marking()->IsMarking()) return; - - auto* const handler = isolate()->heap()->GetEmbedderRootsHandler(); - for (TracedNode* node : traced_young_nodes_) { - if (node->IsInUse()) { - DCHECK(node->is_root()); - if (is_unmodified(node->location())) { - v8::Value* value = ToApi(node->handle()); - node->set_root(handler->IsRoot( - *reinterpret_cast*>(&value))); - } - } - } } void GlobalHandles::IterateYoungStrongAndDependentRoots(RootVisitor* v) { @@ -1076,11 +719,6 @@ void GlobalHandles::IterateYoungStrongAndDependentRoots(RootVisitor* v) { node->location()); } } - for (TracedNode* node : traced_young_nodes_) { - if (node->IsInUse() && node->is_root()) { - v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); - } - } } void GlobalHandles::ProcessWeakYoungObjects( @@ -1095,29 +733,6 @@ void GlobalHandles::ProcessWeakYoungObjects( node->location()); } } - - if (!v8_flags.reclaim_unmodified_wrappers) return; - - auto* const handler = isolate()->heap()->GetEmbedderRootsHandler(); - for (TracedNode* node : traced_young_nodes_) { - if (!node->IsInUse()) continue; - - DCHECK_IMPLIES(node->is_root(), - !should_reset_handle(isolate_->heap(), node->location())); - if (should_reset_handle(isolate_->heap(), node->location())) { - v8::Value* value = ToApi(node->handle()); - handler->ResetRoot( - *reinterpret_cast*>(&value)); - // We cannot check whether a node is in use here as the reset behavior - // depends on whether incremental marking is running when reclaiming - // young objects. - } else { - if (!node->is_root()) { - node->set_root(true); - v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); - } - } - } } void GlobalHandles::InvokeSecondPassPhantomCallbacks() { @@ -1188,12 +803,10 @@ void ClearListOfYoungNodesImpl(Isolate* isolate, std::vector* node_list) { void GlobalHandles::UpdateListOfYoungNodes() { UpdateListOfYoungNodesImpl(isolate_, &young_nodes_); - UpdateListOfYoungNodesImpl(isolate_, &traced_young_nodes_); } void GlobalHandles::ClearListOfYoungNodes() { ClearListOfYoungNodesImpl(isolate_, &young_nodes_); - ClearListOfYoungNodesImpl(isolate_, &traced_young_nodes_); } template @@ -1288,11 +901,6 @@ void GlobalHandles::IterateWeakRoots(RootVisitor* v) { node->location()); } } - for (TracedNode* node : *traced_nodes_) { - if (node->IsInUse()) { - v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); - } - } } DISABLE_CFI_PERF @@ -1303,11 +911,6 @@ void GlobalHandles::IterateAllRoots(RootVisitor* v) { node->location()); } } - for (TracedNode* node : *traced_nodes_) { - if (node->IsRetainer()) { - v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); - } - } } DISABLE_CFI_PERF @@ -1318,11 +921,6 @@ void GlobalHandles::IterateAllYoungRoots(RootVisitor* v) { node->location()); } } - for (TracedNode* node : traced_young_nodes_) { - if (node->IsRetainer()) { - v->VisitRootPointer(Root::kGlobalHandles, nullptr, node->location()); - } - } } DISABLE_CFI_PERF @@ -1343,25 +941,6 @@ void GlobalHandles::IterateAllRootsForTesting( } } -GlobalHandles::NodeBounds GlobalHandles::GetTracedNodeBounds() const { - return traced_nodes_->GetNodeBlockBounds(); -} - -START_ALLOW_USE_DEPRECATED() - -DISABLE_CFI_PERF void GlobalHandles::IterateTracedNodes( - v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor) { - for (TracedNode* node : *traced_nodes_) { - if (node->IsInUse()) { - v8::Value* value = ToApi(node->handle()); - visitor->VisitTracedReference( - *reinterpret_cast*>(&value)); - } - } -} - -END_ALLOW_USE_DEPRECATED() - void GlobalHandles::RecordStats(HeapStats* stats) { *stats->global_handle_count = 0; *stats->weak_global_handle_count = 0; diff --git a/src/handles/global-handles.h b/src/handles/global-handles.h index 7ab929f9ca..e9fb843692 100644 --- a/src/handles/global-handles.h +++ b/src/handles/global-handles.h @@ -13,7 +13,6 @@ #include "include/v8-callbacks.h" #include "include/v8-persistent-handle.h" #include "include/v8-profiler.h" -#include "include/v8-traced-handle.h" #include "src/handles/handles.h" #include "src/heap/heap.h" #include "src/objects/heap-object.h" @@ -30,9 +29,6 @@ class RootVisitor; // callbacks and finalizers attached to them. class V8_EXPORT_PRIVATE GlobalHandles final { public: - static void EnableMarkingBarrier(Isolate*); - static void DisableMarkingBarrier(Isolate*); - GlobalHandles(const GlobalHandles&) = delete; GlobalHandles& operator=(const GlobalHandles&) = delete; @@ -71,19 +67,6 @@ class V8_EXPORT_PRIVATE GlobalHandles final { // Tells whether global handle is weak. static bool IsWeak(Address* location); - // - // API for traced handles. - // - - static void MoveTracedReference(Address** from, Address** to); - static void CopyTracedReference(const Address* const* from, Address** to); - static void DestroyTracedReference(Address* location); - static void MarkTraced(Address* location); - static Object MarkTracedConservatively(Address* inner_location, - Address* traced_node_block_base); - - V8_INLINE static Object Acquire(Address* location); - explicit GlobalHandles(Isolate* isolate); ~GlobalHandles(); @@ -94,11 +77,6 @@ class V8_EXPORT_PRIVATE GlobalHandles final { template inline Handle Create(T value); - Handle CreateTraced(Object value, Address* slot, - GlobalHandleStoreMode store_mode); - Handle CreateTraced(Address value, Address* slot, - GlobalHandleStoreMode store_mode); - void RecordStats(HeapStats* stats); size_t InvokeFirstPassWeakCallbacks(); @@ -113,14 +91,6 @@ class V8_EXPORT_PRIVATE GlobalHandles final { void IterateAllRoots(RootVisitor* v); void IterateAllYoungRoots(RootVisitor* v); - START_ALLOW_USE_DEPRECATED() - - // Iterates over all traces handles represented by `v8::TracedReferenceBase`. - void IterateTracedNodes( - v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor); - - END_ALLOW_USE_DEPRECATED() - // Marks handles that are phantom or have callbacks based on the predicate // |should_reset_handle| as pending. void IterateWeakRootsForPhantomHandles( @@ -146,13 +116,6 @@ class V8_EXPORT_PRIVATE GlobalHandles final { // empty. void ClearListOfYoungNodes(); - // Computes whether young weak objects should be considered roots for young - // generation garbage collections or just be treated weakly. Per default - // objects are considered as roots. Objects are treated not as root when both - // - `is_unmodified()` returns true; - // - the `EmbedderRootsHandler` also does not consider them as roots; - void ComputeWeaknessForYoungObjects(WeakSlotCallback is_unmodified); - Isolate* isolate() const { return isolate_; } size_t TotalSize() const; @@ -160,18 +123,8 @@ class V8_EXPORT_PRIVATE GlobalHandles final { // Number of global handles. size_t handles_count() const; - using NodeBounds = std::vector>; - NodeBounds GetTracedNodeBounds() const; - void IterateAllRootsForTesting(v8::PersistentHandleVisitor* v); - void NotifyStartSweepingOnMutatorThread() { - is_sweeping_on_mutator_thread_ = true; - } - void NotifyEndSweepingOnMutatorThread() { - is_sweeping_on_mutator_thread_ = false; - } - #ifdef DEBUG void PrintStats(); void Print(); @@ -185,9 +138,6 @@ class V8_EXPORT_PRIVATE GlobalHandles final { template class NodeSpace; class PendingPhantomCallback; - class TracedNode; - - static GlobalHandles* From(const TracedNode*); template size_t InvokeFirstPassWeakCallbacks( @@ -204,17 +154,11 @@ class V8_EXPORT_PRIVATE GlobalHandles final { WeakSlotCallbackWithHeap should_reset_node); Isolate* const isolate_; - bool is_marking_ = false; - bool is_sweeping_on_mutator_thread_ = false; std::unique_ptr> regular_nodes_; // Contains all nodes holding young objects. Note: when the list // is accessed, some of the objects may have been promoted already. std::vector young_nodes_; - - std::unique_ptr> traced_nodes_; - std::vector traced_young_nodes_; - std::vector> regular_pending_phantom_callbacks_; std::vector second_pass_callbacks_; diff --git a/src/handles/traced-handles.cc b/src/handles/traced-handles.cc new file mode 100644 index 0000000000..d2d4bafcdf --- /dev/null +++ b/src/handles/traced-handles.cc @@ -0,0 +1,883 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/handles/traced-handles.h" + +#include + +#include "include/v8-internal.h" +#include "include/v8-traced-handle.h" +#include "src/base/logging.h" +#include "src/common/globals.h" +#include "src/handles/handles.h" +#include "src/heap/heap-write-barrier-inl.h" +#include "src/objects/objects.h" +#include "src/objects/visitors.h" + +namespace v8::internal { + +class TracedHandlesImpl; +namespace { + +class TracedNodeBlock; + +constexpr size_t kBlockSize = 256; + +constexpr uint16_t kInvalidFreeListNodeIndex = -1; + +class TracedNode final { + public: + static TracedNode* FromLocation(Address* location) { + return reinterpret_cast(location); + } + + static const TracedNode* FromLocation(const Address* location) { + return reinterpret_cast(location); + } + + TracedNode() = default; + void Initialize(uint8_t, uint16_t); + + uint8_t index() const { return index_; } + + bool is_root() const { return IsRoot::decode(flags_); } + void set_root(bool v) { flags_ = IsRoot::update(flags_, v); } + + template + bool is_in_use() const { + if constexpr (access_mode == AccessMode::NON_ATOMIC) { + return IsInUse::decode(flags_); + } + const auto flags = + reinterpret_cast&>(flags_).load( + std::memory_order_relaxed); + return IsInUse::decode(flags); + } + void set_is_in_use(bool v) { flags_ = IsInUse::update(flags_, v); } + + bool is_in_young_list() const { return IsInYoungList::decode(flags_); } + void set_is_in_young_list(bool v) { + flags_ = IsInYoungList::update(flags_, v); + } + + uint16_t next_free() const { return next_free_index_; } + void set_next_free(uint16_t next_free_index) { + next_free_index_ = next_free_index; + } + void set_class_id(uint16_t class_id) { class_id_ = class_id; } + + template + void set_markbit() { + if constexpr (access_mode == AccessMode::NON_ATOMIC) { + flags_ = Markbit::update(flags_, true); + return; + } + std::atomic& atomic_flags = + reinterpret_cast&>(flags_); + const uint8_t new_value = + Markbit::update(atomic_flags.load(std::memory_order_relaxed), true); + atomic_flags.fetch_or(new_value, std::memory_order_relaxed); + } + + template + bool markbit() const { + if constexpr (access_mode == AccessMode::NON_ATOMIC) { + return Markbit::decode(flags_); + } + const auto flags = + reinterpret_cast&>(flags_).load( + std::memory_order_relaxed); + return Markbit::decode(flags); + } + + void clear_markbit() { flags_ = Markbit::update(flags_, false); } + + void set_raw_object(Address value) { object_ = value; } + Address raw_object() const { return object_; } + Object object() const { return Object(object_); } + Handle handle() { return Handle(&object_); } + FullObjectSlot location() { return FullObjectSlot(&object_); } + + TracedNodeBlock& GetNodeBlock(); + const TracedNodeBlock& GetNodeBlock() const; + + Handle Publish(Object object, bool needs_young_bit_update, + bool needs_black_allocation); + void Release(); + + private: + using IsInUse = base::BitField8; + using IsInYoungList = IsInUse::Next; + using IsRoot = IsInYoungList::Next; + // The markbit is the exception as it can be set from the main and marker + // threads at the same time. + using Markbit = IsRoot::Next; + + Address object_ = kNullAddress; + uint8_t index_ = 0; + uint8_t flags_ = 0; + union { + // When a node is not in use, this index is used to build the free list. + uint16_t next_free_index_; + // When a node is in use, the user can specify a class id. + uint16_t class_id_; + }; +}; + +void TracedNode::Initialize(uint8_t index, uint16_t next_free_index) { + static_assert(offsetof(TracedNode, class_id_) == + Internals::kTracedNodeClassIdOffset); + DCHECK(!is_in_use()); + DCHECK(!is_in_young_list()); + DCHECK(!is_root()); + DCHECK(!markbit()); + index_ = index; + next_free_index_ = next_free_index; +} + +// Publishes all internal state to be consumed by other threads. +Handle TracedNode::Publish(Object object, bool needs_young_bit_update, + bool needs_black_allocation) { + DCHECK(!is_in_use()); + DCHECK(!is_root()); + DCHECK(!markbit()); + set_class_id(0); + if (needs_young_bit_update) { + set_is_in_young_list(true); + } + if (needs_black_allocation) { + set_markbit(); + } + set_root(true); + set_is_in_use(true); + reinterpret_cast*>(&object_)->store( + object.ptr(), std::memory_order_release); + return Handle(&object_); +} + +void TracedNode::Release() { + DCHECK(is_in_use()); + // Only preserve the in-young-list bit which is used to avoid duplicates in + // TracedHandlesImpl::young_nodes_; + flags_ &= IsInYoungList::encode(true); + DCHECK(!is_in_use()); + DCHECK(!is_root()); + DCHECK(!markbit()); + set_raw_object(kGlobalHandleZapValue); +} + +template +class DoublyLinkedList final { + template + class IteratorImpl final + : public base::iterator { + public: + explicit IteratorImpl(U* object) : object_(object) {} + IteratorImpl(const IteratorImpl& other) V8_NOEXCEPT + : object_(other.object_) {} + U* operator*() { return object_; } + bool operator==(const IteratorImpl& rhs) const { + return rhs.object_ == object_; + } + bool operator!=(const IteratorImpl& rhs) const { return !(*this == rhs); } + inline IteratorImpl& operator++() { + object_ = ListNodeFor(object_)->next; + return *this; + } + inline IteratorImpl operator++(int) { + IteratorImpl tmp(*this); + operator++(); + return tmp; + } + + private: + U* object_; + }; + + public: + using Iterator = IteratorImpl; + using ConstIterator = IteratorImpl; + + struct ListNode { + T* prev = nullptr; + T* next = nullptr; + }; + + T* Front() { return front_; } + + void PushFront(T* object) { + ListNodeFor(object)->next = front_; + if (front_) { + ListNodeFor(front_)->prev = object; + } + front_ = object; + size_++; + } + + void PopFront() { + DCHECK(!Empty()); + + if (ListNodeFor(front_)->next) { + ListNodeFor(ListNodeFor(front_)->next)->prev = nullptr; + } + front_ = ListNodeFor(front_)->next; + size_--; + } + + void Remove(T* object) { + if (front_ == object) { + front_ = ListNodeFor(object)->next; + } + if (ListNodeFor(object)->next) { + ListNodeFor(ListNodeFor(object)->next)->prev = ListNodeFor(object)->prev; + ListNodeFor(object)->next = nullptr; + } + if (ListNodeFor(object)->prev) { + ListNodeFor(ListNodeFor(object)->prev)->next = ListNodeFor(object)->next; + ListNodeFor(object)->prev = nullptr; + } + size_--; + } + + bool Contains(T* object) const { + if (front_ == object) return true; + auto* list_node = ListNodeFor(object); + return list_node->prev || list_node->next; + } + + size_t Size() const { return size_; } + bool Empty() const { return size_ == 0; } + + Iterator begin() { return Iterator(front_); } + Iterator end() { return Iterator(nullptr); } + ConstIterator begin() const { return ConstIterator(front_); } + ConstIterator end() const { return ConstIterator(nullptr); } + + private: + static ListNode* ListNodeFor(T* object) { + return NodeAccessor::GetListNode(object); + } + static const ListNode* ListNodeFor(const T* object) { + return NodeAccessor::GetListNode(const_cast(object)); + } + + T* front_ = nullptr; + size_t size_ = 0; +}; + +class TracedNodeBlock final { + struct OverallListNode { + static auto* GetListNode(TracedNodeBlock* block) { + return &block->overall_list_node_; + } + }; + + struct UsableListNode { + static auto* GetListNode(TracedNodeBlock* block) { + return &block->usable_list_node_; + } + }; + + class NodeIteratorImpl final + : public base::iterator { + public: + explicit NodeIteratorImpl(TracedNodeBlock* block) : block_(block) {} + NodeIteratorImpl(TracedNodeBlock* block, size_t current_index) + : block_(block), current_index_(current_index) {} + NodeIteratorImpl(const NodeIteratorImpl& other) V8_NOEXCEPT + : block_(other.block_), + current_index_(other.current_index_) {} + + TracedNode* operator*() { return block_->at(current_index_); } + bool operator==(const NodeIteratorImpl& rhs) const { + return rhs.block_ == block_ && rhs.current_index_ == current_index_; + } + bool operator!=(const NodeIteratorImpl& rhs) const { + return !(*this == rhs); + } + inline NodeIteratorImpl& operator++() { + if (current_index_ < kBlockSize) { + current_index_++; + } + return *this; + } + inline NodeIteratorImpl operator++(int) { + NodeIteratorImpl tmp(*this); + operator++(); + return tmp; + } + + private: + TracedNodeBlock* block_; + size_t current_index_ = 0; + }; + + public: + using OverallList = DoublyLinkedList; + using UsableList = DoublyLinkedList; + using Iterator = NodeIteratorImpl; + + explicit TracedNodeBlock(TracedHandlesImpl&, OverallList&, UsableList&); + + TracedNode* AllocateNode(); + void FreeNode(TracedNode*); + + const void* nodes_begin_address() const { return nodes_; } + const void* nodes_end_address() const { return &nodes_[kBlockSize]; } + + TracedHandlesImpl& traced_handles() const { return traced_handles_; } + + TracedNode* at(size_t index) { return &nodes_[index]; } + + Iterator begin() { return Iterator(this); } + Iterator end() { return Iterator(this, kBlockSize); } + + bool IsFull() const { return used_ == kBlockSize; } + + private: + TracedNode nodes_[kBlockSize]; + OverallList::ListNode overall_list_node_; + UsableList::ListNode usable_list_node_; + TracedHandlesImpl& traced_handles_; + uint16_t used_ = 0; + uint16_t first_free_node_ = 0; +}; + +TracedNodeBlock::TracedNodeBlock(TracedHandlesImpl& traced_handles, + OverallList& overall_list, + UsableList& usable_list) + : traced_handles_(traced_handles) { + for (size_t i = 0; i < (kBlockSize - 1); i++) { + nodes_[i].Initialize(i, i + 1); + } + nodes_[kBlockSize - 1].Initialize(kBlockSize - 1, kInvalidFreeListNodeIndex); + overall_list.PushFront(this); + usable_list.PushFront(this); +} + +TracedNode* TracedNodeBlock::AllocateNode() { + if (used_ == kBlockSize) { + DCHECK_EQ(first_free_node_, kInvalidFreeListNodeIndex); + return nullptr; + } + + DCHECK_NE(first_free_node_, kInvalidFreeListNodeIndex); + auto* node = &nodes_[first_free_node_]; + first_free_node_ = node->next_free(); + used_++; + DCHECK(!node->is_in_use()); + return node; +} + +void TracedNodeBlock::FreeNode(TracedNode* node) { + DCHECK(node->is_in_use()); + node->Release(); + DCHECK(!node->is_in_use()); + node->set_next_free(first_free_node_); + first_free_node_ = node->index(); + used_--; +} + +TracedNodeBlock& TracedNode::GetNodeBlock() { + TracedNode* first_node = this - index_; + return *reinterpret_cast(first_node); +} + +const TracedNodeBlock& TracedNode::GetNodeBlock() const { + const TracedNode* first_node = this - index_; + return *reinterpret_cast(first_node); +} + +bool NeedsTrackingInYoungNodes(Object value, TracedNode* node) { + return ObjectInYoungGeneration(value) && !node->is_in_young_list(); +} + +void SetSlotThreadSafe(Address** slot, Address* val) { + reinterpret_cast*>(slot)->store( + val, std::memory_order_relaxed); +} + +} // namespace + +class TracedHandlesImpl final { + public: + explicit TracedHandlesImpl(Isolate*); + ~TracedHandlesImpl(); + + Handle Create(Address value, Address* slot, + GlobalHandleStoreMode store_mode); + void Destroy(TracedNodeBlock& node_block, TracedNode& node); + void Copy(const TracedNode& from_node, Address** to); + void Move(TracedNode& from_node, Address** from, Address** to); + + void SetIsMarking(bool); + void SetIsSweepingOnMutatorThread(bool); + + TracedHandles::NodeBounds GetNodeBounds() const; + + void UpdateListOfYoungNodes(); + void ClearListOfYoungNodes(); + + void ResetDeadNodes(WeakSlotCallbackWithHeap should_reset_handle); + + void ComputeWeaknessForYoungObjects(WeakSlotCallback is_unmodified); + void ProcessYoungObjects(RootVisitor* visitor, + WeakSlotCallbackWithHeap should_reset_handle); + + void Iterate(RootVisitor* visitor); + void IterateYoung(RootVisitor* visitor); + void IterateYoungRoots(RootVisitor* visitor); + + size_t used_node_count() const { return used_; } + size_t total_size_bytes() const { + return sizeof(TracedNode) * kBlockSize * blocks_.Size(); + } + size_t used_size_bytes() const { return sizeof(TracedNode) * used_; } + + START_ALLOW_USE_DEPRECATED() + + void Iterate(v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor); + + END_ALLOW_USE_DEPRECATED() + + private: + TracedNode* AllocateNode(); + void FreeNode(TracedNode*); + + TracedNodeBlock::OverallList blocks_; + TracedNodeBlock::UsableList usable_blocks_; + std::vector young_nodes_; + Isolate* isolate_; + bool is_marking_ = false; + bool is_sweeping_on_mutator_thread_ = false; + size_t used_ = 0; +}; + +TracedNode* TracedHandlesImpl::AllocateNode() { + auto* block = usable_blocks_.Front(); + if (!block) { + block = new TracedNodeBlock(*this, blocks_, usable_blocks_); + DCHECK_EQ(block, usable_blocks_.Front()); + } + auto* node = block->AllocateNode(); + if (node) { + used_++; + return node; + } + + usable_blocks_.Remove(block); + return AllocateNode(); +} + +void TracedHandlesImpl::FreeNode(TracedNode* node) { + auto& block = node->GetNodeBlock(); + // TODO(v8:13372): Keep vector of empty blocks that could be freed after + // fixing up young nodes. + if (block.IsFull() && !usable_blocks_.Contains(&block)) { + usable_blocks_.PushFront(&block); + } + block.FreeNode(node); + used_--; +} + +TracedHandlesImpl::TracedHandlesImpl(Isolate* isolate) : isolate_(isolate) {} + +TracedHandlesImpl::~TracedHandlesImpl() { + while (!blocks_.Empty()) { + auto* block = blocks_.Front(); + blocks_.PopFront(); + delete block; + } +} + +Handle TracedHandlesImpl::Create(Address value, Address* slot, + GlobalHandleStoreMode store_mode) { + Object object(value); + auto* node = AllocateNode(); + bool needs_young_bit_update = false; + if (NeedsTrackingInYoungNodes(object, node)) { + needs_young_bit_update = true; + young_nodes_.push_back(node); + } + bool needs_black_allocation = false; + if (is_marking_ && store_mode != GlobalHandleStoreMode::kInitializingStore) { + needs_black_allocation = true; + WriteBarrier::MarkingFromGlobalHandle(object); + } + return node->Publish(object, needs_young_bit_update, needs_black_allocation); +} + +void TracedHandlesImpl::Destroy(TracedNodeBlock& node_block, TracedNode& node) { + DCHECK_IMPLIES(is_marking_, !is_sweeping_on_mutator_thread_); + DCHECK_IMPLIES(is_sweeping_on_mutator_thread_, !is_marking_); + + // If sweeping on the mutator thread is running then the handle destruction + // may be a result of a Reset() call from a destructor. The node will be + // reclaimed on the next cycle. + // + // This allows v8::TracedReference::Reset() calls from destructors on + // objects that may be used from stack and heap. + if (is_sweeping_on_mutator_thread_) { + return; + } + + if (is_marking_) { + // Incremental marking is on. This also covers the scavenge case which + // prohibits eagerly reclaiming nodes when marking is on during a scavenge. + // + // On-heap traced nodes are released in the atomic pause in + // `IterateWeakRootsForPhantomHandles()` when they are discovered as not + // marked. Eagerly clear out the object here to avoid needlessly marking it + // from this point on. The node will be reclaimed on the next cycle. + node.set_raw_object(kNullAddress); + return; + } + + // In case marking and sweeping are off, the handle may be freed immediately. + // Note that this includes also the case when invoking the first pass + // callbacks during the atomic pause which requires releasing a node fully. + FreeNode(&node); +} + +void TracedHandlesImpl::Copy(const TracedNode& from_node, Address** to) { + DCHECK_NE(kGlobalHandleZapValue, from_node.raw_object()); + Handle o = + Create(from_node.raw_object(), reinterpret_cast(to), + GlobalHandleStoreMode::kAssigningStore); + SetSlotThreadSafe(to, o.location()); +#ifdef VERIFY_HEAP + if (v8_flags.verify_heap) { + Object(**to).ObjectVerify(isolate_); + } +#endif // VERIFY_HEAP +} + +void TracedHandlesImpl::Move(TracedNode& from_node, Address** from, + Address** to) { + DCHECK(from_node.is_in_use()); + + // Deal with old "to". + auto* to_node = TracedNode::FromLocation(*to); + DCHECK_IMPLIES(*to, to_node->is_in_use()); + DCHECK_IMPLIES(*to, kGlobalHandleZapValue != to_node->raw_object()); + DCHECK_NE(kGlobalHandleZapValue, from_node.raw_object()); + if (*to) { + auto& to_node_block = to_node->GetNodeBlock(); + Destroy(to_node_block, *to_node); + } + + // Set "to" to "from". + SetSlotThreadSafe(to, *from); + to_node = &from_node; + + // Deal with new "to" + DCHECK_NOT_NULL(*to); + DCHECK_EQ(*from, *to); + if (is_marking_) { + // Write barrier needs to cover node as well as object. + to_node->set_markbit(); + WriteBarrier::MarkingFromGlobalHandle(to_node->object()); + } + SetSlotThreadSafe(from, nullptr); +} + +void TracedHandlesImpl::SetIsMarking(bool value) { + DCHECK_EQ(is_marking_, !value); + is_marking_ = value; +} + +void TracedHandlesImpl::SetIsSweepingOnMutatorThread(bool value) { + DCHECK_EQ(is_sweeping_on_mutator_thread_, !value); + is_sweeping_on_mutator_thread_ = value; +} + +TracedHandles::NodeBounds TracedHandlesImpl::GetNodeBounds() const { + TracedHandles::NodeBounds block_bounds; + for (const auto* block : blocks_) { + block_bounds.push_back( + {block->nodes_begin_address(), block->nodes_end_address()}); + } + std::sort(block_bounds.begin(), block_bounds.end(), + [](const auto& pair1, const auto& pair2) { + return pair1.first < pair2.first; + }); + return block_bounds; +} + +void TracedHandlesImpl::UpdateListOfYoungNodes() { + size_t last = 0; + for (auto* node : young_nodes_) { + DCHECK(node->is_in_young_list()); + if (node->is_in_use()) { + if (ObjectInYoungGeneration(node->object())) { + young_nodes_[last++] = node; + } else { + node->set_is_in_young_list(false); + } + } else { + node->set_is_in_young_list(false); + } + } + DCHECK_LE(last, young_nodes_.size()); + young_nodes_.resize(last); + young_nodes_.shrink_to_fit(); +} + +void TracedHandlesImpl::ClearListOfYoungNodes() { + for (auto* node : young_nodes_) { + DCHECK(node->is_in_young_list()); + // Nodes in use and not in use can have this bit set to false. + node->set_is_in_young_list(false); + } + young_nodes_.clear(); + young_nodes_.shrink_to_fit(); +} + +void TracedHandlesImpl::ResetDeadNodes( + WeakSlotCallbackWithHeap should_reset_handle) { + for (auto* block : blocks_) { + for (auto* node : *block) { + if (!node->is_in_use()) continue; + + // Detect unreachable nodes first. + if (!node->markbit()) { + FreeNode(node); + continue; + } + + // Node was reachable. Clear the markbit for the next GC. + node->clear_markbit(); + // TODO(v8:13141): Turn into a DCHECK after some time. + CHECK(!should_reset_handle(isolate_->heap(), node->location())); + } + } +} + +void TracedHandlesImpl::ComputeWeaknessForYoungObjects( + WeakSlotCallback is_unmodified) { + if (!v8_flags.reclaim_unmodified_wrappers) return; + + // Treat all objects as roots during incremental marking to avoid corrupting + // marking worklists. + if (is_marking_) return; + + auto* const handler = isolate_->heap()->GetEmbedderRootsHandler(); + for (TracedNode* node : young_nodes_) { + if (node->is_in_use()) { + DCHECK(node->is_root()); + if (is_unmodified(node->location())) { + v8::Value* value = ToApi(node->handle()); + bool r = handler->IsRoot( + *reinterpret_cast*>(&value)); + node->set_root(r); + } + } + } +} + +void TracedHandlesImpl::ProcessYoungObjects( + RootVisitor* visitor, WeakSlotCallbackWithHeap should_reset_handle) { + if (!v8_flags.reclaim_unmodified_wrappers) return; + + auto* const handler = isolate_->heap()->GetEmbedderRootsHandler(); + for (TracedNode* node : young_nodes_) { + if (!node->is_in_use()) continue; + + DCHECK_IMPLIES(node->is_root(), + !should_reset_handle(isolate_->heap(), node->location())); + if (should_reset_handle(isolate_->heap(), node->location())) { + v8::Value* value = ToApi(node->handle()); + handler->ResetRoot( + *reinterpret_cast*>(&value)); + // We cannot check whether a node is in use here as the reset behavior + // depends on whether incremental marking is running when reclaiming + // young objects. + } else { + if (!node->is_root()) { + node->set_root(true); + visitor->VisitRootPointer(Root::kGlobalHandles, nullptr, + node->location()); + } + } + } +} + +void TracedHandlesImpl::Iterate(RootVisitor* visitor) { + for (auto* block : blocks_) { + for (auto* node : *block) { + if (!node->is_in_use()) continue; + + visitor->VisitRootPointer(Root::kTracedHandles, nullptr, + node->location()); + } + } +} + +void TracedHandlesImpl::IterateYoung(RootVisitor* visitor) { + for (auto* node : young_nodes_) { + if (!node->is_in_use()) continue; + + visitor->VisitRootPointer(Root::kTracedHandles, nullptr, node->location()); + } +} + +void TracedHandlesImpl::IterateYoungRoots(RootVisitor* visitor) { + for (auto* node : young_nodes_) { + if (!node->is_in_use()) continue; + + if (!node->is_root()) continue; + + visitor->VisitRootPointer(Root::kTracedHandles, nullptr, node->location()); + } +} + +START_ALLOW_USE_DEPRECATED() + +void TracedHandlesImpl::Iterate( + v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor) { + for (auto* block : blocks_) { + for (auto* node : *block) { + if (node->is_in_use()) { + v8::Value* value = ToApi(node->handle()); + visitor->VisitTracedReference( + *reinterpret_cast*>(&value)); + } + } + } +} + +END_ALLOW_USE_DEPRECATED() + +TracedHandles::TracedHandles(Isolate* isolate) + : impl_(std::make_unique(isolate)) {} + +TracedHandles::~TracedHandles() = default; + +Handle TracedHandles::Create(Address value, Address* slot, + GlobalHandleStoreMode store_mode) { + return impl_->Create(value, slot, store_mode); +} + +void TracedHandles::SetIsMarking(bool value) { impl_->SetIsMarking(value); } + +void TracedHandles::SetIsSweepingOnMutatorThread(bool value) { + impl_->SetIsSweepingOnMutatorThread(value); +} + +TracedHandles::NodeBounds TracedHandles::GetNodeBounds() const { + return impl_->GetNodeBounds(); +} + +void TracedHandles::UpdateListOfYoungNodes() { + impl_->UpdateListOfYoungNodes(); +} + +void TracedHandles::ClearListOfYoungNodes() { impl_->ClearListOfYoungNodes(); } + +void TracedHandles::ResetDeadNodes( + WeakSlotCallbackWithHeap should_reset_handle) { + impl_->ResetDeadNodes(should_reset_handle); +} + +void TracedHandles::ComputeWeaknessForYoungObjects( + WeakSlotCallback is_unmodified) { + impl_->ComputeWeaknessForYoungObjects(is_unmodified); +} + +void TracedHandles::ProcessYoungObjects( + RootVisitor* visitor, WeakSlotCallbackWithHeap should_reset_handle) { + impl_->ProcessYoungObjects(visitor, should_reset_handle); +} + +void TracedHandles::Iterate(RootVisitor* visitor) { impl_->Iterate(visitor); } + +void TracedHandles::IterateYoung(RootVisitor* visitor) { + impl_->IterateYoung(visitor); +} + +void TracedHandles::IterateYoungRoots(RootVisitor* visitor) { + impl_->IterateYoungRoots(visitor); +} + +size_t TracedHandles::used_node_count() const { + return impl_->used_node_count(); +} + +size_t TracedHandles::total_size_bytes() const { + return impl_->total_size_bytes(); +} + +size_t TracedHandles::used_size_bytes() const { + return impl_->used_size_bytes(); +} + +START_ALLOW_USE_DEPRECATED() + +void TracedHandles::Iterate( + v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor) { + impl_->Iterate(visitor); +} + +END_ALLOW_USE_DEPRECATED() + +// static +void TracedHandles::Destroy(Address* location) { + if (!location) return; + + auto* node = TracedNode::FromLocation(location); + auto& node_block = node->GetNodeBlock(); + auto& traced_handles = node_block.traced_handles(); + traced_handles.Destroy(node_block, *node); +} + +// static +void TracedHandles::Copy(const Address* const* from, Address** to) { + DCHECK_NOT_NULL(*from); + DCHECK_NULL(*to); + + const TracedNode* from_node = TracedNode::FromLocation(*from); + const auto& node_block = from_node->GetNodeBlock(); + auto& traced_handles = node_block.traced_handles(); + traced_handles.Copy(*from_node, to); +} + +// static +void TracedHandles::Move(Address** from, Address** to) { + // Fast path for moving from an empty reference. + if (!*from) { + Destroy(*to); + SetSlotThreadSafe(to, nullptr); + return; + } + + TracedNode* from_node = TracedNode::FromLocation(*from); + auto& node_block = from_node->GetNodeBlock(); + auto& traced_handles = node_block.traced_handles(); + traced_handles.Move(*from_node, from, to); +} + +// static +void TracedHandles::Mark(Address* location) { + auto* node = TracedNode::FromLocation(location); + DCHECK(node->is_in_use()); + node->set_markbit(); +} + +// static +Object TracedHandles::MarkConservatively(Address* inner_location, + Address* traced_node_block_base) { + // Compute the `TracedNode` address based on its inner pointer. + const ptrdiff_t delta = reinterpret_cast(inner_location) - + reinterpret_cast(traced_node_block_base); + const auto index = delta / sizeof(TracedNode); + TracedNode& node = + reinterpret_cast(traced_node_block_base)[index]; + // `MarkConservatively()` runs concurrently with marking code. Reading + // state concurrently to setting the markbit is safe. + if (!node.is_in_use()) return Smi::zero(); + node.set_markbit(); + return node.object(); +} + +} // namespace v8::internal diff --git a/src/handles/traced-handles.h b/src/handles/traced-handles.h new file mode 100644 index 0000000000..c3e1492d98 --- /dev/null +++ b/src/handles/traced-handles.h @@ -0,0 +1,93 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_HANDLES_TRACED_HANDLES_H_ +#define V8_HANDLES_TRACED_HANDLES_H_ + +#include "include/v8-embedder-heap.h" +#include "include/v8-traced-handle.h" +#include "src/base/macros.h" +#include "src/common/globals.h" +#include "src/handles/handles.h" +#include "src/objects/objects.h" +#include "src/objects/visitors.h" + +namespace v8::internal { + +class Isolate; +class TracedHandlesImpl; + +// TracedHandles hold handles that must go through cppgc's tracing methods. The +// handles do otherwise not keep their pointees alive. +class V8_EXPORT_PRIVATE TracedHandles final { + public: + static void Destroy(Address* location); + static void Copy(const Address* const* from, Address** to); + static void Move(Address** from, Address** to); + + static void Mark(Address* location); + static Object MarkConservatively(Address* inner_location, + Address* traced_node_block_base); + + V8_INLINE static Object Acquire(Address* location) { + return Object(reinterpret_cast*>(location)->load( + std::memory_order_acquire)); + } + + explicit TracedHandles(Isolate*); + ~TracedHandles(); + + TracedHandles(const TracedHandles&) = delete; + TracedHandles& operator=(const TracedHandles&) = delete; + + Handle Create(Address value, Address* slot, + GlobalHandleStoreMode store_mode); + + using NodeBounds = std::vector>; + NodeBounds GetNodeBounds() const; + + void SetIsMarking(bool); + void SetIsSweepingOnMutatorThread(bool); + + // Updates the list of young nodes that is maintained separately. + void UpdateListOfYoungNodes(); + // Clears the list of young nodes, assuming that the young generation is + // empty. + void ClearListOfYoungNodes(); + + void ResetDeadNodes(WeakSlotCallbackWithHeap should_reset_handle); + + // Computes whether young weak objects should be considered roots for young + // generation garbage collections or just be treated weakly. Per default + // objects are considered as roots. Objects are treated not as root when both + // - `is_unmodified()` returns true; + // - the `EmbedderRootsHandler` also does not consider them as roots; + void ComputeWeaknessForYoungObjects(WeakSlotCallback is_unmodified); + + void ProcessYoungObjects(RootVisitor* v, + WeakSlotCallbackWithHeap should_reset_handle); + + void Iterate(RootVisitor*); + void IterateYoung(RootVisitor*); + void IterateYoungRoots(RootVisitor*); + + START_ALLOW_USE_DEPRECATED() + + // Iterates over all traces handles represented by + // `v8::TracedReferenceBase`. + void Iterate(v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor); + + END_ALLOW_USE_DEPRECATED() + + size_t used_node_count() const; + size_t total_size_bytes() const; + size_t used_size_bytes() const; + + private: + std::unique_ptr impl_; +}; + +} // namespace v8::internal + +#endif // V8_HANDLES_TRACED_HANDLES_H_ diff --git a/src/heap/cppgc-js/cpp-heap.cc b/src/heap/cppgc-js/cpp-heap.cc index 9a34eb7d57..299edc2d34 100644 --- a/src/heap/cppgc-js/cpp-heap.cc +++ b/src/heap/cppgc-js/cpp-heap.cc @@ -20,8 +20,8 @@ #include "src/base/platform/time.h" #include "src/execution/isolate-inl.h" #include "src/flags/flags.h" -#include "src/handles/global-handles.h" #include "src/handles/handles.h" +#include "src/handles/traced-handles.h" #include "src/heap/base/stack.h" #include "src/heap/cppgc-js/cpp-marking-state.h" #include "src/heap/cppgc-js/cpp-snapshot.h" @@ -152,7 +152,7 @@ void TraceV8ToCppGCReferences( DCHECK(isolate); V8ToCppGCReferencesVisitor forwarding_visitor(marking_state, isolate, wrapper_descriptor); - isolate->global_handles()->IterateTracedNodes(&forwarding_visitor); + isolate->traced_handles()->Iterate(&forwarding_visitor); } } // namespace @@ -561,34 +561,32 @@ namespace { class SweepingOnMutatorThreadForGlobalHandlesScope final { public: explicit SweepingOnMutatorThreadForGlobalHandlesScope( - GlobalHandles& global_handles) - : global_handles_(global_handles) { - global_handles_.NotifyStartSweepingOnMutatorThread(); + TracedHandles& traced_handles) + : traced_handles_(traced_handles) { + traced_handles_.SetIsSweepingOnMutatorThread(true); } ~SweepingOnMutatorThreadForGlobalHandlesScope() { - global_handles_.NotifyEndSweepingOnMutatorThread(); + traced_handles_.SetIsSweepingOnMutatorThread(false); } - GlobalHandles& global_handles_; + TracedHandles& traced_handles_; }; class SweepingOnMutatorThreadForGlobalHandlesObserver final : public cppgc::internal::Sweeper::SweepingOnMutatorThreadObserver { public: SweepingOnMutatorThreadForGlobalHandlesObserver(CppHeap& cpp_heap, - GlobalHandles& global_handles) + TracedHandles& traced_handles) : cppgc::internal::Sweeper::SweepingOnMutatorThreadObserver( cpp_heap.sweeper()), - global_handles_(global_handles) {} + traced_handles_(traced_handles) {} - void Start() override { - global_handles_.NotifyStartSweepingOnMutatorThread(); - } + void Start() override { traced_handles_.SetIsSweepingOnMutatorThread(true); } - void End() override { global_handles_.NotifyEndSweepingOnMutatorThread(); } + void End() override { traced_handles_.SetIsSweepingOnMutatorThread(false); } private: - GlobalHandles& global_handles_; + TracedHandles& traced_handles_; }; } // namespace @@ -608,7 +606,7 @@ void CppHeap::AttachIsolate(Isolate* isolate) { ReduceGCCapabilititesFromFlags(); sweeping_on_mutator_thread_observer_ = std::make_unique( - *this, *isolate_->global_handles()); + *this, *isolate_->traced_handles()); no_gc_scope_--; } @@ -850,7 +848,7 @@ void CppHeap::TraceEpilogue() { base::Optional global_handles_scope; if (isolate_) { - global_handles_scope.emplace(*isolate_->global_handles()); + global_handles_scope.emplace(*isolate_->traced_handles()); } compactable_space_handling = compactor_.CompactSpacesIfEnabled(); } diff --git a/src/heap/cppgc-js/unified-heap-marking-state-inl.h b/src/heap/cppgc-js/unified-heap-marking-state-inl.h index 212b41ed1d..beaaa9c407 100644 --- a/src/heap/cppgc-js/unified-heap-marking-state-inl.h +++ b/src/heap/cppgc-js/unified-heap-marking-state-inl.h @@ -9,8 +9,7 @@ #include "include/v8-traced-handle.h" #include "src/base/logging.h" -#include "src/handles/global-handles-inl.h" -#include "src/handles/global-handles.h" +#include "src/handles/traced-handles.h" #include "src/heap/cppgc-js/unified-heap-marking-state.h" #include "src/heap/heap.h" #include "src/heap/mark-compact.h" @@ -23,17 +22,17 @@ namespace internal { class BasicTracedReferenceExtractor { public: static Object GetObjectForMarking(const TracedReferenceBase& ref) { - Address* global_handle_location = const_cast( + Address* traced_handle_location = const_cast( reinterpret_cast(ref.GetSlotThreadSafe())); // We cannot assume that the reference is non-null as we may get here by // tracing an ephemeron which doesn't have early bailouts, see // `cppgc::Visitor::TraceEphemeron()` for non-Member values. - if (!global_handle_location) return Object(); + if (!traced_handle_location) return Object(); // The load synchronizes internal bitfields that are also read atomically // from the concurrent marker. - Object object = GlobalHandles::Acquire(global_handle_location); - GlobalHandles::MarkTraced(global_handle_location); + Object object = TracedHandles::Acquire(traced_handle_location); + TracedHandles::Mark(traced_handle_location); return object; } }; diff --git a/src/heap/global-handle-marking-visitor.cc b/src/heap/global-handle-marking-visitor.cc index b466051380..46d81cfea2 100644 --- a/src/heap/global-handle-marking-visitor.cc +++ b/src/heap/global-handle-marking-visitor.cc @@ -15,8 +15,7 @@ GlobalHandleMarkingVisitor::GlobalHandleMarkingVisitor( : heap_(heap), marking_state_(*heap_.marking_state()), local_marking_worklist_(local_marking_worklist), - traced_node_bounds_( - heap.isolate()->global_handles()->GetTracedNodeBounds()) {} + traced_node_bounds_(heap.isolate()->traced_handles()->GetNodeBounds()) {} void GlobalHandleMarkingVisitor::VisitPointer(const void* address) { const auto upper_it = std::upper_bound( @@ -27,7 +26,7 @@ void GlobalHandleMarkingVisitor::VisitPointer(const void* address) { const auto bounds = std::next(upper_it, -1); if (address < bounds->second) { - auto object = GlobalHandles::MarkTracedConservatively( + auto object = TracedHandles::MarkConservatively( const_cast(reinterpret_cast(address)), const_cast(reinterpret_cast(bounds->first))); if (!object.IsHeapObject()) { diff --git a/src/heap/global-handle-marking-visitor.h b/src/heap/global-handle-marking-visitor.h index 71e805810e..8b9f1d1a82 100644 --- a/src/heap/global-handle-marking-visitor.h +++ b/src/heap/global-handle-marking-visitor.h @@ -5,7 +5,7 @@ #ifndef V8_HEAP_GLOBAL_HANDLE_MARKING_VISITOR_H_ #define V8_HEAP_GLOBAL_HANDLE_MARKING_VISITOR_H_ -#include "src/handles/global-handles.h" +#include "src/handles/traced-handles.h" #include "src/heap/base/stack.h" #include "src/heap/heap.h" #include "src/heap/mark-compact.h" @@ -27,7 +27,7 @@ class GlobalHandleMarkingVisitor final : public ::heap::base::StackVisitor { Heap& heap_; MarkingState& marking_state_; MarkingWorklists::Local& local_marking_worklist_; - GlobalHandles::NodeBounds traced_node_bounds_; + TracedHandles::NodeBounds traced_node_bounds_; }; #endif // V8_HEAP_GLOBAL_HANDLE_MARKING_VISITOR_H_ diff --git a/src/heap/heap.cc b/src/heap/heap.cc index d453fee7f5..c125db07ed 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -36,6 +36,7 @@ #include "src/execution/vm-state-inl.h" #include "src/flags/flags.h" #include "src/handles/global-handles-inl.h" +#include "src/handles/traced-handles.h" #include "src/heap/array-buffer-sweeper.h" #include "src/heap/base/stack.h" #include "src/heap/basic-memory-chunk.h" @@ -1119,11 +1120,13 @@ size_t Heap::SizeOfObjects() { } size_t Heap::TotalGlobalHandlesSize() { - return isolate_->global_handles()->TotalSize(); + return isolate_->global_handles()->TotalSize() + + isolate_->traced_handles()->total_size_bytes(); } size_t Heap::UsedGlobalHandlesSize() { - return isolate_->global_handles()->UsedSize(); + return isolate_->global_handles()->UsedSize() + + isolate_->traced_handles()->used_size_bytes(); } void Heap::AddAllocationObserversToAllSpaces( @@ -4629,6 +4632,7 @@ void Heap::IterateRoots(RootVisitor* v, base::EnumSet options) { if (options.contains(SkipRoot::kOldGeneration)) { // Skip handles that are either weak or old. isolate_->global_handles()->IterateYoungStrongAndDependentRoots(v); + isolate_->traced_handles()->IterateYoungRoots(v); } else { // Skip handles that are weak. isolate_->global_handles()->IterateStrongRoots(v); @@ -4638,9 +4642,11 @@ void Heap::IterateRoots(RootVisitor* v, base::EnumSet options) { if (options.contains(SkipRoot::kOldGeneration)) { // Skip handles that are old. isolate_->global_handles()->IterateAllYoungRoots(v); + isolate_->traced_handles()->IterateYoung(v); } else { // Do not skip any handles. isolate_->global_handles()->IterateAllRoots(v); + isolate_->traced_handles()->Iterate(v); } } } @@ -4788,6 +4794,7 @@ void Heap::IterateRootsFromStackIncludingClient(RootVisitor* v) { void Heap::IterateWeakGlobalHandles(RootVisitor* v) { isolate_->global_handles()->IterateWeakRoots(v); + isolate_->traced_handles()->Iterate(v); } void Heap::IterateBuiltins(RootVisitor* v) { @@ -5768,7 +5775,7 @@ void Heap::SetStackStart(void* stack_start) { } void Heap::RegisterExternallyReferencedObject(Address* location) { - GlobalHandles::MarkTraced(location); + TracedHandles::Mark(location); Object object(*location); if (!object.IsHeapObject()) { // The embedder is not aware of whether numbers are materialized as heap diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc index 400b8d2aa6..9ad8a2da6a 100644 --- a/src/heap/incremental-marking.cc +++ b/src/heap/incremental-marking.cc @@ -272,6 +272,7 @@ void IncrementalMarking::MarkRoots() { heap()->isolate()->global_handles()->IterateYoungStrongAndDependentRoots( &visitor); + heap()->isolate()->traced_handles()->IterateYoungRoots(&visitor); std::vector marking_items; RememberedSet::IterateMemoryChunks( @@ -333,7 +334,7 @@ void IncrementalMarking::StartMarkingMajor() { MarkingBarrier::ActivateAll(heap(), is_compacting_, MarkingBarrierType::kMajor); - GlobalHandles::EnableMarkingBarrier(heap()->isolate()); + heap()->isolate()->traced_handles()->SetIsMarking(true); heap_->isolate()->compilation_cache()->MarkCompactPrologue(); diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index eb2a0a83ab..f14693d9fa 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -2740,7 +2740,7 @@ void MarkCompactCollector::MarkLiveObjects() { // finished as it will reset page flags that share the same bitmap as // the evacuation candidate bit. MarkingBarrier::DeactivateAll(heap()); - GlobalHandles::DisableMarkingBarrier(heap()->isolate()); + heap()->isolate()->traced_handles()->SetIsMarking(false); } epoch_++; @@ -2936,6 +2936,7 @@ void MarkCompactCollector::ClearNonLiveReferences() { // CPU profiler. heap()->isolate()->global_handles()->IterateWeakRootsForPhantomHandles( &IsUnmarkedHeapObject); + heap()->isolate()->traced_handles()->ResetDeadNodes(&IsUnmarkedHeapObject); } { @@ -6163,7 +6164,7 @@ void MinorMarkCompactCollector::MarkRootSetInParallel( // Seed the root set (roots + old->new set). { TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_SEED); - isolate()->global_handles()->ComputeWeaknessForYoungObjects( + isolate()->traced_handles()->ComputeWeaknessForYoungObjects( &JSObject::IsUnmodifiedApiObject); // MinorMC treats all weak roots except for global handles as strong. // That is why we don't set skip_weak = true here and instead visit @@ -6174,6 +6175,7 @@ void MinorMarkCompactCollector::MarkRootSetInParallel( SkipRoot::kOldGeneration}); isolate()->global_handles()->IterateYoungStrongAndDependentRoots( root_visitor); + isolate()->traced_handles()->IterateYoungRoots(root_visitor); if (!was_marked_incrementally) { // Create items for each page. @@ -6241,6 +6243,8 @@ void MinorMarkCompactCollector::MarkLiveObjects() { TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_GLOBAL_HANDLES); isolate()->global_handles()->ProcessWeakYoungObjects( &root_visitor, &IsUnmarkedObjectForYoungGeneration); + isolate()->traced_handles()->ProcessYoungObjects( + &root_visitor, &IsUnmarkedObjectForYoungGeneration); DrainMarkingWorklist(); } diff --git a/src/heap/scavenger.cc b/src/heap/scavenger.cc index 569bdda2c2..5169b8b256 100644 --- a/src/heap/scavenger.cc +++ b/src/heap/scavenger.cc @@ -364,7 +364,7 @@ void ScavengerCollector::CollectGarbage() { TRACE_GC( heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY); - isolate_->global_handles()->ComputeWeaknessForYoungObjects( + isolate_->traced_handles()->ComputeWeaknessForYoungObjects( &JSObject::IsUnmodifiedApiObject); } { @@ -382,6 +382,7 @@ void ScavengerCollector::CollectGarbage() { heap_->IterateRoots(&root_scavenge_visitor, options); isolate_->global_handles()->IterateYoungStrongAndDependentRoots( &root_scavenge_visitor); + isolate_->traced_handles()->IterateYoungRoots(&root_scavenge_visitor); scavengers[kMainThreadId]->Publish(); } { @@ -411,6 +412,8 @@ void ScavengerCollector::CollectGarbage() { GlobalHandlesWeakRootsUpdatingVisitor visitor; isolate_->global_handles()->ProcessWeakYoungObjects( &visitor, &IsUnscavengedHeapObjectSlot); + isolate_->traced_handles()->ProcessYoungObjects( + &visitor, &IsUnscavengedHeapObjectSlot); } { diff --git a/src/objects/visitors.h b/src/objects/visitors.h index 742188fd8f..6f23a869cb 100644 --- a/src/objects/visitors.h +++ b/src/objects/visitors.h @@ -30,6 +30,7 @@ class CodeDataContainer; V(kHandleScope, "(Handle scope)") \ V(kBuiltins, "(Builtins)") \ V(kGlobalHandles, "(Global handles)") \ + V(kTracedHandles, "(Traced handles)") \ V(kEternalHandles, "(Eternal handles)") \ V(kThreadManager, "(Thread manager)") \ V(kStrongRoots, "(Strong roots)") \ diff --git a/src/profiler/heap-snapshot-generator.cc b/src/profiler/heap-snapshot-generator.cc index edbdb4c91a..d542a174b1 100644 --- a/src/profiler/heap-snapshot-generator.cc +++ b/src/profiler/heap-snapshot-generator.cc @@ -2450,6 +2450,7 @@ void V8HeapExplorer::CollectGlobalObjectsTags() { Isolate* isolate = Isolate::FromHeap(heap_); GlobalObjectsEnumerator enumerator(isolate); isolate->global_handles()->IterateAllRoots(&enumerator); + isolate->traced_handles()->Iterate(&enumerator); for (int i = 0, l = enumerator.count(); i < l; ++i) { Handle obj = enumerator.at(i); const char* tag = global_object_name_resolver_->GetName( diff --git a/src/snapshot/startup-serializer.cc b/src/snapshot/startup-serializer.cc index 5b2d0faa42..2432d9e48b 100644 --- a/src/snapshot/startup-serializer.cc +++ b/src/snapshot/startup-serializer.cc @@ -241,6 +241,7 @@ void SerializedHandleChecker::VisitRootPointers(Root root, bool SerializedHandleChecker::CheckGlobalAndEternalHandles() { isolate_->global_handles()->IterateAllRoots(this); + isolate_->traced_handles()->Iterate(this); isolate_->eternal_handles()->IterateAllRoots(this); return ok_; } diff --git a/test/unittests/heap/embedder-tracing-unittest.cc b/test/unittests/heap/embedder-tracing-unittest.cc index 5c60de51ce..30c1ed64d0 100644 --- a/test/unittests/heap/embedder-tracing-unittest.cc +++ b/test/unittests/heap/embedder-tracing-unittest.cc @@ -545,9 +545,9 @@ TEST_F(EmbedderTracingTest, TracedReferenceReset) { TEST_F(EmbedderTracingTest, TracedReferenceCopyReferences) { ManualGCScope manual_gc(i_isolate()); v8::HandleScope outer_scope(v8_isolate()); - i::GlobalHandles* global_handles = i_isolate()->global_handles(); + auto* traced_handles = i_isolate()->traced_handles(); - const size_t initial_count = global_handles->handles_count(); + const size_t initial_count = traced_handles->used_node_count(); auto handle1 = std::make_unique>(); { v8::HandleScope scope(v8_isolate()); @@ -556,7 +556,7 @@ TEST_F(EmbedderTracingTest, TracedReferenceCopyReferences) { auto handle2 = std::make_unique>(*handle1); auto handle3 = std::make_unique>(); *handle3 = *handle2; - EXPECT_EQ(initial_count + 3, global_handles->handles_count()); + EXPECT_EQ(initial_count + 3, traced_handles->used_node_count()); EXPECT_FALSE(handle1->IsEmpty()); EXPECT_EQ(*handle1, *handle2); EXPECT_EQ(*handle2, *handle3); @@ -574,7 +574,7 @@ TEST_F(EmbedderTracingTest, TracedReferenceCopyReferences) { EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers); FullGC(); } - EXPECT_EQ(initial_count, global_handles->handles_count()); + EXPECT_EQ(initial_count, traced_handles->used_node_count()); } TEST_F(EmbedderTracingTest, TracedReferenceToUnmodifiedJSObjectDiesOnFullGC) { @@ -681,12 +681,12 @@ TEST_F(EmbedderTracingTest, TracedReferenceHandlesMarking) { auto dead = std::make_unique>(); live->Reset(v8_isolate(), v8::Undefined(v8_isolate())); dead->Reset(v8_isolate(), v8::Undefined(v8_isolate())); - i::GlobalHandles* global_handles = i_isolate()->global_handles(); + auto* traced_handles = i_isolate()->traced_handles(); { TestEmbedderHeapTracer tracer; heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer); tracer.AddReferenceForTracing(live.get()); - const size_t initial_count = global_handles->handles_count(); + const size_t initial_count = traced_handles->used_node_count(); { // Conservative scanning may find stale pointers to on-stack handles. // Disable scanning, assuming the slots are overwritten. @@ -698,7 +698,7 @@ TEST_F(EmbedderTracingTest, TracedReferenceHandlesMarking) { EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers); FullGC(); } - const size_t final_count = global_handles->handles_count(); + const size_t final_count = traced_handles->used_node_count(); // Handles are not black allocated, so `dead` is immediately reclaimed. EXPECT_EQ(initial_count, final_count + 1); } @@ -712,12 +712,12 @@ TEST_F(EmbedderTracingTest, TracedReferenceHandlesDoNotLeak) { v8::HandleScope scope(v8_isolate()); auto ref = std::make_unique>(); ref->Reset(v8_isolate(), v8::Undefined(v8_isolate())); - i::GlobalHandles* global_handles = i_isolate()->global_handles(); - const size_t initial_count = global_handles->handles_count(); + auto* traced_handles = i_isolate()->traced_handles(); + const size_t initial_count = traced_handles->used_node_count(); // We need two GCs because handles are black allocated. FullGC(); FullGC(); - const size_t final_count = global_handles->handles_count(); + const size_t final_count = traced_handles->used_node_count(); EXPECT_EQ(initial_count, final_count + 1); } @@ -792,9 +792,9 @@ TEST_F(EmbedderTracingTest, BasicTracedReference) { heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer); tracer.SetStackStart( static_cast(base::Stack::GetCurrentFrameAddress())); - i::GlobalHandles* global_handles = i_isolate()->global_handles(); + auto* traced_handles = i_isolate()->traced_handles(); - const size_t initial_count = global_handles->handles_count(); + const size_t initial_count = traced_handles->used_node_count(); char* memory = new char[sizeof(v8::TracedReference)]; auto* traced = new (memory) v8::TracedReference(); { @@ -804,10 +804,10 @@ TEST_F(EmbedderTracingTest, BasicTracedReference) { EXPECT_TRUE(traced->IsEmpty()); *traced = v8::TracedReference(v8_isolate(), object); EXPECT_FALSE(traced->IsEmpty()); - EXPECT_EQ(initial_count + 1, global_handles->handles_count()); + EXPECT_EQ(initial_count + 1, traced_handles->used_node_count()); } traced->~TracedReference(); - EXPECT_EQ(initial_count + 1, global_handles->handles_count()); + EXPECT_EQ(initial_count + 1, traced_handles->used_node_count()); { // Conservative scanning may find stale pointers to on-stack handles. // Disable scanning, assuming the slots are overwritten. @@ -819,7 +819,7 @@ TEST_F(EmbedderTracingTest, BasicTracedReference) { EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers); FullGC(); } - EXPECT_EQ(initial_count, global_handles->handles_count()); + EXPECT_EQ(initial_count, traced_handles->used_node_count()); delete[] memory; } @@ -902,22 +902,22 @@ TEST_F(EmbedderTracingTest, TracedReferenceNoDestructorReclaimedOnScavenge) { constexpr uint16_t kClassIdToOptimize = 23; EmbedderHeapTracerNoDestructorNonTracingClearing tracer(kClassIdToOptimize); heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer); - i::GlobalHandles* global_handles = i_isolate()->global_handles(); + auto* traced_handles = i_isolate()->traced_handles(); - const size_t initial_count = global_handles->handles_count(); + const size_t initial_count = traced_handles->used_node_count(); auto* optimized_handle = new v8::TracedReference(); auto* non_optimized_handle = new v8::TracedReference(); SetupOptimizedAndNonOptimizedHandle(v8_isolate(), kClassIdToOptimize, optimized_handle, non_optimized_handle); - EXPECT_EQ(initial_count + 2, global_handles->handles_count()); + EXPECT_EQ(initial_count + 2, traced_handles->used_node_count()); YoungGC(); - EXPECT_EQ(initial_count + 1, global_handles->handles_count()); + EXPECT_EQ(initial_count + 1, traced_handles->used_node_count()); EXPECT_TRUE(optimized_handle->IsEmpty()); delete optimized_handle; EXPECT_FALSE(non_optimized_handle->IsEmpty()); non_optimized_handle->Reset(); delete non_optimized_handle; - EXPECT_EQ(initial_count, global_handles->handles_count()); + EXPECT_EQ(initial_count, traced_handles->used_node_count()); } namespace { diff --git a/test/unittests/heap/global-handles-unittest.cc b/test/unittests/heap/global-handles-unittest.cc index 99e9f23d98..a7eda52f7e 100644 --- a/test/unittests/heap/global-handles-unittest.cc +++ b/test/unittests/heap/global-handles-unittest.cc @@ -606,15 +606,15 @@ TEST_F(GlobalHandlesTest, TotalSizeTracedNode) { v8::HandleScope scope(isolate); v8::TracedReference* handle = new TracedReference(); - CHECK_EQ(i_isolate()->global_handles()->TotalSize(), 0); - CHECK_EQ(i_isolate()->global_handles()->UsedSize(), 0); + CHECK_EQ(i_isolate()->traced_handles()->total_size_bytes(), 0); + CHECK_EQ(i_isolate()->traced_handles()->used_size_bytes(), 0); ConstructJSObject(isolate, handle); - CHECK_GT(i_isolate()->global_handles()->TotalSize(), 0); - CHECK_GT(i_isolate()->global_handles()->UsedSize(), 0); + CHECK_GT(i_isolate()->traced_handles()->total_size_bytes(), 0); + CHECK_GT(i_isolate()->traced_handles()->used_size_bytes(), 0); delete handle; CollectAllGarbage(); - CHECK_GT(i_isolate()->global_handles()->TotalSize(), 0); - CHECK_EQ(i_isolate()->global_handles()->UsedSize(), 0); + CHECK_GT(i_isolate()->traced_handles()->total_size_bytes(), 0); + CHECK_EQ(i_isolate()->traced_handles()->used_size_bytes(), 0); } } // namespace internal