unified-young-gen: Implement generational barrier for TracedHandles
If unified young generation is enabled, we don't record all young nodes, but only ones that have old host. The same std::vector<TracedHandle*> is reused for the remembered set implementation. The barrier is added to TracedHandle creation, i.e. - v8::TracedReference::Reset(), - v8::TracedReference::operator=(const TracedReference&), and to moving between TracedHandles, i.e. - v8::TracedReference::operator=(TracedReference&&). Bug: v8:13475 Change-Id: I2dc236e21c05f797687344c5745896f0bb8b0a0a Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4057070 Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Commit-Queue: Anton Bikineev <bikineev@chromium.org> Cr-Commit-Position: refs/heads/main@{#84513}
This commit is contained in:
parent
7317006be8
commit
d164f933b8
@ -463,8 +463,21 @@ void TracedNodeBlock::FreeNode(TracedNode* node) {
|
||||
used_--;
|
||||
}
|
||||
|
||||
bool NeedsTrackingInYoungNodes(Object value, TracedNode* node) {
|
||||
return ObjectInYoungGeneration(value) && !node->is_in_young_list();
|
||||
CppHeap* GetCppHeapIfUnifiedYoungGC(Isolate* isolate) {
|
||||
// TODO(v8:13475) Consider removing this check when unified-young-gen becomes
|
||||
// default.
|
||||
if (!v8_flags.cppgc_young_generation) return nullptr;
|
||||
auto* cpp_heap = CppHeap::From(isolate->heap()->cpp_heap());
|
||||
if (cpp_heap && cpp_heap->generational_gc_supported()) return cpp_heap;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
bool IsCppGCHostOld(CppHeap& cpp_heap, Address host) {
|
||||
DCHECK(host);
|
||||
DCHECK(cpp_heap.generational_gc_supported());
|
||||
auto* host_ptr = reinterpret_cast<void*>(host);
|
||||
auto* page = cppgc::internal::BasePage::FromInnerAddress(&cpp_heap, host_ptr);
|
||||
return !page->ObjectHeaderFromInnerAddress(host_ptr).IsYoung();
|
||||
}
|
||||
|
||||
void SetSlotThreadSafe(Address** slot, Address* val) {
|
||||
@ -521,10 +534,14 @@ class TracedHandlesImpl final {
|
||||
TracedNode* AllocateNode();
|
||||
void FreeNode(TracedNode*);
|
||||
|
||||
bool NeedsTrackingInYoungNodes(Object value, TracedNode* node, Address* slot,
|
||||
GlobalHandleStoreMode store_mode) const;
|
||||
|
||||
TracedNodeBlock::OverallList blocks_;
|
||||
TracedNodeBlock::UsableList usable_blocks_;
|
||||
// List of young nodes. May refer to nodes in `blocks_`, `usable_blocks_`, and
|
||||
// `empty_block_candidates_`.
|
||||
// `empty_block_candidates_`. In case unified young GC is enabled, this serves
|
||||
// as an old-to-young remembered set for cppgc-to-V8 references.
|
||||
std::vector<TracedNode*> young_nodes_;
|
||||
// Empty blocks that are still referred to from `young_nodes_`.
|
||||
std::vector<TracedNodeBlock*> empty_block_candidates_;
|
||||
@ -604,15 +621,37 @@ TracedHandlesImpl::~TracedHandlesImpl() {
|
||||
DCHECK_EQ(block_size_bytes, block_size_bytes_);
|
||||
}
|
||||
|
||||
bool TracedHandlesImpl::NeedsTrackingInYoungNodes(
|
||||
Object object, TracedNode* node, Address* slot,
|
||||
GlobalHandleStoreMode store_mode) const {
|
||||
// If unified young generation is supported, we don't want to treat all young
|
||||
// nodes as roots, but rather trace them normally. With unified GC we only
|
||||
// track old-to-young nodes in the vector.
|
||||
if (auto* cpp_heap = GetCppHeapIfUnifiedYoungGC(isolate_)) {
|
||||
if (store_mode == GlobalHandleStoreMode::kInitializingStore) {
|
||||
// Don't record initializing stores.
|
||||
return false;
|
||||
} else if (is_marking_) {
|
||||
// If marking is in progress, the marking barrier will be issued.
|
||||
return false;
|
||||
} else if (!IsCppGCHostOld(*cpp_heap, reinterpret_cast<Address>(slot))) {
|
||||
// Otherwise, if the host object is young, we don't need to record.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return ObjectInYoungGeneration(object) && !node->is_in_young_list();
|
||||
}
|
||||
|
||||
Handle<Object> TracedHandlesImpl::Create(Address value, Address* slot,
|
||||
GlobalHandleStoreMode store_mode) {
|
||||
Object object(value);
|
||||
auto* node = AllocateNode();
|
||||
bool needs_young_bit_update = false;
|
||||
if (NeedsTrackingInYoungNodes(object, node)) {
|
||||
if (NeedsTrackingInYoungNodes(object, node, slot, store_mode)) {
|
||||
needs_young_bit_update = true;
|
||||
young_nodes_.push_back(node);
|
||||
}
|
||||
|
||||
bool needs_black_allocation = false;
|
||||
if (is_marking_ && store_mode != GlobalHandleStoreMode::kInitializingStore) {
|
||||
needs_black_allocation = true;
|
||||
@ -692,6 +731,17 @@ void TracedHandlesImpl::Move(TracedNode& from_node, Address** from,
|
||||
// Write barrier needs to cover node as well as object.
|
||||
to_node->set_markbit<AccessMode::ATOMIC>();
|
||||
WriteBarrier::MarkingFromGlobalHandle(to_node->object());
|
||||
} else if (auto* cpp_heap = GetCppHeapIfUnifiedYoungGC(isolate_)) {
|
||||
const bool object_is_young_and_not_yet_recorded =
|
||||
!from_node.is_in_young_list() &&
|
||||
ObjectInYoungGeneration(from_node.object());
|
||||
if (object_is_young_and_not_yet_recorded &&
|
||||
IsCppGCHostOld(*cpp_heap, reinterpret_cast<Address>(to))) {
|
||||
DCHECK_EQ(std::find(young_nodes_.begin(), young_nodes_.end(), &from_node),
|
||||
young_nodes_.end());
|
||||
from_node.set_is_in_young_list(true);
|
||||
young_nodes_.push_back(&from_node);
|
||||
}
|
||||
}
|
||||
SetSlotThreadSafe(from, nullptr);
|
||||
}
|
||||
|
@ -4,10 +4,13 @@
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "include/cppgc/allocation.h"
|
||||
#include "include/cppgc/garbage-collected.h"
|
||||
#include "include/cppgc/persistent.h"
|
||||
#include "include/cppgc/testing.h"
|
||||
#include "include/v8-context.h"
|
||||
#include "include/v8-cppgc.h"
|
||||
@ -28,10 +31,26 @@ namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
bool IsHeapObjectYoung(void* obj) {
|
||||
return cppgc::internal::HeapObjectHeader::FromObject(obj).IsYoung();
|
||||
}
|
||||
|
||||
bool IsHeapObjectOld(void* obj) { return !IsHeapObjectYoung(obj); }
|
||||
|
||||
class Wrappable final : public cppgc::GarbageCollected<Wrappable> {
|
||||
public:
|
||||
static size_t destructor_callcount;
|
||||
|
||||
Wrappable() = default;
|
||||
Wrappable(v8::Isolate* isolate, v8::Local<v8::Object> local)
|
||||
: wrapper_(isolate, local) {}
|
||||
|
||||
Wrappable(const Wrappable&) = default;
|
||||
Wrappable(Wrappable&&) = default;
|
||||
|
||||
Wrappable& operator=(const Wrappable&) = default;
|
||||
Wrappable& operator=(Wrappable&&) = default;
|
||||
|
||||
~Wrappable() { destructor_callcount++; }
|
||||
|
||||
void Trace(cppgc::Visitor* visitor) const { visitor->Trace(wrapper_); }
|
||||
@ -59,6 +78,81 @@ class MinorMCEnabler {
|
||||
FlagScope<bool> cppgc_young_generation_;
|
||||
};
|
||||
|
||||
class YoungWrapperCollector : public RootVisitor {
|
||||
public:
|
||||
using YoungWrappers = std::set<Address>;
|
||||
|
||||
void VisitRootPointers(Root root, const char*, FullObjectSlot start,
|
||||
FullObjectSlot end) override {
|
||||
for (FullObjectSlot p = start; p < end; ++p) {
|
||||
all_young_wrappers_.insert(reinterpret_cast<Address>(*p.location()));
|
||||
}
|
||||
}
|
||||
|
||||
YoungWrappers get_wrappers() { return std::move(all_young_wrappers_); }
|
||||
|
||||
private:
|
||||
YoungWrappers all_young_wrappers_;
|
||||
};
|
||||
|
||||
class ExpectCppGCToV8GenerationalBarrierToFire {
|
||||
public:
|
||||
ExpectCppGCToV8GenerationalBarrierToFire(
|
||||
v8::Isolate& isolate, std::initializer_list<Address> expected_wrappers)
|
||||
: isolate_(reinterpret_cast<Isolate&>(isolate)),
|
||||
expected_wrappers_(expected_wrappers) {
|
||||
YoungWrapperCollector visitor;
|
||||
isolate_.traced_handles()->IterateYoung(&visitor);
|
||||
young_wrappers_before_ = visitor.get_wrappers();
|
||||
|
||||
std::vector<Address> diff;
|
||||
std::set_intersection(young_wrappers_before_.begin(),
|
||||
young_wrappers_before_.end(),
|
||||
expected_wrappers_.begin(), expected_wrappers_.end(),
|
||||
std::back_inserter(diff));
|
||||
EXPECT_TRUE(diff.empty());
|
||||
}
|
||||
|
||||
~ExpectCppGCToV8GenerationalBarrierToFire() {
|
||||
YoungWrapperCollector visitor;
|
||||
isolate_.traced_handles()->IterateYoung(&visitor);
|
||||
const auto young_wrappers_after = visitor.get_wrappers();
|
||||
EXPECT_GE(young_wrappers_after.size(), young_wrappers_before_.size());
|
||||
|
||||
EXPECT_TRUE(
|
||||
std::includes(young_wrappers_after.begin(), young_wrappers_after.end(),
|
||||
expected_wrappers_.begin(), expected_wrappers_.end()));
|
||||
EXPECT_EQ(expected_wrappers_.size(),
|
||||
young_wrappers_after.size() - young_wrappers_before_.size());
|
||||
}
|
||||
|
||||
private:
|
||||
Isolate& isolate_;
|
||||
YoungWrapperCollector::YoungWrappers expected_wrappers_;
|
||||
YoungWrapperCollector::YoungWrappers young_wrappers_before_;
|
||||
};
|
||||
|
||||
class ExpectCppGCToV8NoGenerationalBarrier {
|
||||
public:
|
||||
explicit ExpectCppGCToV8NoGenerationalBarrier(v8::Isolate& isolate)
|
||||
: isolate_(reinterpret_cast<Isolate&>(isolate)) {
|
||||
YoungWrapperCollector visitor;
|
||||
isolate_.traced_handles()->IterateYoung(&visitor);
|
||||
young_wrappers_before_ = visitor.get_wrappers();
|
||||
}
|
||||
|
||||
~ExpectCppGCToV8NoGenerationalBarrier() {
|
||||
YoungWrapperCollector visitor;
|
||||
isolate_.traced_handles()->IterateYoung(&visitor);
|
||||
const auto young_wrappers_after = visitor.get_wrappers();
|
||||
EXPECT_EQ(young_wrappers_before_, young_wrappers_after);
|
||||
}
|
||||
|
||||
private:
|
||||
Isolate& isolate_;
|
||||
YoungWrapperCollector::YoungWrappers young_wrappers_before_;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
class YoungUnifiedHeapTest : public MinorMCEnabler, public UnifiedHeapTest {
|
||||
@ -154,6 +248,144 @@ TEST_F(YoungUnifiedHeapTest, GenerationalBarrierV8ToCppGCReference) {
|
||||
EXPECT_EQ(0u, Wrappable::destructor_callcount);
|
||||
}
|
||||
|
||||
TEST_F(YoungUnifiedHeapTest,
|
||||
GenerationalBarrierCppGCToV8NoInitializingStoreBarrier) {
|
||||
if (i::v8_flags.single_generation) return;
|
||||
|
||||
v8::HandleScope handle_scope(v8_isolate());
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
|
||||
auto local = v8::Object::New(v8_isolate());
|
||||
{
|
||||
ExpectCppGCToV8NoGenerationalBarrier expect_no_barrier(*v8_isolate());
|
||||
auto* wrappable = cppgc::MakeGarbageCollected<Wrappable>(
|
||||
allocation_handle(), v8_isolate(), local);
|
||||
auto* copied_wrappable =
|
||||
cppgc::MakeGarbageCollected<Wrappable>(allocation_handle(), *wrappable);
|
||||
auto* moved_wrappable = cppgc::MakeGarbageCollected<Wrappable>(
|
||||
allocation_handle(), std::move(*wrappable));
|
||||
USE(moved_wrappable);
|
||||
USE(copied_wrappable);
|
||||
USE(wrappable);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(YoungUnifiedHeapTest, GenerationalBarrierCppGCToV8ReferenceReset) {
|
||||
if (i::v8_flags.single_generation) return;
|
||||
|
||||
v8::HandleScope handle_scope(v8_isolate());
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
|
||||
cppgc::Persistent<Wrappable> wrappable_object =
|
||||
cppgc::MakeGarbageCollected<Wrappable>(allocation_handle());
|
||||
|
||||
EXPECT_TRUE(IsHeapObjectYoung(wrappable_object.Get()));
|
||||
CollectAllAvailableGarbage();
|
||||
EXPECT_EQ(0u, Wrappable::destructor_callcount);
|
||||
EXPECT_TRUE(IsHeapObjectOld(wrappable_object.Get()));
|
||||
|
||||
{
|
||||
v8::HandleScope inner_handle_scope(v8_isolate());
|
||||
auto local = v8::Object::New(v8_isolate());
|
||||
EXPECT_TRUE(local->IsObject());
|
||||
{
|
||||
ExpectCppGCToV8GenerationalBarrierToFire expect_barrier(
|
||||
*v8_isolate(), {*reinterpret_cast<Address*>(*local)});
|
||||
wrappable_object->SetWrapper(v8_isolate(), local);
|
||||
}
|
||||
}
|
||||
|
||||
CollectYoungGarbageWithoutEmbedderStack(cppgc::Heap::SweepingType::kAtomic);
|
||||
auto local = wrappable_object->wrapper().Get(v8_isolate());
|
||||
EXPECT_TRUE(local->IsObject());
|
||||
}
|
||||
|
||||
TEST_F(YoungUnifiedHeapTest, GenerationalBarrierCppGCToV8ReferenceCopy) {
|
||||
if (i::v8_flags.single_generation) return;
|
||||
|
||||
v8::HandleScope handle_scope(v8_isolate());
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
|
||||
cppgc::Persistent<Wrappable> wrappable_object =
|
||||
cppgc::MakeGarbageCollected<Wrappable>(allocation_handle());
|
||||
|
||||
EXPECT_TRUE(IsHeapObjectYoung(wrappable_object.Get()));
|
||||
CollectAllAvailableGarbage();
|
||||
EXPECT_EQ(0u, Wrappable::destructor_callcount);
|
||||
EXPECT_TRUE(IsHeapObjectOld(wrappable_object.Get()));
|
||||
|
||||
{
|
||||
v8::HandleScope inner_handle_scope(v8_isolate());
|
||||
auto local = v8::Object::New(v8_isolate());
|
||||
EXPECT_TRUE(local->IsObject());
|
||||
|
||||
Wrappable* another_wrappable_object = nullptr;
|
||||
{
|
||||
// Assign to young host and expect no barrier.
|
||||
ExpectCppGCToV8NoGenerationalBarrier expect_no_barrier(*v8_isolate());
|
||||
another_wrappable_object =
|
||||
cppgc::MakeGarbageCollected<Wrappable>(allocation_handle());
|
||||
another_wrappable_object->SetWrapper(v8_isolate(), local);
|
||||
}
|
||||
{
|
||||
// Assign to old object using TracedReference::operator= and expect
|
||||
// the barrier to trigger.
|
||||
ExpectCppGCToV8GenerationalBarrierToFire expect_barrier(
|
||||
*v8_isolate(), {*reinterpret_cast<Address*>(*local)});
|
||||
*wrappable_object = *another_wrappable_object;
|
||||
}
|
||||
}
|
||||
|
||||
CollectYoungGarbageWithoutEmbedderStack(cppgc::Heap::SweepingType::kAtomic);
|
||||
auto local = wrappable_object->wrapper().Get(v8_isolate());
|
||||
EXPECT_TRUE(local->IsObject());
|
||||
}
|
||||
|
||||
TEST_F(YoungUnifiedHeapTest, GenerationalBarrierCppGCToV8ReferenceMove) {
|
||||
if (i::v8_flags.single_generation) return;
|
||||
|
||||
v8::HandleScope handle_scope(v8_isolate());
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
|
||||
cppgc::Persistent<Wrappable> wrappable_object =
|
||||
cppgc::MakeGarbageCollected<Wrappable>(allocation_handle());
|
||||
|
||||
EXPECT_TRUE(IsHeapObjectYoung(wrappable_object.Get()));
|
||||
CollectAllAvailableGarbage();
|
||||
EXPECT_EQ(0u, Wrappable::destructor_callcount);
|
||||
EXPECT_TRUE(IsHeapObjectOld(wrappable_object.Get()));
|
||||
|
||||
{
|
||||
v8::HandleScope inner_handle_scope(v8_isolate());
|
||||
auto local = v8::Object::New(v8_isolate());
|
||||
EXPECT_TRUE(local->IsObject());
|
||||
|
||||
Wrappable* another_wrappable_object = nullptr;
|
||||
{
|
||||
// Assign to young host and expect no barrier.
|
||||
ExpectCppGCToV8NoGenerationalBarrier expect_no_barrier(*v8_isolate());
|
||||
another_wrappable_object =
|
||||
cppgc::MakeGarbageCollected<Wrappable>(allocation_handle());
|
||||
another_wrappable_object->SetWrapper(v8_isolate(), local);
|
||||
}
|
||||
{
|
||||
// Assign to old object using TracedReference::operator= and expect
|
||||
// the barrier to trigger.
|
||||
ExpectCppGCToV8GenerationalBarrierToFire expect_barrier(
|
||||
*v8_isolate(), {*reinterpret_cast<Address*>(*local)});
|
||||
*wrappable_object = std::move(*another_wrappable_object);
|
||||
}
|
||||
}
|
||||
|
||||
CollectYoungGarbageWithoutEmbedderStack(cppgc::Heap::SweepingType::kAtomic);
|
||||
auto local = wrappable_object->wrapper().Get(v8_isolate());
|
||||
EXPECT_TRUE(local->IsObject());
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user