[heap] Decouple the lifetime of a TracedNode from the target object
Currently a TracedNode of a TracedReference is freed only if its target V8 object is unreachable. This is problematic for TracedNodes created for long-living (or immortal) V8 objects and leads to memory leaks. This CL adds logic for collecting unreachable TracedNodes: 1) Each TracedNode gets a markbit. Initially the markbit is set (i.e. we have black allocation for TracedNodes). 2) During marking RegisterEmbedderReference sets the markbit of the corresonding TracedNode. 3) In the atomic pause of Mark-Compact when TracedNodes are iterated, we check the markbits and free TracedNodes with cleared markbits. After this processing all markbits are cleared for the next GC. Note that the new logic does not apply to TracedNode that have callbacks and/or destructors. Bug: chromium:1029738 Change-Id: I38e76a8b4a84170793998988b1a7962e40874428 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1948722 Commit-Queue: Ulan Degenbaev <ulan@chromium.org> Reviewed-by: Omer Katz <omerkatz@chromium.org> Reviewed-by: Hannes Payer <hpayer@chromium.org> Cr-Commit-Position: refs/heads/master@{#65347}
This commit is contained in:
parent
4bb495f420
commit
d716056050
@ -637,6 +637,10 @@ class GlobalHandles::TracedNode final
|
||||
bool has_destructor() const { return HasDestructor::decode(flags_); }
|
||||
void set_has_destructor(bool v) { flags_ = HasDestructor::update(flags_, v); }
|
||||
|
||||
bool markbit() const { return Markbit::decode(flags_); }
|
||||
void clear_markbit() { flags_ = Markbit::update(flags_, false); }
|
||||
void set_markbit() { flags_ = Markbit::update(flags_, true); }
|
||||
|
||||
void SetFinalizationCallback(void* parameter,
|
||||
WeakCallbackInfo<void>::Callback callback) {
|
||||
set_parameter(parameter);
|
||||
@ -678,14 +682,18 @@ class GlobalHandles::TracedNode final
|
||||
using IsInYoungList = NodeState::Next<bool, 1>;
|
||||
using IsRoot = IsInYoungList::Next<bool, 1>;
|
||||
using HasDestructor = IsRoot::Next<bool, 1>;
|
||||
using Markbit = HasDestructor::Next<bool, 1>;
|
||||
|
||||
void ClearImplFields() {
|
||||
set_root(true);
|
||||
// Nodes are black allocated for simplicity.
|
||||
set_markbit();
|
||||
callback_ = nullptr;
|
||||
}
|
||||
|
||||
void CheckImplFieldsAreCleared() const {
|
||||
DCHECK(is_root());
|
||||
DCHECK(markbit());
|
||||
DCHECK_NULL(callback_);
|
||||
}
|
||||
|
||||
@ -793,6 +801,12 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) {
|
||||
}
|
||||
}
|
||||
|
||||
void GlobalHandles::MarkTraced(Address* location) {
|
||||
TracedNode* node = TracedNode::FromLocation(location);
|
||||
node->set_markbit();
|
||||
DCHECK(node->IsInUse());
|
||||
}
|
||||
|
||||
void GlobalHandles::Destroy(Address* location) {
|
||||
if (location != nullptr) {
|
||||
NodeSpace<Node>::Release(Node::FromLocation(location));
|
||||
@ -867,8 +881,26 @@ void GlobalHandles::IterateWeakRootsForPhantomHandles(
|
||||
}
|
||||
}
|
||||
for (TracedNode* node : *traced_nodes_) {
|
||||
if (node->IsInUse() &&
|
||||
should_reset_handle(isolate()->heap(), node->location())) {
|
||||
if (!node->IsInUse()) continue;
|
||||
// Detect unreachable nodes first.
|
||||
if (!node->markbit() && node->IsPhantomResetHandle() &&
|
||||
!node->has_destructor()) {
|
||||
// The handle is unreachable and does not have a callback and a
|
||||
// destructor associated with it. We can clear it even if the target V8
|
||||
// object is alive. Note that the desctructor and the callback may
|
||||
// access the handle, that is why we avoid clearing it.
|
||||
node->ResetPhantomHandle(HandleHolder::kDead);
|
||||
++number_of_phantom_handle_resets_;
|
||||
continue;
|
||||
} else if (node->markbit()) {
|
||||
// Clear the markbit for the next GC.
|
||||
node->clear_markbit();
|
||||
}
|
||||
DCHECK(node->IsInUse());
|
||||
// Detect nodes with unreachable target objects.
|
||||
if (should_reset_handle(isolate()->heap(), node->location())) {
|
||||
// If the node allows eager resetting, then reset it here. Otherwise,
|
||||
// collect its callback that will reset it.
|
||||
if (node->IsPhantomResetHandle()) {
|
||||
node->ResetPhantomHandle(node->has_destructor() ? HandleHolder::kLive
|
||||
: HandleHolder::kDead);
|
||||
|
@ -87,6 +87,7 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
|
||||
static void SetFinalizationCallbackForTraced(
|
||||
Address* location, void* parameter,
|
||||
WeakCallbackInfo<void>::Callback callback);
|
||||
static void MarkTraced(Address* location);
|
||||
|
||||
explicit GlobalHandles(Isolate* isolate);
|
||||
~GlobalHandles();
|
||||
|
@ -5222,10 +5222,13 @@ EmbedderHeapTracer::TraceFlags Heap::flags_for_embedder_tracer() const {
|
||||
}
|
||||
|
||||
void Heap::RegisterExternallyReferencedObject(Address* location) {
|
||||
// The embedder is not aware of whether numbers are materialized as heap
|
||||
// objects are just passed around as Smis.
|
||||
GlobalHandles::MarkTraced(location);
|
||||
Object object(*location);
|
||||
if (!object.IsHeapObject()) return;
|
||||
if (!object.IsHeapObject()) {
|
||||
// The embedder is not aware of whether numbers are materialized as heap
|
||||
// objects are just passed around as Smis.
|
||||
return;
|
||||
}
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
DCHECK(IsValidHeapObject(this, heap_object));
|
||||
if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) {
|
||||
|
@ -60,11 +60,19 @@ class TestEmbedderHeapTracer final : public v8::EmbedderHeapTracer {
|
||||
to_register_with_v8_.push_back(global);
|
||||
}
|
||||
|
||||
void AddReferenceForTracing(v8::TracedReference<v8::Value>* ref) {
|
||||
to_register_with_v8_references_.push_back(ref);
|
||||
}
|
||||
|
||||
bool AdvanceTracing(double deadline_in_ms) final {
|
||||
for (auto global : to_register_with_v8_) {
|
||||
RegisterEmbedderReference(global->As<v8::Data>());
|
||||
}
|
||||
to_register_with_v8_.clear();
|
||||
for (auto ref : to_register_with_v8_references_) {
|
||||
RegisterEmbedderReference(ref->As<v8::Data>());
|
||||
}
|
||||
to_register_with_v8_references_.clear();
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -99,6 +107,7 @@ class TestEmbedderHeapTracer final : public v8::EmbedderHeapTracer {
|
||||
private:
|
||||
std::vector<std::pair<void*, void*>> registered_from_v8_;
|
||||
std::vector<v8::TracedGlobal<v8::Object>*> to_register_with_v8_;
|
||||
std::vector<v8::TracedReference<v8::Value>*> to_register_with_v8_references_;
|
||||
bool consider_traced_global_as_root_ = true;
|
||||
TracePrologueBehavior prologue_behavior_ = TracePrologueBehavior::kNoop;
|
||||
v8::Global<v8::Array> array_;
|
||||
@ -520,6 +529,74 @@ TEST(TracedGlobalWrapperClassId) {
|
||||
CHECK_EQ(17, traced.WrapperClassId());
|
||||
}
|
||||
|
||||
TEST(TracedReferenceHandlesMarking) {
|
||||
ManualGCScope manual_gc;
|
||||
CcTest::InitializeVM();
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
v8::HandleScope scope(isolate);
|
||||
v8::TracedReference<v8::Value> live;
|
||||
v8::TracedReference<v8::Value> dead;
|
||||
live.Reset(isolate, v8::Undefined(isolate));
|
||||
dead.Reset(isolate, v8::Undefined(isolate));
|
||||
i::GlobalHandles* global_handles = CcTest::i_isolate()->global_handles();
|
||||
{
|
||||
TestEmbedderHeapTracer tracer;
|
||||
heap::TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
|
||||
tracer.AddReferenceForTracing(&live);
|
||||
const size_t initial_count = global_handles->handles_count();
|
||||
InvokeMarkSweep();
|
||||
const size_t final_count = global_handles->handles_count();
|
||||
// Handles are black allocated, so the first GC does not collect them.
|
||||
CHECK_EQ(initial_count, final_count);
|
||||
}
|
||||
|
||||
{
|
||||
TestEmbedderHeapTracer tracer;
|
||||
heap::TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
|
||||
tracer.AddReferenceForTracing(&live);
|
||||
const size_t initial_count = global_handles->handles_count();
|
||||
InvokeMarkSweep();
|
||||
const size_t final_count = global_handles->handles_count();
|
||||
CHECK_EQ(initial_count, final_count + 1);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(TracedReferenceHandlesDoNotLeak) {
|
||||
// TracedReference handles are not cleared by the destructor of the embedder
|
||||
// object. To avoid leaks we need to mark these handles during GC.
|
||||
// This test checks that unmarked handles do not leak.
|
||||
ManualGCScope manual_gc;
|
||||
CcTest::InitializeVM();
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
v8::HandleScope scope(isolate);
|
||||
v8::TracedReference<v8::Value> ref;
|
||||
ref.Reset(isolate, v8::Undefined(isolate));
|
||||
i::GlobalHandles* global_handles = CcTest::i_isolate()->global_handles();
|
||||
const size_t initial_count = global_handles->handles_count();
|
||||
// We need two GCs because handles are black allocated.
|
||||
InvokeMarkSweep();
|
||||
InvokeMarkSweep();
|
||||
const size_t final_count = global_handles->handles_count();
|
||||
CHECK_EQ(initial_count, final_count + 1);
|
||||
}
|
||||
|
||||
TEST(TracedGlobalHandlesAreRetained) {
|
||||
// TracedGlobal handles are cleared by the destructor of the embedder object.
|
||||
ManualGCScope manual_gc;
|
||||
CcTest::InitializeVM();
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
v8::HandleScope scope(isolate);
|
||||
v8::TracedGlobal<v8::Value> global;
|
||||
global.Reset(isolate, v8::Undefined(isolate));
|
||||
i::GlobalHandles* global_handles = CcTest::i_isolate()->global_handles();
|
||||
const size_t initial_count = global_handles->handles_count();
|
||||
// We need two GCs because handles are black allocated.
|
||||
InvokeMarkSweep();
|
||||
InvokeMarkSweep();
|
||||
const size_t final_count = global_handles->handles_count();
|
||||
CHECK_EQ(initial_count, final_count);
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
class TracedGlobalVisitor final
|
||||
|
Loading…
Reference in New Issue
Block a user