Revert "[heap] Remove independent handles"
This reverts 667555c6b8
.
This is a short-term fix for NodeJS regression caused by Scavenger
not collecting weak handles that are marked as independent.
Bug: chromium:847863, chromium:780749
Change-Id: Ia1c02e042d0e593c6f5badb82c4ef20b923d3806
Reviewed-on: https://chromium-review.googlesource.com/1082442
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Hannes Payer <hpayer@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53502}
This commit is contained in:
parent
66f3e8f64d
commit
aaa700bda4
@ -672,7 +672,8 @@ void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback should_reset_handle) {
|
||||
void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(RootVisitor* v) {
|
||||
for (Node* node : new_space_nodes_) {
|
||||
if (node->IsStrongRetainer() ||
|
||||
(node->IsWeakRetainer() && node->is_active())) {
|
||||
(node->IsWeakRetainer() && !node->is_independent() &&
|
||||
node->is_active())) {
|
||||
v->VisitRootPointer(Root::kGlobalHandles, node->label(),
|
||||
node->location());
|
||||
}
|
||||
@ -687,7 +688,8 @@ void GlobalHandles::IterateNewSpaceStrongAndDependentRootsAndIdentifyUnmodified(
|
||||
node->set_active(true);
|
||||
}
|
||||
if (node->IsStrongRetainer() ||
|
||||
(node->IsWeakRetainer() && node->is_active())) {
|
||||
(node->IsWeakRetainer() && !node->is_independent() &&
|
||||
node->is_active())) {
|
||||
v->VisitRootPointer(Root::kGlobalHandles, node->label(),
|
||||
node->location());
|
||||
}
|
||||
@ -707,8 +709,8 @@ void GlobalHandles::MarkNewSpaceWeakUnmodifiedObjectsPending(
|
||||
WeakSlotCallbackWithHeap is_dead) {
|
||||
for (Node* node : new_space_nodes_) {
|
||||
DCHECK(node->is_in_new_space_list());
|
||||
if (node->IsWeak() && is_dead(isolate_->heap(), node->location())) {
|
||||
DCHECK(!node->is_active());
|
||||
if ((node->is_independent() || !node->is_active()) && node->IsWeak() &&
|
||||
is_dead(isolate_->heap(), node->location())) {
|
||||
if (!node->IsPhantomCallback() && !node->IsPhantomResetHandle()) {
|
||||
node->MarkPending();
|
||||
}
|
||||
@ -720,8 +722,8 @@ void GlobalHandles::IterateNewSpaceWeakUnmodifiedRootsForFinalizers(
|
||||
RootVisitor* v) {
|
||||
for (Node* node : new_space_nodes_) {
|
||||
DCHECK(node->is_in_new_space_list());
|
||||
if (!node->is_active() && node->IsWeakRetainer() &&
|
||||
(node->state() == Node::PENDING)) {
|
||||
if ((node->is_independent() || !node->is_active()) &&
|
||||
node->IsWeakRetainer() && (node->state() == Node::PENDING)) {
|
||||
DCHECK(!node->IsPhantomCallback());
|
||||
DCHECK(!node->IsPhantomResetHandle());
|
||||
// Finalizers need to survive.
|
||||
@ -735,8 +737,8 @@ void GlobalHandles::IterateNewSpaceWeakUnmodifiedRootsForPhantomHandles(
|
||||
RootVisitor* v, WeakSlotCallbackWithHeap should_reset_handle) {
|
||||
for (Node* node : new_space_nodes_) {
|
||||
DCHECK(node->is_in_new_space_list());
|
||||
if (!node->is_active() && node->IsWeakRetainer() &&
|
||||
(node->state() != Node::PENDING)) {
|
||||
if ((node->is_independent() || !node->is_active()) &&
|
||||
node->IsWeakRetainer() && (node->state() != Node::PENDING)) {
|
||||
DCHECK(node->IsPhantomResetHandle() || node->IsPhantomCallback());
|
||||
if (should_reset_handle(isolate_->heap(), node->location())) {
|
||||
if (node->IsPhantomResetHandle()) {
|
||||
@ -782,12 +784,15 @@ int GlobalHandles::PostScavengeProcessing(
|
||||
// the freed_nodes.
|
||||
continue;
|
||||
}
|
||||
|
||||
// Active nodes are kept alive, so no further processing is requires.
|
||||
if (node->is_active()) {
|
||||
// Skip dependent or unmodified handles. Their weak callbacks might expect
|
||||
// to be
|
||||
// called between two global garbage collection callbacks which
|
||||
// are not called for minor collections.
|
||||
if (!node->is_independent() && (node->is_active())) {
|
||||
node->set_active(false);
|
||||
continue;
|
||||
}
|
||||
node->set_active(false);
|
||||
|
||||
if (node->PostGarbageCollectionProcessing(isolate_)) {
|
||||
if (initial_post_gc_processing_count != post_gc_processing_count_) {
|
||||
@ -798,7 +803,6 @@ int GlobalHandles::PostScavengeProcessing(
|
||||
return freed_nodes;
|
||||
}
|
||||
}
|
||||
|
||||
if (!node->IsRetainer()) {
|
||||
freed_nodes++;
|
||||
}
|
||||
|
@ -99,6 +99,7 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
|
||||
Sample* sample = new Sample(size, node, loc, this);
|
||||
samples_.emplace(sample);
|
||||
sample->global.SetWeak(sample, OnWeakCallback, WeakCallbackType::kParameter);
|
||||
sample->global.MarkIndependent();
|
||||
}
|
||||
|
||||
void SamplingHeapProfiler::OnWeakCallback(
|
||||
|
@ -7788,6 +7788,80 @@ struct FlagAndPersistent {
|
||||
v8::Global<v8::Object> handle;
|
||||
};
|
||||
|
||||
static void SetFlag(const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
|
||||
data.GetParameter()->flag = true;
|
||||
data.GetParameter()->handle.Reset();
|
||||
}
|
||||
|
||||
static void IndependentWeakHandle(bool global_gc, bool interlinked) {
|
||||
i::FLAG_stress_incremental_marking = false;
|
||||
// Parallel scavenge introduces too much fragmentation.
|
||||
i::FLAG_parallel_scavenge = false;
|
||||
v8::Isolate* iso = CcTest::isolate();
|
||||
v8::HandleScope scope(iso);
|
||||
v8::Local<Context> context = Context::New(iso);
|
||||
Context::Scope context_scope(context);
|
||||
|
||||
FlagAndPersistent object_a, object_b;
|
||||
|
||||
size_t big_heap_size = 0;
|
||||
size_t big_array_size = 0;
|
||||
|
||||
{
|
||||
v8::HandleScope handle_scope(iso);
|
||||
Local<Object> a(v8::Object::New(iso));
|
||||
Local<Object> b(v8::Object::New(iso));
|
||||
object_a.handle.Reset(iso, a);
|
||||
object_b.handle.Reset(iso, b);
|
||||
if (interlinked) {
|
||||
a->Set(context, v8_str("x"), b).FromJust();
|
||||
b->Set(context, v8_str("x"), a).FromJust();
|
||||
}
|
||||
if (global_gc) {
|
||||
CcTest::CollectAllGarbage();
|
||||
} else {
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
v8::Local<Value> big_array = v8::Array::New(CcTest::isolate(), 5000);
|
||||
// Verify that we created an array where the space was reserved up front.
|
||||
big_array_size =
|
||||
v8::internal::JSArray::cast(*v8::Utils::OpenHandle(*big_array))
|
||||
->elements()
|
||||
->Size();
|
||||
CHECK_LE(20000, big_array_size);
|
||||
a->Set(context, v8_str("y"), big_array).FromJust();
|
||||
big_heap_size = CcTest::heap()->SizeOfObjects();
|
||||
}
|
||||
|
||||
object_a.flag = false;
|
||||
object_b.flag = false;
|
||||
object_a.handle.SetWeak(&object_a, &SetFlag,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
object_b.handle.SetWeak(&object_b, &SetFlag,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
CHECK(!object_b.handle.IsIndependent());
|
||||
object_a.handle.MarkIndependent();
|
||||
object_b.handle.MarkIndependent();
|
||||
CHECK(object_b.handle.IsIndependent());
|
||||
if (global_gc) {
|
||||
CcTest::CollectAllGarbage();
|
||||
} else {
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
// A single GC should be enough to reclaim the memory, since we are using
|
||||
// phantom handles.
|
||||
CHECK_GT(big_heap_size - big_array_size, CcTest::heap()->SizeOfObjects());
|
||||
CHECK(object_a.flag);
|
||||
CHECK(object_b.flag);
|
||||
}
|
||||
|
||||
TEST(IndependentWeakHandle) {
|
||||
IndependentWeakHandle(false, false);
|
||||
IndependentWeakHandle(false, true);
|
||||
IndependentWeakHandle(true, false);
|
||||
IndependentWeakHandle(true, true);
|
||||
}
|
||||
|
||||
class Trivial {
|
||||
public:
|
||||
explicit Trivial(int x) : x_(x) {}
|
||||
@ -7880,6 +7954,125 @@ THREADED_TEST(InternalFieldCallback) {
|
||||
InternalFieldCallback(true);
|
||||
}
|
||||
|
||||
static void ResetUseValueAndSetFlag(
|
||||
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
|
||||
// Blink will reset the handle, and then use the other handle, so they
|
||||
// can't use the same backing slot.
|
||||
data.GetParameter()->handle.Reset();
|
||||
data.GetParameter()->flag = true;
|
||||
}
|
||||
|
||||
void v8::internal::heap::HeapTester::ResetWeakHandle(bool global_gc) {
|
||||
using v8::Context;
|
||||
using v8::Local;
|
||||
using v8::Object;
|
||||
|
||||
v8::Isolate* iso = CcTest::isolate();
|
||||
v8::HandleScope scope(iso);
|
||||
v8::Local<Context> context = Context::New(iso);
|
||||
Context::Scope context_scope(context);
|
||||
|
||||
FlagAndPersistent object_a, object_b;
|
||||
|
||||
{
|
||||
v8::HandleScope handle_scope(iso);
|
||||
Local<Object> a(v8::Object::New(iso));
|
||||
Local<Object> b(v8::Object::New(iso));
|
||||
object_a.handle.Reset(iso, a);
|
||||
object_b.handle.Reset(iso, b);
|
||||
if (global_gc) {
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
} else {
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
}
|
||||
|
||||
object_a.flag = false;
|
||||
object_b.flag = false;
|
||||
object_a.handle.SetWeak(&object_a, &ResetUseValueAndSetFlag,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
object_b.handle.SetWeak(&object_b, &ResetUseValueAndSetFlag,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
if (!global_gc) {
|
||||
object_a.handle.MarkIndependent();
|
||||
object_b.handle.MarkIndependent();
|
||||
CHECK(object_b.handle.IsIndependent());
|
||||
}
|
||||
if (global_gc) {
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
} else {
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
CHECK(object_a.flag);
|
||||
CHECK(object_b.flag);
|
||||
}
|
||||
|
||||
THREADED_HEAP_TEST(ResetWeakHandle) {
|
||||
v8::internal::heap::HeapTester::ResetWeakHandle(false);
|
||||
v8::internal::heap::HeapTester::ResetWeakHandle(true);
|
||||
}
|
||||
|
||||
static void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }
|
||||
|
||||
static void InvokeMarkSweep() { CcTest::CollectAllGarbage(); }
|
||||
|
||||
static void ForceScavenge2(
|
||||
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
|
||||
data.GetParameter()->flag = true;
|
||||
InvokeScavenge();
|
||||
}
|
||||
|
||||
static void ForceScavenge1(
|
||||
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
|
||||
data.GetParameter()->handle.Reset();
|
||||
data.SetSecondPassCallback(ForceScavenge2);
|
||||
}
|
||||
|
||||
static void ForceMarkSweep2(
|
||||
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
|
||||
data.GetParameter()->flag = true;
|
||||
InvokeMarkSweep();
|
||||
}
|
||||
|
||||
static void ForceMarkSweep1(
|
||||
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
|
||||
data.GetParameter()->handle.Reset();
|
||||
data.SetSecondPassCallback(ForceMarkSweep2);
|
||||
}
|
||||
|
||||
THREADED_TEST(GCFromWeakCallbacks) {
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
v8::Locker locker(CcTest::isolate());
|
||||
v8::HandleScope scope(isolate);
|
||||
v8::Local<Context> context = Context::New(isolate);
|
||||
Context::Scope context_scope(context);
|
||||
|
||||
static const int kNumberOfGCTypes = 2;
|
||||
typedef v8::WeakCallbackInfo<FlagAndPersistent>::Callback Callback;
|
||||
Callback gc_forcing_callback[kNumberOfGCTypes] = {&ForceScavenge1,
|
||||
&ForceMarkSweep1};
|
||||
|
||||
typedef void (*GCInvoker)();
|
||||
GCInvoker invoke_gc[kNumberOfGCTypes] = {&InvokeScavenge, &InvokeMarkSweep};
|
||||
|
||||
for (int outer_gc = 0; outer_gc < kNumberOfGCTypes; outer_gc++) {
|
||||
for (int inner_gc = 0; inner_gc < kNumberOfGCTypes; inner_gc++) {
|
||||
FlagAndPersistent object;
|
||||
{
|
||||
v8::HandleScope handle_scope(isolate);
|
||||
object.handle.Reset(isolate, v8::Object::New(isolate));
|
||||
}
|
||||
object.flag = false;
|
||||
object.handle.SetWeak(&object, gc_forcing_callback[inner_gc],
|
||||
v8::WeakCallbackType::kParameter);
|
||||
object.handle.MarkIndependent();
|
||||
invoke_gc[outer_gc]();
|
||||
EmptyMessageQueues(isolate);
|
||||
CHECK(object.flag);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
v8::Local<Function> args_fun;
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user