[heap] Handle old-to-new slot promotion to shared heap
The GC might promote an in-place internalizable string from new space directly into the shared heap. This means that the GC might need to create OLD_TO_SHARED slots when updating OLD_TO_NEW slots. This CL implements this both for minor and full GCs. Bug: v8:11708 Change-Id: I6102b9024d1dd5dd602d654b006ea5897ab5baa6 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3804604 Commit-Queue: Dominik Inführ <dinfuehr@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/main@{#82298}
This commit is contained in:
parent
251b550166
commit
9cca4e60f1
@ -47,6 +47,7 @@
|
||||
#include "src/heap/parallel-work-item.h"
|
||||
#include "src/heap/read-only-heap.h"
|
||||
#include "src/heap/read-only-spaces.h"
|
||||
#include "src/heap/remembered-set.h"
|
||||
#include "src/heap/safepoint.h"
|
||||
#include "src/heap/slot-set.h"
|
||||
#include "src/heap/spaces-inl.h"
|
||||
@ -4645,6 +4646,44 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
}
|
||||
|
||||
private:
|
||||
template <typename TSlot>
|
||||
inline void CheckOldToNewSlotForSharedUntyped(MemoryChunk* chunk,
|
||||
TSlot slot) {
|
||||
HeapObject heap_object;
|
||||
|
||||
if (!(*slot).GetHeapObject(&heap_object)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (heap_object.InSharedWritableHeap()) {
|
||||
RememberedSet<OLD_TO_SHARED>::Insert<AccessMode::NON_ATOMIC>(
|
||||
chunk, slot.address());
|
||||
}
|
||||
}
|
||||
|
||||
inline void CheckOldToNewSlotForSharedTyped(MemoryChunk* chunk,
|
||||
SlotType slot_type,
|
||||
Address addr) {
|
||||
HeapObject heap_object =
|
||||
UpdateTypedSlotHelper::GetTargetObject(chunk->heap(), slot_type, addr);
|
||||
|
||||
#if DEBUG
|
||||
UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
chunk->heap(), slot_type, addr,
|
||||
[heap_object](FullMaybeObjectSlot slot) {
|
||||
DCHECK_EQ((*slot).GetHeapObjectAssumeStrong(), heap_object);
|
||||
return KEEP_SLOT;
|
||||
});
|
||||
#endif // DEBUG
|
||||
|
||||
if (heap_object.InSharedWritableHeap()) {
|
||||
const uintptr_t offset = addr - chunk->address();
|
||||
DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset));
|
||||
RememberedSet<OLD_TO_SHARED>::InsertTyped(chunk, slot_type,
|
||||
static_cast<uint32_t>(offset));
|
||||
}
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
inline SlotCallbackResult CheckAndUpdateOldToNewSlot(TSlot slot) {
|
||||
static_assert(
|
||||
@ -4705,11 +4744,18 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
: InvalidatedSlotsFilter::LivenessCheck::kNo;
|
||||
InvalidatedSlotsFilter filter =
|
||||
InvalidatedSlotsFilter::OldToNew(chunk_, liveness_check);
|
||||
const bool has_shared_isolate = this->heap_->isolate()->shared_isolate();
|
||||
int slots = RememberedSet<OLD_TO_NEW>::Iterate(
|
||||
chunk_,
|
||||
[this, &filter](MaybeObjectSlot slot) {
|
||||
[this, &filter, has_shared_isolate](MaybeObjectSlot slot) {
|
||||
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
|
||||
return CheckAndUpdateOldToNewSlot(slot);
|
||||
SlotCallbackResult result = CheckAndUpdateOldToNewSlot(slot);
|
||||
// A new space string might have been promoted into the shared heap
|
||||
// during GC.
|
||||
if (has_shared_isolate) {
|
||||
CheckOldToNewSlotForSharedUntyped(chunk_, slot);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
SlotSet::FREE_EMPTY_BUCKETS);
|
||||
|
||||
@ -4800,15 +4846,24 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
void UpdateTypedPointers() {
|
||||
if (chunk_->typed_slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() !=
|
||||
nullptr) {
|
||||
const bool has_shared_isolate = heap_->isolate()->shared_isolate();
|
||||
CHECK_NE(chunk_->owner(), heap_->map_space());
|
||||
const auto check_and_update_old_to_new_slot_fn =
|
||||
[this](FullMaybeObjectSlot slot) {
|
||||
return CheckAndUpdateOldToNewSlot(slot);
|
||||
};
|
||||
RememberedSet<OLD_TO_NEW>::IterateTyped(
|
||||
chunk_, [=](SlotType slot_type, Address slot) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
chunk_,
|
||||
[this, has_shared_isolate, &check_and_update_old_to_new_slot_fn](
|
||||
SlotType slot_type, Address slot) {
|
||||
SlotCallbackResult result = UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
heap_, slot_type, slot, check_and_update_old_to_new_slot_fn);
|
||||
// A new space string might have been promoted into the shared heap
|
||||
// during GC.
|
||||
if (has_shared_isolate) {
|
||||
CheckOldToNewSlotForSharedTyped(chunk_, slot_type, slot);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
|
||||
|
@ -5,8 +5,10 @@
|
||||
#ifndef V8_HEAP_REMEMBERED_SET_INL_H_
|
||||
#define V8_HEAP_REMEMBERED_SET_INL_H_
|
||||
|
||||
#include "src/codegen/assembler-inl.h"
|
||||
#include "src/common/ptr-compr-inl.h"
|
||||
#include "src/heap/remembered-set.h"
|
||||
#include "src/objects/heap-object.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -56,6 +58,44 @@ SlotCallbackResult UpdateTypedSlotHelper::UpdateTypedSlot(Heap* heap,
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
HeapObject UpdateTypedSlotHelper::GetTargetObject(Heap* heap,
|
||||
SlotType slot_type,
|
||||
Address addr) {
|
||||
switch (slot_type) {
|
||||
case SlotType::kCodeEntry: {
|
||||
RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, Code());
|
||||
return Code::GetCodeFromTargetAddress(rinfo.target_address());
|
||||
}
|
||||
case SlotType::kConstPoolCodeEntry: {
|
||||
return Code::GetObjectFromEntryAddress(addr);
|
||||
}
|
||||
case SlotType::kEmbeddedObjectCompressed: {
|
||||
RelocInfo rinfo(addr, RelocInfo::COMPRESSED_EMBEDDED_OBJECT, 0, Code());
|
||||
return rinfo.target_object(heap->isolate());
|
||||
}
|
||||
case SlotType::kEmbeddedObjectFull: {
|
||||
RelocInfo rinfo(addr, RelocInfo::FULL_EMBEDDED_OBJECT, 0, Code());
|
||||
return rinfo.target_object(heap->isolate());
|
||||
}
|
||||
case SlotType::kEmbeddedObjectData: {
|
||||
RelocInfo rinfo(addr, RelocInfo::DATA_EMBEDDED_OBJECT, 0, Code());
|
||||
return rinfo.target_object(heap->isolate());
|
||||
}
|
||||
case SlotType::kConstPoolEmbeddedObjectCompressed: {
|
||||
Address full =
|
||||
DecompressTaggedAny(heap->isolate(), base::Memory<Tagged_t>(addr));
|
||||
return HeapObject::cast(Object(full));
|
||||
}
|
||||
case SlotType::kConstPoolEmbeddedObjectFull: {
|
||||
FullHeapObjectSlot slot(addr);
|
||||
return (*slot).GetHeapObjectAssumeStrong(heap->isolate());
|
||||
}
|
||||
case SlotType::kCleared:
|
||||
break;
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
#endif // V8_HEAP_REMEMBERED_SET_INL_H_
|
||||
|
@ -299,6 +299,10 @@ class UpdateTypedSlotHelper {
|
||||
static SlotCallbackResult UpdateTypedSlot(Heap* heap, SlotType slot_type,
|
||||
Address addr, Callback callback);
|
||||
|
||||
// Returns the HeapObject referenced by the given typed slot entry.
|
||||
inline static HeapObject GetTargetObject(Heap* heap, SlotType slot_type,
|
||||
Address addr);
|
||||
|
||||
private:
|
||||
// Updates a code entry slot using an untyped slot callback.
|
||||
// The callback accepts FullMaybeObjectSlot and returns SlotCallbackResult.
|
||||
|
@ -20,6 +20,7 @@
|
||||
#include "src/heap/objects-visiting-inl.h"
|
||||
#include "src/heap/remembered-set-inl.h"
|
||||
#include "src/heap/scavenger-inl.h"
|
||||
#include "src/heap/slot-set.h"
|
||||
#include "src/heap/sweeper.h"
|
||||
#include "src/objects/data-handler-inl.h"
|
||||
#include "src/objects/embedder-data-array-inl.h"
|
||||
@ -661,15 +662,20 @@ void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
|
||||
|
||||
void Scavenger::ScavengePage(MemoryChunk* page) {
|
||||
CodePageMemoryModificationScope memory_modification_scope(page);
|
||||
const bool has_shared_isolate = heap_->isolate()->shared_isolate();
|
||||
|
||||
if (page->slot_set<OLD_TO_NEW, AccessMode::ATOMIC>() != nullptr) {
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(
|
||||
page, InvalidatedSlotsFilter::LivenessCheck::kNo);
|
||||
RememberedSet<OLD_TO_NEW>::IterateAndTrackEmptyBuckets(
|
||||
page,
|
||||
[this, &filter](MaybeObjectSlot slot) {
|
||||
[this, page, has_shared_isolate, &filter](MaybeObjectSlot slot) {
|
||||
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
|
||||
return CheckAndScavengeObject(heap_, slot);
|
||||
SlotCallbackResult result = CheckAndScavengeObject(heap_, slot);
|
||||
// A new space string might have been promoted into the shared heap
|
||||
// during GC.
|
||||
if (has_shared_isolate) CheckOldToNewSlotForSharedUntyped(page, slot);
|
||||
return result;
|
||||
},
|
||||
&empty_chunks_local_);
|
||||
}
|
||||
@ -681,10 +687,18 @@ void Scavenger::ScavengePage(MemoryChunk* page) {
|
||||
}
|
||||
|
||||
RememberedSet<OLD_TO_NEW>::IterateTyped(
|
||||
page, [=](SlotType type, Address addr) {
|
||||
page, [=](SlotType slot_type, Address slot_address) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
heap_, type, addr, [this](FullMaybeObjectSlot slot) {
|
||||
return CheckAndScavengeObject(heap(), slot);
|
||||
heap_, slot_type, slot_address,
|
||||
[this, page, slot_type, slot_address,
|
||||
has_shared_isolate](FullMaybeObjectSlot slot) {
|
||||
SlotCallbackResult result = CheckAndScavengeObject(heap(), slot);
|
||||
// A new space string might have been promoted into the shared
|
||||
// heap during GC.
|
||||
if (has_shared_isolate) {
|
||||
CheckOldToNewSlotForSharedTyped(page, slot_type, slot_address);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
@ -813,6 +827,44 @@ void Scavenger::AddEphemeronHashTable(EphemeronHashTable table) {
|
||||
ephemeron_table_list_local_.Push(table);
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
void Scavenger::CheckOldToNewSlotForSharedUntyped(MemoryChunk* chunk,
|
||||
TSlot slot) {
|
||||
MaybeObject object = *slot;
|
||||
HeapObject heap_object;
|
||||
|
||||
if (object.GetHeapObject(&heap_object) &&
|
||||
heap_object.InSharedWritableHeap()) {
|
||||
RememberedSet<OLD_TO_SHARED>::Insert<AccessMode::ATOMIC>(chunk,
|
||||
slot.address());
|
||||
}
|
||||
}
|
||||
|
||||
void Scavenger::CheckOldToNewSlotForSharedTyped(MemoryChunk* chunk,
|
||||
SlotType slot_type,
|
||||
Address slot_address) {
|
||||
HeapObject heap_object = UpdateTypedSlotHelper::GetTargetObject(
|
||||
chunk->heap(), slot_type, slot_address);
|
||||
|
||||
#if DEBUG
|
||||
UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
chunk->heap(), slot_type, slot_address,
|
||||
[heap_object](FullMaybeObjectSlot slot) {
|
||||
DCHECK_EQ((*slot).GetHeapObjectAssumeStrong(), heap_object);
|
||||
return KEEP_SLOT;
|
||||
});
|
||||
#endif // DEBUG
|
||||
|
||||
if (heap_object.InSharedWritableHeap()) {
|
||||
const uintptr_t offset = slot_address - chunk->address();
|
||||
DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset));
|
||||
|
||||
base::MutexGuard guard(chunk->mutex());
|
||||
RememberedSet<OLD_TO_SHARED>::InsertTyped(chunk, slot_type,
|
||||
static_cast<uint32_t>(offset));
|
||||
}
|
||||
}
|
||||
|
||||
void RootScavengeVisitor::VisitRootPointer(Root root, const char* description,
|
||||
FullObjectSlot p) {
|
||||
DCHECK(!HasWeakHeapObjectTag(*p));
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "src/heap/base/worklist.h"
|
||||
#include "src/heap/evacuation-allocator.h"
|
||||
#include "src/heap/index-generator.h"
|
||||
#include "src/heap/memory-chunk.h"
|
||||
#include "src/heap/objects-visiting.h"
|
||||
#include "src/heap/parallel-work-item.h"
|
||||
#include "src/heap/slot-set.h"
|
||||
@ -128,6 +129,11 @@ class Scavenger {
|
||||
template <typename TSlot>
|
||||
inline SlotCallbackResult CheckAndScavengeObject(Heap* heap, TSlot slot);
|
||||
|
||||
template <typename TSlot>
|
||||
inline void CheckOldToNewSlotForSharedUntyped(MemoryChunk* chunk, TSlot slot);
|
||||
inline void CheckOldToNewSlotForSharedTyped(MemoryChunk* chunk,
|
||||
SlotType slot_type, Address slot);
|
||||
|
||||
// Scavenges an object |object| referenced from slot |p|. |object| is required
|
||||
// to be in from space.
|
||||
template <typename THeapObjectSlot>
|
||||
|
@ -1660,6 +1660,14 @@ RUNTIME_FUNCTION(Runtime_IsSharedString) {
|
||||
Handle<String>::cast(obj)->IsShared());
|
||||
}
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_IsInPlaceInternalizableString) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
Handle<HeapObject> obj = args.at<HeapObject>(0);
|
||||
return isolate->heap()->ToBoolean(
|
||||
obj->IsString() && String::IsInPlaceInternalizable(String::cast(*obj)));
|
||||
}
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_IsInternalizedString) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
|
@ -538,6 +538,7 @@ namespace internal {
|
||||
F(IsConcatSpreadableProtector, 0, 1) \
|
||||
F(IsConcurrentRecompilationSupported, 0, 1) \
|
||||
F(IsDictPropertyConstTrackingEnabled, 0, 1) \
|
||||
F(IsInPlaceInternalizableString, 1, 1) \
|
||||
F(IsInternalizedString, 1, 1) \
|
||||
F(IsSameHeapObject, 2, 1) \
|
||||
F(IsSharedString, 1, 1) \
|
||||
|
@ -4,9 +4,14 @@
|
||||
|
||||
#include "include/v8-initialization.h"
|
||||
#include "src/base/strings.h"
|
||||
#include "src/common/globals.h"
|
||||
#include "src/heap/factory.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/memory-chunk-layout.h"
|
||||
#include "src/heap/memory-chunk.h"
|
||||
#include "src/heap/parked-scope.h"
|
||||
#include "src/heap/remembered-set.h"
|
||||
#include "src/objects/fixed-array.h"
|
||||
#include "src/objects/objects-inl.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
|
||||
@ -750,6 +755,109 @@ UNINITIALIZED_TEST(PromotionScavenge) {
|
||||
}
|
||||
}
|
||||
|
||||
UNINITIALIZED_TEST(PromotionScavengeOldToShared) {
|
||||
if (FLAG_single_generation) return;
|
||||
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
|
||||
if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return;
|
||||
if (FLAG_stress_concurrent_allocation) return;
|
||||
|
||||
FLAG_shared_string_table = true;
|
||||
|
||||
MultiClientIsolateTest test;
|
||||
IsolateWrapper isolate_wrapper(test.NewClientIsolate());
|
||||
v8::Isolate* isolate = isolate_wrapper.isolate;
|
||||
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
|
||||
Factory* factory = i_isolate->factory();
|
||||
Heap* heap = i_isolate->heap();
|
||||
ManualGCScope manual_gc(i_isolate);
|
||||
|
||||
const char raw_one_byte[] = "foo";
|
||||
|
||||
{
|
||||
HandleScope scope(i_isolate);
|
||||
|
||||
Handle<FixedArray> old_object =
|
||||
factory->NewFixedArray(1, AllocationType::kOld);
|
||||
MemoryChunk* old_object_chunk = MemoryChunk::FromHeapObject(*old_object);
|
||||
CHECK(!old_object_chunk->InYoungGeneration());
|
||||
|
||||
Handle<String> one_byte_seq = factory->NewStringFromAsciiChecked(
|
||||
raw_one_byte, AllocationType::kYoung);
|
||||
CHECK(String::IsInPlaceInternalizable(*one_byte_seq));
|
||||
CHECK(MemoryChunk::FromHeapObject(*one_byte_seq)->InYoungGeneration());
|
||||
|
||||
old_object->set(0, *one_byte_seq);
|
||||
ObjectSlot slot = old_object->GetFirstElementAddress();
|
||||
CHECK(
|
||||
RememberedSet<OLD_TO_NEW>::Contains(old_object_chunk, slot.address()));
|
||||
|
||||
for (int i = 0; i < 2; i++) {
|
||||
heap->CollectGarbage(NEW_SPACE, GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
// In-place-internalizable strings are promoted into the shared heap when
|
||||
// sharing.
|
||||
CHECK(!heap->Contains(*one_byte_seq));
|
||||
CHECK(heap->SharedHeapContains(*one_byte_seq));
|
||||
|
||||
// Since the GC promoted that string into shared heap, it also needs to
|
||||
// create an OLD_TO_SHARED slot.
|
||||
CHECK(RememberedSet<OLD_TO_SHARED>::Contains(old_object_chunk,
|
||||
slot.address()));
|
||||
}
|
||||
}
|
||||
|
||||
UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) {
|
||||
if (FLAG_single_generation) return;
|
||||
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
|
||||
if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return;
|
||||
if (FLAG_stress_concurrent_allocation) return;
|
||||
|
||||
FLAG_shared_string_table = true;
|
||||
FLAG_manual_evacuation_candidates_selection = true;
|
||||
|
||||
MultiClientIsolateTest test;
|
||||
IsolateWrapper isolate_wrapper(test.NewClientIsolate());
|
||||
v8::Isolate* isolate = isolate_wrapper.isolate;
|
||||
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
|
||||
Factory* factory = i_isolate->factory();
|
||||
Heap* heap = i_isolate->heap();
|
||||
ManualGCScope manual_gc(i_isolate);
|
||||
|
||||
const char raw_one_byte[] = "foo";
|
||||
|
||||
{
|
||||
HandleScope scope(i_isolate);
|
||||
|
||||
Handle<FixedArray> old_object =
|
||||
factory->NewFixedArray(1, AllocationType::kOld);
|
||||
MemoryChunk* old_object_chunk = MemoryChunk::FromHeapObject(*old_object);
|
||||
CHECK(!old_object_chunk->InYoungGeneration());
|
||||
|
||||
Handle<String> one_byte_seq = factory->NewStringFromAsciiChecked(
|
||||
raw_one_byte, AllocationType::kYoung);
|
||||
CHECK(String::IsInPlaceInternalizable(*one_byte_seq));
|
||||
CHECK(MemoryChunk::FromHeapObject(*one_byte_seq)->InYoungGeneration());
|
||||
|
||||
old_object->set(0, *one_byte_seq);
|
||||
ObjectSlot slot = old_object->GetFirstElementAddress();
|
||||
CHECK(
|
||||
RememberedSet<OLD_TO_NEW>::Contains(old_object_chunk, slot.address()));
|
||||
|
||||
heap->CollectGarbage(OLD_SPACE, GarbageCollectionReason::kTesting);
|
||||
|
||||
// In-place-internalizable strings are promoted into the shared heap when
|
||||
// sharing.
|
||||
CHECK(!heap->Contains(*one_byte_seq));
|
||||
CHECK(heap->SharedHeapContains(*one_byte_seq));
|
||||
|
||||
// Since the GC promoted that string into shared heap, it also needs to
|
||||
// create an OLD_TO_SHARED slot.
|
||||
CHECK(RememberedSet<OLD_TO_SHARED>::Contains(old_object_chunk,
|
||||
slot.address()));
|
||||
}
|
||||
}
|
||||
|
||||
UNINITIALIZED_TEST(SharedStringsTransitionDuringGC) {
|
||||
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
|
||||
if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return;
|
||||
|
27
test/mjsunit/shared-memory/shared-string-promotion-major.js
Normal file
27
test/mjsunit/shared-memory/shared-string-promotion-major.js
Normal file
@ -0,0 +1,27 @@
|
||||
// Copyright 2022 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
//
|
||||
// Flags: --expose-gc --allow-natives-syntax --verify-heap --shared-string-table
|
||||
|
||||
const old = {};
|
||||
old.bar = 100;
|
||||
|
||||
gc();
|
||||
assertFalse(%InYoungGeneration(old));
|
||||
|
||||
const foo = 'a'.repeat(9);
|
||||
assertTrue(%InYoungGeneration(foo));
|
||||
assertTrue(%IsInPlaceInternalizableString(foo));
|
||||
|
||||
// Create old-to-new reference.
|
||||
old.foo = foo;
|
||||
|
||||
// Full GC would usally promote that string into old space,
|
||||
// with --shared-string-table it is promoted into shared heap
|
||||
// instead. This should create an old-to-shared reference from
|
||||
// an old-to-new slot.
|
||||
gc();
|
||||
|
||||
// An additional full GC for heap verification.
|
||||
gc();
|
28
test/mjsunit/shared-memory/shared-string-promotion-minor.js
Normal file
28
test/mjsunit/shared-memory/shared-string-promotion-minor.js
Normal file
@ -0,0 +1,28 @@
|
||||
// Copyright 2022 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
//
|
||||
// Flags: --expose-gc --allow-natives-syntax --verify-heap --shared-string-table
|
||||
|
||||
const old = {};
|
||||
old.bar = 100;
|
||||
|
||||
gc();
|
||||
assertFalse(%InYoungGeneration(old));
|
||||
|
||||
const foo = 'a'.repeat(9);
|
||||
assertTrue(%InYoungGeneration(foo));
|
||||
assertTrue(%IsInPlaceInternalizableString(foo));
|
||||
|
||||
// Create old-to-new reference.
|
||||
old.foo = foo;
|
||||
|
||||
// The second minor GC would usally promote that string
|
||||
// into old space, with --shared-string-table it is promoted
|
||||
// into shared heap instead. This should create an old-to-shared
|
||||
// reference from an old-to-new slot.
|
||||
gc({type: "minor"});
|
||||
gc({type: "minor"});
|
||||
|
||||
// An additional full GC for heap verification.
|
||||
gc();
|
Loading…
Reference in New Issue
Block a user