Revert "Use list of invalidated objects for old-to-new refs"

This reverts commit 60843b426b.

Reason for revert: TSAN detected issue between Scavenge workers.
One task could invoke RefillFreeList(), while the other task iterates the remembered set of a swept page.

Original change's description:
> Use list of invalidated objects for old-to-new refs
> 
> Instead of inserting "deletion" entries into the store buffer, keep a
> list of invalidated objects to filter out invalid old-to-new slots.
> 
> The first CL https://crrev.com/c/1704109 got reverted because both the
> sweeper and the main task were modifying the invalidated slots data
> structure concurrently. This CL changes this, such that the sweeper
> only modifies the invalidated slots during the final atomic pause when
> the main thread is not running. The sweeper does not need to clean this
> data structure after the pause, since the "update pointers" phase
> already removed all invalidated slots.
> 
> The second CL https://crrev.com/c/1733081 got reverted because the
> sweeper might find more free space than the full GC before it. If an
> object shrinks after the pause but before the sweep, the invalidated
> object might span free memory and potentially new allocated objects.
> Therefore shrink invalidated objects when processing swept pages on
> the main thread. Also clean recorded slots in the gap.
> 
> TBR=petermarshall@chromium.org
> 
> Bug: v8:9454
> Change-Id: I80d1fa3bbc24e97f7c97a373aaad66f105456f12
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1751795
> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#63239}

TBR=ulan@chromium.org,hpayer@chromium.org,dinfuehr@chromium.org

Change-Id: I9c6a371ebe36a1873acbe0d6c6a75dd2f5a55f4e
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:9454
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1760817
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63253}
This commit is contained in:
Dominik Inführ 2019-08-19 16:22:01 +00:00 committed by Commit Bot
parent 612cb2cb65
commit 9a9ba762bf
35 changed files with 561 additions and 768 deletions

View File

@ -291,7 +291,7 @@ Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align,
Heap* heap = isolate()->heap();
HeapObject result =
heap->AllocateRawWithRetryOrFail(size, allocation, origin, alignment);
heap->CreateFillerObjectAt(result.address(), size);
heap->CreateFillerObjectAt(result.address(), size, ClearRecordedSlots::kNo);
return Handle<HeapObject>(result, isolate());
}

View File

@ -1108,15 +1108,6 @@ void Heap::GarbageCollectionEpilogue() {
AllowHeapAllocation for_the_rest_of_the_epilogue;
#ifdef DEBUG
// Old-to-new slot sets must be empty after each collection.
for (SpaceIterator it(this); it.HasNext();) {
Space* space = it.Next();
for (MemoryChunk* chunk = space->first_page(); chunk != space->last_page();
chunk = chunk->list_node().next())
DCHECK_NULL(chunk->invalidated_slots<OLD_TO_NEW>());
}
if (FLAG_print_global_handles) isolate_->global_handles()->Print();
if (FLAG_print_handles) PrintHandles();
if (FLAG_gc_verbose) Print();
@ -1490,7 +1481,7 @@ void Heap::EnsureFillerObjectAtTop() {
Page* page = Page::FromAddress(to_top - kTaggedSize);
if (page->Contains(to_top)) {
int remaining_in_page = static_cast<int>(page->area_end() - to_top);
CreateFillerObjectAt(to_top, remaining_in_page);
CreateFillerObjectAt(to_top, remaining_in_page, ClearRecordedSlots::kNo);
}
}
@ -1821,7 +1812,8 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
// Mark with a free list node, in case we have a GC before
// deserializing.
Address free_space_address = free_space.address();
CreateFillerObjectAt(free_space_address, Map::kSize);
CreateFillerObjectAt(free_space_address, Map::kSize,
ClearRecordedSlots::kNo);
maps->push_back(free_space_address);
} else {
perform_gc = true;
@ -1852,7 +1844,8 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
// Mark with a free list node, in case we have a GC before
// deserializing.
Address free_space_address = free_space.address();
CreateFillerObjectAt(free_space_address, size);
CreateFillerObjectAt(free_space_address, size,
ClearRecordedSlots::kNo);
DCHECK(IsPreAllocatedSpace(static_cast<SnapshotSpace>(space)));
chunk.start = free_space_address;
chunk.end = free_space_address + size;
@ -2757,7 +2750,7 @@ size_t Heap::GetCodeRangeReservedAreaSize() {
}
HeapObject Heap::PrecedeWithFiller(HeapObject object, int filler_size) {
CreateFillerObjectAt(object.address(), filler_size);
CreateFillerObjectAt(object.address(), filler_size, ClearRecordedSlots::kNo);
return HeapObject::FromAddress(object.address() + filler_size);
}
@ -2772,7 +2765,8 @@ HeapObject Heap::AlignWithFiller(HeapObject object, int object_size,
filler_size -= pre_filler;
}
if (filler_size) {
CreateFillerObjectAt(object.address() + object_size, filler_size);
CreateFillerObjectAt(object.address() + object_size, filler_size,
ClearRecordedSlots::kNo);
}
return object;
}
@ -2822,6 +2816,7 @@ void Heap::FlushNumberStringCache() {
}
HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
ClearRecordedSlots clear_slots_mode,
ClearFreedMemoryMode clear_memory_mode) {
if (size == 0) return HeapObject();
HeapObject filler = HeapObject::FromAddress(addr);
@ -2848,6 +2843,9 @@ HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
(size / kTaggedSize) - 2);
}
}
if (clear_slots_mode == ClearRecordedSlots::kYes) {
ClearRecordedSlotRange(addr, addr + size);
}
// At this point, we may be deserializing the heap from a snapshot, and
// none of the maps have been created yet and are nullptr.
@ -2984,7 +2982,8 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
// Technically in new space this write might be omitted (except for
// debug mode which iterates through the heap), but to play safer
// we still do it.
HeapObject filler = CreateFillerObjectAt(old_start, bytes_to_trim);
HeapObject filler =
CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes);
// Initialize header of the trimmed array. Since left trimming is only
// performed on pages which are not concurrently swept creating a filler
@ -2996,9 +2995,10 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
FixedArrayBase new_object =
FixedArrayBase::cast(HeapObject::FromAddress(new_start));
// Move corresponding invalidated object to the right
MemoryChunk::FromHeapObject(new_object)
->MoveObjectWithInvalidatedSlots<OLD_TO_NEW>(filler, new_object);
// Remove recorded slots for the new map and length offset.
ClearRecordedSlot(new_object, new_object.RawField(0));
ClearRecordedSlot(new_object,
new_object.RawField(FixedArrayBase::kLengthOffset));
// Handle invalidated old-to-old slots.
if (incremental_marking()->IsCompacting() &&
@ -3006,7 +3006,7 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
// If the array was right-trimmed before, then it is registered in
// the invalidated_slots.
MemoryChunk::FromHeapObject(new_object)
->MoveObjectWithInvalidatedSlots<OLD_TO_OLD>(filler, new_object);
->MoveObjectWithInvalidatedSlots(filler, new_object);
// We have to clear slots in the free space to avoid stale old-to-old slots.
// Note we cannot use ClearFreedMemoryMode of CreateFillerObjectAt because
// we need pointer granularity writes to avoid race with the concurrent
@ -3094,8 +3094,8 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim,
// Ensure that the object survives because the InvalidatedSlotsFilter will
// compute its size from its map during pointers updating phase.
incremental_marking()->WhiteToGreyAndPush(object);
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
chunk->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object, old_size);
MemoryChunk::FromHeapObject(object)->RegisterObjectWithInvalidatedSlots(
object, old_size);
}
// Technically in new space this write might be omitted (except for
@ -3103,7 +3103,8 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim,
// we still do it.
// We do not create a filler for objects in a large object space.
if (!IsLargeObject(object)) {
HeapObject filler = CreateFillerObjectAt(new_end, bytes_to_trim);
HeapObject filler =
CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kNo);
DCHECK(!filler.is_null());
// Clear the mark bits of the black area that belongs now to the filler.
// This is an optimization. The sweeper will release black fillers anyway.
@ -3386,8 +3387,8 @@ void Heap::NotifyObjectLayoutChange(HeapObject object, int size,
incremental_marking()->MarkBlackAndVisitObjectDueToLayoutChange(object);
if (incremental_marking()->IsCompacting() &&
MayContainRecordedSlots(object)) {
MemoryChunk::FromHeapObject(object)
->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object, size);
MemoryChunk::FromHeapObject(object)->RegisterObjectWithInvalidatedSlots(
object, size);
}
}
#ifdef VERIFY_HEAP
@ -4861,7 +4862,8 @@ HeapObject Heap::EnsureImmovableCode(HeapObject heap_object, int object_size) {
} else {
// Discard the first code allocation, which was on a page where it could
// be moved.
CreateFillerObjectAt(heap_object.address(), object_size);
CreateFillerObjectAt(heap_object.address(), object_size,
ClearRecordedSlots::kNo);
heap_object = AllocateRawCodeInLargeObjectSpace(object_size);
UnprotectAndRegisterMemoryChunk(heap_object);
ZapCodeObject(heap_object.address(), object_size);
@ -5517,6 +5519,15 @@ Address Heap::store_buffer_overflow_function_address() {
return FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow);
}
void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) {
DCHECK(!IsLargeObject(object));
Page* page = Page::FromAddress(slot.address());
if (!page->InYoungGeneration()) {
DCHECK_EQ(page->owner_identity(), OLD_SPACE);
store_buffer()->DeleteEntry(slot.address());
}
}
#ifdef DEBUG
void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
DCHECK(!IsLargeObject(object));
@ -5524,14 +5535,22 @@ void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
Page* page = Page::FromAddress(slot.address());
DCHECK_EQ(page->owner_identity(), OLD_SPACE);
store_buffer()->MoveAllEntriesToRememberedSet();
CHECK_IMPLIES(RememberedSet<OLD_TO_NEW>::Contains(page, slot.address()),
page->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object));
CHECK(!RememberedSet<OLD_TO_NEW>::Contains(page, slot.address()));
// Old to old slots are filtered with invalidated slots.
CHECK_IMPLIES(RememberedSet<OLD_TO_OLD>::Contains(page, slot.address()),
page->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
page->RegisteredObjectWithInvalidatedSlots(object));
}
#endif
void Heap::ClearRecordedSlotRange(Address start, Address end) {
Page* page = Page::FromAddress(start);
DCHECK(!page->IsLargePage());
if (!page->InYoungGeneration()) {
DCHECK_EQ(page->owner_identity(), OLD_SPACE);
store_buffer()->DeleteEntry(start, end);
}
}
PagedSpace* PagedSpaceIterator::Next() {
switch (counter_++) {
case RO_SPACE:

View File

@ -84,6 +84,8 @@ enum ArrayStorageAllocationMode {
INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
};
enum class ClearRecordedSlots { kYes, kNo };
enum class ClearFreedMemoryMode { kClearFreedMemory, kDontClearFreedMemory };
enum ExternalBackingStoreType { kArrayBuffer, kExternalString, kNumTypes };
@ -409,13 +411,15 @@ class Heap {
WriteBarrierMode mode);
// Initialize a filler object to keep the ability to iterate over the heap
// when introducing gaps within pages. If the memory after the object header
// of the filler should be cleared, pass in kClearFreedMemory. The default is
// when introducing gaps within pages. If slots could have been recorded in
// the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
// pass ClearRecordedSlots::kNo. If the memory after the object header of
// the filler should be cleared, pass in kClearFreedMemory. The default is
// kDontClearFreedMemory.
V8_EXPORT_PRIVATE HeapObject
CreateFillerObjectAt(Address addr, int size,
ClearFreedMemoryMode clear_memory_mode =
ClearFreedMemoryMode::kDontClearFreedMemory);
V8_EXPORT_PRIVATE HeapObject CreateFillerObjectAt(
Address addr, int size, ClearRecordedSlots clear_slots_mode,
ClearFreedMemoryMode clear_memory_mode =
ClearFreedMemoryMode::kDontClearFreedMemory);
template <typename T>
void CreateFillerForArray(T object, int elements_to_trim, int bytes_to_trim);
@ -839,6 +843,9 @@ class Heap {
static intptr_t store_buffer_mask_constant();
static Address store_buffer_overflow_function_address();
void ClearRecordedSlot(HeapObject object, ObjectSlot slot);
void ClearRecordedSlotRange(Address start, Address end);
#ifdef DEBUG
void VerifyClearedSlot(HeapObject object, ObjectSlot slot);
#endif

View File

@ -62,53 +62,6 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
return invalidated_object_.IsValidSlot(invalidated_object_.map(), offset);
}
void InvalidatedSlotsCleanup::Free(Address free_start, Address free_end) {
#ifdef DEBUG
DCHECK_LT(free_start, free_end);
// Free regions should come in increasing order and do not overlap
DCHECK_LE(last_free_, free_start);
last_free_ = free_start;
#endif
if (iterator_ == iterator_end_) return;
// Ignore invalidated objects before free region
while (free_start >= invalidated_end_) {
++iterator_;
NextInvalidatedObject();
}
// Loop here: Free region might contain multiple invalidated objects
while (free_end > invalidated_start_) {
// Case: Free region starts before current invalidated object
if (free_start <= invalidated_start_) {
CHECK(invalidated_end_ <= free_end);
iterator_ = invalidated_slots_->erase(iterator_);
} else {
// Case: Free region starts within current invalidated object
// (Can happen for right-trimmed objects)
iterator_->second =
static_cast<int>(free_start - iterator_->first.address());
CHECK(free_end >= invalidated_end_);
iterator_++;
}
NextInvalidatedObject();
}
}
void InvalidatedSlotsCleanup::NextInvalidatedObject() {
if (iterator_ != iterator_end_) {
invalidated_start_ = iterator_->first.address();
invalidated_end_ = invalidated_start_ + iterator_->second;
} else {
invalidated_start_ = sentinel_;
invalidated_end_ = sentinel_;
}
}
} // namespace internal
} // namespace v8

View File

@ -8,18 +8,9 @@
namespace v8 {
namespace internal {
InvalidatedSlotsFilter InvalidatedSlotsFilter::OldToOld(MemoryChunk* chunk) {
return InvalidatedSlotsFilter(chunk, chunk->invalidated_slots<OLD_TO_OLD>());
}
InvalidatedSlotsFilter InvalidatedSlotsFilter::OldToNew(MemoryChunk* chunk) {
return InvalidatedSlotsFilter(chunk, chunk->invalidated_slots<OLD_TO_NEW>());
}
InvalidatedSlotsFilter::InvalidatedSlotsFilter(
MemoryChunk* chunk, InvalidatedSlots* invalidated_slots) {
InvalidatedSlotsFilter::InvalidatedSlotsFilter(MemoryChunk* chunk) {
// Adjust slots_in_free_space_are_valid_ if more spaces are added.
DCHECK_IMPLIES(invalidated_slots != nullptr,
DCHECK_IMPLIES(chunk->invalidated_slots() != nullptr,
chunk->InOldSpace() || chunk->InLargeObjectSpace());
// The sweeper removes invalid slots and makes free space available for
// allocation. Slots for new objects can be recorded in the free space.
@ -27,8 +18,8 @@ InvalidatedSlotsFilter::InvalidatedSlotsFilter(
// object space are not swept but have SweepingDone() == true.
slots_in_free_space_are_valid_ = chunk->SweepingDone() && chunk->InOldSpace();
invalidated_slots = invalidated_slots ? invalidated_slots : &empty_;
InvalidatedSlots* invalidated_slots =
chunk->invalidated_slots() ? chunk->invalidated_slots() : &empty_;
iterator_ = invalidated_slots->begin();
iterator_end_ = invalidated_slots->end();
sentinel_ = chunk->area_end();
@ -46,37 +37,5 @@ InvalidatedSlotsFilter::InvalidatedSlotsFilter(
#endif
}
InvalidatedSlotsCleanup InvalidatedSlotsCleanup::OldToOld(MemoryChunk* chunk) {
return InvalidatedSlotsCleanup(chunk, chunk->invalidated_slots<OLD_TO_OLD>());
}
InvalidatedSlotsCleanup InvalidatedSlotsCleanup::OldToNew(MemoryChunk* chunk) {
return InvalidatedSlotsCleanup(chunk, chunk->invalidated_slots<OLD_TO_NEW>());
}
InvalidatedSlotsCleanup InvalidatedSlotsCleanup::NoCleanup(MemoryChunk* chunk) {
return InvalidatedSlotsCleanup(chunk, nullptr);
}
InvalidatedSlotsCleanup::InvalidatedSlotsCleanup(
MemoryChunk* chunk, InvalidatedSlots* invalidated_slots) {
invalidated_slots_ = invalidated_slots ? invalidated_slots : &empty_;
iterator_ = invalidated_slots_->begin();
iterator_end_ = invalidated_slots_->end();
sentinel_ = chunk->area_end();
if (iterator_ != iterator_end_) {
invalidated_start_ = iterator_->first.address();
invalidated_end_ = invalidated_start_ + iterator_->second;
} else {
invalidated_start_ = sentinel_;
invalidated_end_ = sentinel_;
}
#ifdef DEBUG
last_free_ = chunk->area_start();
#endif
}
} // namespace internal
} // namespace v8

View File

@ -30,11 +30,7 @@ using InvalidatedSlots = std::map<HeapObject, int, Object::Comparer>;
// n is the number of IsValid queries.
class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
public:
static InvalidatedSlotsFilter OldToOld(MemoryChunk* chunk);
static InvalidatedSlotsFilter OldToNew(MemoryChunk* chunk);
explicit InvalidatedSlotsFilter(MemoryChunk* chunk,
InvalidatedSlots* invalidated_slots);
explicit InvalidatedSlotsFilter(MemoryChunk* chunk);
inline bool IsValid(Address slot);
private:
@ -52,33 +48,6 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
#endif
};
class V8_EXPORT_PRIVATE InvalidatedSlotsCleanup {
public:
static InvalidatedSlotsCleanup OldToOld(MemoryChunk* chunk);
static InvalidatedSlotsCleanup OldToNew(MemoryChunk* chunk);
static InvalidatedSlotsCleanup NoCleanup(MemoryChunk* chunk);
explicit InvalidatedSlotsCleanup(MemoryChunk* chunk,
InvalidatedSlots* invalidated_slots);
inline void Free(Address free_start, Address free_end);
private:
InvalidatedSlots::iterator iterator_;
InvalidatedSlots::iterator iterator_end_;
InvalidatedSlots* invalidated_slots_;
InvalidatedSlots empty_;
Address sentinel_;
Address invalidated_start_;
Address invalidated_end_;
inline void NextInvalidatedObject();
#ifdef DEBUG
Address last_free_;
#endif
};
} // namespace internal
} // namespace v8

View File

@ -48,14 +48,16 @@ void LocalAllocator::FreeLast(AllocationSpace space, HeapObject object,
void LocalAllocator::FreeLastInNewSpace(HeapObject object, int object_size) {
if (!new_space_lab_.TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
heap_->CreateFillerObjectAt(object.address(), object_size);
heap_->CreateFillerObjectAt(object.address(), object_size,
ClearRecordedSlots::kNo);
}
}
void LocalAllocator::FreeLastInOldSpace(HeapObject object, int object_size) {
if (!compaction_spaces_.Get(OLD_SPACE)->TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
heap_->CreateFillerObjectAt(object.address(), object_size);
heap_->CreateFillerObjectAt(object.address(), object_size,
ClearRecordedSlots::kNo);
}
}

View File

@ -306,7 +306,8 @@ void EvacuationVerifier::VerifyEvacuation(PagedSpace* space) {
if (p->Contains(space->top())) {
CodePageMemoryModificationScope memory_modification_scope(p);
heap_->CreateFillerObjectAt(
space->top(), static_cast<int>(space->limit() - space->top()));
space->top(), static_cast<int>(space->limit() - space->top()),
ClearRecordedSlots::kNo);
}
VerifyEvacuationOnPage(p->area_start(), p->area_end());
}
@ -2096,7 +2097,8 @@ void MarkCompactCollector::FlushBytecodeFromSFI(
if (!heap()->IsLargeObject(compiled_data)) {
heap()->CreateFillerObjectAt(
compiled_data.address() + UncompiledDataWithoutPreparseData::kSize,
compiled_data_size - UncompiledDataWithoutPreparseData::kSize);
compiled_data_size - UncompiledDataWithoutPreparseData::kSize,
ClearRecordedSlots::kNo);
}
// Initialize the uncompiled data.
@ -2237,7 +2239,8 @@ void MarkCompactCollector::RightTrimDescriptorArray(DescriptorArray array,
RememberedSet<OLD_TO_OLD>::RemoveRange(MemoryChunk::FromHeapObject(array),
start, end,
SlotSet::PREFREE_EMPTY_BUCKETS);
heap()->CreateFillerObjectAt(start, static_cast<int>(end - start));
heap()->CreateFillerObjectAt(start, static_cast<int>(end - start),
ClearRecordedSlots::kNo);
array.set_number_of_all_descriptors(new_nof_all_descriptors);
}
@ -2686,8 +2689,7 @@ void MarkCompactCollector::EvacuateEpilogue() {
for (Page* p : *heap()->old_space()) {
DCHECK_NULL((p->slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
DCHECK_NULL((p->typed_slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
DCHECK_NULL(p->invalidated_slots<OLD_TO_OLD>());
DCHECK_NULL(p->invalidated_slots<OLD_TO_NEW>());
DCHECK_NULL(p->invalidated_slots());
}
#endif
}
@ -3408,32 +3410,16 @@ class RememberedSetUpdatingItem : public UpdatingItem {
void UpdateUntypedPointers() {
if (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(chunk_);
RememberedSet<OLD_TO_NEW>::Iterate(
chunk_,
[this, &filter](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
[this](MaybeObjectSlot slot) {
return CheckAndUpdateOldToNewSlot(slot);
},
SlotSet::PREFREE_EMPTY_BUCKETS);
}
if (chunk_->invalidated_slots<OLD_TO_NEW>() != nullptr) {
#ifdef DEBUG
for (auto object_size : *chunk_->invalidated_slots<OLD_TO_NEW>()) {
HeapObject object = object_size.first;
int size = object_size.second;
DCHECK_LE(object.SizeFromMap(object.map()), size);
}
#endif
// The invalidated slots are not needed after old-to-new slots were
// processed.
chunk_->ReleaseInvalidatedSlots<OLD_TO_NEW>();
}
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
(chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(chunk_);
InvalidatedSlotsFilter filter(chunk_);
RememberedSet<OLD_TO_OLD>::Iterate(
chunk_,
[&filter](MaybeObjectSlot slot) {
@ -3443,9 +3429,9 @@ class RememberedSetUpdatingItem : public UpdatingItem {
SlotSet::PREFREE_EMPTY_BUCKETS);
}
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
chunk_->invalidated_slots<OLD_TO_OLD>() != nullptr) {
chunk_->invalidated_slots() != nullptr) {
#ifdef DEBUG
for (auto object_size : *chunk_->invalidated_slots<OLD_TO_OLD>()) {
for (auto object_size : *chunk_->invalidated_slots()) {
HeapObject object = object_size.first;
int size = object_size.second;
DCHECK_LE(object.SizeFromMap(object.map()), size);
@ -3453,7 +3439,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
#endif
// The invalidated slots are not needed after old-to-old slots were
// processsed.
chunk_->ReleaseInvalidatedSlots<OLD_TO_OLD>();
chunk_->ReleaseInvalidatedSlots();
}
}
@ -3567,17 +3553,13 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
const bool contains_old_to_new_slots =
chunk->slot_set<OLD_TO_NEW>() != nullptr ||
chunk->typed_slot_set<OLD_TO_NEW>() != nullptr;
const bool contains_old_to_old_invalidated_slots =
chunk->invalidated_slots<OLD_TO_OLD>() != nullptr;
const bool contains_old_to_new_invalidated_slots =
chunk->invalidated_slots<OLD_TO_NEW>() != nullptr;
const bool contains_invalidated_slots =
chunk->invalidated_slots() != nullptr;
if (!contains_old_to_new_slots && !contains_old_to_old_slots &&
!contains_old_to_old_invalidated_slots &&
!contains_old_to_new_invalidated_slots)
!contains_invalidated_slots)
continue;
if (mode == RememberedSetUpdatingMode::ALL || contains_old_to_new_slots ||
contains_old_to_old_invalidated_slots ||
contains_old_to_new_invalidated_slots) {
contains_invalidated_slots) {
job->AddItem(CreateRememberedSetUpdatingItem(chunk, mode));
pages++;
}
@ -4393,7 +4375,8 @@ void MinorMarkCompactCollector::MakeIterable(
if (free_space_mode == ZAP_FREE_SPACE) {
ZapCode(free_start, size);
}
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size));
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
ClearRecordedSlots::kNo);
}
Map map = object.synchronized_map();
int size = object.SizeFromMap(map);
@ -4409,7 +4392,8 @@ void MinorMarkCompactCollector::MakeIterable(
if (free_space_mode == ZAP_FREE_SPACE) {
ZapCode(free_start, size);
}
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size));
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
ClearRecordedSlots::kNo);
}
if (marking_mode == MarkingTreatmentMode::CLEAR) {
@ -4652,14 +4636,11 @@ class PageMarkingItem : public MarkingItem {
inline Heap* heap() { return chunk_->heap(); }
void MarkUntypedPointers(YoungGenerationMarkingTask* task) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(chunk_);
RememberedSet<OLD_TO_NEW>::Iterate(
chunk_,
[this, task, &filter](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
return CheckAndMarkObject(task, slot);
},
SlotSet::PREFREE_EMPTY_BUCKETS);
RememberedSet<OLD_TO_NEW>::Iterate(chunk_,
[this, task](MaybeObjectSlot slot) {
return CheckAndMarkObject(task, slot);
},
SlotSet::PREFREE_EMPTY_BUCKETS);
}
void MarkTypedPointers(YoungGenerationMarkingTask* task) {

View File

@ -122,7 +122,7 @@ class RememberedSet : public AllStatic {
SlotSet* slots = chunk->slot_set<type>();
TypedSlotSet* typed_slots = chunk->typed_slot_set<type>();
if (slots != nullptr || typed_slots != nullptr ||
chunk->invalidated_slots<type>() != nullptr) {
chunk->invalidated_slots() != nullptr) {
callback(chunk);
}
}
@ -256,7 +256,7 @@ class RememberedSet : public AllStatic {
while ((chunk = it.next()) != nullptr) {
chunk->ReleaseSlotSet<OLD_TO_OLD>();
chunk->ReleaseTypedSlotSet<OLD_TO_OLD>();
chunk->ReleaseInvalidatedSlots<OLD_TO_OLD>();
chunk->ReleaseInvalidatedSlots();
}
}

View File

@ -8,7 +8,6 @@
#include "src/heap/barrier.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/heap-inl.h"
#include "src/heap/invalidated-slots-inl.h"
#include "src/heap/item-parallel-job.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/objects-visiting-inl.h"
@ -432,15 +431,12 @@ void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
void Scavenger::ScavengePage(MemoryChunk* page) {
CodePageMemoryModificationScope memory_modification_scope(page);
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(page);
RememberedSet<OLD_TO_NEW>::Iterate(
page,
[this, &filter](MaybeObjectSlot addr) {
if (!filter.IsValid(addr.address())) return REMOVE_SLOT;
return CheckAndScavengeObject(heap_, addr);
},
SlotSet::KEEP_EMPTY_BUCKETS);
page->ReleaseInvalidatedSlots<OLD_TO_NEW>();
RememberedSet<OLD_TO_NEW>::Iterate(page,
[this](MaybeObjectSlot addr) {
return CheckAndScavengeObject(heap_,
addr);
},
SlotSet::KEEP_EMPTY_BUCKETS);
RememberedSet<OLD_TO_NEW>::IterateTyped(
page, [=](SlotType type, Address addr) {
return UpdateTypedSlotHelper::UpdateTypedSlot(

View File

@ -703,8 +703,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
nullptr);
base::AsAtomicPointer::Release_Store(&chunk->typed_slot_set_[OLD_TO_OLD],
nullptr);
chunk->invalidated_slots_[OLD_TO_NEW] = nullptr;
chunk->invalidated_slots_[OLD_TO_OLD] = nullptr;
chunk->invalidated_slots_ = nullptr;
chunk->progress_bar_ = 0;
chunk->high_water_mark_ = static_cast<intptr_t>(area_start - base);
chunk->set_concurrent_sweeping_state(kSweepingDone);
@ -846,28 +845,6 @@ void Page::ReleaseFreeListCategories() {
}
}
template V8_EXPORT_PRIVATE void
Page::AdjustSizeOfInvalidatedObjects<OLD_TO_NEW>();
template <RememberedSetType type>
void Page::AdjustSizeOfInvalidatedObjects() {
if (!invalidated_slots<type>()) return;
for (auto& pair : *invalidated_slots<type>()) {
int old_size = pair.second;
int new_size = pair.first.Size();
DCHECK_LE(new_size, old_size);
if (new_size < old_size) {
Address free_start = pair.first.address() + new_size;
Address free_end = pair.first.address() + old_size;
RememberedSet<type>::RemoveRange(this, free_start, free_end,
SlotSet::KEEP_EMPTY_BUCKETS);
pair.second = new_size;
}
}
}
Page* Page::ConvertNewToOld(Page* old_page) {
DCHECK(old_page);
DCHECK(old_page->InNewSpace());
@ -1106,7 +1083,8 @@ size_t Page::ShrinkToHighWaterMark() {
}
heap()->CreateFillerObjectAt(
filler.address(),
static_cast<int>(area_end() - filler.address() - unused));
static_cast<int>(area_end() - filler.address() - unused),
ClearRecordedSlots::kNo);
heap()->memory_allocator()->PartialFreeMemory(
this, address() + size() - unused, unused, area_end() - unused);
if (filler.address() != area_end()) {
@ -1401,8 +1379,7 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() {
ReleaseSlotSet<OLD_TO_OLD>();
ReleaseTypedSlotSet<OLD_TO_NEW>();
ReleaseTypedSlotSet<OLD_TO_OLD>();
ReleaseInvalidatedSlots<OLD_TO_NEW>();
ReleaseInvalidatedSlots<OLD_TO_OLD>();
ReleaseInvalidatedSlots();
if (local_tracker_ != nullptr) ReleaseLocalTracker();
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
@ -1484,89 +1461,53 @@ void MemoryChunk::ReleaseTypedSlotSet() {
}
}
template InvalidatedSlots* MemoryChunk::AllocateInvalidatedSlots<OLD_TO_NEW>();
template InvalidatedSlots* MemoryChunk::AllocateInvalidatedSlots<OLD_TO_OLD>();
template <RememberedSetType type>
InvalidatedSlots* MemoryChunk::AllocateInvalidatedSlots() {
DCHECK_NULL(invalidated_slots_[type]);
invalidated_slots_[type] = new InvalidatedSlots();
return invalidated_slots_[type];
DCHECK_NULL(invalidated_slots_);
invalidated_slots_ = new InvalidatedSlots();
return invalidated_slots_;
}
template void MemoryChunk::ReleaseInvalidatedSlots<OLD_TO_NEW>();
template void MemoryChunk::ReleaseInvalidatedSlots<OLD_TO_OLD>();
template <RememberedSetType type>
void MemoryChunk::ReleaseInvalidatedSlots() {
if (invalidated_slots_[type]) {
delete invalidated_slots_[type];
invalidated_slots_[type] = nullptr;
if (invalidated_slots_) {
delete invalidated_slots_;
invalidated_slots_ = nullptr;
}
}
template V8_EXPORT_PRIVATE void
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object,
int size);
template V8_EXPORT_PRIVATE void
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object,
int size);
template <RememberedSetType type>
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object,
int size) {
if (!ShouldSkipEvacuationSlotRecording()) {
if (invalidated_slots<type>() == nullptr) {
AllocateInvalidatedSlots<type>();
}
InvalidatedSlots::iterator it = invalidated_slots<type>()->find(object);
if (it != invalidated_slots<type>()->end()) {
int old_size = it->second;
it->second = std::max(old_size, size);
} else {
invalidated_slots<type>()->insert(it, std::make_pair(object, size));
if (invalidated_slots() == nullptr) {
AllocateInvalidatedSlots();
}
int old_size = (*invalidated_slots())[object];
(*invalidated_slots())[object] = std::max(old_size, size);
}
}
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(
HeapObject object);
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(
HeapObject object);
template <RememberedSetType type>
bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject object) {
if (ShouldSkipEvacuationSlotRecording()) {
// Invalidated slots do not matter if we are not recording slots.
return true;
}
if (invalidated_slots<type>() == nullptr) {
if (invalidated_slots() == nullptr) {
return false;
}
return invalidated_slots<type>()->find(object) !=
invalidated_slots<type>()->end();
return invalidated_slots()->find(object) != invalidated_slots()->end();
}
template void MemoryChunk::MoveObjectWithInvalidatedSlots<OLD_TO_NEW>(
HeapObject old_start, HeapObject new_start);
template void MemoryChunk::MoveObjectWithInvalidatedSlots<OLD_TO_OLD>(
HeapObject old_start, HeapObject new_start);
template <RememberedSetType type>
void MemoryChunk::MoveObjectWithInvalidatedSlots(HeapObject old_start,
HeapObject new_start) {
DCHECK_LT(old_start, new_start);
DCHECK_EQ(MemoryChunk::FromHeapObject(old_start),
MemoryChunk::FromHeapObject(new_start));
if (!ShouldSkipEvacuationSlotRecording() && invalidated_slots<type>()) {
auto it = invalidated_slots<type>()->find(old_start);
if (it != invalidated_slots<type>()->end()) {
if (!ShouldSkipEvacuationSlotRecording() && invalidated_slots()) {
auto it = invalidated_slots()->find(old_start);
if (it != invalidated_slots()->end()) {
int old_size = it->second;
int delta = static_cast<int>(new_start.address() - old_start.address());
invalidated_slots<type>()->erase(it);
(*invalidated_slots<type>())[new_start] = old_size - delta;
invalidated_slots()->erase(it);
(*invalidated_slots())[new_start] = old_size - delta;
}
}
}
@ -1618,7 +1559,7 @@ void Space::AllocationStep(int bytes_since_last, Address soon_object,
DCHECK(!heap()->allocation_step_in_progress());
heap()->set_allocation_step_in_progress(true);
heap()->CreateFillerObjectAt(soon_object, size);
heap()->CreateFillerObjectAt(soon_object, size, ClearRecordedSlots::kNo);
for (AllocationObserver* observer : allocation_observers_) {
observer->AllocationStep(bytes_since_last, soon_object, size);
}
@ -1670,13 +1611,6 @@ void PagedSpace::RefillFreeList() {
p->ForAllFreeListCategories(
[](FreeListCategory* category) { category->Reset(); });
}
// Between "sweeping pending" and the actual sweep, objects might shrink
// or get right-trimmed. The sweeper would detect these gaps as free
// memory and reuse it for allocations. Update size of invalidated objects
// such that they do not contain these gaps anymore.
p->AdjustSizeOfInvalidatedObjects<OLD_TO_NEW>();
// Only during compaction pages can actually change ownership. This is
// safe because there exists no other competing action on the page links
// during compaction.
@ -2357,7 +2291,8 @@ bool SemiSpace::EnsureCurrentCapacity() {
current_page->SetFlags(first_page()->GetFlags(),
static_cast<uintptr_t>(Page::kCopyAllFlags));
heap()->CreateFillerObjectAt(current_page->area_start(),
static_cast<int>(current_page->area_size()));
static_cast<int>(current_page->area_size()),
ClearRecordedSlots::kNo);
}
}
return true;
@ -2367,7 +2302,8 @@ LinearAllocationArea LocalAllocationBuffer::Close() {
if (IsValid()) {
heap_->CreateFillerObjectAt(
allocation_info_.top(),
static_cast<int>(allocation_info_.limit() - allocation_info_.top()));
static_cast<int>(allocation_info_.limit() - allocation_info_.top()),
ClearRecordedSlots::kNo);
const LinearAllocationArea old_info = allocation_info_;
allocation_info_ = LinearAllocationArea(kNullAddress, kNullAddress);
return old_info;
@ -2382,7 +2318,8 @@ LocalAllocationBuffer::LocalAllocationBuffer(
if (IsValid()) {
heap_->CreateFillerObjectAt(
allocation_info_.top(),
static_cast<int>(allocation_info_.limit() - allocation_info_.top()));
static_cast<int>(allocation_info_.limit() - allocation_info_.top()),
ClearRecordedSlots::kNo);
}
}
@ -2462,7 +2399,7 @@ bool NewSpace::AddFreshPage() {
// Clear remainder of current page.
Address limit = Page::FromAllocationAreaAddress(top)->area_end();
int remaining_in_page = static_cast<int>(limit - top);
heap()->CreateFillerObjectAt(top, remaining_in_page);
heap()->CreateFillerObjectAt(top, remaining_in_page, ClearRecordedSlots::kNo);
UpdateLinearAllocationArea();
return true;
@ -3554,17 +3491,9 @@ bool PagedSpace::RawSlowRefillLinearAllocationArea(int size_in_bytes) {
static_cast<size_t>(size_in_bytes)))
return true;
// Cleanup invalidated old-to-new refs for compaction space in the
// final atomic pause.
Sweeper::FreeSpaceMayContainInvalidatedSlots
invalidated_slots_in_free_space =
is_local() ? Sweeper::FreeSpaceMayContainInvalidatedSlots::kYes
: Sweeper::FreeSpaceMayContainInvalidatedSlots::kNo;
// If sweeping is still in progress try to sweep pages.
int max_freed = collector->sweeper()->ParallelSweepSpace(
identity(), size_in_bytes, kMaxPagesToSweep,
invalidated_slots_in_free_space);
identity(), size_in_bytes, kMaxPagesToSweep);
RefillFreeList();
if (max_freed >= size_in_bytes) {
if (RefillLinearAllocationAreaFromFreeList(
@ -3694,7 +3623,7 @@ void ReadOnlySpace::RepairFreeListsAfterDeserialization() {
start += filler.Size();
}
CHECK_EQ(size, static_cast<int>(end - start));
heap()->CreateFillerObjectAt(start, size);
heap()->CreateFillerObjectAt(start, size, ClearRecordedSlots::kNo);
}
}
@ -3840,7 +3769,8 @@ LargePage* LargeObjectSpace::AllocateLargePage(int object_size,
HeapObject object = page->GetObject();
heap()->CreateFillerObjectAt(object.address(), object_size);
heap()->CreateFillerObjectAt(object.address(), object_size,
ClearRecordedSlots::kNo);
return page;
}

View File

@ -632,8 +632,7 @@ class MemoryChunk : public BasicMemoryChunk {
+ kSystemPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // SlotSet* array
+ kSystemPointerSize *
NUMBER_OF_REMEMBERED_SET_TYPES // TypedSlotSet* array
+ kSystemPointerSize *
NUMBER_OF_REMEMBERED_SET_TYPES // InvalidatedSlots* array
+ kSystemPointerSize // InvalidatedSlots* invalidated_slots_
+ kSystemPointerSize // std::atomic<intptr_t> high_water_mark_
+ kSystemPointerSize // base::Mutex* mutex_
+ kSystemPointerSize // std::atomic<ConcurrentSweepingState>
@ -719,7 +718,7 @@ class MemoryChunk : public BasicMemoryChunk {
template <RememberedSetType type>
bool ContainsSlots() {
return slot_set<type>() != nullptr || typed_slot_set<type>() != nullptr ||
invalidated_slots<type>() != nullptr;
invalidated_slots() != nullptr;
}
template <RememberedSetType type, AccessMode access_mode = AccessMode::ATOMIC>
@ -747,23 +746,15 @@ class MemoryChunk : public BasicMemoryChunk {
template <RememberedSetType type>
void ReleaseTypedSlotSet();
template <RememberedSetType type>
InvalidatedSlots* AllocateInvalidatedSlots();
template <RememberedSetType type>
void ReleaseInvalidatedSlots();
template <RememberedSetType type>
V8_EXPORT_PRIVATE void RegisterObjectWithInvalidatedSlots(HeapObject object,
int size);
// Updates invalidated_slots after array left-trimming.
template <RememberedSetType type>
void MoveObjectWithInvalidatedSlots(HeapObject old_start,
HeapObject new_start);
template <RememberedSetType type>
bool RegisteredObjectWithInvalidatedSlots(HeapObject object);
template <RememberedSetType type>
InvalidatedSlots* invalidated_slots() {
return invalidated_slots_[type];
}
InvalidatedSlots* invalidated_slots() { return invalidated_slots_; }
void ReleaseLocalTracker();
@ -939,7 +930,7 @@ class MemoryChunk : public BasicMemoryChunk {
// is ceil(size() / kPageSize).
SlotSet* slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES];
TypedSlotSet* typed_slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES];
InvalidatedSlots* invalidated_slots_[NUMBER_OF_REMEMBERED_SET_TYPES];
InvalidatedSlots* invalidated_slots_;
// Assuming the initial allocation on a page is sequential,
// count highest number of bytes ever allocated on the page.
@ -1051,9 +1042,6 @@ class Page : public MemoryChunk {
inline void MarkEvacuationCandidate();
inline void ClearEvacuationCandidate();
template <RememberedSetType type>
void AdjustSizeOfInvalidatedObjects();
Page* next_page() { return static_cast<Page*>(list_node_.next()); }
Page* prev_page() { return static_cast<Page*>(list_node_.prev()); }
@ -2254,7 +2242,8 @@ class V8_EXPORT_PRIVATE PagedSpace
size_t Free(Address start, size_t size_in_bytes, SpaceAccountingMode mode) {
if (size_in_bytes == 0) return 0;
heap()->CreateFillerObjectAt(start, static_cast<int>(size_in_bytes));
heap()->CreateFillerObjectAt(start, static_cast<int>(size_in_bytes),
ClearRecordedSlots::kNo);
if (mode == SpaceAccountingMode::kSpaceAccounted) {
return AccountedFree(start, size_in_bytes);
} else {

View File

@ -12,6 +12,16 @@
namespace v8 {
namespace internal {
void StoreBuffer::InsertDeletionIntoStoreBuffer(Address start, Address end) {
if (top_ + sizeof(Address) * 2 > limit_[current_]) {
StoreBufferOverflow(heap_->isolate());
}
*top_ = MarkDeletionAddress(start);
top_++;
*top_ = end;
top_++;
}
void StoreBuffer::InsertIntoStoreBuffer(Address slot) {
if (top_ + sizeof(Address) > limit_[current_]) {
StoreBufferOverflow(heap_->isolate());

View File

@ -28,6 +28,7 @@ StoreBuffer::StoreBuffer(Heap* heap)
}
task_running_ = false;
insertion_callback = &InsertDuringRuntime;
deletion_callback = &DeleteDuringRuntime;
}
void StoreBuffer::SetUp() {
@ -90,11 +91,22 @@ void StoreBuffer::TearDown() {
}
}
void StoreBuffer::DeleteDuringRuntime(StoreBuffer* store_buffer, Address start,
Address end) {
DCHECK(store_buffer->mode() == StoreBuffer::NOT_IN_GC);
store_buffer->InsertDeletionIntoStoreBuffer(start, end);
}
void StoreBuffer::InsertDuringRuntime(StoreBuffer* store_buffer, Address slot) {
DCHECK(store_buffer->mode() == StoreBuffer::NOT_IN_GC);
store_buffer->InsertIntoStoreBuffer(slot);
}
void StoreBuffer::DeleteDuringGarbageCollection(StoreBuffer* store_buffer,
Address start, Address end) {
UNREACHABLE();
}
void StoreBuffer::InsertDuringGarbageCollection(StoreBuffer* store_buffer,
Address slot) {
DCHECK(store_buffer->mode() != StoreBuffer::NOT_IN_GC);
@ -105,8 +117,10 @@ void StoreBuffer::SetMode(StoreBufferMode mode) {
mode_ = mode;
if (mode == NOT_IN_GC) {
insertion_callback = &InsertDuringRuntime;
deletion_callback = &DeleteDuringRuntime;
} else {
insertion_callback = &InsertDuringGarbageCollection;
deletion_callback = &DeleteDuringGarbageCollection;
}
}
@ -146,9 +160,24 @@ void StoreBuffer::MoveEntriesToRememberedSet(int index) {
MemoryChunk::BaseAddress(addr) != chunk->address()) {
chunk = MemoryChunk::FromAnyPointerAddress(addr);
}
if (addr != last_inserted_addr) {
RememberedSet<OLD_TO_NEW>::Insert(chunk, addr);
last_inserted_addr = addr;
if (IsDeletionAddress(addr)) {
last_inserted_addr = kNullAddress;
current++;
Address end = *current;
DCHECK(!IsDeletionAddress(end));
addr = UnmarkDeletionAddress(addr);
if (end) {
RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, addr, end,
SlotSet::PREFREE_EMPTY_BUCKETS);
} else {
RememberedSet<OLD_TO_NEW>::Remove(chunk, addr);
}
} else {
DCHECK(!IsDeletionAddress(addr));
if (addr != last_inserted_addr) {
RememberedSet<OLD_TO_NEW>::Insert(chunk, addr);
last_inserted_addr = addr;
}
}
}
lazy_top_[index] = nullptr;

View File

@ -33,11 +33,17 @@ class StoreBuffer {
Max(static_cast<int>(kMinExpectedOSPageSize / kStoreBuffers),
1 << (11 + kSystemPointerSizeLog2));
static const int kStoreBufferMask = kStoreBufferSize - 1;
static const intptr_t kDeletionTag = 1;
V8_EXPORT_PRIVATE static int StoreBufferOverflow(Isolate* isolate);
static void DeleteDuringGarbageCollection(StoreBuffer* store_buffer,
Address start, Address end);
static void InsertDuringGarbageCollection(StoreBuffer* store_buffer,
Address slot);
static void DeleteDuringRuntime(StoreBuffer* store_buffer, Address start,
Address end);
static void InsertDuringRuntime(StoreBuffer* store_buffer, Address slot);
explicit StoreBuffer(Heap* heap);
@ -55,6 +61,19 @@ class StoreBuffer {
// the remembered set.
void MoveAllEntriesToRememberedSet();
inline bool IsDeletionAddress(Address address) const {
return address & kDeletionTag;
}
inline Address MarkDeletionAddress(Address address) {
return address | kDeletionTag;
}
inline Address UnmarkDeletionAddress(Address address) {
return address & ~kDeletionTag;
}
inline void InsertDeletionIntoStoreBuffer(Address start, Address end);
inline void InsertIntoStoreBuffer(Address slot);
void InsertEntry(Address slot) {
@ -64,6 +83,16 @@ class StoreBuffer {
insertion_callback(this, slot);
}
// If we only want to delete a single slot, end should be set to null which
// will be written into the second field. When processing the store buffer
// the more efficient Remove method will be called in this case.
void DeleteEntry(Address start, Address end = kNullAddress) {
// Deletions coming from the GC are directly deleted from the remembered
// set. Deletions coming from the runtime are added to the store buffer
// to allow concurrent processing.
deletion_callback(this, start, end);
}
void SetMode(StoreBufferMode mode);
// Used by the concurrent processing thread to transfer entries from the
@ -145,6 +174,7 @@ class StoreBuffer {
// Callbacks are more efficient than reading out the gc state for every
// store buffer operation.
void (*insertion_callback)(StoreBuffer*, Address);
void (*deletion_callback)(StoreBuffer*, Address, Address);
};
} // namespace internal

View File

@ -8,7 +8,6 @@
#include "src/execution/vm-state-inl.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/invalidated-slots-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/remembered-set.h"
#include "src/objects/objects-inl.h"
@ -251,10 +250,8 @@ void Sweeper::EnsureCompleted() {
bool Sweeper::AreSweeperTasksRunning() { return num_sweeping_tasks_ != 0; }
int Sweeper::RawSweep(
Page* p, FreeListRebuildingMode free_list_mode,
FreeSpaceTreatmentMode free_space_mode,
FreeSpaceMayContainInvalidatedSlots invalidated_slots_in_free_space) {
int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
FreeSpaceTreatmentMode free_space_mode) {
Space* space = p->owner();
DCHECK_NOT_NULL(space);
DCHECK(free_list_mode == IGNORE_FREE_LIST || space->identity() == OLD_SPACE ||
@ -277,15 +274,6 @@ int Sweeper::RawSweep(
ArrayBufferTracker::FreeDead(p, marking_state_);
Address free_start = p->area_start();
InvalidatedSlotsCleanup old_to_new_cleanup =
InvalidatedSlotsCleanup::NoCleanup(p);
// Clean invalidated slots during the final atomic pause. After resuming
// execution this isn't necessary, invalid old-to-new refs were already
// removed by mark compact's update pointers phase.
if (invalidated_slots_in_free_space ==
FreeSpaceMayContainInvalidatedSlots::kYes)
old_to_new_cleanup = InvalidatedSlotsCleanup::OldToNew(p);
intptr_t live_bytes = 0;
intptr_t freed_bytes = 0;
@ -317,7 +305,7 @@ int Sweeper::RawSweep(
max_freed_bytes = Max(freed_bytes, max_freed_bytes);
} else {
p->heap()->CreateFillerObjectAt(
free_start, static_cast<int>(size),
free_start, static_cast<int>(size), ClearRecordedSlots::kNo,
ClearFreedMemoryMode::kClearFreedMemory);
}
if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size);
@ -330,8 +318,6 @@ int Sweeper::RawSweep(
static_cast<uint32_t>(free_start - p->address()),
static_cast<uint32_t>(free_end - p->address())));
}
old_to_new_cleanup.Free(free_start, free_end);
}
Map map = object.synchronized_map();
int size = object.SizeFromMap(map);
@ -351,6 +337,7 @@ int Sweeper::RawSweep(
max_freed_bytes = Max(freed_bytes, max_freed_bytes);
} else {
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
ClearRecordedSlots::kNo,
ClearFreedMemoryMode::kClearFreedMemory);
}
if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size);
@ -363,8 +350,6 @@ int Sweeper::RawSweep(
static_cast<uint32_t>(free_start - p->address()),
static_cast<uint32_t>(p->area_end() - p->address())));
}
old_to_new_cleanup.Free(free_start, p->area_end());
}
// Clear invalid typed slots after collection all free ranges.
@ -414,15 +399,13 @@ bool Sweeper::SweepSpaceIncrementallyFromTask(AllocationSpace identity) {
return sweeping_list_[GetSweepSpaceIndex(identity)].empty();
}
int Sweeper::ParallelSweepSpace(
AllocationSpace identity, int required_freed_bytes, int max_pages,
FreeSpaceMayContainInvalidatedSlots invalidated_slots_in_free_space) {
int Sweeper::ParallelSweepSpace(AllocationSpace identity,
int required_freed_bytes, int max_pages) {
int max_freed = 0;
int pages_freed = 0;
Page* page = nullptr;
while ((page = GetSweepingPageSafe(identity)) != nullptr) {
int freed =
ParallelSweepPage(page, identity, invalidated_slots_in_free_space);
int freed = ParallelSweepPage(page, identity);
if (page->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) {
// Free list of a never-allocate page will be dropped later on.
continue;
@ -436,9 +419,7 @@ int Sweeper::ParallelSweepSpace(
return max_freed;
}
int Sweeper::ParallelSweepPage(
Page* page, AllocationSpace identity,
FreeSpaceMayContainInvalidatedSlots invalidated_slots_in_free_space) {
int Sweeper::ParallelSweepPage(Page* page, AllocationSpace identity) {
// Early bailout for pages that are swept outside of the regular sweeping
// path. This check here avoids taking the lock first, avoiding deadlocks.
if (page->SweepingDone()) return 0;
@ -458,8 +439,7 @@ int Sweeper::ParallelSweepPage(
page->set_concurrent_sweeping_state(Page::kSweepingInProgress);
const FreeSpaceTreatmentMode free_space_mode =
Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE;
max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode,
invalidated_slots_in_free_space);
max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode);
DCHECK(page->SweepingDone());
// After finishing sweeping of a page we clean up its remembered set.
@ -615,8 +595,7 @@ void Sweeper::MakeIterable(Page* page) {
DCHECK(IsValidIterabilitySpace(page->owner_identity()));
const FreeSpaceTreatmentMode free_space_mode =
Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE;
RawSweep(page, IGNORE_FREE_LIST, free_space_mode,
FreeSpaceMayContainInvalidatedSlots::kNo);
RawSweep(page, IGNORE_FREE_LIST, free_space_mode);
}
} // namespace internal

View File

@ -70,8 +70,12 @@ class Sweeper {
};
enum FreeListRebuildingMode { REBUILD_FREE_LIST, IGNORE_FREE_LIST };
enum ClearOldToNewSlotsMode {
DO_NOT_CLEAR,
CLEAR_REGULAR_SLOTS,
CLEAR_TYPED_SLOTS
};
enum AddPageMode { REGULAR, READD_TEMPORARY_REMOVED_PAGE };
enum class FreeSpaceMayContainInvalidatedSlots { kYes, kNo };
Sweeper(Heap* heap, MajorNonAtomicMarkingState* marking_state);
@ -79,21 +83,14 @@ class Sweeper {
void AddPage(AllocationSpace space, Page* page, AddPageMode mode);
int ParallelSweepSpace(
AllocationSpace identity, int required_freed_bytes, int max_pages = 0,
FreeSpaceMayContainInvalidatedSlots invalidated_slots_in_free_space =
FreeSpaceMayContainInvalidatedSlots::kNo);
int ParallelSweepPage(
Page* page, AllocationSpace identity,
FreeSpaceMayContainInvalidatedSlots invalidated_slots_in_free_space =
FreeSpaceMayContainInvalidatedSlots::kNo);
int ParallelSweepSpace(AllocationSpace identity, int required_freed_bytes,
int max_pages = 0);
int ParallelSweepPage(Page* page, AllocationSpace identity);
void ScheduleIncrementalSweepingTask();
int RawSweep(
Page* p, FreeListRebuildingMode free_list_mode,
FreeSpaceTreatmentMode free_space_mode,
FreeSpaceMayContainInvalidatedSlots invalidated_slots_in_free_space);
int RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
FreeSpaceTreatmentMode free_space_mode);
// After calling this function sweeping is considered to be in progress
// and the main thread can sweep lazily, but the background sweeper tasks

View File

@ -391,7 +391,7 @@ void MutableBigInt::Canonicalize(MutableBigInt result) {
// We do not create a filler for objects in large object space.
// TODO(hpayer): We should shrink the large object page if the size
// of the object changed significantly.
heap->CreateFillerObjectAt(new_end, size_delta);
heap->CreateFillerObjectAt(new_end, size_delta, ClearRecordedSlots::kNo);
}
result.synchronized_set_length(new_length);
@ -2222,7 +2222,8 @@ MaybeHandle<String> MutableBigInt::ToStringGeneric(Isolate* isolate,
int needed_size = SeqOneByteString::SizeFor(pos);
if (needed_size < string_size) {
Address new_end = result->address() + needed_size;
heap->CreateFillerObjectAt(new_end, string_size - needed_size);
heap->CreateFillerObjectAt(new_end, (string_size - needed_size),
ClearRecordedSlots::kNo);
}
}
// Reverse the string.

View File

@ -2784,10 +2784,6 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
heap->NotifyObjectLayoutChange(*object, old_instance_size, no_allocation);
MemoryChunk::FromHeapObject(*object)
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(*object,
old_instance_size);
// Copy (real) inobject properties. If necessary, stop at number_of_fields to
// avoid overwriting |one_pointer_filler_map|.
int limit = Min(inobject, number_of_fields);
@ -2801,9 +2797,14 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
// Ensure that all bits of the double value are preserved.
object->RawFastDoublePropertyAsBitsAtPut(
index, MutableHeapNumber::cast(value).value_as_bits());
if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) {
// Transition from tagged to untagged slot.
heap->ClearRecordedSlot(*object, object->RawField(index.offset()));
} else {
#ifdef DEBUG
heap->VerifyClearedSlot(*object, object->RawField(index.offset()));
heap->VerifyClearedSlot(*object, object->RawField(index.offset()));
#endif
}
} else {
object->RawFastPropertyAtPut(index, value);
}
@ -2818,8 +2819,8 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
if (instance_size_delta > 0) {
Address address = object->address();
heap->CreateFillerObjectAt(address + new_instance_size,
instance_size_delta);
heap->CreateFillerObjectAt(address + new_instance_size, instance_size_delta,
ClearRecordedSlots::kYes);
}
// We are storing the new map using release store after creating a filler for
@ -2897,10 +2898,6 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
int old_instance_size = map->instance_size();
heap->NotifyObjectLayoutChange(*object, old_instance_size, no_allocation);
MemoryChunk::FromHeapObject(*object)
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(*object,
old_instance_size);
// Resize the object in the heap if necessary.
int new_instance_size = new_map->instance_size();
int instance_size_delta = old_instance_size - new_instance_size;
@ -2908,7 +2905,7 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
if (instance_size_delta > 0) {
heap->CreateFillerObjectAt(object->address() + new_instance_size,
instance_size_delta);
instance_size_delta, ClearRecordedSlots::kYes);
}
// We are storing the new map using release store after creating a filler for
@ -2921,6 +2918,11 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
// garbage.
int inobject_properties = new_map->GetInObjectProperties();
if (inobject_properties) {
Heap* heap = isolate->heap();
heap->ClearRecordedSlotRange(
object->address() + map->GetInObjectPropertyOffset(0),
object->address() + new_instance_size);
for (int i = 0; i < inobject_properties; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
object->RawFastPropertyAtPut(index, Smi::kZero);

View File

@ -627,7 +627,8 @@ void SharedFunctionInfo::ClearPreparseData() {
heap->CreateFillerObjectAt(
data.address() + UncompiledDataWithoutPreparseData::kSize,
UncompiledDataWithPreparseData::kSize -
UncompiledDataWithoutPreparseData::kSize);
UncompiledDataWithoutPreparseData::kSize,
ClearRecordedSlots::kNo);
// Ensure that the clear was successful.
DCHECK(HasUncompiledDataWithoutPreparseData());

View File

@ -123,7 +123,7 @@ void String::MakeThin(Isolate* isolate, String internalized) {
int size_delta = old_size - ThinString::kSize;
if (size_delta != 0) {
Heap* heap = isolate->heap();
heap->CreateFillerObjectAt(thin_end, size_delta);
heap->CreateFillerObjectAt(thin_end, size_delta, ClearRecordedSlots::kNo);
}
}
@ -155,10 +155,6 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(*this, size, no_allocation);
MemoryChunk::FromAddress(this->address())
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
HeapObject::FromAddress(this->address()), size);
}
// Morph the string to an external string by replacing the map and
// reinitializing the fields. This won't work if the space the existing
@ -181,8 +177,12 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
// Byte size of the external String object.
int new_size = this->SizeFromMap(new_map);
isolate->heap()->CreateFillerObjectAt(this->address() + new_size,
size - new_size);
isolate->heap()->CreateFillerObjectAt(
this->address() + new_size, size - new_size, ClearRecordedSlots::kNo);
if (has_pointers) {
isolate->heap()->ClearRecordedSlotRange(this->address(),
this->address() + new_size);
}
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
@ -228,10 +228,6 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(*this, size, no_allocation);
MemoryChunk::FromAddress(this->address())
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
HeapObject::FromAddress(this->address()), size);
}
// Morph the string to an external string by replacing the map and
// reinitializing the fields. This won't work if the space the existing
@ -253,8 +249,12 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
// Byte size of the external String object.
int new_size = this->SizeFromMap(new_map);
isolate->heap()->CreateFillerObjectAt(this->address() + new_size,
size - new_size);
isolate->heap()->CreateFillerObjectAt(
this->address() + new_size, size - new_size, ClearRecordedSlots::kNo);
if (has_pointers) {
isolate->heap()->ClearRecordedSlotRange(this->address(),
this->address() + new_size);
}
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
@ -1405,7 +1405,8 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
Heap* heap = Heap::FromWritableHeapObject(*string);
// Sizes are pointer size aligned, so that we can use filler objects
// that are a multiple of pointer size.
heap->CreateFillerObjectAt(start_of_string + new_size, delta);
heap->CreateFillerObjectAt(start_of_string + new_size, delta,
ClearRecordedSlots::kNo);
// We are storing the new length using release store after creating a filler
// for the left-over space to avoid races with the sweeper thread.
string->synchronized_set_length(new_length);

View File

@ -205,7 +205,7 @@ void AllocationTracker::AllocationEvent(Address addr, int size) {
// Mark the new block as FreeSpace to make sure the heap is iterable
// while we are capturing stack trace.
heap->CreateFillerObjectAt(addr, size);
heap->CreateFillerObjectAt(addr, size, ClearRecordedSlots::kNo);
Isolate* isolate = Isolate::FromHeap(heap);
int length = 0;

View File

@ -81,7 +81,8 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
// Mark the new block as FreeSpace to make sure the heap is iterable while we
// are taking the sample.
heap_->CreateFillerObjectAt(soon_object, static_cast<int>(size));
heap_->CreateFillerObjectAt(soon_object, static_cast<int>(size),
ClearRecordedSlots::kNo);
Local<v8::Value> loc = v8::Utils::ToLocal(obj);

View File

@ -134,17 +134,8 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
// for properties stored in the descriptor array.
if (details.location() == kField) {
DisallowHeapAllocation no_allocation;
int receiver_size = receiver_map->instance_size();
isolate->heap()->NotifyObjectLayoutChange(*receiver, receiver_size,
no_allocation);
// We need to invalidate object because subsequent object modifications
// might put a raw double into the deleted property.
// Slot clearing is the reason why this entire function cannot currently
// be implemented in the DeleteProperty stub.
MemoryChunk::FromHeapObject(*receiver)
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(*receiver,
receiver_size);
isolate->heap()->NotifyObjectLayoutChange(
*receiver, receiver_map->instance_size(), no_allocation);
FieldIndex index =
FieldIndex::ForPropertyIndex(*receiver_map, details.field_index());
// Special case deleting the last out-of object property.
@ -155,6 +146,14 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
} else {
Object filler = ReadOnlyRoots(isolate).one_pointer_filler_map();
JSObject::cast(*receiver).RawFastPropertyAtPut(index, filler);
// We must clear any recorded slot for the deleted property, because
// subsequent object modifications might put a raw double there.
// Slot clearing is the reason why this entire function cannot currently
// be implemented in the DeleteProperty stub.
if (index.is_inobject() && !receiver_map->IsUnboxedDoubleField(index)) {
isolate->heap()->ClearRecordedSlot(*receiver,
receiver->RawField(index.offset()));
}
}
}
// If the {receiver_map} was marked stable before, then there could be

View File

@ -793,7 +793,7 @@ V8_WARN_UNUSED_RESULT static Object StringReplaceGlobalRegExpWithEmptyString(
// TODO(hpayer): We should shrink the large object page if the size
// of the object changed significantly.
if (!heap->IsLargeObject(*answer)) {
heap->CreateFillerObjectAt(end_of_string, delta);
heap->CreateFillerObjectAt(end_of_string, delta, ClearRecordedSlots::kNo);
}
return *answer;
}

View File

@ -19,10 +19,6 @@
V(CompactionSpaceDivideSinglePage) \
V(InvalidatedSlotsAfterTrimming) \
V(InvalidatedSlotsAllInvalidatedRanges) \
V(InvalidatedSlotsCleanupEachObject) \
V(InvalidatedSlotsCleanupFull) \
V(InvalidatedSlotsCleanupRightTrim) \
V(InvalidatedSlotsCleanupOverlapRight) \
V(InvalidatedSlotsEvacuationCandidate) \
V(InvalidatedSlotsNoInvalidatedRanges) \
V(InvalidatedSlotsResetObjectRegression) \

View File

@ -101,7 +101,8 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
// Not enough room to create another fixed array. Let's create a filler.
if (free_memory > (2 * kTaggedSize)) {
heap->CreateFillerObjectAt(
*heap->old_space()->allocation_top_address(), free_memory);
*heap->old_space()->allocation_top_address(), free_memory,
ClearRecordedSlots::kNo);
}
break;
}
@ -218,7 +219,8 @@ void ForceEvacuationCandidate(Page* page) {
if (top < limit && Page::FromAllocationAreaAddress(top) == page) {
// Create filler object to keep page iterable if it was iterable.
int remaining = static_cast<int>(limit - top);
space->heap()->CreateFillerObjectAt(top, remaining);
space->heap()->CreateFillerObjectAt(top, remaining,
ClearRecordedSlots::kNo);
space->FreeLinearAllocationArea();
}
}

View File

@ -54,12 +54,12 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
heap->AllocateRaw(size, AllocationType::kYoung).ToObjectChecked();
// In order to pass heap verification on Isolate teardown, mark the
// allocated area as a filler.
heap->CreateFillerObjectAt(obj.address(), size);
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
// Old generation.
heap::SimulateFullSpace(heap->old_space());
obj = heap->AllocateRaw(size, AllocationType::kOld).ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), size);
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
// Large object space.
static const size_t kLargeObjectSpaceFillerLength =
@ -71,22 +71,23 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
.ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), size);
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
}
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
.ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), size);
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
// Map space.
heap::SimulateFullSpace(heap->map_space());
obj = heap->AllocateRaw(Map::kSize, AllocationType::kMap).ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), Map::kSize);
heap->CreateFillerObjectAt(obj.address(), Map::kSize,
ClearRecordedSlots::kNo);
// Code space.
heap::SimulateFullSpace(heap->code_space());
size = CcTest::i_isolate()->builtins()->builtin(Builtins::kIllegal).Size();
obj = heap->AllocateRaw(size, AllocationType::kCode).ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), size);
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
return CcTest::i_isolate()->factory()->true_value();
}

View File

@ -1637,7 +1637,7 @@ static HeapObject NewSpaceAllocateAligned(int size,
heap->new_space()->AllocateRawAligned(size, alignment);
HeapObject obj;
allocation.To(&obj);
heap->CreateFillerObjectAt(obj.address(), size);
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
return obj;
}
@ -1702,7 +1702,7 @@ static HeapObject OldSpaceAllocateAligned(int size,
heap->old_space()->AllocateRawAligned(size, alignment);
HeapObject obj;
allocation.To(&obj);
heap->CreateFillerObjectAt(obj.address(), size);
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
return obj;
}
@ -1731,7 +1731,8 @@ TEST(TestAlignedOverAllocation) {
// Allocate a dummy object to properly set up the linear allocation info.
AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
CHECK(!dummy.IsRetry());
heap->CreateFillerObjectAt(dummy.ToObjectChecked().address(), kTaggedSize);
heap->CreateFillerObjectAt(dummy.ToObjectChecked().address(), kTaggedSize,
ClearRecordedSlots::kNo);
// Double misalignment is 4 on 32-bit platforms or when pointer compression
// is enabled, 0 on 64-bit ones when pointer compression is disabled.
@ -3604,7 +3605,8 @@ TEST(Regress169928) {
CHECK(allocation.To(&obj));
Address addr_obj = obj.address();
CcTest::heap()->CreateFillerObjectAt(addr_obj,
AllocationMemento::kSize + kTaggedSize);
AllocationMemento::kSize + kTaggedSize,
ClearRecordedSlots::kNo);
// Give the array a name, making sure not to allocate strings.
v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);

View File

@ -44,7 +44,7 @@ Page* HeapTester::AllocateByteArraysOnPage(
CHECK_EQ(page, Page::FromHeapObject(byte_array));
}
}
CHECK_NULL(page->invalidated_slots<OLD_TO_OLD>());
CHECK_NULL(page->invalidated_slots());
return page;
}
@ -53,7 +53,7 @@ HEAP_TEST(InvalidatedSlotsNoInvalidatedRanges) {
Heap* heap = CcTest::heap();
std::vector<ByteArray> byte_arrays;
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
InvalidatedSlotsFilter filter(page);
for (ByteArray byte_array : byte_arrays) {
Address start = byte_array.address() + ByteArray::kHeaderSize;
Address end = byte_array.address() + byte_array.Size();
@ -70,10 +70,10 @@ HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register every second byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i += 2) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
byte_arrays[i].Size());
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
byte_arrays[i].Size());
}
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array.address() + ByteArray::kHeaderSize;
@ -95,10 +95,10 @@ HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
byte_arrays[i].Size());
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
byte_arrays[i].Size());
}
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array.address() + ByteArray::kHeaderSize;
@ -117,12 +117,12 @@ HEAP_TEST(InvalidatedSlotsAfterTrimming) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
byte_arrays[i].Size());
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
byte_arrays[i].Size());
}
// Trim byte arrays and check that the slots outside the byte arrays are
// considered invalid if the old space page was swept.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array.address() + ByteArray::kHeaderSize;
@ -145,11 +145,11 @@ HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
// This should be no-op because the page is marked as evacuation
// candidate.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
byte_arrays[i].Size());
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
byte_arrays[i].Size());
}
// All slots must still be valid.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array.address() + ByteArray::kHeaderSize;
@ -169,11 +169,11 @@ HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0].length() - 8);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
byte_arrays[i].Size());
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
byte_arrays[i].Size());
}
// All slots must still be invalid.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array.address() + ByteArray::kHeaderSize;
@ -351,77 +351,6 @@ HEAP_TEST(InvalidatedSlotsFastToSlow) {
CcTest::CollectGarbage(i::OLD_SPACE);
}
HEAP_TEST(InvalidatedSlotsCleanupFull) {
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
std::vector<ByteArray> byte_arrays;
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i],
byte_arrays[i].Size());
}
// Mark full page as free
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
cleanup.Free(page->area_start(), page->area_end());
// After cleanup there should be no invalidated objects on page left
CHECK(page->invalidated_slots<OLD_TO_NEW>()->empty());
}
HEAP_TEST(InvalidatedSlotsCleanupEachObject) {
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
std::vector<ByteArray> byte_arrays;
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i],
byte_arrays[i].Size());
}
// Mark each object as free on page
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
Address free_start = byte_arrays[i].address();
Address free_end = free_start + byte_arrays[i].Size();
cleanup.Free(free_start, free_end);
}
// After cleanup there should be no invalidated objects on page left
CHECK(page->invalidated_slots<OLD_TO_NEW>()->empty());
}
HEAP_TEST(InvalidatedSlotsCleanupRightTrim) {
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
std::vector<ByteArray> byte_arrays;
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
CHECK_GT(byte_arrays.size(), 1);
ByteArray& invalidated = byte_arrays[1];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated,
invalidated.Size());
heap->RightTrimFixedArray(invalidated, invalidated.length() - 8);
// Free memory at end of invalidated object
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
Address free_start = invalidated.address() + invalidated.Size();
cleanup.Free(free_start, page->area_end());
// After cleanup the invalidated object should be smaller
InvalidatedSlots* invalidated_slots = page->invalidated_slots<OLD_TO_NEW>();
CHECK_EQ((*invalidated_slots)[HeapObject::FromAddress(invalidated.address())],
invalidated.Size());
CHECK_EQ(invalidated_slots->size(), 1);
}
} // namespace heap
} // namespace internal
} // namespace v8

View File

@ -46,7 +46,8 @@ static bool AllocateFromLab(Heap* heap, LocalAllocationBuffer* lab,
AllocationResult result =
lab->AllocateRawAligned(static_cast<int>(size_in_bytes), alignment);
if (result.To(&obj)) {
heap->CreateFillerObjectAt(obj.address(), static_cast<int>(size_in_bytes));
heap->CreateFillerObjectAt(obj.address(), static_cast<int>(size_in_bytes),
ClearRecordedSlots::kNo);
return true;
}
return false;

View File

@ -363,7 +363,8 @@ TEST(Regress5829) {
Address old_end = array->address() + array->Size();
// Right trim the array without clearing the mark bits.
array->set_length(9);
heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize);
heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize,
ClearRecordedSlots::kNo);
heap->old_space()->FreeLinearAllocationArea();
Page* page = Page::FromAddress(array->address());
IncrementalMarking::MarkingState* marking_state = marking->marking_state();

View File

@ -391,7 +391,8 @@ static HeapObject AllocateUnaligned(NewSpace* space, int size) {
CHECK(!allocation.IsRetry());
HeapObject filler;
CHECK(allocation.To(&filler));
space->heap()->CreateFillerObjectAt(filler.address(), size);
space->heap()->CreateFillerObjectAt(filler.address(), size,
ClearRecordedSlots::kNo);
return filler;
}
@ -400,7 +401,8 @@ static HeapObject AllocateUnaligned(PagedSpace* space, int size) {
CHECK(!allocation.IsRetry());
HeapObject filler;
CHECK(allocation.To(&filler));
space->heap()->CreateFillerObjectAt(filler.address(), size);
space->heap()->CreateFillerObjectAt(filler.address(), size,
ClearRecordedSlots::kNo);
return filler;
}
@ -570,7 +572,8 @@ HEAP_TEST(Regress777177) {
heap::SimulateFullSpace(old_space);
AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
HeapObject obj = result.ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), filler_size);
heap->CreateFillerObjectAt(obj.address(), filler_size,
ClearRecordedSlots::kNo);
}
{
@ -587,7 +590,8 @@ HEAP_TEST(Regress777177) {
// This triggers assert in crbug.com/777177.
AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
HeapObject obj = result.ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), filler_size);
heap->CreateFillerObjectAt(obj.address(), filler_size,
ClearRecordedSlots::kNo);
}
old_space->RemoveAllocationObserver(&observer);
}
@ -618,7 +622,8 @@ HEAP_TEST(Regress791582) {
AllocationResult result =
new_space->AllocateRaw(until_page_end, kWordAligned);
HeapObject obj = result.ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), until_page_end);
heap->CreateFillerObjectAt(obj.address(), until_page_end,
ClearRecordedSlots::kNo);
// Simulate allocation folding moving the top pointer back.
*new_space->allocation_top_address() = obj.address();
}
@ -627,7 +632,7 @@ HEAP_TEST(Regress791582) {
// This triggers assert in crbug.com/791582
AllocationResult result = new_space->AllocateRaw(256, kWordAligned);
HeapObject obj = result.ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), 256);
heap->CreateFillerObjectAt(obj.address(), 256, ClearRecordedSlots::kNo);
}
new_space->RemoveAllocationObserver(&observer);
}

View File

@ -39,7 +39,8 @@ TEST_F(SpacesTest, CompactionSpaceMerge) {
HeapObject object =
compaction_space->AllocateRawUnaligned(kMaxRegularHeapObjectSize)
.ToObjectChecked();
heap->CreateFillerObjectAt(object.address(), kMaxRegularHeapObjectSize);
heap->CreateFillerObjectAt(object.address(), kMaxRegularHeapObjectSize,
ClearRecordedSlots::kNo);
}
int pages_in_old_space = old_space->CountTotalPages();
int pages_in_compaction_space = compaction_space->CountTotalPages();

View File

@ -191,238 +191,238 @@ INSTANCE_TYPES = {
# List of known V8 maps.
KNOWN_MAPS = {
("read_only_space", 0x00119): (74, "FreeSpaceMap"),
("read_only_space", 0x00169): (68, "MetaMap"),
("read_only_space", 0x001e9): (67, "NullMap"),
("read_only_space", 0x00251): (154, "DescriptorArrayMap"),
("read_only_space", 0x002b1): (149, "WeakFixedArrayMap"),
("read_only_space", 0x00301): (77, "OnePointerFillerMap"),
("read_only_space", 0x00351): (77, "TwoPointerFillerMap"),
("read_only_space", 0x003d1): (67, "UninitializedMap"),
("read_only_space", 0x00441): (8, "OneByteInternalizedStringMap"),
("read_only_space", 0x004e1): (67, "UndefinedMap"),
("read_only_space", 0x00541): (65, "HeapNumberMap"),
("read_only_space", 0x005c1): (67, "TheHoleMap"),
("read_only_space", 0x00669): (67, "BooleanMap"),
("read_only_space", 0x00741): (72, "ByteArrayMap"),
("read_only_space", 0x00791): (124, "FixedArrayMap"),
("read_only_space", 0x007e1): (124, "FixedCOWArrayMap"),
("read_only_space", 0x00831): (127, "HashTableMap"),
("read_only_space", 0x00881): (64, "SymbolMap"),
("read_only_space", 0x008d1): (40, "OneByteStringMap"),
("read_only_space", 0x00921): (137, "ScopeInfoMap"),
("read_only_space", 0x00971): (161, "SharedFunctionInfoMap"),
("read_only_space", 0x009c1): (69, "CodeMap"),
("read_only_space", 0x00a11): (144, "FunctionContextMap"),
("read_only_space", 0x00a61): (152, "CellMap"),
("read_only_space", 0x00ab1): (160, "GlobalPropertyCellMap"),
("read_only_space", 0x00b01): (71, "ForeignMap"),
("read_only_space", 0x00b51): (150, "TransitionArrayMap"),
("read_only_space", 0x00ba1): (156, "FeedbackVectorMap"),
("read_only_space", 0x00c41): (67, "ArgumentsMarkerMap"),
("read_only_space", 0x00ce1): (67, "ExceptionMap"),
("read_only_space", 0x00d81): (67, "TerminationExceptionMap"),
("read_only_space", 0x00e29): (67, "OptimizedOutMap"),
("read_only_space", 0x00ec9): (67, "StaleRegisterMap"),
("read_only_space", 0x00f39): (146, "NativeContextMap"),
("read_only_space", 0x00f89): (145, "ModuleContextMap"),
("read_only_space", 0x00fd9): (143, "EvalContextMap"),
("read_only_space", 0x01029): (147, "ScriptContextMap"),
("read_only_space", 0x01079): (139, "AwaitContextMap"),
("read_only_space", 0x010c9): (140, "BlockContextMap"),
("read_only_space", 0x01119): (141, "CatchContextMap"),
("read_only_space", 0x01169): (148, "WithContextMap"),
("read_only_space", 0x011b9): (142, "DebugEvaluateContextMap"),
("read_only_space", 0x01209): (138, "ScriptContextTableMap"),
("read_only_space", 0x01259): (126, "ClosureFeedbackCellArrayMap"),
("read_only_space", 0x012a9): (76, "FeedbackMetadataArrayMap"),
("read_only_space", 0x012f9): (124, "ArrayListMap"),
("read_only_space", 0x01349): (66, "BigIntMap"),
("read_only_space", 0x01399): (125, "ObjectBoilerplateDescriptionMap"),
("read_only_space", 0x013e9): (73, "BytecodeArrayMap"),
("read_only_space", 0x01439): (153, "CodeDataContainerMap"),
("read_only_space", 0x01489): (75, "FixedDoubleArrayMap"),
("read_only_space", 0x014d9): (132, "GlobalDictionaryMap"),
("read_only_space", 0x01529): (155, "ManyClosuresCellMap"),
("read_only_space", 0x01579): (124, "ModuleInfoMap"),
("read_only_space", 0x015c9): (70, "MutableHeapNumberMap"),
("read_only_space", 0x01619): (131, "NameDictionaryMap"),
("read_only_space", 0x01669): (155, "NoClosuresCellMap"),
("read_only_space", 0x016b9): (133, "NumberDictionaryMap"),
("read_only_space", 0x01709): (155, "OneClosureCellMap"),
("read_only_space", 0x01759): (128, "OrderedHashMapMap"),
("read_only_space", 0x017a9): (129, "OrderedHashSetMap"),
("read_only_space", 0x017f9): (130, "OrderedNameDictionaryMap"),
("read_only_space", 0x01849): (158, "PreparseDataMap"),
("read_only_space", 0x01899): (159, "PropertyArrayMap"),
("read_only_space", 0x018e9): (151, "SideEffectCallHandlerInfoMap"),
("read_only_space", 0x01939): (151, "SideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x01989): (151, "NextCallSideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x019d9): (134, "SimpleNumberDictionaryMap"),
("read_only_space", 0x01a29): (124, "SloppyArgumentsElementsMap"),
("read_only_space", 0x01a79): (162, "SmallOrderedHashMapMap"),
("read_only_space", 0x01ac9): (163, "SmallOrderedHashSetMap"),
("read_only_space", 0x01b19): (164, "SmallOrderedNameDictionaryMap"),
("read_only_space", 0x01b69): (120, "SourceTextModuleMap"),
("read_only_space", 0x01bb9): (135, "StringTableMap"),
("read_only_space", 0x01c09): (121, "SyntheticModuleMap"),
("read_only_space", 0x01c59): (166, "UncompiledDataWithoutPreparseDataMap"),
("read_only_space", 0x01ca9): (167, "UncompiledDataWithPreparseDataMap"),
("read_only_space", 0x01cf9): (168, "WeakArrayListMap"),
("read_only_space", 0x01d49): (136, "EphemeronHashTableMap"),
("read_only_space", 0x01d99): (123, "EmbedderDataArrayMap"),
("read_only_space", 0x01de9): (169, "WeakCellMap"),
("read_only_space", 0x01e39): (58, "NativeSourceStringMap"),
("read_only_space", 0x01e89): (32, "StringMap"),
("read_only_space", 0x01ed9): (41, "ConsOneByteStringMap"),
("read_only_space", 0x01f29): (33, "ConsStringMap"),
("read_only_space", 0x01f79): (45, "ThinOneByteStringMap"),
("read_only_space", 0x01fc9): (37, "ThinStringMap"),
("read_only_space", 0x02019): (35, "SlicedStringMap"),
("read_only_space", 0x02069): (43, "SlicedOneByteStringMap"),
("read_only_space", 0x020b9): (34, "ExternalStringMap"),
("read_only_space", 0x02109): (42, "ExternalOneByteStringMap"),
("read_only_space", 0x02159): (50, "UncachedExternalStringMap"),
("read_only_space", 0x021a9): (0, "InternalizedStringMap"),
("read_only_space", 0x021f9): (2, "ExternalInternalizedStringMap"),
("read_only_space", 0x02249): (10, "ExternalOneByteInternalizedStringMap"),
("read_only_space", 0x02299): (18, "UncachedExternalInternalizedStringMap"),
("read_only_space", 0x022e9): (26, "UncachedExternalOneByteInternalizedStringMap"),
("read_only_space", 0x02339): (58, "UncachedExternalOneByteStringMap"),
("read_only_space", 0x02389): (67, "SelfReferenceMarkerMap"),
("read_only_space", 0x023f1): (88, "EnumCacheMap"),
("read_only_space", 0x02491): (83, "ArrayBoilerplateDescriptionMap"),
("read_only_space", 0x02681): (91, "InterceptorInfoMap"),
("read_only_space", 0x04f01): (78, "AccessCheckInfoMap"),
("read_only_space", 0x04f51): (79, "AccessorInfoMap"),
("read_only_space", 0x04fa1): (80, "AccessorPairMap"),
("read_only_space", 0x04ff1): (81, "AliasedArgumentsEntryMap"),
("read_only_space", 0x05041): (82, "AllocationMementoMap"),
("read_only_space", 0x05091): (84, "AsmWasmDataMap"),
("read_only_space", 0x050e1): (85, "AsyncGeneratorRequestMap"),
("read_only_space", 0x05131): (86, "ClassPositionsMap"),
("read_only_space", 0x05181): (87, "DebugInfoMap"),
("read_only_space", 0x051d1): (89, "FunctionTemplateInfoMap"),
("read_only_space", 0x05221): (90, "FunctionTemplateRareDataMap"),
("read_only_space", 0x05271): (92, "InterpreterDataMap"),
("read_only_space", 0x052c1): (93, "ObjectTemplateInfoMap"),
("read_only_space", 0x05311): (94, "PromiseCapabilityMap"),
("read_only_space", 0x05361): (95, "PromiseReactionMap"),
("read_only_space", 0x053b1): (96, "PrototypeInfoMap"),
("read_only_space", 0x05401): (97, "ScriptMap"),
("read_only_space", 0x05451): (98, "SourcePositionTableWithFrameCacheMap"),
("read_only_space", 0x054a1): (99, "SourceTextModuleInfoEntryMap"),
("read_only_space", 0x054f1): (100, "StackFrameInfoMap"),
("read_only_space", 0x05541): (101, "StackTraceFrameMap"),
("read_only_space", 0x05591): (102, "TemplateObjectDescriptionMap"),
("read_only_space", 0x055e1): (103, "Tuple2Map"),
("read_only_space", 0x05631): (104, "Tuple3Map"),
("read_only_space", 0x05681): (105, "WasmCapiFunctionDataMap"),
("read_only_space", 0x056d1): (106, "WasmDebugInfoMap"),
("read_only_space", 0x05721): (107, "WasmExceptionTagMap"),
("read_only_space", 0x05771): (108, "WasmExportedFunctionDataMap"),
("read_only_space", 0x057c1): (109, "WasmIndirectFunctionTableMap"),
("read_only_space", 0x05811): (110, "WasmJSFunctionDataMap"),
("read_only_space", 0x05861): (111, "CallableTaskMap"),
("read_only_space", 0x058b1): (112, "CallbackTaskMap"),
("read_only_space", 0x05901): (113, "PromiseFulfillReactionJobTaskMap"),
("read_only_space", 0x05951): (114, "PromiseRejectReactionJobTaskMap"),
("read_only_space", 0x059a1): (115, "PromiseResolveThenableJobTaskMap"),
("read_only_space", 0x059f1): (116, "InternalClassMap"),
("read_only_space", 0x05a41): (117, "SmiPairMap"),
("read_only_space", 0x05a91): (118, "SmiBoxMap"),
("read_only_space", 0x05ae1): (119, "SortStateMap"),
("read_only_space", 0x05b31): (122, "AllocationSiteWithWeakNextMap"),
("read_only_space", 0x05b81): (122, "AllocationSiteWithoutWeakNextMap"),
("read_only_space", 0x05bd1): (157, "LoadHandler1Map"),
("read_only_space", 0x05c21): (157, "LoadHandler2Map"),
("read_only_space", 0x05c71): (157, "LoadHandler3Map"),
("read_only_space", 0x05cc1): (165, "StoreHandler0Map"),
("read_only_space", 0x05d11): (165, "StoreHandler1Map"),
("read_only_space", 0x05d61): (165, "StoreHandler2Map"),
("read_only_space", 0x05db1): (165, "StoreHandler3Map"),
("map_space", 0x00119): (1057, "ExternalMap"),
("map_space", 0x00169): (1073, "JSMessageObjectMap"),
("read_only_space", 0x00111): (74, "FreeSpaceMap"),
("read_only_space", 0x00161): (68, "MetaMap"),
("read_only_space", 0x001e1): (67, "NullMap"),
("read_only_space", 0x00249): (154, "DescriptorArrayMap"),
("read_only_space", 0x002a9): (149, "WeakFixedArrayMap"),
("read_only_space", 0x002f9): (77, "OnePointerFillerMap"),
("read_only_space", 0x00349): (77, "TwoPointerFillerMap"),
("read_only_space", 0x003c9): (67, "UninitializedMap"),
("read_only_space", 0x00439): (8, "OneByteInternalizedStringMap"),
("read_only_space", 0x004d9): (67, "UndefinedMap"),
("read_only_space", 0x00539): (65, "HeapNumberMap"),
("read_only_space", 0x005b9): (67, "TheHoleMap"),
("read_only_space", 0x00661): (67, "BooleanMap"),
("read_only_space", 0x00739): (72, "ByteArrayMap"),
("read_only_space", 0x00789): (124, "FixedArrayMap"),
("read_only_space", 0x007d9): (124, "FixedCOWArrayMap"),
("read_only_space", 0x00829): (127, "HashTableMap"),
("read_only_space", 0x00879): (64, "SymbolMap"),
("read_only_space", 0x008c9): (40, "OneByteStringMap"),
("read_only_space", 0x00919): (137, "ScopeInfoMap"),
("read_only_space", 0x00969): (161, "SharedFunctionInfoMap"),
("read_only_space", 0x009b9): (69, "CodeMap"),
("read_only_space", 0x00a09): (144, "FunctionContextMap"),
("read_only_space", 0x00a59): (152, "CellMap"),
("read_only_space", 0x00aa9): (160, "GlobalPropertyCellMap"),
("read_only_space", 0x00af9): (71, "ForeignMap"),
("read_only_space", 0x00b49): (150, "TransitionArrayMap"),
("read_only_space", 0x00b99): (156, "FeedbackVectorMap"),
("read_only_space", 0x00c39): (67, "ArgumentsMarkerMap"),
("read_only_space", 0x00cd9): (67, "ExceptionMap"),
("read_only_space", 0x00d79): (67, "TerminationExceptionMap"),
("read_only_space", 0x00e21): (67, "OptimizedOutMap"),
("read_only_space", 0x00ec1): (67, "StaleRegisterMap"),
("read_only_space", 0x00f31): (146, "NativeContextMap"),
("read_only_space", 0x00f81): (145, "ModuleContextMap"),
("read_only_space", 0x00fd1): (143, "EvalContextMap"),
("read_only_space", 0x01021): (147, "ScriptContextMap"),
("read_only_space", 0x01071): (139, "AwaitContextMap"),
("read_only_space", 0x010c1): (140, "BlockContextMap"),
("read_only_space", 0x01111): (141, "CatchContextMap"),
("read_only_space", 0x01161): (148, "WithContextMap"),
("read_only_space", 0x011b1): (142, "DebugEvaluateContextMap"),
("read_only_space", 0x01201): (138, "ScriptContextTableMap"),
("read_only_space", 0x01251): (126, "ClosureFeedbackCellArrayMap"),
("read_only_space", 0x012a1): (76, "FeedbackMetadataArrayMap"),
("read_only_space", 0x012f1): (124, "ArrayListMap"),
("read_only_space", 0x01341): (66, "BigIntMap"),
("read_only_space", 0x01391): (125, "ObjectBoilerplateDescriptionMap"),
("read_only_space", 0x013e1): (73, "BytecodeArrayMap"),
("read_only_space", 0x01431): (153, "CodeDataContainerMap"),
("read_only_space", 0x01481): (75, "FixedDoubleArrayMap"),
("read_only_space", 0x014d1): (132, "GlobalDictionaryMap"),
("read_only_space", 0x01521): (155, "ManyClosuresCellMap"),
("read_only_space", 0x01571): (124, "ModuleInfoMap"),
("read_only_space", 0x015c1): (70, "MutableHeapNumberMap"),
("read_only_space", 0x01611): (131, "NameDictionaryMap"),
("read_only_space", 0x01661): (155, "NoClosuresCellMap"),
("read_only_space", 0x016b1): (133, "NumberDictionaryMap"),
("read_only_space", 0x01701): (155, "OneClosureCellMap"),
("read_only_space", 0x01751): (128, "OrderedHashMapMap"),
("read_only_space", 0x017a1): (129, "OrderedHashSetMap"),
("read_only_space", 0x017f1): (130, "OrderedNameDictionaryMap"),
("read_only_space", 0x01841): (158, "PreparseDataMap"),
("read_only_space", 0x01891): (159, "PropertyArrayMap"),
("read_only_space", 0x018e1): (151, "SideEffectCallHandlerInfoMap"),
("read_only_space", 0x01931): (151, "SideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x01981): (151, "NextCallSideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x019d1): (134, "SimpleNumberDictionaryMap"),
("read_only_space", 0x01a21): (124, "SloppyArgumentsElementsMap"),
("read_only_space", 0x01a71): (162, "SmallOrderedHashMapMap"),
("read_only_space", 0x01ac1): (163, "SmallOrderedHashSetMap"),
("read_only_space", 0x01b11): (164, "SmallOrderedNameDictionaryMap"),
("read_only_space", 0x01b61): (120, "SourceTextModuleMap"),
("read_only_space", 0x01bb1): (135, "StringTableMap"),
("read_only_space", 0x01c01): (121, "SyntheticModuleMap"),
("read_only_space", 0x01c51): (166, "UncompiledDataWithoutPreparseDataMap"),
("read_only_space", 0x01ca1): (167, "UncompiledDataWithPreparseDataMap"),
("read_only_space", 0x01cf1): (168, "WeakArrayListMap"),
("read_only_space", 0x01d41): (136, "EphemeronHashTableMap"),
("read_only_space", 0x01d91): (123, "EmbedderDataArrayMap"),
("read_only_space", 0x01de1): (169, "WeakCellMap"),
("read_only_space", 0x01e31): (58, "NativeSourceStringMap"),
("read_only_space", 0x01e81): (32, "StringMap"),
("read_only_space", 0x01ed1): (41, "ConsOneByteStringMap"),
("read_only_space", 0x01f21): (33, "ConsStringMap"),
("read_only_space", 0x01f71): (45, "ThinOneByteStringMap"),
("read_only_space", 0x01fc1): (37, "ThinStringMap"),
("read_only_space", 0x02011): (35, "SlicedStringMap"),
("read_only_space", 0x02061): (43, "SlicedOneByteStringMap"),
("read_only_space", 0x020b1): (34, "ExternalStringMap"),
("read_only_space", 0x02101): (42, "ExternalOneByteStringMap"),
("read_only_space", 0x02151): (50, "UncachedExternalStringMap"),
("read_only_space", 0x021a1): (0, "InternalizedStringMap"),
("read_only_space", 0x021f1): (2, "ExternalInternalizedStringMap"),
("read_only_space", 0x02241): (10, "ExternalOneByteInternalizedStringMap"),
("read_only_space", 0x02291): (18, "UncachedExternalInternalizedStringMap"),
("read_only_space", 0x022e1): (26, "UncachedExternalOneByteInternalizedStringMap"),
("read_only_space", 0x02331): (58, "UncachedExternalOneByteStringMap"),
("read_only_space", 0x02381): (67, "SelfReferenceMarkerMap"),
("read_only_space", 0x023e9): (88, "EnumCacheMap"),
("read_only_space", 0x02489): (83, "ArrayBoilerplateDescriptionMap"),
("read_only_space", 0x02679): (91, "InterceptorInfoMap"),
("read_only_space", 0x04ef9): (78, "AccessCheckInfoMap"),
("read_only_space", 0x04f49): (79, "AccessorInfoMap"),
("read_only_space", 0x04f99): (80, "AccessorPairMap"),
("read_only_space", 0x04fe9): (81, "AliasedArgumentsEntryMap"),
("read_only_space", 0x05039): (82, "AllocationMementoMap"),
("read_only_space", 0x05089): (84, "AsmWasmDataMap"),
("read_only_space", 0x050d9): (85, "AsyncGeneratorRequestMap"),
("read_only_space", 0x05129): (86, "ClassPositionsMap"),
("read_only_space", 0x05179): (87, "DebugInfoMap"),
("read_only_space", 0x051c9): (89, "FunctionTemplateInfoMap"),
("read_only_space", 0x05219): (90, "FunctionTemplateRareDataMap"),
("read_only_space", 0x05269): (92, "InterpreterDataMap"),
("read_only_space", 0x052b9): (93, "ObjectTemplateInfoMap"),
("read_only_space", 0x05309): (94, "PromiseCapabilityMap"),
("read_only_space", 0x05359): (95, "PromiseReactionMap"),
("read_only_space", 0x053a9): (96, "PrototypeInfoMap"),
("read_only_space", 0x053f9): (97, "ScriptMap"),
("read_only_space", 0x05449): (98, "SourcePositionTableWithFrameCacheMap"),
("read_only_space", 0x05499): (99, "SourceTextModuleInfoEntryMap"),
("read_only_space", 0x054e9): (100, "StackFrameInfoMap"),
("read_only_space", 0x05539): (101, "StackTraceFrameMap"),
("read_only_space", 0x05589): (102, "TemplateObjectDescriptionMap"),
("read_only_space", 0x055d9): (103, "Tuple2Map"),
("read_only_space", 0x05629): (104, "Tuple3Map"),
("read_only_space", 0x05679): (105, "WasmCapiFunctionDataMap"),
("read_only_space", 0x056c9): (106, "WasmDebugInfoMap"),
("read_only_space", 0x05719): (107, "WasmExceptionTagMap"),
("read_only_space", 0x05769): (108, "WasmExportedFunctionDataMap"),
("read_only_space", 0x057b9): (109, "WasmIndirectFunctionTableMap"),
("read_only_space", 0x05809): (110, "WasmJSFunctionDataMap"),
("read_only_space", 0x05859): (111, "CallableTaskMap"),
("read_only_space", 0x058a9): (112, "CallbackTaskMap"),
("read_only_space", 0x058f9): (113, "PromiseFulfillReactionJobTaskMap"),
("read_only_space", 0x05949): (114, "PromiseRejectReactionJobTaskMap"),
("read_only_space", 0x05999): (115, "PromiseResolveThenableJobTaskMap"),
("read_only_space", 0x059e9): (116, "InternalClassMap"),
("read_only_space", 0x05a39): (117, "SmiPairMap"),
("read_only_space", 0x05a89): (118, "SmiBoxMap"),
("read_only_space", 0x05ad9): (119, "SortStateMap"),
("read_only_space", 0x05b29): (122, "AllocationSiteWithWeakNextMap"),
("read_only_space", 0x05b79): (122, "AllocationSiteWithoutWeakNextMap"),
("read_only_space", 0x05bc9): (157, "LoadHandler1Map"),
("read_only_space", 0x05c19): (157, "LoadHandler2Map"),
("read_only_space", 0x05c69): (157, "LoadHandler3Map"),
("read_only_space", 0x05cb9): (165, "StoreHandler0Map"),
("read_only_space", 0x05d09): (165, "StoreHandler1Map"),
("read_only_space", 0x05d59): (165, "StoreHandler2Map"),
("read_only_space", 0x05da9): (165, "StoreHandler3Map"),
("map_space", 0x00111): (1057, "ExternalMap"),
("map_space", 0x00161): (1073, "JSMessageObjectMap"),
}
# List of known V8 objects.
KNOWN_OBJECTS = {
("read_only_space", 0x001b9): "NullValue",
("read_only_space", 0x00239): "EmptyDescriptorArray",
("read_only_space", 0x002a1): "EmptyWeakFixedArray",
("read_only_space", 0x003a1): "UninitializedValue",
("read_only_space", 0x004b1): "UndefinedValue",
("read_only_space", 0x00531): "NanValue",
("read_only_space", 0x00591): "TheHoleValue",
("read_only_space", 0x00629): "HoleNanValue",
("read_only_space", 0x00639): "TrueValue",
("read_only_space", 0x006e9): "FalseValue",
("read_only_space", 0x00731): "empty_string",
("read_only_space", 0x00bf1): "EmptyScopeInfo",
("read_only_space", 0x00c01): "EmptyFixedArray",
("read_only_space", 0x00c11): "ArgumentsMarker",
("read_only_space", 0x00cb1): "Exception",
("read_only_space", 0x00d51): "TerminationException",
("read_only_space", 0x00df9): "OptimizedOut",
("read_only_space", 0x00e99): "StaleRegister",
("read_only_space", 0x023d9): "EmptyEnumCache",
("read_only_space", 0x02441): "EmptyPropertyArray",
("read_only_space", 0x02451): "EmptyByteArray",
("read_only_space", 0x02461): "EmptyObjectBoilerplateDescription",
("read_only_space", 0x02479): "EmptyArrayBoilerplateDescription",
("read_only_space", 0x024e1): "EmptyClosureFeedbackCellArray",
("read_only_space", 0x024f1): "EmptySloppyArgumentsElements",
("read_only_space", 0x02511): "EmptySlowElementDictionary",
("read_only_space", 0x02559): "EmptyOrderedHashMap",
("read_only_space", 0x02581): "EmptyOrderedHashSet",
("read_only_space", 0x025a9): "EmptyFeedbackMetadata",
("read_only_space", 0x025b9): "EmptyPropertyCell",
("read_only_space", 0x025e1): "EmptyPropertyDictionary",
("read_only_space", 0x02631): "NoOpInterceptorInfo",
("read_only_space", 0x026d1): "EmptyWeakArrayList",
("read_only_space", 0x026e9): "InfinityValue",
("read_only_space", 0x026f9): "MinusZeroValue",
("read_only_space", 0x02709): "MinusInfinityValue",
("read_only_space", 0x02719): "SelfReferenceMarker",
("read_only_space", 0x02771): "OffHeapTrampolineRelocationInfo",
("read_only_space", 0x02789): "TrampolineTrivialCodeDataContainer",
("read_only_space", 0x027a1): "TrampolinePromiseRejectionCodeDataContainer",
("read_only_space", 0x027b9): "GlobalThisBindingScopeInfo",
("read_only_space", 0x02821): "EmptyFunctionScopeInfo",
("read_only_space", 0x02871): "HashSeed",
("old_space", 0x00119): "ArgumentsIteratorAccessor",
("old_space", 0x00189): "ArrayLengthAccessor",
("old_space", 0x001f9): "BoundFunctionLengthAccessor",
("old_space", 0x00269): "BoundFunctionNameAccessor",
("old_space", 0x002d9): "ErrorStackAccessor",
("old_space", 0x00349): "FunctionArgumentsAccessor",
("old_space", 0x003b9): "FunctionCallerAccessor",
("old_space", 0x00429): "FunctionNameAccessor",
("old_space", 0x00499): "FunctionLengthAccessor",
("old_space", 0x00509): "FunctionPrototypeAccessor",
("old_space", 0x00579): "StringLengthAccessor",
("old_space", 0x005e9): "InvalidPrototypeValidityCell",
("old_space", 0x005f9): "EmptyScript",
("old_space", 0x00679): "ManyClosuresCell",
("old_space", 0x00691): "ArrayConstructorProtector",
("old_space", 0x006a1): "NoElementsProtector",
("old_space", 0x006c9): "IsConcatSpreadableProtector",
("old_space", 0x006d9): "ArraySpeciesProtector",
("old_space", 0x00701): "TypedArraySpeciesProtector",
("old_space", 0x00729): "PromiseSpeciesProtector",
("old_space", 0x00751): "StringLengthProtector",
("old_space", 0x00761): "ArrayIteratorProtector",
("old_space", 0x00789): "ArrayBufferDetachingProtector",
("old_space", 0x007b1): "PromiseHookProtector",
("old_space", 0x007d9): "PromiseResolveProtector",
("old_space", 0x007e9): "MapIteratorProtector",
("old_space", 0x00811): "PromiseThenProtector",
("old_space", 0x00839): "SetIteratorProtector",
("old_space", 0x00861): "StringIteratorProtector",
("old_space", 0x00889): "SingleCharacterStringCache",
("old_space", 0x01099): "StringSplitCache",
("old_space", 0x018a9): "RegExpMultipleCache",
("old_space", 0x020b9): "BuiltinsConstantsTable",
("read_only_space", 0x001b1): "NullValue",
("read_only_space", 0x00231): "EmptyDescriptorArray",
("read_only_space", 0x00299): "EmptyWeakFixedArray",
("read_only_space", 0x00399): "UninitializedValue",
("read_only_space", 0x004a9): "UndefinedValue",
("read_only_space", 0x00529): "NanValue",
("read_only_space", 0x00589): "TheHoleValue",
("read_only_space", 0x00621): "HoleNanValue",
("read_only_space", 0x00631): "TrueValue",
("read_only_space", 0x006e1): "FalseValue",
("read_only_space", 0x00729): "empty_string",
("read_only_space", 0x00be9): "EmptyScopeInfo",
("read_only_space", 0x00bf9): "EmptyFixedArray",
("read_only_space", 0x00c09): "ArgumentsMarker",
("read_only_space", 0x00ca9): "Exception",
("read_only_space", 0x00d49): "TerminationException",
("read_only_space", 0x00df1): "OptimizedOut",
("read_only_space", 0x00e91): "StaleRegister",
("read_only_space", 0x023d1): "EmptyEnumCache",
("read_only_space", 0x02439): "EmptyPropertyArray",
("read_only_space", 0x02449): "EmptyByteArray",
("read_only_space", 0x02459): "EmptyObjectBoilerplateDescription",
("read_only_space", 0x02471): "EmptyArrayBoilerplateDescription",
("read_only_space", 0x024d9): "EmptyClosureFeedbackCellArray",
("read_only_space", 0x024e9): "EmptySloppyArgumentsElements",
("read_only_space", 0x02509): "EmptySlowElementDictionary",
("read_only_space", 0x02551): "EmptyOrderedHashMap",
("read_only_space", 0x02579): "EmptyOrderedHashSet",
("read_only_space", 0x025a1): "EmptyFeedbackMetadata",
("read_only_space", 0x025b1): "EmptyPropertyCell",
("read_only_space", 0x025d9): "EmptyPropertyDictionary",
("read_only_space", 0x02629): "NoOpInterceptorInfo",
("read_only_space", 0x026c9): "EmptyWeakArrayList",
("read_only_space", 0x026e1): "InfinityValue",
("read_only_space", 0x026f1): "MinusZeroValue",
("read_only_space", 0x02701): "MinusInfinityValue",
("read_only_space", 0x02711): "SelfReferenceMarker",
("read_only_space", 0x02769): "OffHeapTrampolineRelocationInfo",
("read_only_space", 0x02781): "TrampolineTrivialCodeDataContainer",
("read_only_space", 0x02799): "TrampolinePromiseRejectionCodeDataContainer",
("read_only_space", 0x027b1): "GlobalThisBindingScopeInfo",
("read_only_space", 0x02819): "EmptyFunctionScopeInfo",
("read_only_space", 0x02869): "HashSeed",
("old_space", 0x00111): "ArgumentsIteratorAccessor",
("old_space", 0x00181): "ArrayLengthAccessor",
("old_space", 0x001f1): "BoundFunctionLengthAccessor",
("old_space", 0x00261): "BoundFunctionNameAccessor",
("old_space", 0x002d1): "ErrorStackAccessor",
("old_space", 0x00341): "FunctionArgumentsAccessor",
("old_space", 0x003b1): "FunctionCallerAccessor",
("old_space", 0x00421): "FunctionNameAccessor",
("old_space", 0x00491): "FunctionLengthAccessor",
("old_space", 0x00501): "FunctionPrototypeAccessor",
("old_space", 0x00571): "StringLengthAccessor",
("old_space", 0x005e1): "InvalidPrototypeValidityCell",
("old_space", 0x005f1): "EmptyScript",
("old_space", 0x00671): "ManyClosuresCell",
("old_space", 0x00689): "ArrayConstructorProtector",
("old_space", 0x00699): "NoElementsProtector",
("old_space", 0x006c1): "IsConcatSpreadableProtector",
("old_space", 0x006d1): "ArraySpeciesProtector",
("old_space", 0x006f9): "TypedArraySpeciesProtector",
("old_space", 0x00721): "PromiseSpeciesProtector",
("old_space", 0x00749): "StringLengthProtector",
("old_space", 0x00759): "ArrayIteratorProtector",
("old_space", 0x00781): "ArrayBufferDetachingProtector",
("old_space", 0x007a9): "PromiseHookProtector",
("old_space", 0x007d1): "PromiseResolveProtector",
("old_space", 0x007e1): "MapIteratorProtector",
("old_space", 0x00809): "PromiseThenProtector",
("old_space", 0x00831): "SetIteratorProtector",
("old_space", 0x00859): "StringIteratorProtector",
("old_space", 0x00881): "SingleCharacterStringCache",
("old_space", 0x01091): "StringSplitCache",
("old_space", 0x018a1): "RegExpMultipleCache",
("old_space", 0x020b1): "BuiltinsConstantsTable",
}
# List of known V8 Frame Markers.