Revert "Use list of invalidated objects for old-to-new refs"
This reverts commit dcac02ee81
.
Reason for revert: TSAN discovered issue with cleaning invalidated slots in sweeper threads and inserting new slots in the main thread.
Original change's description:
> Use list of invalidated objects for old-to-new refs
>
> Instead of inserting "deletion" entries into the store buffer, keep
> a list of invalidated objects to filter out invalid old-to-new slots.
>
> Bug: v8:9454
> Change-Id: Icd21d8cb2159190457f54d0f8b56742ecc820419
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1695474
> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Reviewed-by: Peter Marshall <petermarshall@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#62744}
TBR=ulan@chromium.org,petermarshall@chromium.org,dinfuehr@chromium.org
Change-Id: I4278e9100c76657663e0a6a62f5d86bb3a343c0e
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:9454
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1704109
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62746}
This commit is contained in:
parent
40b9bb669a
commit
da5a277ade
@ -290,7 +290,7 @@ Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align,
|
||||
Heap* heap = isolate()->heap();
|
||||
HeapObject result =
|
||||
heap->AllocateRawWithRetryOrFail(size, allocation, alignment);
|
||||
heap->CreateFillerObjectAt(result.address(), size);
|
||||
heap->CreateFillerObjectAt(result.address(), size, ClearRecordedSlots::kNo);
|
||||
return Handle<HeapObject>(result, isolate());
|
||||
}
|
||||
|
||||
|
@ -1107,15 +1107,6 @@ void Heap::GarbageCollectionEpilogue() {
|
||||
AllowHeapAllocation for_the_rest_of_the_epilogue;
|
||||
|
||||
#ifdef DEBUG
|
||||
// Old-to-new slot sets must be empty after each collection.
|
||||
for (SpaceIterator it(this); it.HasNext();) {
|
||||
Space* space = it.Next();
|
||||
|
||||
for (MemoryChunk* chunk = space->first_page(); chunk != space->last_page();
|
||||
chunk = chunk->list_node().next())
|
||||
DCHECK_NULL(chunk->invalidated_slots<OLD_TO_NEW>());
|
||||
}
|
||||
|
||||
if (FLAG_print_global_handles) isolate_->global_handles()->Print();
|
||||
if (FLAG_print_handles) PrintHandles();
|
||||
if (FLAG_gc_verbose) Print();
|
||||
@ -1511,7 +1502,7 @@ void Heap::EnsureFillerObjectAtTop() {
|
||||
Page* page = Page::FromAddress(to_top - kTaggedSize);
|
||||
if (page->Contains(to_top)) {
|
||||
int remaining_in_page = static_cast<int>(page->area_end() - to_top);
|
||||
CreateFillerObjectAt(to_top, remaining_in_page);
|
||||
CreateFillerObjectAt(to_top, remaining_in_page, ClearRecordedSlots::kNo);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1842,7 +1833,8 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
|
||||
// Mark with a free list node, in case we have a GC before
|
||||
// deserializing.
|
||||
Address free_space_address = free_space.address();
|
||||
CreateFillerObjectAt(free_space_address, Map::kSize);
|
||||
CreateFillerObjectAt(free_space_address, Map::kSize,
|
||||
ClearRecordedSlots::kNo);
|
||||
maps->push_back(free_space_address);
|
||||
} else {
|
||||
perform_gc = true;
|
||||
@ -1873,7 +1865,8 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
|
||||
// Mark with a free list node, in case we have a GC before
|
||||
// deserializing.
|
||||
Address free_space_address = free_space.address();
|
||||
CreateFillerObjectAt(free_space_address, size);
|
||||
CreateFillerObjectAt(free_space_address, size,
|
||||
ClearRecordedSlots::kNo);
|
||||
DCHECK(IsPreAllocatedSpace(static_cast<SnapshotSpace>(space)));
|
||||
chunk.start = free_space_address;
|
||||
chunk.end = free_space_address + size;
|
||||
@ -2778,7 +2771,7 @@ size_t Heap::GetCodeRangeReservedAreaSize() {
|
||||
}
|
||||
|
||||
HeapObject Heap::PrecedeWithFiller(HeapObject object, int filler_size) {
|
||||
CreateFillerObjectAt(object.address(), filler_size);
|
||||
CreateFillerObjectAt(object.address(), filler_size, ClearRecordedSlots::kNo);
|
||||
return HeapObject::FromAddress(object.address() + filler_size);
|
||||
}
|
||||
|
||||
@ -2793,7 +2786,8 @@ HeapObject Heap::AlignWithFiller(HeapObject object, int object_size,
|
||||
filler_size -= pre_filler;
|
||||
}
|
||||
if (filler_size) {
|
||||
CreateFillerObjectAt(object.address() + object_size, filler_size);
|
||||
CreateFillerObjectAt(object.address() + object_size, filler_size,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
return object;
|
||||
}
|
||||
@ -2843,6 +2837,7 @@ void Heap::FlushNumberStringCache() {
|
||||
}
|
||||
|
||||
HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
|
||||
ClearRecordedSlots clear_slots_mode,
|
||||
ClearFreedMemoryMode clear_memory_mode) {
|
||||
if (size == 0) return HeapObject();
|
||||
HeapObject filler = HeapObject::FromAddress(addr);
|
||||
@ -2869,6 +2864,9 @@ HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
|
||||
(size / kTaggedSize) - 2);
|
||||
}
|
||||
}
|
||||
if (clear_slots_mode == ClearRecordedSlots::kYes) {
|
||||
ClearRecordedSlotRange(addr, addr + size);
|
||||
}
|
||||
|
||||
// At this point, we may be deserializing the heap from a snapshot, and
|
||||
// none of the maps have been created yet and are nullptr.
|
||||
@ -3005,7 +3003,8 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
|
||||
// Technically in new space this write might be omitted (except for
|
||||
// debug mode which iterates through the heap), but to play safer
|
||||
// we still do it.
|
||||
HeapObject filler = CreateFillerObjectAt(old_start, bytes_to_trim);
|
||||
HeapObject filler =
|
||||
CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes);
|
||||
|
||||
// Initialize header of the trimmed array. Since left trimming is only
|
||||
// performed on pages which are not concurrently swept creating a filler
|
||||
@ -3017,9 +3016,10 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
|
||||
FixedArrayBase new_object =
|
||||
FixedArrayBase::cast(HeapObject::FromAddress(new_start));
|
||||
|
||||
// Move corresponding invalidated object to the right
|
||||
MemoryChunk::FromHeapObject(new_object)
|
||||
->MoveObjectWithInvalidatedSlots<OLD_TO_NEW>(filler, new_object);
|
||||
// Remove recorded slots for the new map and length offset.
|
||||
ClearRecordedSlot(new_object, new_object.RawField(0));
|
||||
ClearRecordedSlot(new_object,
|
||||
new_object.RawField(FixedArrayBase::kLengthOffset));
|
||||
|
||||
// Handle invalidated old-to-old slots.
|
||||
if (incremental_marking()->IsCompacting() &&
|
||||
@ -3027,7 +3027,7 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
|
||||
// If the array was right-trimmed before, then it is registered in
|
||||
// the invalidated_slots.
|
||||
MemoryChunk::FromHeapObject(new_object)
|
||||
->MoveObjectWithInvalidatedSlots<OLD_TO_OLD>(filler, new_object);
|
||||
->MoveObjectWithInvalidatedSlots(filler, new_object);
|
||||
// We have to clear slots in the free space to avoid stale old-to-old slots.
|
||||
// Note we cannot use ClearFreedMemoryMode of CreateFillerObjectAt because
|
||||
// we need pointer granularity writes to avoid race with the concurrent
|
||||
@ -3115,8 +3115,8 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim,
|
||||
// Ensure that the object survives because the InvalidatedSlotsFilter will
|
||||
// compute its size from its map during pointers updating phase.
|
||||
incremental_marking()->WhiteToGreyAndPush(object);
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
|
||||
chunk->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object, old_size);
|
||||
MemoryChunk::FromHeapObject(object)->RegisterObjectWithInvalidatedSlots(
|
||||
object, old_size);
|
||||
}
|
||||
|
||||
// Technically in new space this write might be omitted (except for
|
||||
@ -3124,7 +3124,8 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim,
|
||||
// we still do it.
|
||||
// We do not create a filler for objects in a large object space.
|
||||
if (!IsLargeObject(object)) {
|
||||
HeapObject filler = CreateFillerObjectAt(new_end, bytes_to_trim);
|
||||
HeapObject filler =
|
||||
CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kNo);
|
||||
DCHECK(!filler.is_null());
|
||||
// Clear the mark bits of the black area that belongs now to the filler.
|
||||
// This is an optimization. The sweeper will release black fillers anyway.
|
||||
@ -3407,8 +3408,8 @@ void Heap::NotifyObjectLayoutChange(HeapObject object, int size,
|
||||
incremental_marking()->MarkBlackAndVisitObjectDueToLayoutChange(object);
|
||||
if (incremental_marking()->IsCompacting() &&
|
||||
MayContainRecordedSlots(object)) {
|
||||
MemoryChunk::FromHeapObject(object)
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object, size);
|
||||
MemoryChunk::FromHeapObject(object)->RegisterObjectWithInvalidatedSlots(
|
||||
object, size);
|
||||
}
|
||||
}
|
||||
#ifdef VERIFY_HEAP
|
||||
@ -4828,7 +4829,8 @@ HeapObject Heap::EnsureImmovableCode(HeapObject heap_object, int object_size) {
|
||||
} else {
|
||||
// Discard the first code allocation, which was on a page where it could
|
||||
// be moved.
|
||||
CreateFillerObjectAt(heap_object.address(), object_size);
|
||||
CreateFillerObjectAt(heap_object.address(), object_size,
|
||||
ClearRecordedSlots::kNo);
|
||||
heap_object = AllocateRawCodeInLargeObjectSpace(object_size);
|
||||
UnprotectAndRegisterMemoryChunk(heap_object);
|
||||
ZapCodeObject(heap_object.address(), object_size);
|
||||
@ -5500,6 +5502,15 @@ Address Heap::store_buffer_overflow_function_address() {
|
||||
return FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow);
|
||||
}
|
||||
|
||||
void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) {
|
||||
DCHECK(!IsLargeObject(object));
|
||||
Page* page = Page::FromAddress(slot.address());
|
||||
if (!page->InYoungGeneration()) {
|
||||
DCHECK_EQ(page->owner_identity(), OLD_SPACE);
|
||||
store_buffer()->DeleteEntry(slot.address());
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
|
||||
DCHECK(!IsLargeObject(object));
|
||||
@ -5507,14 +5518,22 @@ void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
|
||||
Page* page = Page::FromAddress(slot.address());
|
||||
DCHECK_EQ(page->owner_identity(), OLD_SPACE);
|
||||
store_buffer()->MoveAllEntriesToRememberedSet();
|
||||
CHECK_IMPLIES(RememberedSet<OLD_TO_NEW>::Contains(page, slot.address()),
|
||||
page->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object));
|
||||
CHECK(!RememberedSet<OLD_TO_NEW>::Contains(page, slot.address()));
|
||||
// Old to old slots are filtered with invalidated slots.
|
||||
CHECK_IMPLIES(RememberedSet<OLD_TO_OLD>::Contains(page, slot.address()),
|
||||
page->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
|
||||
page->RegisteredObjectWithInvalidatedSlots(object));
|
||||
}
|
||||
#endif
|
||||
|
||||
void Heap::ClearRecordedSlotRange(Address start, Address end) {
|
||||
Page* page = Page::FromAddress(start);
|
||||
DCHECK(!page->IsLargePage());
|
||||
if (!page->InYoungGeneration()) {
|
||||
DCHECK_EQ(page->owner_identity(), OLD_SPACE);
|
||||
store_buffer()->DeleteEntry(start, end);
|
||||
}
|
||||
}
|
||||
|
||||
PagedSpace* PagedSpaceIterator::Next() {
|
||||
switch (counter_++) {
|
||||
case RO_SPACE:
|
||||
|
@ -84,6 +84,8 @@ enum ArrayStorageAllocationMode {
|
||||
INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
|
||||
};
|
||||
|
||||
enum class ClearRecordedSlots { kYes, kNo };
|
||||
|
||||
enum class ClearFreedMemoryMode { kClearFreedMemory, kDontClearFreedMemory };
|
||||
|
||||
enum ExternalBackingStoreType { kArrayBuffer, kExternalString, kNumTypes };
|
||||
@ -401,13 +403,15 @@ class Heap {
|
||||
WriteBarrierMode mode);
|
||||
|
||||
// Initialize a filler object to keep the ability to iterate over the heap
|
||||
// when introducing gaps within pages. If the memory after the object header
|
||||
// of the filler should be cleared, pass in kClearFreedMemory. The default is
|
||||
// when introducing gaps within pages. If slots could have been recorded in
|
||||
// the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
|
||||
// pass ClearRecordedSlots::kNo. If the memory after the object header of
|
||||
// the filler should be cleared, pass in kClearFreedMemory. The default is
|
||||
// kDontClearFreedMemory.
|
||||
V8_EXPORT_PRIVATE HeapObject
|
||||
CreateFillerObjectAt(Address addr, int size,
|
||||
ClearFreedMemoryMode clear_memory_mode =
|
||||
ClearFreedMemoryMode::kDontClearFreedMemory);
|
||||
V8_EXPORT_PRIVATE HeapObject CreateFillerObjectAt(
|
||||
Address addr, int size, ClearRecordedSlots clear_slots_mode,
|
||||
ClearFreedMemoryMode clear_memory_mode =
|
||||
ClearFreedMemoryMode::kDontClearFreedMemory);
|
||||
|
||||
template <typename T>
|
||||
void CreateFillerForArray(T object, int elements_to_trim, int bytes_to_trim);
|
||||
@ -840,6 +844,9 @@ class Heap {
|
||||
static intptr_t store_buffer_mask_constant();
|
||||
static Address store_buffer_overflow_function_address();
|
||||
|
||||
void ClearRecordedSlot(HeapObject object, ObjectSlot slot);
|
||||
void ClearRecordedSlotRange(Address start, Address end);
|
||||
|
||||
#ifdef DEBUG
|
||||
void VerifyClearedSlot(HeapObject object, ObjectSlot slot);
|
||||
#endif
|
||||
|
@ -62,53 +62,6 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
|
||||
return invalidated_object_.IsValidSlot(invalidated_object_.map(), offset);
|
||||
}
|
||||
|
||||
void InvalidatedSlotsCleanup::Free(Address free_start, Address free_end) {
|
||||
#ifdef DEBUG
|
||||
DCHECK_LT(free_start, free_end);
|
||||
// Free regions should come in increasing order and do not overlap
|
||||
DCHECK_LE(last_free_, free_start);
|
||||
last_free_ = free_start;
|
||||
#endif
|
||||
|
||||
if (iterator_ == iterator_end_) return;
|
||||
|
||||
// Ignore invalidated objects before free region
|
||||
while (free_start >= invalidated_end_) {
|
||||
++iterator_;
|
||||
NextInvalidatedObject();
|
||||
}
|
||||
|
||||
// Loop here: Free region might contain multiple invalidated objects
|
||||
while (free_end > invalidated_start_) {
|
||||
// Case: Free region starts before current invalidated object
|
||||
if (free_start <= invalidated_start_) {
|
||||
CHECK(invalidated_end_ <= free_end);
|
||||
iterator_ = invalidated_slots_->erase(iterator_);
|
||||
|
||||
} else {
|
||||
// Case: Free region starts within current invalidated object
|
||||
// (Can happen for right-trimmed objects)
|
||||
iterator_->second =
|
||||
static_cast<int>(free_start - iterator_->first.address());
|
||||
|
||||
CHECK(free_end >= invalidated_end_);
|
||||
iterator_++;
|
||||
}
|
||||
|
||||
NextInvalidatedObject();
|
||||
}
|
||||
}
|
||||
|
||||
void InvalidatedSlotsCleanup::NextInvalidatedObject() {
|
||||
if (iterator_ != iterator_end_) {
|
||||
invalidated_start_ = iterator_->first.address();
|
||||
invalidated_end_ = invalidated_start_ + iterator_->second;
|
||||
} else {
|
||||
invalidated_start_ = sentinel_;
|
||||
invalidated_end_ = sentinel_;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -8,18 +8,9 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
InvalidatedSlotsFilter InvalidatedSlotsFilter::OldToOld(MemoryChunk* chunk) {
|
||||
return InvalidatedSlotsFilter(chunk, chunk->invalidated_slots<OLD_TO_OLD>());
|
||||
}
|
||||
|
||||
InvalidatedSlotsFilter InvalidatedSlotsFilter::OldToNew(MemoryChunk* chunk) {
|
||||
return InvalidatedSlotsFilter(chunk, chunk->invalidated_slots<OLD_TO_NEW>());
|
||||
}
|
||||
|
||||
InvalidatedSlotsFilter::InvalidatedSlotsFilter(
|
||||
MemoryChunk* chunk, InvalidatedSlots* invalidated_slots) {
|
||||
InvalidatedSlotsFilter::InvalidatedSlotsFilter(MemoryChunk* chunk) {
|
||||
// Adjust slots_in_free_space_are_valid_ if more spaces are added.
|
||||
DCHECK_IMPLIES(invalidated_slots != nullptr,
|
||||
DCHECK_IMPLIES(chunk->invalidated_slots() != nullptr,
|
||||
chunk->InOldSpace() || chunk->InLargeObjectSpace());
|
||||
// The sweeper removes invalid slots and makes free space available for
|
||||
// allocation. Slots for new objects can be recorded in the free space.
|
||||
@ -27,8 +18,8 @@ InvalidatedSlotsFilter::InvalidatedSlotsFilter(
|
||||
// object space are not swept but have SweepingDone() == true.
|
||||
slots_in_free_space_are_valid_ = chunk->SweepingDone() && chunk->InOldSpace();
|
||||
|
||||
invalidated_slots = invalidated_slots ? invalidated_slots : &empty_;
|
||||
|
||||
InvalidatedSlots* invalidated_slots =
|
||||
chunk->invalidated_slots() ? chunk->invalidated_slots() : &empty_;
|
||||
iterator_ = invalidated_slots->begin();
|
||||
iterator_end_ = invalidated_slots->end();
|
||||
sentinel_ = chunk->area_end();
|
||||
@ -46,33 +37,5 @@ InvalidatedSlotsFilter::InvalidatedSlotsFilter(
|
||||
#endif
|
||||
}
|
||||
|
||||
InvalidatedSlotsCleanup InvalidatedSlotsCleanup::OldToOld(MemoryChunk* chunk) {
|
||||
return InvalidatedSlotsCleanup(chunk, chunk->invalidated_slots<OLD_TO_OLD>());
|
||||
}
|
||||
|
||||
InvalidatedSlotsCleanup InvalidatedSlotsCleanup::OldToNew(MemoryChunk* chunk) {
|
||||
return InvalidatedSlotsCleanup(chunk, chunk->invalidated_slots<OLD_TO_NEW>());
|
||||
}
|
||||
|
||||
InvalidatedSlotsCleanup::InvalidatedSlotsCleanup(
|
||||
MemoryChunk* chunk, InvalidatedSlots* invalidated_slots) {
|
||||
invalidated_slots_ = invalidated_slots ? invalidated_slots : &empty_;
|
||||
iterator_ = invalidated_slots_->begin();
|
||||
iterator_end_ = invalidated_slots_->end();
|
||||
sentinel_ = chunk->area_end();
|
||||
|
||||
if (iterator_ != iterator_end_) {
|
||||
invalidated_start_ = iterator_->first.address();
|
||||
invalidated_end_ = invalidated_start_ + iterator_->second;
|
||||
} else {
|
||||
invalidated_start_ = sentinel_;
|
||||
invalidated_end_ = sentinel_;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
last_free_ = chunk->area_start();
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -30,11 +30,7 @@ using InvalidatedSlots = std::map<HeapObject, int, Object::Comparer>;
|
||||
// n is the number of IsValid queries.
|
||||
class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
|
||||
public:
|
||||
static InvalidatedSlotsFilter OldToOld(MemoryChunk* chunk);
|
||||
static InvalidatedSlotsFilter OldToNew(MemoryChunk* chunk);
|
||||
|
||||
explicit InvalidatedSlotsFilter(MemoryChunk* chunk,
|
||||
InvalidatedSlots* invalidated_slots);
|
||||
explicit InvalidatedSlotsFilter(MemoryChunk* chunk);
|
||||
inline bool IsValid(Address slot);
|
||||
|
||||
private:
|
||||
@ -52,32 +48,6 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
|
||||
#endif
|
||||
};
|
||||
|
||||
class V8_EXPORT_PRIVATE InvalidatedSlotsCleanup {
|
||||
public:
|
||||
static InvalidatedSlotsCleanup OldToOld(MemoryChunk* chunk);
|
||||
static InvalidatedSlotsCleanup OldToNew(MemoryChunk* chunk);
|
||||
|
||||
explicit InvalidatedSlotsCleanup(MemoryChunk* chunk,
|
||||
InvalidatedSlots* invalidated_slots);
|
||||
|
||||
inline void Free(Address free_start, Address free_end);
|
||||
|
||||
private:
|
||||
InvalidatedSlots::iterator iterator_;
|
||||
InvalidatedSlots::iterator iterator_end_;
|
||||
InvalidatedSlots* invalidated_slots_;
|
||||
InvalidatedSlots empty_;
|
||||
|
||||
Address sentinel_;
|
||||
Address invalidated_start_;
|
||||
Address invalidated_end_;
|
||||
|
||||
inline void NextInvalidatedObject();
|
||||
#ifdef DEBUG
|
||||
Address last_free_;
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -47,14 +47,16 @@ void LocalAllocator::FreeLast(AllocationSpace space, HeapObject object,
|
||||
void LocalAllocator::FreeLastInNewSpace(HeapObject object, int object_size) {
|
||||
if (!new_space_lab_.TryFreeLast(object, object_size)) {
|
||||
// We couldn't free the last object so we have to write a proper filler.
|
||||
heap_->CreateFillerObjectAt(object.address(), object_size);
|
||||
heap_->CreateFillerObjectAt(object.address(), object_size,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
}
|
||||
|
||||
void LocalAllocator::FreeLastInOldSpace(HeapObject object, int object_size) {
|
||||
if (!compaction_spaces_.Get(OLD_SPACE)->TryFreeLast(object, object_size)) {
|
||||
// We couldn't free the last object so we have to write a proper filler.
|
||||
heap_->CreateFillerObjectAt(object.address(), object_size);
|
||||
heap_->CreateFillerObjectAt(object.address(), object_size,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -306,7 +306,8 @@ void EvacuationVerifier::VerifyEvacuation(PagedSpace* space) {
|
||||
if (p->Contains(space->top())) {
|
||||
CodePageMemoryModificationScope memory_modification_scope(p);
|
||||
heap_->CreateFillerObjectAt(
|
||||
space->top(), static_cast<int>(space->limit() - space->top()));
|
||||
space->top(), static_cast<int>(space->limit() - space->top()),
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
VerifyEvacuationOnPage(p->area_start(), p->area_end());
|
||||
}
|
||||
@ -2095,7 +2096,8 @@ void MarkCompactCollector::FlushBytecodeFromSFI(
|
||||
if (!heap()->IsLargeObject(compiled_data)) {
|
||||
heap()->CreateFillerObjectAt(
|
||||
compiled_data.address() + UncompiledDataWithoutPreparseData::kSize,
|
||||
compiled_data_size - UncompiledDataWithoutPreparseData::kSize);
|
||||
compiled_data_size - UncompiledDataWithoutPreparseData::kSize,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
|
||||
// Initialize the uncompiled data.
|
||||
@ -2236,7 +2238,8 @@ void MarkCompactCollector::RightTrimDescriptorArray(DescriptorArray array,
|
||||
RememberedSet<OLD_TO_OLD>::RemoveRange(MemoryChunk::FromHeapObject(array),
|
||||
start, end,
|
||||
SlotSet::PREFREE_EMPTY_BUCKETS);
|
||||
heap()->CreateFillerObjectAt(start, static_cast<int>(end - start));
|
||||
heap()->CreateFillerObjectAt(start, static_cast<int>(end - start),
|
||||
ClearRecordedSlots::kNo);
|
||||
array.set_number_of_all_descriptors(new_nof_all_descriptors);
|
||||
}
|
||||
|
||||
@ -2685,8 +2688,7 @@ void MarkCompactCollector::EvacuateEpilogue() {
|
||||
for (Page* p : *heap()->old_space()) {
|
||||
DCHECK_NULL((p->slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
|
||||
DCHECK_NULL((p->typed_slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
|
||||
DCHECK_NULL(p->invalidated_slots<OLD_TO_OLD>());
|
||||
DCHECK_NULL(p->invalidated_slots<OLD_TO_NEW>());
|
||||
DCHECK_NULL(p->invalidated_slots());
|
||||
}
|
||||
#endif
|
||||
}
|
||||
@ -3383,32 +3385,16 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
|
||||
void UpdateUntypedPointers() {
|
||||
if (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) {
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(chunk_);
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(
|
||||
chunk_,
|
||||
[this, &filter](MaybeObjectSlot slot) {
|
||||
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
|
||||
[this](MaybeObjectSlot slot) {
|
||||
return CheckAndUpdateOldToNewSlot(slot);
|
||||
},
|
||||
SlotSet::PREFREE_EMPTY_BUCKETS);
|
||||
}
|
||||
|
||||
if (chunk_->invalidated_slots<OLD_TO_NEW>() != nullptr) {
|
||||
#ifdef DEBUG
|
||||
for (auto object_size : *chunk_->invalidated_slots<OLD_TO_NEW>()) {
|
||||
HeapObject object = object_size.first;
|
||||
int size = object_size.second;
|
||||
DCHECK_LE(object.SizeFromMap(object.map()), size);
|
||||
}
|
||||
#endif
|
||||
// The invalidated slots are not needed after old-to-new slots were
|
||||
// processed.
|
||||
chunk_->ReleaseInvalidatedSlots<OLD_TO_NEW>();
|
||||
}
|
||||
|
||||
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
|
||||
(chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(chunk_);
|
||||
InvalidatedSlotsFilter filter(chunk_);
|
||||
RememberedSet<OLD_TO_OLD>::Iterate(
|
||||
chunk_,
|
||||
[&filter](MaybeObjectSlot slot) {
|
||||
@ -3418,9 +3404,9 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
SlotSet::PREFREE_EMPTY_BUCKETS);
|
||||
}
|
||||
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
|
||||
chunk_->invalidated_slots<OLD_TO_OLD>() != nullptr) {
|
||||
chunk_->invalidated_slots() != nullptr) {
|
||||
#ifdef DEBUG
|
||||
for (auto object_size : *chunk_->invalidated_slots<OLD_TO_OLD>()) {
|
||||
for (auto object_size : *chunk_->invalidated_slots()) {
|
||||
HeapObject object = object_size.first;
|
||||
int size = object_size.second;
|
||||
DCHECK_LE(object.SizeFromMap(object.map()), size);
|
||||
@ -3428,7 +3414,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
#endif
|
||||
// The invalidated slots are not needed after old-to-old slots were
|
||||
// processsed.
|
||||
chunk_->ReleaseInvalidatedSlots<OLD_TO_OLD>();
|
||||
chunk_->ReleaseInvalidatedSlots();
|
||||
}
|
||||
}
|
||||
|
||||
@ -3542,17 +3528,13 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
|
||||
const bool contains_old_to_new_slots =
|
||||
chunk->slot_set<OLD_TO_NEW>() != nullptr ||
|
||||
chunk->typed_slot_set<OLD_TO_NEW>() != nullptr;
|
||||
const bool contains_old_to_old_invalidated_slots =
|
||||
chunk->invalidated_slots<OLD_TO_OLD>() != nullptr;
|
||||
const bool contains_old_to_new_invalidated_slots =
|
||||
chunk->invalidated_slots<OLD_TO_NEW>() != nullptr;
|
||||
const bool contains_invalidated_slots =
|
||||
chunk->invalidated_slots() != nullptr;
|
||||
if (!contains_old_to_new_slots && !contains_old_to_old_slots &&
|
||||
!contains_old_to_old_invalidated_slots &&
|
||||
!contains_old_to_new_invalidated_slots)
|
||||
!contains_invalidated_slots)
|
||||
continue;
|
||||
if (mode == RememberedSetUpdatingMode::ALL || contains_old_to_new_slots ||
|
||||
contains_old_to_old_invalidated_slots ||
|
||||
contains_old_to_new_invalidated_slots) {
|
||||
contains_invalidated_slots) {
|
||||
job->AddItem(CreateRememberedSetUpdatingItem(chunk, mode));
|
||||
pages++;
|
||||
}
|
||||
@ -4367,7 +4349,8 @@ void MinorMarkCompactCollector::MakeIterable(
|
||||
if (free_space_mode == ZAP_FREE_SPACE) {
|
||||
ZapCode(free_start, size);
|
||||
}
|
||||
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size));
|
||||
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
Map map = object.synchronized_map();
|
||||
int size = object.SizeFromMap(map);
|
||||
@ -4383,7 +4366,8 @@ void MinorMarkCompactCollector::MakeIterable(
|
||||
if (free_space_mode == ZAP_FREE_SPACE) {
|
||||
ZapCode(free_start, size);
|
||||
}
|
||||
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size));
|
||||
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
|
||||
if (marking_mode == MarkingTreatmentMode::CLEAR) {
|
||||
@ -4616,14 +4600,11 @@ class PageMarkingItem : public MarkingItem {
|
||||
inline Heap* heap() { return chunk_->heap(); }
|
||||
|
||||
void MarkUntypedPointers(YoungGenerationMarkingTask* task) {
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(chunk_);
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(
|
||||
chunk_,
|
||||
[this, task, &filter](MaybeObjectSlot slot) {
|
||||
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
|
||||
return CheckAndMarkObject(task, slot);
|
||||
},
|
||||
SlotSet::PREFREE_EMPTY_BUCKETS);
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(chunk_,
|
||||
[this, task](MaybeObjectSlot slot) {
|
||||
return CheckAndMarkObject(task, slot);
|
||||
},
|
||||
SlotSet::PREFREE_EMPTY_BUCKETS);
|
||||
}
|
||||
|
||||
void MarkTypedPointers(YoungGenerationMarkingTask* task) {
|
||||
|
@ -122,7 +122,7 @@ class RememberedSet : public AllStatic {
|
||||
SlotSet* slots = chunk->slot_set<type>();
|
||||
TypedSlotSet* typed_slots = chunk->typed_slot_set<type>();
|
||||
if (slots != nullptr || typed_slots != nullptr ||
|
||||
chunk->invalidated_slots<type>() != nullptr) {
|
||||
chunk->invalidated_slots() != nullptr) {
|
||||
callback(chunk);
|
||||
}
|
||||
}
|
||||
@ -256,7 +256,7 @@ class RememberedSet : public AllStatic {
|
||||
while ((chunk = it.next()) != nullptr) {
|
||||
chunk->ReleaseSlotSet<OLD_TO_OLD>();
|
||||
chunk->ReleaseTypedSlotSet<OLD_TO_OLD>();
|
||||
chunk->ReleaseInvalidatedSlots<OLD_TO_OLD>();
|
||||
chunk->ReleaseInvalidatedSlots();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,6 @@
|
||||
#include "src/heap/barrier.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/invalidated-slots-inl.h"
|
||||
#include "src/heap/item-parallel-job.h"
|
||||
#include "src/heap/mark-compact-inl.h"
|
||||
#include "src/heap/objects-visiting-inl.h"
|
||||
@ -422,15 +421,12 @@ void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
|
||||
|
||||
void Scavenger::ScavengePage(MemoryChunk* page) {
|
||||
CodePageMemoryModificationScope memory_modification_scope(page);
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(page);
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(
|
||||
page,
|
||||
[this, &filter](MaybeObjectSlot addr) {
|
||||
if (!filter.IsValid(addr.address())) return REMOVE_SLOT;
|
||||
return CheckAndScavengeObject(heap_, addr);
|
||||
},
|
||||
SlotSet::KEEP_EMPTY_BUCKETS);
|
||||
page->ReleaseInvalidatedSlots<OLD_TO_NEW>();
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(page,
|
||||
[this](MaybeObjectSlot addr) {
|
||||
return CheckAndScavengeObject(heap_,
|
||||
addr);
|
||||
},
|
||||
SlotSet::KEEP_EMPTY_BUCKETS);
|
||||
RememberedSet<OLD_TO_NEW>::IterateTyped(
|
||||
page, [=](SlotType type, Address addr) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
|
@ -703,8 +703,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
|
||||
nullptr);
|
||||
base::AsAtomicPointer::Release_Store(&chunk->typed_slot_set_[OLD_TO_OLD],
|
||||
nullptr);
|
||||
chunk->invalidated_slots_[OLD_TO_NEW] = nullptr;
|
||||
chunk->invalidated_slots_[OLD_TO_OLD] = nullptr;
|
||||
chunk->invalidated_slots_ = nullptr;
|
||||
chunk->progress_bar_ = 0;
|
||||
chunk->high_water_mark_ = static_cast<intptr_t>(area_start - base);
|
||||
chunk->set_concurrent_sweeping_state(kSweepingDone);
|
||||
@ -1084,7 +1083,8 @@ size_t Page::ShrinkToHighWaterMark() {
|
||||
}
|
||||
heap()->CreateFillerObjectAt(
|
||||
filler.address(),
|
||||
static_cast<int>(area_end() - filler.address() - unused));
|
||||
static_cast<int>(area_end() - filler.address() - unused),
|
||||
ClearRecordedSlots::kNo);
|
||||
heap()->memory_allocator()->PartialFreeMemory(
|
||||
this, address() + size() - unused, unused, area_end() - unused);
|
||||
if (filler.address() != area_end()) {
|
||||
@ -1379,8 +1379,7 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() {
|
||||
ReleaseSlotSet<OLD_TO_OLD>();
|
||||
ReleaseTypedSlotSet<OLD_TO_NEW>();
|
||||
ReleaseTypedSlotSet<OLD_TO_OLD>();
|
||||
ReleaseInvalidatedSlots<OLD_TO_NEW>();
|
||||
ReleaseInvalidatedSlots<OLD_TO_OLD>();
|
||||
ReleaseInvalidatedSlots();
|
||||
|
||||
if (local_tracker_ != nullptr) ReleaseLocalTracker();
|
||||
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
|
||||
@ -1462,89 +1461,53 @@ void MemoryChunk::ReleaseTypedSlotSet() {
|
||||
}
|
||||
}
|
||||
|
||||
template InvalidatedSlots* MemoryChunk::AllocateInvalidatedSlots<OLD_TO_NEW>();
|
||||
template InvalidatedSlots* MemoryChunk::AllocateInvalidatedSlots<OLD_TO_OLD>();
|
||||
|
||||
template <RememberedSetType type>
|
||||
InvalidatedSlots* MemoryChunk::AllocateInvalidatedSlots() {
|
||||
DCHECK_NULL(invalidated_slots_[type]);
|
||||
invalidated_slots_[type] = new InvalidatedSlots();
|
||||
return invalidated_slots_[type];
|
||||
DCHECK_NULL(invalidated_slots_);
|
||||
invalidated_slots_ = new InvalidatedSlots();
|
||||
return invalidated_slots_;
|
||||
}
|
||||
|
||||
template void MemoryChunk::ReleaseInvalidatedSlots<OLD_TO_NEW>();
|
||||
template void MemoryChunk::ReleaseInvalidatedSlots<OLD_TO_OLD>();
|
||||
|
||||
template <RememberedSetType type>
|
||||
void MemoryChunk::ReleaseInvalidatedSlots() {
|
||||
if (invalidated_slots_[type]) {
|
||||
delete invalidated_slots_[type];
|
||||
invalidated_slots_[type] = nullptr;
|
||||
if (invalidated_slots_) {
|
||||
delete invalidated_slots_;
|
||||
invalidated_slots_ = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
template V8_EXPORT_PRIVATE void
|
||||
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object,
|
||||
int size);
|
||||
template V8_EXPORT_PRIVATE void
|
||||
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object,
|
||||
int size);
|
||||
|
||||
template <RememberedSetType type>
|
||||
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object,
|
||||
int size) {
|
||||
if (!ShouldSkipEvacuationSlotRecording()) {
|
||||
if (invalidated_slots<type>() == nullptr) {
|
||||
AllocateInvalidatedSlots<type>();
|
||||
}
|
||||
|
||||
InvalidatedSlots::iterator it = invalidated_slots<type>()->find(object);
|
||||
|
||||
if (it != invalidated_slots<type>()->end()) {
|
||||
int old_size = it->second;
|
||||
it->second = std::max(old_size, size);
|
||||
} else {
|
||||
invalidated_slots<type>()->insert(it, std::make_pair(object, size));
|
||||
if (invalidated_slots() == nullptr) {
|
||||
AllocateInvalidatedSlots();
|
||||
}
|
||||
int old_size = (*invalidated_slots())[object];
|
||||
(*invalidated_slots())[object] = std::max(old_size, size);
|
||||
}
|
||||
}
|
||||
|
||||
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(
|
||||
HeapObject object);
|
||||
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(
|
||||
HeapObject object);
|
||||
|
||||
template <RememberedSetType type>
|
||||
bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject object) {
|
||||
if (ShouldSkipEvacuationSlotRecording()) {
|
||||
// Invalidated slots do not matter if we are not recording slots.
|
||||
return true;
|
||||
}
|
||||
if (invalidated_slots<type>() == nullptr) {
|
||||
if (invalidated_slots() == nullptr) {
|
||||
return false;
|
||||
}
|
||||
return invalidated_slots<type>()->find(object) !=
|
||||
invalidated_slots<type>()->end();
|
||||
return invalidated_slots()->find(object) != invalidated_slots()->end();
|
||||
}
|
||||
|
||||
template void MemoryChunk::MoveObjectWithInvalidatedSlots<OLD_TO_NEW>(
|
||||
HeapObject old_start, HeapObject new_start);
|
||||
template void MemoryChunk::MoveObjectWithInvalidatedSlots<OLD_TO_OLD>(
|
||||
HeapObject old_start, HeapObject new_start);
|
||||
|
||||
template <RememberedSetType type>
|
||||
void MemoryChunk::MoveObjectWithInvalidatedSlots(HeapObject old_start,
|
||||
HeapObject new_start) {
|
||||
DCHECK_LT(old_start, new_start);
|
||||
DCHECK_EQ(MemoryChunk::FromHeapObject(old_start),
|
||||
MemoryChunk::FromHeapObject(new_start));
|
||||
if (!ShouldSkipEvacuationSlotRecording() && invalidated_slots<type>()) {
|
||||
auto it = invalidated_slots<type>()->find(old_start);
|
||||
if (it != invalidated_slots<type>()->end()) {
|
||||
if (!ShouldSkipEvacuationSlotRecording() && invalidated_slots()) {
|
||||
auto it = invalidated_slots()->find(old_start);
|
||||
if (it != invalidated_slots()->end()) {
|
||||
int old_size = it->second;
|
||||
int delta = static_cast<int>(new_start.address() - old_start.address());
|
||||
invalidated_slots<type>()->erase(it);
|
||||
(*invalidated_slots<type>())[new_start] = old_size - delta;
|
||||
invalidated_slots()->erase(it);
|
||||
(*invalidated_slots())[new_start] = old_size - delta;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1600,7 +1563,7 @@ void Space::AllocationStep(int bytes_since_last, Address soon_object,
|
||||
|
||||
DCHECK(!heap()->allocation_step_in_progress());
|
||||
heap()->set_allocation_step_in_progress(true);
|
||||
heap()->CreateFillerObjectAt(soon_object, size);
|
||||
heap()->CreateFillerObjectAt(soon_object, size, ClearRecordedSlots::kNo);
|
||||
for (AllocationObserver* observer : allocation_observers_) {
|
||||
observer->AllocationStep(bytes_since_last, soon_object, size);
|
||||
}
|
||||
@ -2314,7 +2277,8 @@ bool SemiSpace::EnsureCurrentCapacity() {
|
||||
current_page->SetFlags(first_page()->GetFlags(),
|
||||
static_cast<uintptr_t>(Page::kCopyAllFlags));
|
||||
heap()->CreateFillerObjectAt(current_page->area_start(),
|
||||
static_cast<int>(current_page->area_size()));
|
||||
static_cast<int>(current_page->area_size()),
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
@ -2324,7 +2288,8 @@ LinearAllocationArea LocalAllocationBuffer::Close() {
|
||||
if (IsValid()) {
|
||||
heap_->CreateFillerObjectAt(
|
||||
allocation_info_.top(),
|
||||
static_cast<int>(allocation_info_.limit() - allocation_info_.top()));
|
||||
static_cast<int>(allocation_info_.limit() - allocation_info_.top()),
|
||||
ClearRecordedSlots::kNo);
|
||||
const LinearAllocationArea old_info = allocation_info_;
|
||||
allocation_info_ = LinearAllocationArea(kNullAddress, kNullAddress);
|
||||
return old_info;
|
||||
@ -2339,7 +2304,8 @@ LocalAllocationBuffer::LocalAllocationBuffer(
|
||||
if (IsValid()) {
|
||||
heap_->CreateFillerObjectAt(
|
||||
allocation_info_.top(),
|
||||
static_cast<int>(allocation_info_.limit() - allocation_info_.top()));
|
||||
static_cast<int>(allocation_info_.limit() - allocation_info_.top()),
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2419,7 +2385,7 @@ bool NewSpace::AddFreshPage() {
|
||||
// Clear remainder of current page.
|
||||
Address limit = Page::FromAllocationAreaAddress(top)->area_end();
|
||||
int remaining_in_page = static_cast<int>(limit - top);
|
||||
heap()->CreateFillerObjectAt(top, remaining_in_page);
|
||||
heap()->CreateFillerObjectAt(top, remaining_in_page, ClearRecordedSlots::kNo);
|
||||
UpdateLinearAllocationArea();
|
||||
|
||||
return true;
|
||||
@ -3605,7 +3571,7 @@ void ReadOnlySpace::RepairFreeListsAfterDeserialization() {
|
||||
start += filler.Size();
|
||||
}
|
||||
CHECK_EQ(size, static_cast<int>(end - start));
|
||||
heap()->CreateFillerObjectAt(start, size);
|
||||
heap()->CreateFillerObjectAt(start, size, ClearRecordedSlots::kNo);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3751,7 +3717,8 @@ LargePage* LargeObjectSpace::AllocateLargePage(int object_size,
|
||||
|
||||
HeapObject object = page->GetObject();
|
||||
|
||||
heap()->CreateFillerObjectAt(object.address(), object_size);
|
||||
heap()->CreateFillerObjectAt(object.address(), object_size,
|
||||
ClearRecordedSlots::kNo);
|
||||
return page;
|
||||
}
|
||||
|
||||
|
@ -606,8 +606,7 @@ class MemoryChunk : public BasicMemoryChunk {
|
||||
+ kSystemPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // SlotSet* array
|
||||
+ kSystemPointerSize *
|
||||
NUMBER_OF_REMEMBERED_SET_TYPES // TypedSlotSet* array
|
||||
+ kSystemPointerSize *
|
||||
NUMBER_OF_REMEMBERED_SET_TYPES // InvalidatedSlots* array
|
||||
+ kSystemPointerSize // InvalidatedSlots* invalidated_slots_
|
||||
+ kSystemPointerSize // std::atomic<intptr_t> high_water_mark_
|
||||
+ kSystemPointerSize // base::Mutex* mutex_
|
||||
+ kSystemPointerSize // std::atomic<ConcurrentSweepingState>
|
||||
@ -693,7 +692,7 @@ class MemoryChunk : public BasicMemoryChunk {
|
||||
template <RememberedSetType type>
|
||||
bool ContainsSlots() {
|
||||
return slot_set<type>() != nullptr || typed_slot_set<type>() != nullptr ||
|
||||
invalidated_slots<type>() != nullptr;
|
||||
invalidated_slots() != nullptr;
|
||||
}
|
||||
|
||||
template <RememberedSetType type, AccessMode access_mode = AccessMode::ATOMIC>
|
||||
@ -721,23 +720,15 @@ class MemoryChunk : public BasicMemoryChunk {
|
||||
template <RememberedSetType type>
|
||||
void ReleaseTypedSlotSet();
|
||||
|
||||
template <RememberedSetType type>
|
||||
InvalidatedSlots* AllocateInvalidatedSlots();
|
||||
template <RememberedSetType type>
|
||||
void ReleaseInvalidatedSlots();
|
||||
template <RememberedSetType type>
|
||||
V8_EXPORT_PRIVATE void RegisterObjectWithInvalidatedSlots(HeapObject object,
|
||||
int size);
|
||||
// Updates invalidated_slots after array left-trimming.
|
||||
template <RememberedSetType type>
|
||||
void MoveObjectWithInvalidatedSlots(HeapObject old_start,
|
||||
HeapObject new_start);
|
||||
template <RememberedSetType type>
|
||||
bool RegisteredObjectWithInvalidatedSlots(HeapObject object);
|
||||
template <RememberedSetType type>
|
||||
InvalidatedSlots* invalidated_slots() {
|
||||
return invalidated_slots_[type];
|
||||
}
|
||||
InvalidatedSlots* invalidated_slots() { return invalidated_slots_; }
|
||||
|
||||
void ReleaseLocalTracker();
|
||||
|
||||
@ -913,7 +904,7 @@ class MemoryChunk : public BasicMemoryChunk {
|
||||
// is ceil(size() / kPageSize).
|
||||
SlotSet* slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES];
|
||||
TypedSlotSet* typed_slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES];
|
||||
InvalidatedSlots* invalidated_slots_[NUMBER_OF_REMEMBERED_SET_TYPES];
|
||||
InvalidatedSlots* invalidated_slots_;
|
||||
|
||||
// Assuming the initial allocation on a page is sequential,
|
||||
// count highest number of bytes ever allocated on the page.
|
||||
@ -2217,7 +2208,8 @@ class V8_EXPORT_PRIVATE PagedSpace
|
||||
|
||||
size_t Free(Address start, size_t size_in_bytes, SpaceAccountingMode mode) {
|
||||
if (size_in_bytes == 0) return 0;
|
||||
heap()->CreateFillerObjectAt(start, static_cast<int>(size_in_bytes));
|
||||
heap()->CreateFillerObjectAt(start, static_cast<int>(size_in_bytes),
|
||||
ClearRecordedSlots::kNo);
|
||||
if (mode == SpaceAccountingMode::kSpaceAccounted) {
|
||||
return AccountedFree(start, size_in_bytes);
|
||||
} else {
|
||||
|
@ -12,6 +12,16 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
void StoreBuffer::InsertDeletionIntoStoreBuffer(Address start, Address end) {
|
||||
if (top_ + sizeof(Address) * 2 > limit_[current_]) {
|
||||
StoreBufferOverflow(heap_->isolate());
|
||||
}
|
||||
*top_ = MarkDeletionAddress(start);
|
||||
top_++;
|
||||
*top_ = end;
|
||||
top_++;
|
||||
}
|
||||
|
||||
void StoreBuffer::InsertIntoStoreBuffer(Address slot) {
|
||||
if (top_ + sizeof(Address) > limit_[current_]) {
|
||||
StoreBufferOverflow(heap_->isolate());
|
||||
|
@ -28,6 +28,7 @@ StoreBuffer::StoreBuffer(Heap* heap)
|
||||
}
|
||||
task_running_ = false;
|
||||
insertion_callback = &InsertDuringRuntime;
|
||||
deletion_callback = &DeleteDuringRuntime;
|
||||
}
|
||||
|
||||
void StoreBuffer::SetUp() {
|
||||
@ -90,11 +91,22 @@ void StoreBuffer::TearDown() {
|
||||
}
|
||||
}
|
||||
|
||||
void StoreBuffer::DeleteDuringRuntime(StoreBuffer* store_buffer, Address start,
|
||||
Address end) {
|
||||
DCHECK(store_buffer->mode() == StoreBuffer::NOT_IN_GC);
|
||||
store_buffer->InsertDeletionIntoStoreBuffer(start, end);
|
||||
}
|
||||
|
||||
void StoreBuffer::InsertDuringRuntime(StoreBuffer* store_buffer, Address slot) {
|
||||
DCHECK(store_buffer->mode() == StoreBuffer::NOT_IN_GC);
|
||||
store_buffer->InsertIntoStoreBuffer(slot);
|
||||
}
|
||||
|
||||
void StoreBuffer::DeleteDuringGarbageCollection(StoreBuffer* store_buffer,
|
||||
Address start, Address end) {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
void StoreBuffer::InsertDuringGarbageCollection(StoreBuffer* store_buffer,
|
||||
Address slot) {
|
||||
DCHECK(store_buffer->mode() != StoreBuffer::NOT_IN_GC);
|
||||
@ -105,8 +117,10 @@ void StoreBuffer::SetMode(StoreBufferMode mode) {
|
||||
mode_ = mode;
|
||||
if (mode == NOT_IN_GC) {
|
||||
insertion_callback = &InsertDuringRuntime;
|
||||
deletion_callback = &DeleteDuringRuntime;
|
||||
} else {
|
||||
insertion_callback = &InsertDuringGarbageCollection;
|
||||
deletion_callback = &DeleteDuringGarbageCollection;
|
||||
}
|
||||
}
|
||||
|
||||
@ -146,9 +160,24 @@ void StoreBuffer::MoveEntriesToRememberedSet(int index) {
|
||||
MemoryChunk::BaseAddress(addr) != chunk->address()) {
|
||||
chunk = MemoryChunk::FromAnyPointerAddress(addr);
|
||||
}
|
||||
if (addr != last_inserted_addr) {
|
||||
RememberedSet<OLD_TO_NEW>::Insert(chunk, addr);
|
||||
last_inserted_addr = addr;
|
||||
if (IsDeletionAddress(addr)) {
|
||||
last_inserted_addr = kNullAddress;
|
||||
current++;
|
||||
Address end = *current;
|
||||
DCHECK(!IsDeletionAddress(end));
|
||||
addr = UnmarkDeletionAddress(addr);
|
||||
if (end) {
|
||||
RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, addr, end,
|
||||
SlotSet::PREFREE_EMPTY_BUCKETS);
|
||||
} else {
|
||||
RememberedSet<OLD_TO_NEW>::Remove(chunk, addr);
|
||||
}
|
||||
} else {
|
||||
DCHECK(!IsDeletionAddress(addr));
|
||||
if (addr != last_inserted_addr) {
|
||||
RememberedSet<OLD_TO_NEW>::Insert(chunk, addr);
|
||||
last_inserted_addr = addr;
|
||||
}
|
||||
}
|
||||
}
|
||||
lazy_top_[index] = nullptr;
|
||||
|
@ -33,11 +33,17 @@ class StoreBuffer {
|
||||
Max(static_cast<int>(kMinExpectedOSPageSize / kStoreBuffers),
|
||||
1 << (11 + kSystemPointerSizeLog2));
|
||||
static const int kStoreBufferMask = kStoreBufferSize - 1;
|
||||
static const intptr_t kDeletionTag = 1;
|
||||
|
||||
V8_EXPORT_PRIVATE static int StoreBufferOverflow(Isolate* isolate);
|
||||
|
||||
static void DeleteDuringGarbageCollection(StoreBuffer* store_buffer,
|
||||
Address start, Address end);
|
||||
static void InsertDuringGarbageCollection(StoreBuffer* store_buffer,
|
||||
Address slot);
|
||||
|
||||
static void DeleteDuringRuntime(StoreBuffer* store_buffer, Address start,
|
||||
Address end);
|
||||
static void InsertDuringRuntime(StoreBuffer* store_buffer, Address slot);
|
||||
|
||||
explicit StoreBuffer(Heap* heap);
|
||||
@ -55,6 +61,19 @@ class StoreBuffer {
|
||||
// the remembered set.
|
||||
void MoveAllEntriesToRememberedSet();
|
||||
|
||||
inline bool IsDeletionAddress(Address address) const {
|
||||
return address & kDeletionTag;
|
||||
}
|
||||
|
||||
inline Address MarkDeletionAddress(Address address) {
|
||||
return address | kDeletionTag;
|
||||
}
|
||||
|
||||
inline Address UnmarkDeletionAddress(Address address) {
|
||||
return address & ~kDeletionTag;
|
||||
}
|
||||
|
||||
inline void InsertDeletionIntoStoreBuffer(Address start, Address end);
|
||||
inline void InsertIntoStoreBuffer(Address slot);
|
||||
|
||||
void InsertEntry(Address slot) {
|
||||
@ -64,6 +83,16 @@ class StoreBuffer {
|
||||
insertion_callback(this, slot);
|
||||
}
|
||||
|
||||
// If we only want to delete a single slot, end should be set to null which
|
||||
// will be written into the second field. When processing the store buffer
|
||||
// the more efficient Remove method will be called in this case.
|
||||
void DeleteEntry(Address start, Address end = kNullAddress) {
|
||||
// Deletions coming from the GC are directly deleted from the remembered
|
||||
// set. Deletions coming from the runtime are added to the store buffer
|
||||
// to allow concurrent processing.
|
||||
deletion_callback(this, start, end);
|
||||
}
|
||||
|
||||
void SetMode(StoreBufferMode mode);
|
||||
|
||||
// Used by the concurrent processing thread to transfer entries from the
|
||||
@ -145,6 +174,7 @@ class StoreBuffer {
|
||||
// Callbacks are more efficient than reading out the gc state for every
|
||||
// store buffer operation.
|
||||
void (*insertion_callback)(StoreBuffer*, Address);
|
||||
void (*deletion_callback)(StoreBuffer*, Address, Address);
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -8,7 +8,6 @@
|
||||
#include "src/execution/vm-state-inl.h"
|
||||
#include "src/heap/array-buffer-tracker-inl.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/invalidated-slots-inl.h"
|
||||
#include "src/heap/mark-compact-inl.h"
|
||||
#include "src/heap/remembered-set.h"
|
||||
#include "src/objects/objects-inl.h"
|
||||
@ -266,8 +265,6 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
|
||||
ArrayBufferTracker::FreeDead(p, marking_state_);
|
||||
|
||||
Address free_start = p->area_start();
|
||||
InvalidatedSlotsCleanup old_to_new_cleanup =
|
||||
InvalidatedSlotsCleanup::OldToNew(p);
|
||||
|
||||
intptr_t live_bytes = 0;
|
||||
intptr_t freed_bytes = 0;
|
||||
@ -299,7 +296,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
|
||||
max_freed_bytes = Max(freed_bytes, max_freed_bytes);
|
||||
} else {
|
||||
p->heap()->CreateFillerObjectAt(
|
||||
free_start, static_cast<int>(size),
|
||||
free_start, static_cast<int>(size), ClearRecordedSlots::kNo,
|
||||
ClearFreedMemoryMode::kClearFreedMemory);
|
||||
}
|
||||
if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size);
|
||||
@ -312,8 +309,6 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
|
||||
static_cast<uint32_t>(free_start - p->address()),
|
||||
static_cast<uint32_t>(free_end - p->address())));
|
||||
}
|
||||
|
||||
old_to_new_cleanup.Free(free_start, free_end);
|
||||
}
|
||||
Map map = object.synchronized_map();
|
||||
int size = object.SizeFromMap(map);
|
||||
@ -333,6 +328,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
|
||||
max_freed_bytes = Max(freed_bytes, max_freed_bytes);
|
||||
} else {
|
||||
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
|
||||
ClearRecordedSlots::kNo,
|
||||
ClearFreedMemoryMode::kClearFreedMemory);
|
||||
}
|
||||
if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size);
|
||||
@ -345,8 +341,6 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
|
||||
static_cast<uint32_t>(free_start - p->address()),
|
||||
static_cast<uint32_t>(p->area_end() - p->address())));
|
||||
}
|
||||
|
||||
old_to_new_cleanup.Free(free_start, p->area_end());
|
||||
}
|
||||
|
||||
// Clear invalid typed slots after collection all free ranges.
|
||||
|
@ -387,7 +387,7 @@ void MutableBigInt::Canonicalize(MutableBigInt result) {
|
||||
// We do not create a filler for objects in large object space.
|
||||
// TODO(hpayer): We should shrink the large object page if the size
|
||||
// of the object changed significantly.
|
||||
heap->CreateFillerObjectAt(new_end, size_delta);
|
||||
heap->CreateFillerObjectAt(new_end, size_delta, ClearRecordedSlots::kNo);
|
||||
}
|
||||
result.synchronized_set_length(new_length);
|
||||
|
||||
@ -2223,7 +2223,8 @@ MaybeHandle<String> MutableBigInt::ToStringGeneric(Isolate* isolate,
|
||||
int needed_size = SeqOneByteString::SizeFor(pos);
|
||||
if (needed_size < string_size) {
|
||||
Address new_end = result->address() + needed_size;
|
||||
heap->CreateFillerObjectAt(new_end, string_size - needed_size);
|
||||
heap->CreateFillerObjectAt(new_end, (string_size - needed_size),
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
}
|
||||
// Reverse the string.
|
||||
|
@ -2775,10 +2775,6 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
|
||||
|
||||
heap->NotifyObjectLayoutChange(*object, old_instance_size, no_allocation);
|
||||
|
||||
MemoryChunk::FromHeapObject(*object)
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(*object,
|
||||
old_instance_size);
|
||||
|
||||
// Copy (real) inobject properties. If necessary, stop at number_of_fields to
|
||||
// avoid overwriting |one_pointer_filler_map|.
|
||||
int limit = Min(inobject, number_of_fields);
|
||||
@ -2792,9 +2788,14 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
|
||||
// Ensure that all bits of the double value are preserved.
|
||||
object->RawFastDoublePropertyAsBitsAtPut(
|
||||
index, MutableHeapNumber::cast(value).value_as_bits());
|
||||
if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) {
|
||||
// Transition from tagged to untagged slot.
|
||||
heap->ClearRecordedSlot(*object, object->RawField(index.offset()));
|
||||
} else {
|
||||
#ifdef DEBUG
|
||||
heap->VerifyClearedSlot(*object, object->RawField(index.offset()));
|
||||
heap->VerifyClearedSlot(*object, object->RawField(index.offset()));
|
||||
#endif
|
||||
}
|
||||
} else {
|
||||
object->RawFastPropertyAtPut(index, value);
|
||||
}
|
||||
@ -2809,8 +2810,8 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
|
||||
|
||||
if (instance_size_delta > 0) {
|
||||
Address address = object->address();
|
||||
heap->CreateFillerObjectAt(address + new_instance_size,
|
||||
instance_size_delta);
|
||||
heap->CreateFillerObjectAt(address + new_instance_size, instance_size_delta,
|
||||
ClearRecordedSlots::kYes);
|
||||
}
|
||||
|
||||
// We are storing the new map using release store after creating a filler for
|
||||
@ -2888,10 +2889,6 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
|
||||
int old_instance_size = map->instance_size();
|
||||
heap->NotifyObjectLayoutChange(*object, old_instance_size, no_allocation);
|
||||
|
||||
MemoryChunk::FromHeapObject(*object)
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(*object,
|
||||
old_instance_size);
|
||||
|
||||
// Resize the object in the heap if necessary.
|
||||
int new_instance_size = new_map->instance_size();
|
||||
int instance_size_delta = old_instance_size - new_instance_size;
|
||||
@ -2899,7 +2896,7 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
|
||||
|
||||
if (instance_size_delta > 0) {
|
||||
heap->CreateFillerObjectAt(object->address() + new_instance_size,
|
||||
instance_size_delta);
|
||||
instance_size_delta, ClearRecordedSlots::kYes);
|
||||
}
|
||||
|
||||
// We are storing the new map using release store after creating a filler for
|
||||
@ -2912,6 +2909,11 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
|
||||
// garbage.
|
||||
int inobject_properties = new_map->GetInObjectProperties();
|
||||
if (inobject_properties) {
|
||||
Heap* heap = isolate->heap();
|
||||
heap->ClearRecordedSlotRange(
|
||||
object->address() + map->GetInObjectPropertyOffset(0),
|
||||
object->address() + new_instance_size);
|
||||
|
||||
for (int i = 0; i < inobject_properties; i++) {
|
||||
FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
|
||||
object->RawFastPropertyAtPut(index, Smi::kZero);
|
||||
|
@ -621,7 +621,8 @@ void SharedFunctionInfo::ClearPreparseData() {
|
||||
heap->CreateFillerObjectAt(
|
||||
data.address() + UncompiledDataWithoutPreparseData::kSize,
|
||||
UncompiledDataWithPreparseData::kSize -
|
||||
UncompiledDataWithoutPreparseData::kSize);
|
||||
UncompiledDataWithoutPreparseData::kSize,
|
||||
ClearRecordedSlots::kNo);
|
||||
|
||||
// Ensure that the clear was successful.
|
||||
DCHECK(HasUncompiledDataWithoutPreparseData());
|
||||
|
@ -123,7 +123,7 @@ void String::MakeThin(Isolate* isolate, String internalized) {
|
||||
int size_delta = old_size - ThinString::kSize;
|
||||
if (size_delta != 0) {
|
||||
Heap* heap = isolate->heap();
|
||||
heap->CreateFillerObjectAt(thin_end, size_delta);
|
||||
heap->CreateFillerObjectAt(thin_end, size_delta, ClearRecordedSlots::kNo);
|
||||
}
|
||||
}
|
||||
|
||||
@ -155,10 +155,6 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
|
||||
|
||||
if (has_pointers) {
|
||||
isolate->heap()->NotifyObjectLayoutChange(*this, size, no_allocation);
|
||||
|
||||
MemoryChunk::FromAddress(this->address())
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
|
||||
HeapObject::FromAddress(this->address()), size);
|
||||
}
|
||||
// Morph the string to an external string by replacing the map and
|
||||
// reinitializing the fields. This won't work if the space the existing
|
||||
@ -181,8 +177,12 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
|
||||
|
||||
// Byte size of the external String object.
|
||||
int new_size = this->SizeFromMap(new_map);
|
||||
isolate->heap()->CreateFillerObjectAt(this->address() + new_size,
|
||||
size - new_size);
|
||||
isolate->heap()->CreateFillerObjectAt(
|
||||
this->address() + new_size, size - new_size, ClearRecordedSlots::kNo);
|
||||
if (has_pointers) {
|
||||
isolate->heap()->ClearRecordedSlotRange(this->address(),
|
||||
this->address() + new_size);
|
||||
}
|
||||
|
||||
// We are storing the new map using release store after creating a filler for
|
||||
// the left-over space to avoid races with the sweeper thread.
|
||||
@ -228,10 +228,6 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
|
||||
|
||||
if (has_pointers) {
|
||||
isolate->heap()->NotifyObjectLayoutChange(*this, size, no_allocation);
|
||||
|
||||
MemoryChunk::FromAddress(this->address())
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
|
||||
HeapObject::FromAddress(this->address()), size);
|
||||
}
|
||||
// Morph the string to an external string by replacing the map and
|
||||
// reinitializing the fields. This won't work if the space the existing
|
||||
@ -253,8 +249,12 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
|
||||
|
||||
// Byte size of the external String object.
|
||||
int new_size = this->SizeFromMap(new_map);
|
||||
isolate->heap()->CreateFillerObjectAt(this->address() + new_size,
|
||||
size - new_size);
|
||||
isolate->heap()->CreateFillerObjectAt(
|
||||
this->address() + new_size, size - new_size, ClearRecordedSlots::kNo);
|
||||
if (has_pointers) {
|
||||
isolate->heap()->ClearRecordedSlotRange(this->address(),
|
||||
this->address() + new_size);
|
||||
}
|
||||
|
||||
// We are storing the new map using release store after creating a filler for
|
||||
// the left-over space to avoid races with the sweeper thread.
|
||||
@ -1405,7 +1405,8 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(*string);
|
||||
// Sizes are pointer size aligned, so that we can use filler objects
|
||||
// that are a multiple of pointer size.
|
||||
heap->CreateFillerObjectAt(start_of_string + new_size, delta);
|
||||
heap->CreateFillerObjectAt(start_of_string + new_size, delta,
|
||||
ClearRecordedSlots::kNo);
|
||||
// We are storing the new length using release store after creating a filler
|
||||
// for the left-over space to avoid races with the sweeper thread.
|
||||
string->synchronized_set_length(new_length);
|
||||
|
@ -205,7 +205,7 @@ void AllocationTracker::AllocationEvent(Address addr, int size) {
|
||||
|
||||
// Mark the new block as FreeSpace to make sure the heap is iterable
|
||||
// while we are capturing stack trace.
|
||||
heap->CreateFillerObjectAt(addr, size);
|
||||
heap->CreateFillerObjectAt(addr, size, ClearRecordedSlots::kNo);
|
||||
|
||||
Isolate* isolate = Isolate::FromHeap(heap);
|
||||
int length = 0;
|
||||
|
@ -81,7 +81,8 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
|
||||
|
||||
// Mark the new block as FreeSpace to make sure the heap is iterable while we
|
||||
// are taking the sample.
|
||||
heap_->CreateFillerObjectAt(soon_object, static_cast<int>(size));
|
||||
heap_->CreateFillerObjectAt(soon_object, static_cast<int>(size),
|
||||
ClearRecordedSlots::kNo);
|
||||
|
||||
Local<v8::Value> loc = v8::Utils::ToLocal(obj);
|
||||
|
||||
|
@ -134,17 +134,8 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
|
||||
// for properties stored in the descriptor array.
|
||||
if (details.location() == kField) {
|
||||
DisallowHeapAllocation no_allocation;
|
||||
int receiver_size = receiver_map->instance_size();
|
||||
isolate->heap()->NotifyObjectLayoutChange(*receiver, receiver_size,
|
||||
no_allocation);
|
||||
|
||||
// We need to invalidate object because subsequent object modifications
|
||||
// might put a raw double into the deleted property.
|
||||
// Slot clearing is the reason why this entire function cannot currently
|
||||
// be implemented in the DeleteProperty stub.
|
||||
MemoryChunk::FromHeapObject(*receiver)
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(*receiver,
|
||||
receiver_size);
|
||||
isolate->heap()->NotifyObjectLayoutChange(
|
||||
*receiver, receiver_map->instance_size(), no_allocation);
|
||||
FieldIndex index =
|
||||
FieldIndex::ForPropertyIndex(*receiver_map, details.field_index());
|
||||
// Special case deleting the last out-of object property.
|
||||
@ -155,6 +146,14 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
|
||||
} else {
|
||||
Object filler = ReadOnlyRoots(isolate).one_pointer_filler_map();
|
||||
JSObject::cast(*receiver).RawFastPropertyAtPut(index, filler);
|
||||
// We must clear any recorded slot for the deleted property, because
|
||||
// subsequent object modifications might put a raw double there.
|
||||
// Slot clearing is the reason why this entire function cannot currently
|
||||
// be implemented in the DeleteProperty stub.
|
||||
if (index.is_inobject() && !receiver_map->IsUnboxedDoubleField(index)) {
|
||||
isolate->heap()->ClearRecordedSlot(*receiver,
|
||||
receiver->RawField(index.offset()));
|
||||
}
|
||||
}
|
||||
}
|
||||
// If the {receiver_map} was marked stable before, then there could be
|
||||
|
@ -779,7 +779,7 @@ V8_WARN_UNUSED_RESULT static Object StringReplaceGlobalRegExpWithEmptyString(
|
||||
// TODO(hpayer): We should shrink the large object page if the size
|
||||
// of the object changed significantly.
|
||||
if (!heap->IsLargeObject(*answer)) {
|
||||
heap->CreateFillerObjectAt(end_of_string, delta);
|
||||
heap->CreateFillerObjectAt(end_of_string, delta, ClearRecordedSlots::kNo);
|
||||
}
|
||||
return *answer;
|
||||
}
|
||||
|
@ -19,10 +19,6 @@
|
||||
V(CompactionSpaceDivideSinglePage) \
|
||||
V(InvalidatedSlotsAfterTrimming) \
|
||||
V(InvalidatedSlotsAllInvalidatedRanges) \
|
||||
V(InvalidatedSlotsCleanupEachObject) \
|
||||
V(InvalidatedSlotsCleanupFull) \
|
||||
V(InvalidatedSlotsCleanupRightTrim) \
|
||||
V(InvalidatedSlotsCleanupOverlapRight) \
|
||||
V(InvalidatedSlotsEvacuationCandidate) \
|
||||
V(InvalidatedSlotsNoInvalidatedRanges) \
|
||||
V(InvalidatedSlotsResetObjectRegression) \
|
||||
|
@ -101,7 +101,8 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
|
||||
// Not enough room to create another fixed array. Let's create a filler.
|
||||
if (free_memory > (2 * kTaggedSize)) {
|
||||
heap->CreateFillerObjectAt(
|
||||
*heap->old_space()->allocation_top_address(), free_memory);
|
||||
*heap->old_space()->allocation_top_address(), free_memory,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@ -218,7 +219,8 @@ void ForceEvacuationCandidate(Page* page) {
|
||||
if (top < limit && Page::FromAllocationAreaAddress(top) == page) {
|
||||
// Create filler object to keep page iterable if it was iterable.
|
||||
int remaining = static_cast<int>(limit - top);
|
||||
space->heap()->CreateFillerObjectAt(top, remaining);
|
||||
space->heap()->CreateFillerObjectAt(top, remaining,
|
||||
ClearRecordedSlots::kNo);
|
||||
space->FreeLinearAllocationArea();
|
||||
}
|
||||
}
|
||||
|
@ -54,12 +54,12 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
|
||||
heap->AllocateRaw(size, AllocationType::kYoung).ToObjectChecked();
|
||||
// In order to pass heap verification on Isolate teardown, mark the
|
||||
// allocated area as a filler.
|
||||
heap->CreateFillerObjectAt(obj.address(), size);
|
||||
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
|
||||
|
||||
// Old generation.
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
obj = heap->AllocateRaw(size, AllocationType::kOld).ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(obj.address(), size);
|
||||
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
|
||||
|
||||
// Large object space.
|
||||
static const size_t kLargeObjectSpaceFillerLength =
|
||||
@ -71,22 +71,23 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
|
||||
while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
|
||||
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
|
||||
.ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(obj.address(), size);
|
||||
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
|
||||
}
|
||||
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
|
||||
.ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(obj.address(), size);
|
||||
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
|
||||
|
||||
// Map space.
|
||||
heap::SimulateFullSpace(heap->map_space());
|
||||
obj = heap->AllocateRaw(Map::kSize, AllocationType::kMap).ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(obj.address(), Map::kSize);
|
||||
heap->CreateFillerObjectAt(obj.address(), Map::kSize,
|
||||
ClearRecordedSlots::kNo);
|
||||
|
||||
// Code space.
|
||||
heap::SimulateFullSpace(heap->code_space());
|
||||
size = CcTest::i_isolate()->builtins()->builtin(Builtins::kIllegal).Size();
|
||||
obj = heap->AllocateRaw(size, AllocationType::kCode).ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(obj.address(), size);
|
||||
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
|
||||
return CcTest::i_isolate()->factory()->true_value();
|
||||
}
|
||||
|
||||
|
@ -1637,7 +1637,7 @@ static HeapObject NewSpaceAllocateAligned(int size,
|
||||
heap->new_space()->AllocateRawAligned(size, alignment);
|
||||
HeapObject obj;
|
||||
allocation.To(&obj);
|
||||
heap->CreateFillerObjectAt(obj.address(), size);
|
||||
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
|
||||
return obj;
|
||||
}
|
||||
|
||||
@ -1702,7 +1702,7 @@ static HeapObject OldSpaceAllocateAligned(int size,
|
||||
heap->old_space()->AllocateRawAligned(size, alignment);
|
||||
HeapObject obj;
|
||||
allocation.To(&obj);
|
||||
heap->CreateFillerObjectAt(obj.address(), size);
|
||||
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
|
||||
return obj;
|
||||
}
|
||||
|
||||
@ -1731,7 +1731,8 @@ TEST(TestAlignedOverAllocation) {
|
||||
// Allocate a dummy object to properly set up the linear allocation info.
|
||||
AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
|
||||
CHECK(!dummy.IsRetry());
|
||||
heap->CreateFillerObjectAt(dummy.ToObjectChecked().address(), kTaggedSize);
|
||||
heap->CreateFillerObjectAt(dummy.ToObjectChecked().address(), kTaggedSize,
|
||||
ClearRecordedSlots::kNo);
|
||||
|
||||
// Double misalignment is 4 on 32-bit platforms or when pointer compression
|
||||
// is enabled, 0 on 64-bit ones when pointer compression is disabled.
|
||||
@ -3604,7 +3605,8 @@ TEST(Regress169928) {
|
||||
CHECK(allocation.To(&obj));
|
||||
Address addr_obj = obj.address();
|
||||
CcTest::heap()->CreateFillerObjectAt(addr_obj,
|
||||
AllocationMemento::kSize + kTaggedSize);
|
||||
AllocationMemento::kSize + kTaggedSize,
|
||||
ClearRecordedSlots::kNo);
|
||||
|
||||
// Give the array a name, making sure not to allocate strings.
|
||||
v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
|
||||
|
@ -44,7 +44,7 @@ Page* HeapTester::AllocateByteArraysOnPage(
|
||||
CHECK_EQ(page, Page::FromHeapObject(byte_array));
|
||||
}
|
||||
}
|
||||
CHECK_NULL(page->invalidated_slots<OLD_TO_OLD>());
|
||||
CHECK_NULL(page->invalidated_slots());
|
||||
return page;
|
||||
}
|
||||
|
||||
@ -53,7 +53,7 @@ HEAP_TEST(InvalidatedSlotsNoInvalidatedRanges) {
|
||||
Heap* heap = CcTest::heap();
|
||||
std::vector<ByteArray> byte_arrays;
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
InvalidatedSlotsFilter filter(page);
|
||||
for (ByteArray byte_array : byte_arrays) {
|
||||
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
||||
Address end = byte_array.address() + byte_array.Size();
|
||||
@ -70,10 +70,10 @@ HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register every second byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i += 2) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
}
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
InvalidatedSlotsFilter filter(page);
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
||||
@ -95,10 +95,10 @@ HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register the all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
}
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
InvalidatedSlotsFilter filter(page);
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
||||
@ -117,12 +117,12 @@ HEAP_TEST(InvalidatedSlotsAfterTrimming) {
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register the all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
}
|
||||
// Trim byte arrays and check that the slots outside the byte arrays are
|
||||
// considered invalid if the old space page was swept.
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
InvalidatedSlotsFilter filter(page);
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
||||
@ -145,11 +145,11 @@ HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
|
||||
// This should be no-op because the page is marked as evacuation
|
||||
// candidate.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
}
|
||||
// All slots must still be valid.
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
InvalidatedSlotsFilter filter(page);
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
||||
@ -169,11 +169,11 @@ HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
|
||||
heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0].length() - 8);
|
||||
// Register the all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
}
|
||||
// All slots must still be invalid.
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
InvalidatedSlotsFilter filter(page);
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
||||
@ -351,77 +351,6 @@ HEAP_TEST(InvalidatedSlotsFastToSlow) {
|
||||
CcTest::CollectGarbage(i::OLD_SPACE);
|
||||
}
|
||||
|
||||
HEAP_TEST(InvalidatedSlotsCleanupFull) {
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
std::vector<ByteArray> byte_arrays;
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
}
|
||||
|
||||
// Mark full page as free
|
||||
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
||||
cleanup.Free(page->area_start(), page->area_end());
|
||||
|
||||
// After cleanup there should be no invalidated objects on page left
|
||||
CHECK(page->invalidated_slots<OLD_TO_NEW>()->empty());
|
||||
}
|
||||
|
||||
HEAP_TEST(InvalidatedSlotsCleanupEachObject) {
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
std::vector<ByteArray> byte_arrays;
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i],
|
||||
byte_arrays[i].Size());
|
||||
}
|
||||
|
||||
// Mark each object as free on page
|
||||
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
||||
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
Address free_start = byte_arrays[i].address();
|
||||
Address free_end = free_start + byte_arrays[i].Size();
|
||||
cleanup.Free(free_start, free_end);
|
||||
}
|
||||
|
||||
// After cleanup there should be no invalidated objects on page left
|
||||
CHECK(page->invalidated_slots<OLD_TO_NEW>()->empty());
|
||||
}
|
||||
|
||||
HEAP_TEST(InvalidatedSlotsCleanupRightTrim) {
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
std::vector<ByteArray> byte_arrays;
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
|
||||
CHECK_GT(byte_arrays.size(), 1);
|
||||
ByteArray& invalidated = byte_arrays[1];
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated,
|
||||
invalidated.Size());
|
||||
|
||||
heap->RightTrimFixedArray(invalidated, invalidated.length() - 8);
|
||||
|
||||
// Free memory at end of invalidated object
|
||||
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
||||
Address free_start = invalidated.address() + invalidated.Size();
|
||||
cleanup.Free(free_start, page->area_end());
|
||||
|
||||
// After cleanup the invalidated object should be smaller
|
||||
InvalidatedSlots* invalidated_slots = page->invalidated_slots<OLD_TO_NEW>();
|
||||
CHECK_EQ((*invalidated_slots)[HeapObject::FromAddress(invalidated.address())],
|
||||
invalidated.Size());
|
||||
CHECK_EQ(invalidated_slots->size(), 1);
|
||||
}
|
||||
|
||||
} // namespace heap
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -46,7 +46,8 @@ static bool AllocateFromLab(Heap* heap, LocalAllocationBuffer* lab,
|
||||
AllocationResult result =
|
||||
lab->AllocateRawAligned(static_cast<int>(size_in_bytes), alignment);
|
||||
if (result.To(&obj)) {
|
||||
heap->CreateFillerObjectAt(obj.address(), static_cast<int>(size_in_bytes));
|
||||
heap->CreateFillerObjectAt(obj.address(), static_cast<int>(size_in_bytes),
|
||||
ClearRecordedSlots::kNo);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
@ -363,7 +363,8 @@ TEST(Regress5829) {
|
||||
Address old_end = array->address() + array->Size();
|
||||
// Right trim the array without clearing the mark bits.
|
||||
array->set_length(9);
|
||||
heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize);
|
||||
heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize,
|
||||
ClearRecordedSlots::kNo);
|
||||
heap->old_space()->FreeLinearAllocationArea();
|
||||
Page* page = Page::FromAddress(array->address());
|
||||
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
|
||||
|
@ -391,7 +391,8 @@ static HeapObject AllocateUnaligned(NewSpace* space, int size) {
|
||||
CHECK(!allocation.IsRetry());
|
||||
HeapObject filler;
|
||||
CHECK(allocation.To(&filler));
|
||||
space->heap()->CreateFillerObjectAt(filler.address(), size);
|
||||
space->heap()->CreateFillerObjectAt(filler.address(), size,
|
||||
ClearRecordedSlots::kNo);
|
||||
return filler;
|
||||
}
|
||||
|
||||
@ -400,7 +401,8 @@ static HeapObject AllocateUnaligned(PagedSpace* space, int size) {
|
||||
CHECK(!allocation.IsRetry());
|
||||
HeapObject filler;
|
||||
CHECK(allocation.To(&filler));
|
||||
space->heap()->CreateFillerObjectAt(filler.address(), size);
|
||||
space->heap()->CreateFillerObjectAt(filler.address(), size,
|
||||
ClearRecordedSlots::kNo);
|
||||
return filler;
|
||||
}
|
||||
|
||||
@ -570,7 +572,8 @@ HEAP_TEST(Regress777177) {
|
||||
heap::SimulateFullSpace(old_space);
|
||||
AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
|
||||
HeapObject obj = result.ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(obj.address(), filler_size);
|
||||
heap->CreateFillerObjectAt(obj.address(), filler_size,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
|
||||
{
|
||||
@ -587,7 +590,8 @@ HEAP_TEST(Regress777177) {
|
||||
// This triggers assert in crbug.com/777177.
|
||||
AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
|
||||
HeapObject obj = result.ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(obj.address(), filler_size);
|
||||
heap->CreateFillerObjectAt(obj.address(), filler_size,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
old_space->RemoveAllocationObserver(&observer);
|
||||
}
|
||||
@ -618,7 +622,8 @@ HEAP_TEST(Regress791582) {
|
||||
AllocationResult result =
|
||||
new_space->AllocateRaw(until_page_end, kWordAligned);
|
||||
HeapObject obj = result.ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(obj.address(), until_page_end);
|
||||
heap->CreateFillerObjectAt(obj.address(), until_page_end,
|
||||
ClearRecordedSlots::kNo);
|
||||
// Simulate allocation folding moving the top pointer back.
|
||||
*new_space->allocation_top_address() = obj.address();
|
||||
}
|
||||
@ -627,7 +632,7 @@ HEAP_TEST(Regress791582) {
|
||||
// This triggers assert in crbug.com/791582
|
||||
AllocationResult result = new_space->AllocateRaw(256, kWordAligned);
|
||||
HeapObject obj = result.ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(obj.address(), 256);
|
||||
heap->CreateFillerObjectAt(obj.address(), 256, ClearRecordedSlots::kNo);
|
||||
}
|
||||
new_space->RemoveAllocationObserver(&observer);
|
||||
}
|
||||
|
@ -39,7 +39,8 @@ TEST_F(SpacesTest, CompactionSpaceMerge) {
|
||||
HeapObject object =
|
||||
compaction_space->AllocateRawUnaligned(kMaxRegularHeapObjectSize)
|
||||
.ToObjectChecked();
|
||||
heap->CreateFillerObjectAt(object.address(), kMaxRegularHeapObjectSize);
|
||||
heap->CreateFillerObjectAt(object.address(), kMaxRegularHeapObjectSize,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
int pages_in_old_space = old_space->CountTotalPages();
|
||||
int pages_in_compaction_space = compaction_space->CountTotalPages();
|
||||
|
@ -192,237 +192,237 @@ INSTANCE_TYPES = {
|
||||
|
||||
# List of known V8 maps.
|
||||
KNOWN_MAPS = {
|
||||
("read_only_space", 0x00119): (74, "FreeSpaceMap"),
|
||||
("read_only_space", 0x00169): (68, "MetaMap"),
|
||||
("read_only_space", 0x001e9): (67, "NullMap"),
|
||||
("read_only_space", 0x00251): (155, "DescriptorArrayMap"),
|
||||
("read_only_space", 0x002b1): (150, "WeakFixedArrayMap"),
|
||||
("read_only_space", 0x00301): (77, "OnePointerFillerMap"),
|
||||
("read_only_space", 0x00351): (77, "TwoPointerFillerMap"),
|
||||
("read_only_space", 0x003d1): (67, "UninitializedMap"),
|
||||
("read_only_space", 0x00441): (8, "OneByteInternalizedStringMap"),
|
||||
("read_only_space", 0x004e1): (67, "UndefinedMap"),
|
||||
("read_only_space", 0x00541): (65, "HeapNumberMap"),
|
||||
("read_only_space", 0x005c1): (67, "TheHoleMap"),
|
||||
("read_only_space", 0x00669): (67, "BooleanMap"),
|
||||
("read_only_space", 0x00741): (72, "ByteArrayMap"),
|
||||
("read_only_space", 0x00791): (125, "FixedArrayMap"),
|
||||
("read_only_space", 0x007e1): (125, "FixedCOWArrayMap"),
|
||||
("read_only_space", 0x00831): (128, "HashTableMap"),
|
||||
("read_only_space", 0x00881): (64, "SymbolMap"),
|
||||
("read_only_space", 0x008d1): (40, "OneByteStringMap"),
|
||||
("read_only_space", 0x00921): (138, "ScopeInfoMap"),
|
||||
("read_only_space", 0x00971): (162, "SharedFunctionInfoMap"),
|
||||
("read_only_space", 0x009c1): (69, "CodeMap"),
|
||||
("read_only_space", 0x00a11): (145, "FunctionContextMap"),
|
||||
("read_only_space", 0x00a61): (153, "CellMap"),
|
||||
("read_only_space", 0x00ab1): (161, "GlobalPropertyCellMap"),
|
||||
("read_only_space", 0x00b01): (71, "ForeignMap"),
|
||||
("read_only_space", 0x00b51): (151, "TransitionArrayMap"),
|
||||
("read_only_space", 0x00ba1): (157, "FeedbackVectorMap"),
|
||||
("read_only_space", 0x00c41): (67, "ArgumentsMarkerMap"),
|
||||
("read_only_space", 0x00ce1): (67, "ExceptionMap"),
|
||||
("read_only_space", 0x00d81): (67, "TerminationExceptionMap"),
|
||||
("read_only_space", 0x00e29): (67, "OptimizedOutMap"),
|
||||
("read_only_space", 0x00ec9): (67, "StaleRegisterMap"),
|
||||
("read_only_space", 0x00f39): (147, "NativeContextMap"),
|
||||
("read_only_space", 0x00f89): (146, "ModuleContextMap"),
|
||||
("read_only_space", 0x00fd9): (144, "EvalContextMap"),
|
||||
("read_only_space", 0x01029): (148, "ScriptContextMap"),
|
||||
("read_only_space", 0x01079): (140, "AwaitContextMap"),
|
||||
("read_only_space", 0x010c9): (141, "BlockContextMap"),
|
||||
("read_only_space", 0x01119): (142, "CatchContextMap"),
|
||||
("read_only_space", 0x01169): (149, "WithContextMap"),
|
||||
("read_only_space", 0x011b9): (143, "DebugEvaluateContextMap"),
|
||||
("read_only_space", 0x01209): (139, "ScriptContextTableMap"),
|
||||
("read_only_space", 0x01259): (127, "ClosureFeedbackCellArrayMap"),
|
||||
("read_only_space", 0x012a9): (76, "FeedbackMetadataArrayMap"),
|
||||
("read_only_space", 0x012f9): (125, "ArrayListMap"),
|
||||
("read_only_space", 0x01349): (66, "BigIntMap"),
|
||||
("read_only_space", 0x01399): (126, "ObjectBoilerplateDescriptionMap"),
|
||||
("read_only_space", 0x013e9): (73, "BytecodeArrayMap"),
|
||||
("read_only_space", 0x01439): (154, "CodeDataContainerMap"),
|
||||
("read_only_space", 0x01489): (75, "FixedDoubleArrayMap"),
|
||||
("read_only_space", 0x014d9): (133, "GlobalDictionaryMap"),
|
||||
("read_only_space", 0x01529): (156, "ManyClosuresCellMap"),
|
||||
("read_only_space", 0x01579): (125, "ModuleInfoMap"),
|
||||
("read_only_space", 0x015c9): (70, "MutableHeapNumberMap"),
|
||||
("read_only_space", 0x01619): (132, "NameDictionaryMap"),
|
||||
("read_only_space", 0x01669): (156, "NoClosuresCellMap"),
|
||||
("read_only_space", 0x016b9): (134, "NumberDictionaryMap"),
|
||||
("read_only_space", 0x01709): (156, "OneClosureCellMap"),
|
||||
("read_only_space", 0x01759): (129, "OrderedHashMapMap"),
|
||||
("read_only_space", 0x017a9): (130, "OrderedHashSetMap"),
|
||||
("read_only_space", 0x017f9): (131, "OrderedNameDictionaryMap"),
|
||||
("read_only_space", 0x01849): (159, "PreparseDataMap"),
|
||||
("read_only_space", 0x01899): (160, "PropertyArrayMap"),
|
||||
("read_only_space", 0x018e9): (152, "SideEffectCallHandlerInfoMap"),
|
||||
("read_only_space", 0x01939): (152, "SideEffectFreeCallHandlerInfoMap"),
|
||||
("read_only_space", 0x01989): (152, "NextCallSideEffectFreeCallHandlerInfoMap"),
|
||||
("read_only_space", 0x019d9): (135, "SimpleNumberDictionaryMap"),
|
||||
("read_only_space", 0x01a29): (125, "SloppyArgumentsElementsMap"),
|
||||
("read_only_space", 0x01a79): (163, "SmallOrderedHashMapMap"),
|
||||
("read_only_space", 0x01ac9): (164, "SmallOrderedHashSetMap"),
|
||||
("read_only_space", 0x01b19): (165, "SmallOrderedNameDictionaryMap"),
|
||||
("read_only_space", 0x01b69): (121, "SourceTextModuleMap"),
|
||||
("read_only_space", 0x01bb9): (136, "StringTableMap"),
|
||||
("read_only_space", 0x01c09): (122, "SyntheticModuleMap"),
|
||||
("read_only_space", 0x01c59): (167, "UncompiledDataWithoutPreparseDataMap"),
|
||||
("read_only_space", 0x01ca9): (168, "UncompiledDataWithPreparseDataMap"),
|
||||
("read_only_space", 0x01cf9): (169, "WeakArrayListMap"),
|
||||
("read_only_space", 0x01d49): (137, "EphemeronHashTableMap"),
|
||||
("read_only_space", 0x01d99): (124, "EmbedderDataArrayMap"),
|
||||
("read_only_space", 0x01de9): (170, "WeakCellMap"),
|
||||
("read_only_space", 0x01e39): (58, "NativeSourceStringMap"),
|
||||
("read_only_space", 0x01e89): (32, "StringMap"),
|
||||
("read_only_space", 0x01ed9): (41, "ConsOneByteStringMap"),
|
||||
("read_only_space", 0x01f29): (33, "ConsStringMap"),
|
||||
("read_only_space", 0x01f79): (45, "ThinOneByteStringMap"),
|
||||
("read_only_space", 0x01fc9): (37, "ThinStringMap"),
|
||||
("read_only_space", 0x02019): (35, "SlicedStringMap"),
|
||||
("read_only_space", 0x02069): (43, "SlicedOneByteStringMap"),
|
||||
("read_only_space", 0x020b9): (34, "ExternalStringMap"),
|
||||
("read_only_space", 0x02109): (42, "ExternalOneByteStringMap"),
|
||||
("read_only_space", 0x02159): (50, "UncachedExternalStringMap"),
|
||||
("read_only_space", 0x021a9): (0, "InternalizedStringMap"),
|
||||
("read_only_space", 0x021f9): (2, "ExternalInternalizedStringMap"),
|
||||
("read_only_space", 0x02249): (10, "ExternalOneByteInternalizedStringMap"),
|
||||
("read_only_space", 0x02299): (18, "UncachedExternalInternalizedStringMap"),
|
||||
("read_only_space", 0x022e9): (26, "UncachedExternalOneByteInternalizedStringMap"),
|
||||
("read_only_space", 0x02339): (58, "UncachedExternalOneByteStringMap"),
|
||||
("read_only_space", 0x02389): (67, "SelfReferenceMarkerMap"),
|
||||
("read_only_space", 0x023f1): (88, "EnumCacheMap"),
|
||||
("read_only_space", 0x02491): (83, "ArrayBoilerplateDescriptionMap"),
|
||||
("read_only_space", 0x02681): (91, "InterceptorInfoMap"),
|
||||
("read_only_space", 0x04e61): (78, "AccessCheckInfoMap"),
|
||||
("read_only_space", 0x04eb1): (79, "AccessorInfoMap"),
|
||||
("read_only_space", 0x04f01): (80, "AccessorPairMap"),
|
||||
("read_only_space", 0x04f51): (81, "AliasedArgumentsEntryMap"),
|
||||
("read_only_space", 0x04fa1): (82, "AllocationMementoMap"),
|
||||
("read_only_space", 0x04ff1): (84, "AsmWasmDataMap"),
|
||||
("read_only_space", 0x05041): (85, "AsyncGeneratorRequestMap"),
|
||||
("read_only_space", 0x05091): (86, "ClassPositionsMap"),
|
||||
("read_only_space", 0x050e1): (87, "DebugInfoMap"),
|
||||
("read_only_space", 0x05131): (89, "FunctionTemplateInfoMap"),
|
||||
("read_only_space", 0x05181): (90, "FunctionTemplateRareDataMap"),
|
||||
("read_only_space", 0x051d1): (92, "InterpreterDataMap"),
|
||||
("read_only_space", 0x05221): (93, "ObjectTemplateInfoMap"),
|
||||
("read_only_space", 0x05271): (94, "PromiseCapabilityMap"),
|
||||
("read_only_space", 0x052c1): (95, "PromiseReactionMap"),
|
||||
("read_only_space", 0x05311): (96, "PrototypeInfoMap"),
|
||||
("read_only_space", 0x05361): (97, "ScriptMap"),
|
||||
("read_only_space", 0x053b1): (98, "SourcePositionTableWithFrameCacheMap"),
|
||||
("read_only_space", 0x05401): (99, "SourceTextModuleInfoEntryMap"),
|
||||
("read_only_space", 0x05451): (100, "StackFrameInfoMap"),
|
||||
("read_only_space", 0x054a1): (101, "StackTraceFrameMap"),
|
||||
("read_only_space", 0x054f1): (102, "TemplateObjectDescriptionMap"),
|
||||
("read_only_space", 0x05541): (103, "Tuple2Map"),
|
||||
("read_only_space", 0x05591): (104, "Tuple3Map"),
|
||||
("read_only_space", 0x055e1): (105, "WasmCapiFunctionDataMap"),
|
||||
("read_only_space", 0x05631): (106, "WasmDebugInfoMap"),
|
||||
("read_only_space", 0x05681): (107, "WasmExceptionTagMap"),
|
||||
("read_only_space", 0x056d1): (108, "WasmExportedFunctionDataMap"),
|
||||
("read_only_space", 0x05721): (109, "WasmIndirectFunctionTableMap"),
|
||||
("read_only_space", 0x05771): (110, "WasmJSFunctionDataMap"),
|
||||
("read_only_space", 0x057c1): (111, "CallableTaskMap"),
|
||||
("read_only_space", 0x05811): (112, "CallbackTaskMap"),
|
||||
("read_only_space", 0x05861): (113, "PromiseFulfillReactionJobTaskMap"),
|
||||
("read_only_space", 0x058b1): (114, "PromiseRejectReactionJobTaskMap"),
|
||||
("read_only_space", 0x05901): (115, "PromiseResolveThenableJobTaskMap"),
|
||||
("read_only_space", 0x05951): (116, "FinalizationGroupCleanupJobTaskMap"),
|
||||
("read_only_space", 0x059a1): (117, "InternalClassMap"),
|
||||
("read_only_space", 0x059f1): (118, "SmiPairMap"),
|
||||
("read_only_space", 0x05a41): (119, "SmiBoxMap"),
|
||||
("read_only_space", 0x05a91): (120, "SortStateMap"),
|
||||
("read_only_space", 0x05ae1): (123, "AllocationSiteWithWeakNextMap"),
|
||||
("read_only_space", 0x05b31): (123, "AllocationSiteWithoutWeakNextMap"),
|
||||
("read_only_space", 0x05b81): (158, "LoadHandler1Map"),
|
||||
("read_only_space", 0x05bd1): (158, "LoadHandler2Map"),
|
||||
("read_only_space", 0x05c21): (158, "LoadHandler3Map"),
|
||||
("read_only_space", 0x05c71): (166, "StoreHandler0Map"),
|
||||
("read_only_space", 0x05cc1): (166, "StoreHandler1Map"),
|
||||
("read_only_space", 0x05d11): (166, "StoreHandler2Map"),
|
||||
("read_only_space", 0x05d61): (166, "StoreHandler3Map"),
|
||||
("map_space", 0x00119): (1057, "ExternalMap"),
|
||||
("map_space", 0x00169): (1073, "JSMessageObjectMap"),
|
||||
("read_only_space", 0x00111): (74, "FreeSpaceMap"),
|
||||
("read_only_space", 0x00161): (68, "MetaMap"),
|
||||
("read_only_space", 0x001e1): (67, "NullMap"),
|
||||
("read_only_space", 0x00249): (155, "DescriptorArrayMap"),
|
||||
("read_only_space", 0x002a9): (150, "WeakFixedArrayMap"),
|
||||
("read_only_space", 0x002f9): (77, "OnePointerFillerMap"),
|
||||
("read_only_space", 0x00349): (77, "TwoPointerFillerMap"),
|
||||
("read_only_space", 0x003c9): (67, "UninitializedMap"),
|
||||
("read_only_space", 0x00439): (8, "OneByteInternalizedStringMap"),
|
||||
("read_only_space", 0x004d9): (67, "UndefinedMap"),
|
||||
("read_only_space", 0x00539): (65, "HeapNumberMap"),
|
||||
("read_only_space", 0x005b9): (67, "TheHoleMap"),
|
||||
("read_only_space", 0x00661): (67, "BooleanMap"),
|
||||
("read_only_space", 0x00739): (72, "ByteArrayMap"),
|
||||
("read_only_space", 0x00789): (125, "FixedArrayMap"),
|
||||
("read_only_space", 0x007d9): (125, "FixedCOWArrayMap"),
|
||||
("read_only_space", 0x00829): (128, "HashTableMap"),
|
||||
("read_only_space", 0x00879): (64, "SymbolMap"),
|
||||
("read_only_space", 0x008c9): (40, "OneByteStringMap"),
|
||||
("read_only_space", 0x00919): (138, "ScopeInfoMap"),
|
||||
("read_only_space", 0x00969): (162, "SharedFunctionInfoMap"),
|
||||
("read_only_space", 0x009b9): (69, "CodeMap"),
|
||||
("read_only_space", 0x00a09): (145, "FunctionContextMap"),
|
||||
("read_only_space", 0x00a59): (153, "CellMap"),
|
||||
("read_only_space", 0x00aa9): (161, "GlobalPropertyCellMap"),
|
||||
("read_only_space", 0x00af9): (71, "ForeignMap"),
|
||||
("read_only_space", 0x00b49): (151, "TransitionArrayMap"),
|
||||
("read_only_space", 0x00b99): (157, "FeedbackVectorMap"),
|
||||
("read_only_space", 0x00c39): (67, "ArgumentsMarkerMap"),
|
||||
("read_only_space", 0x00cd9): (67, "ExceptionMap"),
|
||||
("read_only_space", 0x00d79): (67, "TerminationExceptionMap"),
|
||||
("read_only_space", 0x00e21): (67, "OptimizedOutMap"),
|
||||
("read_only_space", 0x00ec1): (67, "StaleRegisterMap"),
|
||||
("read_only_space", 0x00f31): (147, "NativeContextMap"),
|
||||
("read_only_space", 0x00f81): (146, "ModuleContextMap"),
|
||||
("read_only_space", 0x00fd1): (144, "EvalContextMap"),
|
||||
("read_only_space", 0x01021): (148, "ScriptContextMap"),
|
||||
("read_only_space", 0x01071): (140, "AwaitContextMap"),
|
||||
("read_only_space", 0x010c1): (141, "BlockContextMap"),
|
||||
("read_only_space", 0x01111): (142, "CatchContextMap"),
|
||||
("read_only_space", 0x01161): (149, "WithContextMap"),
|
||||
("read_only_space", 0x011b1): (143, "DebugEvaluateContextMap"),
|
||||
("read_only_space", 0x01201): (139, "ScriptContextTableMap"),
|
||||
("read_only_space", 0x01251): (127, "ClosureFeedbackCellArrayMap"),
|
||||
("read_only_space", 0x012a1): (76, "FeedbackMetadataArrayMap"),
|
||||
("read_only_space", 0x012f1): (125, "ArrayListMap"),
|
||||
("read_only_space", 0x01341): (66, "BigIntMap"),
|
||||
("read_only_space", 0x01391): (126, "ObjectBoilerplateDescriptionMap"),
|
||||
("read_only_space", 0x013e1): (73, "BytecodeArrayMap"),
|
||||
("read_only_space", 0x01431): (154, "CodeDataContainerMap"),
|
||||
("read_only_space", 0x01481): (75, "FixedDoubleArrayMap"),
|
||||
("read_only_space", 0x014d1): (133, "GlobalDictionaryMap"),
|
||||
("read_only_space", 0x01521): (156, "ManyClosuresCellMap"),
|
||||
("read_only_space", 0x01571): (125, "ModuleInfoMap"),
|
||||
("read_only_space", 0x015c1): (70, "MutableHeapNumberMap"),
|
||||
("read_only_space", 0x01611): (132, "NameDictionaryMap"),
|
||||
("read_only_space", 0x01661): (156, "NoClosuresCellMap"),
|
||||
("read_only_space", 0x016b1): (134, "NumberDictionaryMap"),
|
||||
("read_only_space", 0x01701): (156, "OneClosureCellMap"),
|
||||
("read_only_space", 0x01751): (129, "OrderedHashMapMap"),
|
||||
("read_only_space", 0x017a1): (130, "OrderedHashSetMap"),
|
||||
("read_only_space", 0x017f1): (131, "OrderedNameDictionaryMap"),
|
||||
("read_only_space", 0x01841): (159, "PreparseDataMap"),
|
||||
("read_only_space", 0x01891): (160, "PropertyArrayMap"),
|
||||
("read_only_space", 0x018e1): (152, "SideEffectCallHandlerInfoMap"),
|
||||
("read_only_space", 0x01931): (152, "SideEffectFreeCallHandlerInfoMap"),
|
||||
("read_only_space", 0x01981): (152, "NextCallSideEffectFreeCallHandlerInfoMap"),
|
||||
("read_only_space", 0x019d1): (135, "SimpleNumberDictionaryMap"),
|
||||
("read_only_space", 0x01a21): (125, "SloppyArgumentsElementsMap"),
|
||||
("read_only_space", 0x01a71): (163, "SmallOrderedHashMapMap"),
|
||||
("read_only_space", 0x01ac1): (164, "SmallOrderedHashSetMap"),
|
||||
("read_only_space", 0x01b11): (165, "SmallOrderedNameDictionaryMap"),
|
||||
("read_only_space", 0x01b61): (121, "SourceTextModuleMap"),
|
||||
("read_only_space", 0x01bb1): (136, "StringTableMap"),
|
||||
("read_only_space", 0x01c01): (122, "SyntheticModuleMap"),
|
||||
("read_only_space", 0x01c51): (167, "UncompiledDataWithoutPreparseDataMap"),
|
||||
("read_only_space", 0x01ca1): (168, "UncompiledDataWithPreparseDataMap"),
|
||||
("read_only_space", 0x01cf1): (169, "WeakArrayListMap"),
|
||||
("read_only_space", 0x01d41): (137, "EphemeronHashTableMap"),
|
||||
("read_only_space", 0x01d91): (124, "EmbedderDataArrayMap"),
|
||||
("read_only_space", 0x01de1): (170, "WeakCellMap"),
|
||||
("read_only_space", 0x01e31): (58, "NativeSourceStringMap"),
|
||||
("read_only_space", 0x01e81): (32, "StringMap"),
|
||||
("read_only_space", 0x01ed1): (41, "ConsOneByteStringMap"),
|
||||
("read_only_space", 0x01f21): (33, "ConsStringMap"),
|
||||
("read_only_space", 0x01f71): (45, "ThinOneByteStringMap"),
|
||||
("read_only_space", 0x01fc1): (37, "ThinStringMap"),
|
||||
("read_only_space", 0x02011): (35, "SlicedStringMap"),
|
||||
("read_only_space", 0x02061): (43, "SlicedOneByteStringMap"),
|
||||
("read_only_space", 0x020b1): (34, "ExternalStringMap"),
|
||||
("read_only_space", 0x02101): (42, "ExternalOneByteStringMap"),
|
||||
("read_only_space", 0x02151): (50, "UncachedExternalStringMap"),
|
||||
("read_only_space", 0x021a1): (0, "InternalizedStringMap"),
|
||||
("read_only_space", 0x021f1): (2, "ExternalInternalizedStringMap"),
|
||||
("read_only_space", 0x02241): (10, "ExternalOneByteInternalizedStringMap"),
|
||||
("read_only_space", 0x02291): (18, "UncachedExternalInternalizedStringMap"),
|
||||
("read_only_space", 0x022e1): (26, "UncachedExternalOneByteInternalizedStringMap"),
|
||||
("read_only_space", 0x02331): (58, "UncachedExternalOneByteStringMap"),
|
||||
("read_only_space", 0x02381): (67, "SelfReferenceMarkerMap"),
|
||||
("read_only_space", 0x023e9): (88, "EnumCacheMap"),
|
||||
("read_only_space", 0x02489): (83, "ArrayBoilerplateDescriptionMap"),
|
||||
("read_only_space", 0x02679): (91, "InterceptorInfoMap"),
|
||||
("read_only_space", 0x04e59): (78, "AccessCheckInfoMap"),
|
||||
("read_only_space", 0x04ea9): (79, "AccessorInfoMap"),
|
||||
("read_only_space", 0x04ef9): (80, "AccessorPairMap"),
|
||||
("read_only_space", 0x04f49): (81, "AliasedArgumentsEntryMap"),
|
||||
("read_only_space", 0x04f99): (82, "AllocationMementoMap"),
|
||||
("read_only_space", 0x04fe9): (84, "AsmWasmDataMap"),
|
||||
("read_only_space", 0x05039): (85, "AsyncGeneratorRequestMap"),
|
||||
("read_only_space", 0x05089): (86, "ClassPositionsMap"),
|
||||
("read_only_space", 0x050d9): (87, "DebugInfoMap"),
|
||||
("read_only_space", 0x05129): (89, "FunctionTemplateInfoMap"),
|
||||
("read_only_space", 0x05179): (90, "FunctionTemplateRareDataMap"),
|
||||
("read_only_space", 0x051c9): (92, "InterpreterDataMap"),
|
||||
("read_only_space", 0x05219): (93, "ObjectTemplateInfoMap"),
|
||||
("read_only_space", 0x05269): (94, "PromiseCapabilityMap"),
|
||||
("read_only_space", 0x052b9): (95, "PromiseReactionMap"),
|
||||
("read_only_space", 0x05309): (96, "PrototypeInfoMap"),
|
||||
("read_only_space", 0x05359): (97, "ScriptMap"),
|
||||
("read_only_space", 0x053a9): (98, "SourcePositionTableWithFrameCacheMap"),
|
||||
("read_only_space", 0x053f9): (99, "SourceTextModuleInfoEntryMap"),
|
||||
("read_only_space", 0x05449): (100, "StackFrameInfoMap"),
|
||||
("read_only_space", 0x05499): (101, "StackTraceFrameMap"),
|
||||
("read_only_space", 0x054e9): (102, "TemplateObjectDescriptionMap"),
|
||||
("read_only_space", 0x05539): (103, "Tuple2Map"),
|
||||
("read_only_space", 0x05589): (104, "Tuple3Map"),
|
||||
("read_only_space", 0x055d9): (105, "WasmCapiFunctionDataMap"),
|
||||
("read_only_space", 0x05629): (106, "WasmDebugInfoMap"),
|
||||
("read_only_space", 0x05679): (107, "WasmExceptionTagMap"),
|
||||
("read_only_space", 0x056c9): (108, "WasmExportedFunctionDataMap"),
|
||||
("read_only_space", 0x05719): (109, "WasmIndirectFunctionTableMap"),
|
||||
("read_only_space", 0x05769): (110, "WasmJSFunctionDataMap"),
|
||||
("read_only_space", 0x057b9): (111, "CallableTaskMap"),
|
||||
("read_only_space", 0x05809): (112, "CallbackTaskMap"),
|
||||
("read_only_space", 0x05859): (113, "PromiseFulfillReactionJobTaskMap"),
|
||||
("read_only_space", 0x058a9): (114, "PromiseRejectReactionJobTaskMap"),
|
||||
("read_only_space", 0x058f9): (115, "PromiseResolveThenableJobTaskMap"),
|
||||
("read_only_space", 0x05949): (116, "FinalizationGroupCleanupJobTaskMap"),
|
||||
("read_only_space", 0x05999): (117, "InternalClassMap"),
|
||||
("read_only_space", 0x059e9): (118, "SmiPairMap"),
|
||||
("read_only_space", 0x05a39): (119, "SmiBoxMap"),
|
||||
("read_only_space", 0x05a89): (120, "SortStateMap"),
|
||||
("read_only_space", 0x05ad9): (123, "AllocationSiteWithWeakNextMap"),
|
||||
("read_only_space", 0x05b29): (123, "AllocationSiteWithoutWeakNextMap"),
|
||||
("read_only_space", 0x05b79): (158, "LoadHandler1Map"),
|
||||
("read_only_space", 0x05bc9): (158, "LoadHandler2Map"),
|
||||
("read_only_space", 0x05c19): (158, "LoadHandler3Map"),
|
||||
("read_only_space", 0x05c69): (166, "StoreHandler0Map"),
|
||||
("read_only_space", 0x05cb9): (166, "StoreHandler1Map"),
|
||||
("read_only_space", 0x05d09): (166, "StoreHandler2Map"),
|
||||
("read_only_space", 0x05d59): (166, "StoreHandler3Map"),
|
||||
("map_space", 0x00111): (1057, "ExternalMap"),
|
||||
("map_space", 0x00161): (1073, "JSMessageObjectMap"),
|
||||
}
|
||||
|
||||
# List of known V8 objects.
|
||||
KNOWN_OBJECTS = {
|
||||
("read_only_space", 0x001b9): "NullValue",
|
||||
("read_only_space", 0x00239): "EmptyDescriptorArray",
|
||||
("read_only_space", 0x002a1): "EmptyWeakFixedArray",
|
||||
("read_only_space", 0x003a1): "UninitializedValue",
|
||||
("read_only_space", 0x004b1): "UndefinedValue",
|
||||
("read_only_space", 0x00531): "NanValue",
|
||||
("read_only_space", 0x00591): "TheHoleValue",
|
||||
("read_only_space", 0x00629): "HoleNanValue",
|
||||
("read_only_space", 0x00639): "TrueValue",
|
||||
("read_only_space", 0x006e9): "FalseValue",
|
||||
("read_only_space", 0x00731): "empty_string",
|
||||
("read_only_space", 0x00bf1): "EmptyScopeInfo",
|
||||
("read_only_space", 0x00c01): "EmptyFixedArray",
|
||||
("read_only_space", 0x00c11): "ArgumentsMarker",
|
||||
("read_only_space", 0x00cb1): "Exception",
|
||||
("read_only_space", 0x00d51): "TerminationException",
|
||||
("read_only_space", 0x00df9): "OptimizedOut",
|
||||
("read_only_space", 0x00e99): "StaleRegister",
|
||||
("read_only_space", 0x023d9): "EmptyEnumCache",
|
||||
("read_only_space", 0x02441): "EmptyPropertyArray",
|
||||
("read_only_space", 0x02451): "EmptyByteArray",
|
||||
("read_only_space", 0x02461): "EmptyObjectBoilerplateDescription",
|
||||
("read_only_space", 0x02479): "EmptyArrayBoilerplateDescription",
|
||||
("read_only_space", 0x024e1): "EmptyClosureFeedbackCellArray",
|
||||
("read_only_space", 0x024f1): "EmptySloppyArgumentsElements",
|
||||
("read_only_space", 0x02511): "EmptySlowElementDictionary",
|
||||
("read_only_space", 0x02559): "EmptyOrderedHashMap",
|
||||
("read_only_space", 0x02581): "EmptyOrderedHashSet",
|
||||
("read_only_space", 0x025a9): "EmptyFeedbackMetadata",
|
||||
("read_only_space", 0x025b9): "EmptyPropertyCell",
|
||||
("read_only_space", 0x025e1): "EmptyPropertyDictionary",
|
||||
("read_only_space", 0x02631): "NoOpInterceptorInfo",
|
||||
("read_only_space", 0x026d1): "EmptyWeakArrayList",
|
||||
("read_only_space", 0x026e9): "InfinityValue",
|
||||
("read_only_space", 0x026f9): "MinusZeroValue",
|
||||
("read_only_space", 0x02709): "MinusInfinityValue",
|
||||
("read_only_space", 0x02719): "SelfReferenceMarker",
|
||||
("read_only_space", 0x02771): "OffHeapTrampolineRelocationInfo",
|
||||
("read_only_space", 0x02789): "TrampolineTrivialCodeDataContainer",
|
||||
("read_only_space", 0x027a1): "TrampolinePromiseRejectionCodeDataContainer",
|
||||
("read_only_space", 0x027b9): "HashSeed",
|
||||
("old_space", 0x00119): "ArgumentsIteratorAccessor",
|
||||
("old_space", 0x00189): "ArrayLengthAccessor",
|
||||
("old_space", 0x001f9): "BoundFunctionLengthAccessor",
|
||||
("old_space", 0x00269): "BoundFunctionNameAccessor",
|
||||
("old_space", 0x002d9): "ErrorStackAccessor",
|
||||
("old_space", 0x00349): "FunctionArgumentsAccessor",
|
||||
("old_space", 0x003b9): "FunctionCallerAccessor",
|
||||
("old_space", 0x00429): "FunctionNameAccessor",
|
||||
("old_space", 0x00499): "FunctionLengthAccessor",
|
||||
("old_space", 0x00509): "FunctionPrototypeAccessor",
|
||||
("old_space", 0x00579): "StringLengthAccessor",
|
||||
("old_space", 0x005e9): "InvalidPrototypeValidityCell",
|
||||
("old_space", 0x005f9): "EmptyScript",
|
||||
("old_space", 0x00679): "ManyClosuresCell",
|
||||
("old_space", 0x00691): "ArrayConstructorProtector",
|
||||
("old_space", 0x006a1): "NoElementsProtector",
|
||||
("old_space", 0x006c9): "IsConcatSpreadableProtector",
|
||||
("old_space", 0x006d9): "ArraySpeciesProtector",
|
||||
("old_space", 0x00701): "TypedArraySpeciesProtector",
|
||||
("old_space", 0x00729): "PromiseSpeciesProtector",
|
||||
("old_space", 0x00751): "StringLengthProtector",
|
||||
("old_space", 0x00761): "ArrayIteratorProtector",
|
||||
("old_space", 0x00789): "ArrayBufferDetachingProtector",
|
||||
("old_space", 0x007b1): "PromiseHookProtector",
|
||||
("old_space", 0x007d9): "PromiseResolveProtector",
|
||||
("old_space", 0x007e9): "MapIteratorProtector",
|
||||
("old_space", 0x00811): "PromiseThenProtector",
|
||||
("old_space", 0x00839): "SetIteratorProtector",
|
||||
("old_space", 0x00861): "StringIteratorProtector",
|
||||
("old_space", 0x00889): "SingleCharacterStringCache",
|
||||
("old_space", 0x01099): "StringSplitCache",
|
||||
("old_space", 0x018a9): "RegExpMultipleCache",
|
||||
("old_space", 0x020b9): "BuiltinsConstantsTable",
|
||||
("read_only_space", 0x001b1): "NullValue",
|
||||
("read_only_space", 0x00231): "EmptyDescriptorArray",
|
||||
("read_only_space", 0x00299): "EmptyWeakFixedArray",
|
||||
("read_only_space", 0x00399): "UninitializedValue",
|
||||
("read_only_space", 0x004a9): "UndefinedValue",
|
||||
("read_only_space", 0x00529): "NanValue",
|
||||
("read_only_space", 0x00589): "TheHoleValue",
|
||||
("read_only_space", 0x00621): "HoleNanValue",
|
||||
("read_only_space", 0x00631): "TrueValue",
|
||||
("read_only_space", 0x006e1): "FalseValue",
|
||||
("read_only_space", 0x00729): "empty_string",
|
||||
("read_only_space", 0x00be9): "EmptyScopeInfo",
|
||||
("read_only_space", 0x00bf9): "EmptyFixedArray",
|
||||
("read_only_space", 0x00c09): "ArgumentsMarker",
|
||||
("read_only_space", 0x00ca9): "Exception",
|
||||
("read_only_space", 0x00d49): "TerminationException",
|
||||
("read_only_space", 0x00df1): "OptimizedOut",
|
||||
("read_only_space", 0x00e91): "StaleRegister",
|
||||
("read_only_space", 0x023d1): "EmptyEnumCache",
|
||||
("read_only_space", 0x02439): "EmptyPropertyArray",
|
||||
("read_only_space", 0x02449): "EmptyByteArray",
|
||||
("read_only_space", 0x02459): "EmptyObjectBoilerplateDescription",
|
||||
("read_only_space", 0x02471): "EmptyArrayBoilerplateDescription",
|
||||
("read_only_space", 0x024d9): "EmptyClosureFeedbackCellArray",
|
||||
("read_only_space", 0x024e9): "EmptySloppyArgumentsElements",
|
||||
("read_only_space", 0x02509): "EmptySlowElementDictionary",
|
||||
("read_only_space", 0x02551): "EmptyOrderedHashMap",
|
||||
("read_only_space", 0x02579): "EmptyOrderedHashSet",
|
||||
("read_only_space", 0x025a1): "EmptyFeedbackMetadata",
|
||||
("read_only_space", 0x025b1): "EmptyPropertyCell",
|
||||
("read_only_space", 0x025d9): "EmptyPropertyDictionary",
|
||||
("read_only_space", 0x02629): "NoOpInterceptorInfo",
|
||||
("read_only_space", 0x026c9): "EmptyWeakArrayList",
|
||||
("read_only_space", 0x026e1): "InfinityValue",
|
||||
("read_only_space", 0x026f1): "MinusZeroValue",
|
||||
("read_only_space", 0x02701): "MinusInfinityValue",
|
||||
("read_only_space", 0x02711): "SelfReferenceMarker",
|
||||
("read_only_space", 0x02769): "OffHeapTrampolineRelocationInfo",
|
||||
("read_only_space", 0x02781): "TrampolineTrivialCodeDataContainer",
|
||||
("read_only_space", 0x02799): "TrampolinePromiseRejectionCodeDataContainer",
|
||||
("read_only_space", 0x027b1): "HashSeed",
|
||||
("old_space", 0x00111): "ArgumentsIteratorAccessor",
|
||||
("old_space", 0x00181): "ArrayLengthAccessor",
|
||||
("old_space", 0x001f1): "BoundFunctionLengthAccessor",
|
||||
("old_space", 0x00261): "BoundFunctionNameAccessor",
|
||||
("old_space", 0x002d1): "ErrorStackAccessor",
|
||||
("old_space", 0x00341): "FunctionArgumentsAccessor",
|
||||
("old_space", 0x003b1): "FunctionCallerAccessor",
|
||||
("old_space", 0x00421): "FunctionNameAccessor",
|
||||
("old_space", 0x00491): "FunctionLengthAccessor",
|
||||
("old_space", 0x00501): "FunctionPrototypeAccessor",
|
||||
("old_space", 0x00571): "StringLengthAccessor",
|
||||
("old_space", 0x005e1): "InvalidPrototypeValidityCell",
|
||||
("old_space", 0x005f1): "EmptyScript",
|
||||
("old_space", 0x00671): "ManyClosuresCell",
|
||||
("old_space", 0x00689): "ArrayConstructorProtector",
|
||||
("old_space", 0x00699): "NoElementsProtector",
|
||||
("old_space", 0x006c1): "IsConcatSpreadableProtector",
|
||||
("old_space", 0x006d1): "ArraySpeciesProtector",
|
||||
("old_space", 0x006f9): "TypedArraySpeciesProtector",
|
||||
("old_space", 0x00721): "PromiseSpeciesProtector",
|
||||
("old_space", 0x00749): "StringLengthProtector",
|
||||
("old_space", 0x00759): "ArrayIteratorProtector",
|
||||
("old_space", 0x00781): "ArrayBufferDetachingProtector",
|
||||
("old_space", 0x007a9): "PromiseHookProtector",
|
||||
("old_space", 0x007d1): "PromiseResolveProtector",
|
||||
("old_space", 0x007e1): "MapIteratorProtector",
|
||||
("old_space", 0x00809): "PromiseThenProtector",
|
||||
("old_space", 0x00831): "SetIteratorProtector",
|
||||
("old_space", 0x00859): "StringIteratorProtector",
|
||||
("old_space", 0x00881): "SingleCharacterStringCache",
|
||||
("old_space", 0x01091): "StringSplitCache",
|
||||
("old_space", 0x018a1): "RegExpMultipleCache",
|
||||
("old_space", 0x020b1): "BuiltinsConstantsTable",
|
||||
}
|
||||
|
||||
# List of known V8 Frame Markers.
|
||||
|
Loading…
Reference in New Issue
Block a user