[heap] Store size with invalidated object

When updating pointers during a full GC, a page might not be swept
already. In such cases there might be invalid objects and slots recorded
in free memory. Updating tagged slots in free memory is fine even though
it is superfluous work.

However, the GC also needs to calculate the size of potentially dead
invalid objects in order to be able to check whether a slot is within
that object. But since that object is dead, its map might be dead as
well which makes size calculation impossible on such objects. The CL
changes this to cache the size of invalid objects. A follow-up CL will
also check the marking bit of invalid objects.

Bug: v8:12578, chromium:1316289
Change-Id: Ie773d0862a565982957e0dc409630d76552d1a32
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3599482
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Jakob Linke <jgruber@chromium.org>
Reviewed-by: Patrick Thier <pthier@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80169}
This commit is contained in:
Dominik Inführ 2022-04-25 10:20:23 +02:00 committed by V8 LUCI CQ
parent 9080b8acf7
commit 5d235def26
12 changed files with 119 additions and 59 deletions

View File

@ -13,6 +13,7 @@
#include "src/diagnostics/disasm.h"
#include "src/execution/frames.h"
#include "src/execution/isolate.h"
#include "src/heap/heap.h"
#include "src/numbers/conversions.h"
#include "src/objects/arguments.h"
#include "src/objects/heap-number-inl.h"
@ -1850,7 +1851,9 @@ void TranslatedState::InitializeJSObjectAt(
CHECK_GE(children_count, 2);
// Notify the concurrent marker about the layout change.
isolate()->heap()->NotifyObjectLayoutChange(*object_storage, no_gc);
isolate()->heap()->NotifyObjectLayoutChange(
*object_storage, no_gc, InvalidateRecordedSlots::kYes,
slot->GetChildrenCount() * kTaggedSize);
// Fill the property array field.
{
@ -1902,7 +1905,9 @@ void TranslatedState::InitializeObjectWithTaggedFieldsAt(
}
// Notify the concurrent marker about the layout change.
isolate()->heap()->NotifyObjectLayoutChange(*object_storage, no_gc);
isolate()->heap()->NotifyObjectLayoutChange(
*object_storage, no_gc, InvalidateRecordedSlots::kYes,
slot->GetChildrenCount() * kTaggedSize);
// Write the fields to the object.
for (int i = 1; i < children_count; i++) {

View File

@ -3954,22 +3954,24 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
void Heap::NotifyObjectLayoutChange(
HeapObject object, const DisallowGarbageCollection&,
InvalidateRecordedSlots invalidate_recorded_slots) {
InvalidateRecordedSlots invalidate_recorded_slots, int new_size) {
DCHECK_IMPLIES(invalidate_recorded_slots == InvalidateRecordedSlots::kYes,
new_size > 0);
if (incremental_marking()->IsMarking()) {
incremental_marking()->MarkBlackAndVisitObjectDueToLayoutChange(object);
if (incremental_marking()->IsCompacting() &&
invalidate_recorded_slots == InvalidateRecordedSlots::kYes &&
MayContainRecordedSlots(object)) {
MemoryChunk::FromHeapObject(object)
->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object);
->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object, new_size);
}
}
if (invalidate_recorded_slots == InvalidateRecordedSlots::kYes &&
MayContainRecordedSlots(object)) {
MemoryChunk::FromHeapObject(object)
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object);
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object, new_size);
MemoryChunk::FromHeapObject(object)
->RegisterObjectWithInvalidatedSlots<OLD_TO_SHARED>(object);
->RegisterObjectWithInvalidatedSlots<OLD_TO_SHARED>(object, new_size);
}
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
@ -4646,11 +4648,34 @@ void Heap::Verify() {
if (new_lo_space_) new_lo_space_->Verify(isolate());
isolate()->string_table()->VerifyIfOwnedBy(isolate());
VerifyInvalidatedObjectSize();
#if DEBUG
VerifyCommittedPhysicalMemory();
#endif // DEBUG
}
namespace {
void VerifyInvalidatedSlots(InvalidatedSlots* invalidated_slots) {
if (!invalidated_slots) return;
for (std::pair<HeapObject, int> object_and_size : *invalidated_slots) {
HeapObject object = object_and_size.first;
int size = object_and_size.second;
CHECK_EQ(object.Size(), size);
}
}
} // namespace
void Heap::VerifyInvalidatedObjectSize() {
OldGenerationMemoryChunkIterator chunk_iterator(this);
MemoryChunk* chunk;
while ((chunk = chunk_iterator.next()) != nullptr) {
VerifyInvalidatedSlots(chunk->invalidated_slots<OLD_TO_NEW>());
VerifyInvalidatedSlots(chunk->invalidated_slots<OLD_TO_SHARED>());
}
}
void Heap::VerifyReadOnlyHeap() {
CHECK(!read_only_space_->writable());
read_only_space_->Verify(isolate());

View File

@ -1114,8 +1114,7 @@ class Heap {
// manually.
void NotifyObjectLayoutChange(
HeapObject object, const DisallowGarbageCollection&,
InvalidateRecordedSlots invalidate_recorded_slots =
InvalidateRecordedSlots::kYes);
InvalidateRecordedSlots invalidate_recorded_slots, int new_size = 0);
#ifdef VERIFY_HEAP
// This function checks that either
@ -1584,6 +1583,9 @@ class Heap {
// created.
void VerifyReadOnlyHeap();
void VerifyRememberedSetFor(HeapObject object);
// Verify that cached size of invalidated object is up-to-date.
void VerifyInvalidatedObjectSize();
#endif
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT

View File

@ -28,22 +28,18 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
NextInvalidatedObject();
}
HeapObject invalidated_object = HeapObject::FromAddress(invalidated_start_);
if (invalidated_size_ == 0) {
DCHECK(MarkCompactCollector::IsMapOrForwarded(invalidated_object.map()));
invalidated_size_ = invalidated_object.Size();
}
int offset = static_cast<int>(slot - invalidated_start_);
// OLD_TO_OLD can have slots in map word unlike other remembered sets.
DCHECK_GE(offset, 0);
DCHECK_IMPLIES(remembered_set_type_ != OLD_TO_OLD, offset > 0);
if (offset < invalidated_size_)
return offset == 0 ||
invalidated_object.IsValidSlot(invalidated_object.map(), offset);
if (offset < invalidated_size_) {
if (offset == 0) return true;
HeapObject invalidated_object = HeapObject::FromAddress(invalidated_start_);
DCHECK(MarkCompactCollector::IsMapOrForwarded(invalidated_object.map()));
return invalidated_object.IsValidSlot(invalidated_object.map(), offset);
}
NextInvalidatedObject();
return true;
@ -51,12 +47,14 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
void InvalidatedSlotsFilter::NextInvalidatedObject() {
invalidated_start_ = next_invalidated_start_;
invalidated_size_ = 0;
invalidated_size_ = next_invalidated_size_;
if (iterator_ == iterator_end_) {
next_invalidated_start_ = sentinel_;
next_invalidated_size_ = 0;
} else {
next_invalidated_start_ = iterator_->address();
next_invalidated_start_ = iterator_->first.address();
next_invalidated_size_ = iterator_->second;
iterator_++;
}
}
@ -87,7 +85,7 @@ void InvalidatedSlotsCleanup::Free(Address free_start, Address free_end) {
void InvalidatedSlotsCleanup::NextInvalidatedObject() {
if (iterator_ != iterator_end_) {
invalidated_start_ = iterator_->address();
invalidated_start_ = iterator_->first.address();
} else {
invalidated_start_ = sentinel_;
}

View File

@ -21,7 +21,7 @@ namespace internal {
// that potentially invalidates slots recorded concurrently. The second part
// of each element is the size of the corresponding object before the layout
// change.
using InvalidatedSlots = std::set<HeapObject, Object::Comparer>;
using InvalidatedSlots = std::map<HeapObject, int, Object::Comparer>;
// This class provides IsValid predicate that takes into account the set
// of invalidated objects in the given memory chunk.
@ -45,9 +45,10 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
InvalidatedSlots::const_iterator iterator_;
InvalidatedSlots::const_iterator iterator_end_;
Address sentinel_;
Address invalidated_start_;
Address next_invalidated_start_;
int invalidated_size_;
Address invalidated_start_{kNullAddress};
Address next_invalidated_start_{kNullAddress};
int invalidated_size_{0};
int next_invalidated_size_{0};
InvalidatedSlots empty_;
#ifdef DEBUG
Address last_slot_;

View File

@ -364,14 +364,17 @@ void MemoryChunk::ReleaseInvalidatedSlots() {
}
template V8_EXPORT_PRIVATE void
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object);
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object,
int new_size);
template V8_EXPORT_PRIVATE void
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object);
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object,
int new_size);
template V8_EXPORT_PRIVATE void MemoryChunk::RegisterObjectWithInvalidatedSlots<
OLD_TO_SHARED>(HeapObject object);
OLD_TO_SHARED>(HeapObject object, int new_size);
template <RememberedSetType type>
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object) {
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object,
int new_size) {
bool skip_slot_recording;
switch (type) {
@ -399,23 +402,27 @@ void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object) {
AllocateInvalidatedSlots<type>();
}
invalidated_slots<type>()->insert(object);
DCHECK_GT(new_size, 0);
InvalidatedSlots& invalidated_slots = *this->invalidated_slots<type>();
DCHECK_IMPLIES(invalidated_slots[object] > 0,
new_size <= invalidated_slots[object]);
invalidated_slots.insert_or_assign(object, new_size);
}
void MemoryChunk::InvalidateRecordedSlots(HeapObject object) {
void MemoryChunk::InvalidateRecordedSlots(HeapObject object, int new_size) {
if (V8_DISABLE_WRITE_BARRIERS_BOOL) return;
if (heap()->incremental_marking()->IsCompacting()) {
// We cannot check slot_set_[OLD_TO_OLD] here, since the
// concurrent markers might insert slots concurrently.
RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object);
RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object, new_size);
}
if (slot_set_[OLD_TO_NEW] != nullptr) {
RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object);
RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object, new_size);
}
if (slot_set_[OLD_TO_SHARED] != nullptr) {
RegisterObjectWithInvalidatedSlots<OLD_TO_SHARED>(object);
RegisterObjectWithInvalidatedSlots<OLD_TO_SHARED>(object, new_size);
}
}

View File

@ -143,8 +143,9 @@ class MemoryChunk : public BasicMemoryChunk {
template <RememberedSetType type>
void ReleaseInvalidatedSlots();
template <RememberedSetType type>
V8_EXPORT_PRIVATE void RegisterObjectWithInvalidatedSlots(HeapObject object);
void InvalidateRecordedSlots(HeapObject object);
V8_EXPORT_PRIVATE void RegisterObjectWithInvalidatedSlots(HeapObject object,
int new_size);
void InvalidateRecordedSlots(HeapObject object, int new_size);
template <RememberedSetType type>
bool RegisteredObjectWithInvalidatedSlots(HeapObject object);
template <RememberedSetType type>

View File

@ -11,6 +11,7 @@
#include "src/common/globals.h"
#include "src/handles/handles-inl.h"
#include "src/heap/heap-write-barrier-inl.h"
#include "src/heap/heap.h"
#include "src/objects/debug-objects-inl.h"
#include "src/objects/feedback-vector-inl.h"
#include "src/objects/scope-info-inl.h"
@ -826,7 +827,8 @@ void SharedFunctionInfo::ClearPreparseData() {
Heap* heap = GetHeapFromWritableObject(data);
// Swap the map.
heap->NotifyObjectLayoutChange(data, no_gc);
heap->NotifyObjectLayoutChange(data, no_gc, InvalidateRecordedSlots::kYes,
UncompiledDataWithoutPreparseData::kSize);
STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize <
UncompiledDataWithPreparseData::kSize);
STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize ==

View File

@ -391,11 +391,6 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
bool is_internalized = this->IsInternalizedString();
bool has_pointers = StringShape(*this).IsIndirect();
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(*this, no_gc,
InvalidateRecordedSlots::kYes);
}
base::SharedMutexGuard<base::kExclusive> shared_mutex_guard(
isolate->internalized_string_access());
// Morph the string to an external string by replacing the map and
@ -419,6 +414,12 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
// Byte size of the external String object.
int new_size = this->SizeFromMap(new_map);
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(
*this, no_gc, InvalidateRecordedSlots::kYes, new_size);
}
if (!isolate->heap()->IsLargeObject(*this)) {
isolate->heap()->CreateFillerObjectAt(
this->address() + new_size, size - new_size,
@ -475,11 +476,6 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
bool is_internalized = this->IsInternalizedString();
bool has_pointers = StringShape(*this).IsIndirect();
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(*this, no_gc,
InvalidateRecordedSlots::kYes);
}
base::SharedMutexGuard<base::kExclusive> shared_mutex_guard(
isolate->internalized_string_access());
// Morph the string to an external string by replacing the map and
@ -504,6 +500,11 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
// Byte size of the external String object.
int new_size = this->SizeFromMap(new_map);
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(
*this, no_gc, InvalidateRecordedSlots::kYes, new_size);
}
isolate->heap()->CreateFillerObjectAt(
this->address() + new_size, size - new_size,
has_pointers ? ClearRecordedSlots::kYes : ClearRecordedSlots::kNo);

View File

@ -219,7 +219,8 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
receiver->RawField(index.offset()));
if (!FLAG_enable_third_party_heap) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*receiver);
chunk->InvalidateRecordedSlots(*receiver);
int new_size = parent_map->instance_size();
chunk->InvalidateRecordedSlots(*receiver, new_size);
}
}
}

View File

@ -234,13 +234,15 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithInvalidatedSlots) {
}
}
// First object is going to be evacuated.
HeapObject front_object = *compaction_page_handles.front();
to_be_aborted_page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
*compaction_page_handles.front());
front_object, front_object.Size());
// Last object is NOT going to be evacuated.
// This happens since not all objects fit on the only other page in the
// old space, the GC isn't allowed to allocate another page.
HeapObject back_object = *compaction_page_handles.back();
to_be_aborted_page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
*compaction_page_handles.back());
back_object, back_object.Size());
to_be_aborted_page->SetFlag(
MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);

View File

@ -71,7 +71,9 @@ HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register every second byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i += 2) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
byte_array.Size());
}
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
@ -96,7 +98,9 @@ HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
byte_array.Size());
}
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
@ -117,16 +121,18 @@ HEAP_TEST(InvalidatedSlotsAfterTrimming) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(
byte_arrays[i], ByteArray::kHeaderSize);
}
// Trim byte arrays and check that the slots outside the byte arrays are
// considered invalid if the old space page was swept.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array.address() + ByteArray::kHeaderSize;
Address end = byte_array.address() + byte_array.Size();
heap->RightTrimFixedArray(byte_array, byte_array.length());
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK_EQ(filter.IsValid(addr), page->SweepingDone());
}
@ -144,7 +150,9 @@ HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
// This should be no-op because the page is marked as evacuation
// candidate.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
byte_array.Size());
}
// All slots must still be valid.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
@ -168,7 +176,9 @@ HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0].length() - 8);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
byte_array.Size());
}
// All slots must still be invalid.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
@ -361,7 +371,9 @@ HEAP_TEST(InvalidatedSlotsCleanupFull) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_array,
byte_array.Size());
}
// Mark full page as free
@ -380,7 +392,9 @@ HEAP_TEST(InvalidatedSlotsCleanupEachObject) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_array,
byte_array.Size());
}
// Mark each object as free on page
@ -407,7 +421,8 @@ HEAP_TEST(InvalidatedSlotsCleanupRightTrim) {
ByteArray& invalidated = byte_arrays[1];
heap->RightTrimFixedArray(invalidated, invalidated.length() - 8);
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated);
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated,
invalidated.Size());
// Free memory at end of invalidated object
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);