Reland^3: [heap] Store size with invalidated object
This is a reland of commit2b79eefed3
A DCHECK was using map[key] and inadvertently inserted into the map that way. Original change's description: > Reland^2: [heap] Store size with invalidated object > > This is a reland of commit23b2d571a7
> > When updating pointers during a full GC, a page might not be swept > already. In such cases there might be invalid objects and slots > recorded in free memory. Updating tagged slots in free memory is fine > even though not strictly necessary. > > However, the GC also needs to calculate the size of potentially dead > invalid objects in order to be able to check whether a slot is within > that object. But since that object is dead, its map might be dead as > well which makes size calculation impossible on such objects. The CL > changes this to cache the size of invalid objects. A follow-up CL will > also check the marking bit of invalid objects. > > Reason for reverts: > > Revert #2: In-object slack tracking on JSObjects doesn't update the > cached size of invalidated objects. The fix here was to stop > invalidating recorded slots on JSObjects at all and avoid that problem > completely (see https://crrev.com/c/3620274). > > Revert #1: Not all size changes go through NotifyObjectLayoutChange, so > https://crrev.com/c/3607992 introduced NotifyObjectSizeChange as a > bottleneck for object size changes/right-trimming. This method is > now used to update the size of invalidated objects. > > Bug: v8:12578, chromium:1316289 > Change-Id: I0478d04601c0270ddb39419ca6cf98719951eb4d > Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3623542 > Reviewed-by: Jakob Linke <jgruber@chromium.org> > Reviewed-by: Patrick Thier <pthier@chromium.org> > Commit-Queue: Dominik Inführ <dinfuehr@chromium.org> > Reviewed-by: Michael Lippautz <mlippautz@chromium.org> > Cr-Commit-Position: refs/heads/main@{#80344} Bug: v8:12578, chromium:1316289 Change-Id: Ibcc04c209213c584860a7c473082526cb4e53c59 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3627635 Reviewed-by: Patrick Thier <pthier@chromium.org> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Nico Hartmann <nicohartmann@chromium.org> Cr-Commit-Position: refs/heads/main@{#80542}
This commit is contained in:
parent
36565f6b5c
commit
34da5f5b5b
@ -3480,8 +3480,9 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
|
||||
#ifdef DEBUG
|
||||
if (MayContainRecordedSlots(object)) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
|
||||
DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
|
||||
DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object));
|
||||
DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
|
||||
DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_SHARED>(object));
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -3898,7 +3899,7 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
|
||||
|
||||
void Heap::NotifyObjectLayoutChange(
|
||||
HeapObject object, const DisallowGarbageCollection&,
|
||||
InvalidateRecordedSlots invalidate_recorded_slots) {
|
||||
InvalidateRecordedSlots invalidate_recorded_slots, int new_size) {
|
||||
if (invalidate_recorded_slots == InvalidateRecordedSlots::kYes) {
|
||||
const bool may_contain_recorded_slots = MayContainRecordedSlots(object);
|
||||
|
||||
@ -3906,15 +3907,15 @@ void Heap::NotifyObjectLayoutChange(
|
||||
incremental_marking()->MarkBlackAndVisitObjectDueToLayoutChange(object);
|
||||
if (may_contain_recorded_slots && incremental_marking()->IsCompacting()) {
|
||||
MemoryChunk::FromHeapObject(object)
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object);
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object, new_size);
|
||||
}
|
||||
}
|
||||
|
||||
if (may_contain_recorded_slots) {
|
||||
MemoryChunk::FromHeapObject(object)
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object);
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object, new_size);
|
||||
MemoryChunk::FromHeapObject(object)
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_SHARED>(object);
|
||||
->RegisterObjectWithInvalidatedSlots<OLD_TO_SHARED>(object, new_size);
|
||||
}
|
||||
}
|
||||
#ifdef VERIFY_HEAP
|
||||
@ -3930,6 +3931,8 @@ void Heap::NotifyObjectSizeChange(HeapObject object, int old_size, int new_size,
|
||||
DCHECK_LE(new_size, old_size);
|
||||
if (new_size == old_size) return;
|
||||
|
||||
UpdateInvalidatedObjectSize(object, new_size);
|
||||
|
||||
const bool is_background = LocalHeap::Current() != nullptr;
|
||||
DCHECK_IMPLIES(is_background,
|
||||
clear_recorded_slots == ClearRecordedSlots::kNo);
|
||||
@ -3946,6 +3949,20 @@ void Heap::NotifyObjectSizeChange(HeapObject object, int old_size, int new_size,
|
||||
clear_recorded_slots, verify_no_slots_recorded);
|
||||
}
|
||||
|
||||
void Heap::UpdateInvalidatedObjectSize(HeapObject object, int new_size) {
|
||||
if (!MayContainRecordedSlots(object)) return;
|
||||
|
||||
if (incremental_marking()->IsCompacting()) {
|
||||
MemoryChunk::FromHeapObject(object)
|
||||
->UpdateInvalidatedObjectSize<OLD_TO_OLD>(object, new_size);
|
||||
}
|
||||
|
||||
MemoryChunk::FromHeapObject(object)->UpdateInvalidatedObjectSize<OLD_TO_NEW>(
|
||||
object, new_size);
|
||||
MemoryChunk::FromHeapObject(object)
|
||||
->UpdateInvalidatedObjectSize<OLD_TO_SHARED>(object, new_size);
|
||||
}
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
// Helper class for collecting slot addresses.
|
||||
class SlotCollectingVisitor final : public ObjectVisitor {
|
||||
@ -4615,11 +4632,35 @@ void Heap::Verify() {
|
||||
if (new_lo_space_) new_lo_space_->Verify(isolate());
|
||||
isolate()->string_table()->VerifyIfOwnedBy(isolate());
|
||||
|
||||
VerifyInvalidatedObjectSize();
|
||||
|
||||
#if DEBUG
|
||||
VerifyCommittedPhysicalMemory();
|
||||
#endif // DEBUG
|
||||
}
|
||||
|
||||
namespace {
|
||||
void VerifyInvalidatedSlots(InvalidatedSlots* invalidated_slots) {
|
||||
if (!invalidated_slots) return;
|
||||
for (std::pair<HeapObject, int> object_and_size : *invalidated_slots) {
|
||||
HeapObject object = object_and_size.first;
|
||||
int size = object_and_size.second;
|
||||
CHECK_EQ(object.Size(), size);
|
||||
}
|
||||
}
|
||||
} // namespace
|
||||
|
||||
void Heap::VerifyInvalidatedObjectSize() {
|
||||
OldGenerationMemoryChunkIterator chunk_iterator(this);
|
||||
MemoryChunk* chunk;
|
||||
|
||||
while ((chunk = chunk_iterator.next()) != nullptr) {
|
||||
VerifyInvalidatedSlots(chunk->invalidated_slots<OLD_TO_NEW>());
|
||||
VerifyInvalidatedSlots(chunk->invalidated_slots<OLD_TO_OLD>());
|
||||
VerifyInvalidatedSlots(chunk->invalidated_slots<OLD_TO_SHARED>());
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::VerifyReadOnlyHeap() {
|
||||
CHECK(!read_only_space_->writable());
|
||||
read_only_space_->Verify(isolate());
|
||||
|
@ -1132,8 +1132,7 @@ class Heap {
|
||||
// manually.
|
||||
void NotifyObjectLayoutChange(
|
||||
HeapObject object, const DisallowGarbageCollection&,
|
||||
InvalidateRecordedSlots invalidate_recorded_slots =
|
||||
InvalidateRecordedSlots::kYes);
|
||||
InvalidateRecordedSlots invalidate_recorded_slots, int new_size = 0);
|
||||
|
||||
// The runtime uses this function to inform the GC of object size changes. The
|
||||
// GC will fill this area with a filler object and might clear recorded slots
|
||||
@ -1608,6 +1607,9 @@ class Heap {
|
||||
// created.
|
||||
void VerifyReadOnlyHeap();
|
||||
void VerifyRememberedSetFor(HeapObject object);
|
||||
|
||||
// Verify that cached size of invalidated object is up-to-date.
|
||||
void VerifyInvalidatedObjectSize();
|
||||
#endif
|
||||
|
||||
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
|
||||
@ -1824,6 +1826,9 @@ class Heap {
|
||||
V8_EXPORT_PRIVATE void ZapCodeObject(Address start_address,
|
||||
int size_in_bytes);
|
||||
|
||||
// Updates invalidated object size in all remembered sets.
|
||||
void UpdateInvalidatedObjectSize(HeapObject object, int new_size);
|
||||
|
||||
enum class VerifyNoSlotsRecorded { kYes, kNo };
|
||||
|
||||
// This method is used by the sweeper on free memory ranges to make the page
|
||||
|
@ -28,22 +28,18 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
|
||||
NextInvalidatedObject();
|
||||
}
|
||||
|
||||
HeapObject invalidated_object = HeapObject::FromAddress(invalidated_start_);
|
||||
|
||||
if (invalidated_size_ == 0) {
|
||||
DCHECK(MarkCompactCollector::IsMapOrForwarded(invalidated_object.map()));
|
||||
invalidated_size_ = invalidated_object.Size();
|
||||
}
|
||||
|
||||
int offset = static_cast<int>(slot - invalidated_start_);
|
||||
|
||||
// OLD_TO_OLD can have slots in map word unlike other remembered sets.
|
||||
DCHECK_GE(offset, 0);
|
||||
DCHECK_IMPLIES(remembered_set_type_ != OLD_TO_OLD, offset > 0);
|
||||
|
||||
if (offset < invalidated_size_)
|
||||
return offset == 0 ||
|
||||
invalidated_object.IsValidSlot(invalidated_object.map(), offset);
|
||||
if (offset < invalidated_size_) {
|
||||
if (offset == 0) return true;
|
||||
HeapObject invalidated_object = HeapObject::FromAddress(invalidated_start_);
|
||||
DCHECK(MarkCompactCollector::IsMapOrForwarded(invalidated_object.map()));
|
||||
return invalidated_object.IsValidSlot(invalidated_object.map(), offset);
|
||||
}
|
||||
|
||||
NextInvalidatedObject();
|
||||
return true;
|
||||
@ -51,12 +47,14 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
|
||||
|
||||
void InvalidatedSlotsFilter::NextInvalidatedObject() {
|
||||
invalidated_start_ = next_invalidated_start_;
|
||||
invalidated_size_ = 0;
|
||||
invalidated_size_ = next_invalidated_size_;
|
||||
|
||||
if (iterator_ == iterator_end_) {
|
||||
next_invalidated_start_ = sentinel_;
|
||||
next_invalidated_size_ = 0;
|
||||
} else {
|
||||
next_invalidated_start_ = iterator_->address();
|
||||
next_invalidated_start_ = iterator_->first.address();
|
||||
next_invalidated_size_ = iterator_->second;
|
||||
iterator_++;
|
||||
}
|
||||
}
|
||||
@ -87,7 +85,7 @@ void InvalidatedSlotsCleanup::Free(Address free_start, Address free_end) {
|
||||
|
||||
void InvalidatedSlotsCleanup::NextInvalidatedObject() {
|
||||
if (iterator_ != iterator_end_) {
|
||||
invalidated_start_ = iterator_->address();
|
||||
invalidated_start_ = iterator_->first.address();
|
||||
} else {
|
||||
invalidated_start_ = sentinel_;
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ namespace internal {
|
||||
// that potentially invalidates slots recorded concurrently. The second part
|
||||
// of each element is the size of the corresponding object before the layout
|
||||
// change.
|
||||
using InvalidatedSlots = std::set<HeapObject, Object::Comparer>;
|
||||
using InvalidatedSlots = std::map<HeapObject, int, Object::Comparer>;
|
||||
|
||||
// This class provides IsValid predicate that takes into account the set
|
||||
// of invalidated objects in the given memory chunk.
|
||||
@ -45,9 +45,10 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
|
||||
InvalidatedSlots::const_iterator iterator_;
|
||||
InvalidatedSlots::const_iterator iterator_end_;
|
||||
Address sentinel_;
|
||||
Address invalidated_start_;
|
||||
Address next_invalidated_start_;
|
||||
int invalidated_size_;
|
||||
Address invalidated_start_{kNullAddress};
|
||||
Address next_invalidated_start_{kNullAddress};
|
||||
int invalidated_size_{0};
|
||||
int next_invalidated_size_{0};
|
||||
InvalidatedSlots empty_;
|
||||
#ifdef DEBUG
|
||||
Address last_slot_;
|
||||
|
@ -370,14 +370,17 @@ void MemoryChunk::ReleaseInvalidatedSlots() {
|
||||
}
|
||||
|
||||
template V8_EXPORT_PRIVATE void
|
||||
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object);
|
||||
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object,
|
||||
int new_size);
|
||||
template V8_EXPORT_PRIVATE void
|
||||
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object);
|
||||
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object,
|
||||
int new_size);
|
||||
template V8_EXPORT_PRIVATE void MemoryChunk::RegisterObjectWithInvalidatedSlots<
|
||||
OLD_TO_SHARED>(HeapObject object);
|
||||
OLD_TO_SHARED>(HeapObject object, int new_size);
|
||||
|
||||
template <RememberedSetType type>
|
||||
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object) {
|
||||
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object,
|
||||
int new_size) {
|
||||
// ByteArray and FixedArray are still invalidated in tests.
|
||||
DCHECK(object.IsString() || object.IsByteArray() || object.IsFixedArray());
|
||||
bool skip_slot_recording;
|
||||
@ -407,13 +410,43 @@ void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object) {
|
||||
AllocateInvalidatedSlots<type>();
|
||||
}
|
||||
|
||||
invalidated_slots<type>()->insert(object);
|
||||
DCHECK_GT(new_size, 0);
|
||||
InvalidatedSlots& invalidated_slots = *this->invalidated_slots<type>();
|
||||
DCHECK_IMPLIES(invalidated_slots.count(object) > 0,
|
||||
new_size <= invalidated_slots[object]);
|
||||
invalidated_slots.insert_or_assign(object, new_size);
|
||||
}
|
||||
|
||||
template V8_EXPORT_PRIVATE void
|
||||
MemoryChunk::UpdateInvalidatedObjectSize<OLD_TO_NEW>(HeapObject object,
|
||||
int new_size);
|
||||
template V8_EXPORT_PRIVATE void
|
||||
MemoryChunk::UpdateInvalidatedObjectSize<OLD_TO_OLD>(HeapObject object,
|
||||
int new_size);
|
||||
template V8_EXPORT_PRIVATE void
|
||||
MemoryChunk::UpdateInvalidatedObjectSize<OLD_TO_SHARED>(HeapObject object,
|
||||
int new_size);
|
||||
|
||||
template <RememberedSetType type>
|
||||
void MemoryChunk::UpdateInvalidatedObjectSize(HeapObject object, int new_size) {
|
||||
DCHECK_GT(new_size, 0);
|
||||
|
||||
if (invalidated_slots<type>() == nullptr) return;
|
||||
|
||||
InvalidatedSlots& invalidated_slots = *this->invalidated_slots<type>();
|
||||
if (invalidated_slots.count(object) > 0) {
|
||||
DCHECK_LE(new_size, invalidated_slots[object]);
|
||||
DCHECK_NE(0, invalidated_slots[object]);
|
||||
invalidated_slots.insert_or_assign(object, new_size);
|
||||
}
|
||||
}
|
||||
|
||||
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(
|
||||
HeapObject object);
|
||||
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(
|
||||
HeapObject object);
|
||||
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_SHARED>(
|
||||
HeapObject object);
|
||||
|
||||
template <RememberedSetType type>
|
||||
bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject object) {
|
||||
|
@ -143,7 +143,11 @@ class MemoryChunk : public BasicMemoryChunk {
|
||||
template <RememberedSetType type>
|
||||
void ReleaseInvalidatedSlots();
|
||||
template <RememberedSetType type>
|
||||
V8_EXPORT_PRIVATE void RegisterObjectWithInvalidatedSlots(HeapObject object);
|
||||
V8_EXPORT_PRIVATE void RegisterObjectWithInvalidatedSlots(HeapObject object,
|
||||
int new_size);
|
||||
template <RememberedSetType type>
|
||||
V8_EXPORT_PRIVATE void UpdateInvalidatedObjectSize(HeapObject object,
|
||||
int new_size);
|
||||
template <RememberedSetType type>
|
||||
bool RegisteredObjectWithInvalidatedSlots(HeapObject object);
|
||||
template <RememberedSetType type>
|
||||
|
@ -249,11 +249,6 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
|
||||
bool is_internalized = this->IsInternalizedString();
|
||||
bool has_pointers = StringShape(*this).IsIndirect();
|
||||
|
||||
if (has_pointers) {
|
||||
isolate->heap()->NotifyObjectLayoutChange(*this, no_gc,
|
||||
InvalidateRecordedSlots::kYes);
|
||||
}
|
||||
|
||||
base::SharedMutexGuard<base::kExclusive> shared_mutex_guard(
|
||||
isolate->internalized_string_access());
|
||||
// Morph the string to an external string by replacing the map and
|
||||
@ -277,6 +272,12 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
|
||||
|
||||
// Byte size of the external String object.
|
||||
int new_size = this->SizeFromMap(new_map);
|
||||
|
||||
if (has_pointers) {
|
||||
isolate->heap()->NotifyObjectLayoutChange(
|
||||
*this, no_gc, InvalidateRecordedSlots::kYes, new_size);
|
||||
}
|
||||
|
||||
if (!isolate->heap()->IsLargeObject(*this)) {
|
||||
isolate->heap()->NotifyObjectSizeChange(
|
||||
*this, size, new_size,
|
||||
@ -333,11 +334,6 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
|
||||
bool is_internalized = this->IsInternalizedString();
|
||||
bool has_pointers = StringShape(*this).IsIndirect();
|
||||
|
||||
if (has_pointers) {
|
||||
isolate->heap()->NotifyObjectLayoutChange(*this, no_gc,
|
||||
InvalidateRecordedSlots::kYes);
|
||||
}
|
||||
|
||||
base::SharedMutexGuard<base::kExclusive> shared_mutex_guard(
|
||||
isolate->internalized_string_access());
|
||||
// Morph the string to an external string by replacing the map and
|
||||
@ -362,6 +358,11 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
|
||||
// Byte size of the external String object.
|
||||
int new_size = this->SizeFromMap(new_map);
|
||||
|
||||
if (has_pointers) {
|
||||
isolate->heap()->NotifyObjectLayoutChange(
|
||||
*this, no_gc, InvalidateRecordedSlots::kYes, new_size);
|
||||
}
|
||||
|
||||
isolate->heap()->NotifyObjectSizeChange(
|
||||
*this, size, new_size,
|
||||
has_pointers ? ClearRecordedSlots::kYes : ClearRecordedSlots::kNo);
|
||||
|
@ -234,13 +234,15 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithInvalidatedSlots) {
|
||||
}
|
||||
}
|
||||
// First object is going to be evacuated.
|
||||
HeapObject front_object = *compaction_page_handles.front();
|
||||
to_be_aborted_page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
|
||||
*compaction_page_handles.front());
|
||||
front_object, front_object.Size());
|
||||
// Last object is NOT going to be evacuated.
|
||||
// This happens since not all objects fit on the only other page in the
|
||||
// old space, the GC isn't allowed to allocate another page.
|
||||
HeapObject back_object = *compaction_page_handles.back();
|
||||
to_be_aborted_page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
|
||||
*compaction_page_handles.back());
|
||||
back_object, back_object.Size());
|
||||
to_be_aborted_page->SetFlag(
|
||||
MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
||||
|
||||
|
@ -71,7 +71,9 @@ HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register every second byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i += 2) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
|
||||
byte_array.Size());
|
||||
}
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
@ -96,7 +98,9 @@ HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register the all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
|
||||
byte_array.Size());
|
||||
}
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
@ -117,16 +121,18 @@ HEAP_TEST(InvalidatedSlotsAfterTrimming) {
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register the all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(
|
||||
byte_arrays[i], ByteArray::kHeaderSize);
|
||||
}
|
||||
// Trim byte arrays and check that the slots outside the byte arrays are
|
||||
// considered invalid if the old space page was swept.
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
||||
Address end = byte_array.address() + byte_array.Size();
|
||||
heap->RightTrimFixedArray(byte_array, byte_array.length());
|
||||
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
||||
CHECK_EQ(filter.IsValid(addr), page->SweepingDone());
|
||||
}
|
||||
@ -144,7 +150,9 @@ HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
|
||||
// This should be no-op because the page is marked as evacuation
|
||||
// candidate.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
|
||||
byte_array.Size());
|
||||
}
|
||||
// All slots must still be valid.
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
@ -168,7 +176,9 @@ HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
|
||||
heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0].length() - 8);
|
||||
// Register the all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
|
||||
byte_array.Size());
|
||||
}
|
||||
// All slots must still be invalid.
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
||||
@ -361,7 +371,9 @@ HEAP_TEST(InvalidatedSlotsCleanupFull) {
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_array,
|
||||
byte_array.Size());
|
||||
}
|
||||
|
||||
// Mark full page as free
|
||||
@ -380,7 +392,9 @@ HEAP_TEST(InvalidatedSlotsCleanupEachObject) {
|
||||
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
||||
// Register all byte arrays as invalidated.
|
||||
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
|
||||
ByteArray byte_array = byte_arrays[i];
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_array,
|
||||
byte_array.Size());
|
||||
}
|
||||
|
||||
// Mark each object as free on page
|
||||
@ -407,7 +421,8 @@ HEAP_TEST(InvalidatedSlotsCleanupRightTrim) {
|
||||
ByteArray& invalidated = byte_arrays[1];
|
||||
|
||||
heap->RightTrimFixedArray(invalidated, invalidated.length() - 8);
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated);
|
||||
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated,
|
||||
invalidated.Size());
|
||||
|
||||
// Free memory at end of invalidated object
|
||||
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
||||
|
Loading…
Reference in New Issue
Block a user