[heap] Simplify adjusting of live bytes.

We never increment live bytes concurrent to the sweeper.

BUG=

Review-Url: https://codereview.chromium.org/2504193002
Cr-Commit-Position: refs/heads/master@{#41097}
This commit is contained in:
ulan 2016-11-18 03:11:30 -08:00 committed by Commit bot
parent 30db83a3b5
commit d3f2213b61
20 changed files with 84 additions and 91 deletions

View File

@ -758,8 +758,7 @@ class ElementsAccessorBase : public ElementsAccessor {
}
if (2 * length <= capacity) {
// If more than half the elements won't be used, trim the array.
isolate->heap()->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(
*backing_store, capacity - length);
isolate->heap()->RightTrimFixedArray(*backing_store, capacity - length);
} else {
// Otherwise, fill the unused tail with holes.
for (uint32_t i = length; i < old_length; i++) {
@ -1799,8 +1798,7 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
return;
}
isolate->heap()->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(
*backing_store, length - entry);
isolate->heap()->RightTrimFixedArray(*backing_store, length - entry);
}
static void DeleteCommon(Handle<JSObject> obj, uint32_t entry,

View File

@ -3095,8 +3095,7 @@ bool Heap::CanMoveObjectStart(HeapObject* object) {
return Page::FromAddress(address)->SweepingDone();
}
void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) {
void Heap::AdjustLiveBytes(HeapObject* object, int by) {
// As long as the inspected object is black and we are currently not iterating
// the heap using HeapIterator, we can update the live byte count. We cannot
// update while using HeapIterator because the iterator is temporarily
@ -3106,11 +3105,8 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) {
} else if (!in_heap_iterator() &&
!mark_compact_collector()->sweeping_in_progress() &&
Marking::IsBlack(ObjectMarking::MarkBitFrom(object->address()))) {
if (mode == SEQUENTIAL_TO_SWEEPER) {
MemoryChunk::IncrementLiveBytesFromGC(object, by);
} else {
MemoryChunk::IncrementLiveBytesFromMutator(object, by);
}
DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone());
MemoryChunk::IncrementLiveBytes(object, by);
}
}
@ -3165,7 +3161,7 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
FixedArrayBase::cast(HeapObject::FromAddress(new_start));
// Maintain consistency of live bytes during incremental marking
AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER);
AdjustLiveBytes(new_object, -bytes_to_trim);
// Remove recorded slots for the new map and length offset.
ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0));
@ -3177,15 +3173,6 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
return new_object;
}
// Force instantiation of templatized method.
template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
FixedArrayBase*, int);
template void Heap::RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(
FixedArrayBase*, int);
template<Heap::InvocationMode mode>
void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) {
const int len = object->length();
DCHECK_LE(elements_to_trim, len);
@ -3238,7 +3225,7 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) {
object->synchronized_set_length(len - elements_to_trim);
// Maintain consistency of live bytes during incremental marking
AdjustLiveBytes(object, -bytes_to_trim, mode);
AdjustLiveBytes(object, -bytes_to_trim);
// Notify the heap profiler of change in object layout. The array may not be
// moved during GC, and size has to be adjusted nevertheless.

View File

@ -556,12 +556,6 @@ class Heap {
enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
// Indicates whether live bytes adjustment is triggered
// - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
// - or from within GC (CONCURRENT_TO_SWEEPER),
// - or mutator code (CONCURRENT_TO_SWEEPER).
enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
enum UpdateAllocationSiteMode { kGlobal, kCached };
// Taking this lock prevents the GC from entering a phase that relocates
@ -751,14 +745,13 @@ class Heap {
bool CanMoveObjectStart(HeapObject* object);
// Maintain consistency of live bytes during incremental marking.
void AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode);
void AdjustLiveBytes(HeapObject* object, int by);
// Trim the given array from the left. Note that this relocates the object
// start and hence is only valid if there is only a single reference to it.
FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
// Trim the given array from the right.
template<Heap::InvocationMode mode>
void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
// Converts the given boolean condition to JavaScript boolean value.

View File

@ -129,7 +129,7 @@ static void MarkObjectGreyDoNotEnqueue(Object* obj) {
HeapObject* heap_obj = HeapObject::cast(obj);
MarkBit mark_bit = ObjectMarking::MarkBitFrom(HeapObject::cast(obj));
if (Marking::IsBlack(mark_bit)) {
MemoryChunk::IncrementLiveBytesFromGC(heap_obj, -heap_obj->Size());
MemoryChunk::IncrementLiveBytes(heap_obj, -heap_obj->Size());
}
Marking::AnyToGrey(mark_bit);
}
@ -268,7 +268,7 @@ class IncrementalMarkingMarkingVisitor
MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object);
if (Marking::IsWhite(mark_bit)) {
Marking::MarkBlack(mark_bit);
MemoryChunk::IncrementLiveBytesFromGC(heap_object, heap_object->Size());
MemoryChunk::IncrementLiveBytes(heap_object, heap_object->Size());
return true;
}
return false;
@ -864,7 +864,7 @@ void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
if (Marking::IsBlack(mark_bit)) return;
Marking::GreyToBlack(mark_bit);
MemoryChunk::IncrementLiveBytesFromGC(obj, size);
MemoryChunk::IncrementLiveBytes(obj, size);
}
intptr_t IncrementalMarking::ProcessMarkingDeque(
@ -933,7 +933,7 @@ void IncrementalMarking::Hurry() {
MarkBit mark_bit = ObjectMarking::MarkBitFrom(cache);
if (Marking::IsGrey(mark_bit)) {
Marking::GreyToBlack(mark_bit);
MemoryChunk::IncrementLiveBytesFromGC(cache, cache->Size());
MemoryChunk::IncrementLiveBytes(cache, cache->Size());
}
}
context = Context::cast(context)->next_context_link();

View File

@ -15,7 +15,7 @@ namespace internal {
void MarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(obj)));
if (marking_deque()->Push(obj)) {
MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size());
MemoryChunk::IncrementLiveBytes(obj, obj->Size());
} else {
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
Marking::BlackToGrey(mark_bit);
@ -26,7 +26,7 @@ void MarkCompactCollector::PushBlack(HeapObject* obj) {
void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(obj)));
if (!marking_deque()->Unshift(obj)) {
MemoryChunk::IncrementLiveBytesFromGC(obj, -obj->Size());
MemoryChunk::IncrementLiveBytes(obj, -obj->Size());
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
Marking::BlackToGrey(mark_bit);
}
@ -47,7 +47,7 @@ void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) {
DCHECK(Marking::IsWhite(mark_bit));
DCHECK(ObjectMarking::MarkBitFrom(obj) == mark_bit);
Marking::WhiteToBlack(mark_bit);
MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size());
MemoryChunk::IncrementLiveBytes(obj, obj->Size());
}

View File

@ -2578,8 +2578,8 @@ bool MarkCompactCollector::CompactTransitionArray(
// array disappeared during GC.
int trim = TransitionArray::Capacity(transitions) - transition_index;
if (trim > 0) {
heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
transitions, trim * TransitionArray::kTransitionSize);
heap_->RightTrimFixedArray(transitions,
trim * TransitionArray::kTransitionSize);
transitions->SetNumberOfTransitions(transition_index);
}
return descriptors_owner_died;
@ -2597,8 +2597,8 @@ void MarkCompactCollector::TrimDescriptorArray(Map* map,
int number_of_descriptors = descriptors->number_of_descriptors_storage();
int to_trim = number_of_descriptors - number_of_own_descriptors;
if (to_trim > 0) {
heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
descriptors, to_trim * DescriptorArray::kDescriptorSize);
heap_->RightTrimFixedArray(descriptors,
to_trim * DescriptorArray::kDescriptorSize);
descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
if (descriptors->HasEnumCache()) TrimEnumCache(map, descriptors);
@ -2629,13 +2629,11 @@ void MarkCompactCollector::TrimEnumCache(Map* map,
int to_trim = enum_cache->length() - live_enum;
if (to_trim <= 0) return;
heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
descriptors->GetEnumCache(), to_trim);
heap_->RightTrimFixedArray(descriptors->GetEnumCache(), to_trim);
if (!descriptors->HasEnumIndicesCache()) return;
FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(enum_indices_cache,
to_trim);
heap_->RightTrimFixedArray(enum_indices_cache, to_trim);
}

View File

@ -139,7 +139,7 @@ class ScavengingVisitor : public StaticVisitorBase {
if (marks_handling == TRANSFER_MARKS) {
if (IncrementalMarking::TransferColor(source, target, size)) {
MemoryChunk::IncrementLiveBytesFromGC(target, size);
MemoryChunk::IncrementLiveBytes(target, size);
}
}
}

View File

@ -221,7 +221,7 @@ void Page::InitializeFreeListCategories() {
}
}
void MemoryChunk::IncrementLiveBytesFromGC(HeapObject* object, int by) {
void MemoryChunk::IncrementLiveBytes(HeapObject* object, int by) {
MemoryChunk::FromAddress(object->address())->IncrementLiveBytes(by);
}
@ -244,14 +244,6 @@ void MemoryChunk::IncrementLiveBytes(int by) {
DCHECK_LE(static_cast<size_t>(live_byte_count_), size_);
}
void MemoryChunk::IncrementLiveBytesFromMutator(HeapObject* object, int by) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
if (!chunk->InNewSpace() && !static_cast<Page*>(chunk)->SweepingDone()) {
static_cast<PagedSpace*>(chunk->owner())->Allocate(by);
}
chunk->IncrementLiveBytes(by);
}
bool PagedSpace::Contains(Address addr) {
Page* p = Page::FromAddress(addr);
if (!Page::IsValid(p)) return false;
@ -439,7 +431,7 @@ AllocationResult PagedSpace::AllocateRawUnaligned(
if (object != NULL) {
if (heap()->incremental_marking()->black_allocation()) {
Marking::MarkBlack(ObjectMarking::MarkBitFrom(object));
MemoryChunk::IncrementLiveBytesFromGC(object, size_in_bytes);
MemoryChunk::IncrementLiveBytes(object, size_in_bytes);
}
}
}

View File

@ -2616,7 +2616,7 @@ HeapObject* FreeList::Allocate(size_t size_in_bytes) {
// Memory in the linear allocation area is counted as allocated. We may free
// a little of this again immediately - see below.
owner_->Allocate(static_cast<int>(new_node_size));
owner_->AccountAllocatedBytes(new_node_size);
if (owner_->heap()->inline_allocation_disabled()) {
// Keep the linear allocation area empty if requested to do so, just
@ -3009,7 +3009,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
if (heap()->incremental_marking()->black_allocation()) {
Marking::MarkBlack(ObjectMarking::MarkBitFrom(object));
MemoryChunk::IncrementLiveBytesFromGC(object, object_size);
MemoryChunk::IncrementLiveBytes(object, object_size);
}
return object;
}

View File

@ -367,8 +367,7 @@ class MemoryChunk {
static const int kAllocatableMemory = kPageSize - kObjectStartOffset;
static inline void IncrementLiveBytesFromMutator(HeapObject* object, int by);
static inline void IncrementLiveBytesFromGC(HeapObject* object, int by);
static inline void IncrementLiveBytes(HeapObject* object, int by);
// Only works if the pointer is in the first kPageSize of the MemoryChunk.
static MemoryChunk* FromAddress(Address a) {
@ -2034,7 +2033,9 @@ class PagedSpace : public Space {
void MarkAllocationInfoBlack();
void Allocate(int bytes) { accounting_stats_.AllocateBytes(bytes); }
void AccountAllocatedBytes(size_t bytes) {
accounting_stats_.AllocateBytes(bytes);
}
void IncreaseCapacity(size_t bytes);

View File

@ -164,8 +164,7 @@ void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) {
// Right trim the relocation info to free up remaining space.
const int delta = reloc_info->length() - new_reloc_length;
if (delta > 0) {
isolate->heap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
reloc_info, delta);
isolate->heap()->RightTrimFixedArray(reloc_info, delta);
}
}

View File

@ -3436,8 +3436,7 @@ void Isolate::CheckDetachedContextsAfterGC() {
if (new_length == 0) {
heap()->set_detached_contexts(heap()->empty_fixed_array());
} else if (new_length < length) {
heap()->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(
*detached_contexts, length - new_length);
heap()->RightTrimFixedArray(*detached_contexts, length - new_length);
}
}

View File

@ -245,7 +245,7 @@ LayoutDescriptor* LayoutDescriptor::Trim(Heap* heap, Map* map,
if (current_length != array_length) {
DCHECK_LT(array_length, current_length);
int delta = current_length - array_length;
heap->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(this, delta);
heap->RightTrimFixedArray(this, delta);
}
memset(DataPtr(), 0, DataSize());
LayoutDescriptor* layout_descriptor =

View File

@ -2311,7 +2311,7 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
self->set_resource(resource);
if (is_internalized) self->Hash(); // Force regeneration of the hash value.
heap->AdjustLiveBytes(this, new_size - size, Heap::CONCURRENT_TO_SWEEPER);
heap->AdjustLiveBytes(this, new_size - size);
return true;
}
@ -2377,7 +2377,7 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
self->set_resource(resource);
if (is_internalized) self->Hash(); // Force regeneration of the hash value.
heap->AdjustLiveBytes(this, new_size - size, Heap::CONCURRENT_TO_SWEEPER);
heap->AdjustLiveBytes(this, new_size - size);
return true;
}
@ -3427,7 +3427,7 @@ void MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
// If there are properties in the new backing store, trim it to the correct
// size and install the backing store into the object.
if (external > 0) {
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, inobject);
heap->RightTrimFixedArray(*array, inobject);
object->set_properties(*array);
}
@ -3440,8 +3440,7 @@ void MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
Address address = object->address();
heap->CreateFillerObjectAt(address + new_instance_size, instance_size_delta,
ClearRecordedSlots::kYes);
heap->AdjustLiveBytes(*object, -instance_size_delta,
Heap::CONCURRENT_TO_SWEEPER);
heap->AdjustLiveBytes(*object, -instance_size_delta);
}
// We are storing the new map using release store after creating a filler for
@ -3536,8 +3535,7 @@ void MigrateFastToSlow(Handle<JSObject> object, Handle<Map> new_map,
Heap* heap = isolate->heap();
heap->CreateFillerObjectAt(object->address() + new_instance_size,
instance_size_delta, ClearRecordedSlots::kYes);
heap->AdjustLiveBytes(*object, -instance_size_delta,
Heap::CONCURRENT_TO_SWEEPER);
heap->AdjustLiveBytes(*object, -instance_size_delta);
}
// We are storing the new map using release store after creating a filler for
@ -10141,8 +10139,7 @@ Handle<FixedArray> FixedArray::SetAndGrow(Handle<FixedArray> array, int index,
void FixedArray::Shrink(int new_length) {
DCHECK(0 <= new_length && new_length <= length());
if (new_length < length()) {
GetHeap()->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(
this, length() - new_length);
GetHeap()->RightTrimFixedArray(this, length() - new_length);
}
}
@ -12050,7 +12047,7 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
// that are a multiple of pointer size.
heap->CreateFillerObjectAt(start_of_string + new_size, delta,
ClearRecordedSlots::kNo);
heap->AdjustLiveBytes(*string, -delta, Heap::CONCURRENT_TO_SWEEPER);
heap->AdjustLiveBytes(*string, -delta);
// We are storing the new length using release store after creating a filler
// for the left-over space to avoid races with the sweeper thread.
@ -12470,8 +12467,7 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
}
if (dst != length) {
// Always trim even when array is cleared because of heap verifier.
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(code_map,
length - dst);
heap->RightTrimFixedArray(code_map, length - dst);
if (code_map->length() == kEntriesStart) {
ClearOptimizedCodeMap();
}
@ -12484,8 +12480,7 @@ void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
DCHECK(shrink_by % kEntryLength == 0);
DCHECK(shrink_by <= code_map->length() - kEntriesStart);
// Always trim even when array is cleared because of heap verifier.
GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map,
shrink_by);
GetHeap()->RightTrimFixedArray(code_map, shrink_by);
if (code_map->length() == kEntriesStart) {
ClearOptimizedCodeMap();
}

View File

@ -249,8 +249,7 @@ RUNTIME_FUNCTION(Runtime_GetArrayKeys) {
}
if (j != keys->length()) {
isolate->heap()->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(
*keys, keys->length() - j);
isolate->heap()->RightTrimFixedArray(*keys, keys->length() - j);
}
return *isolate->factory()->NewJSArrayWithElements(keys);

View File

@ -652,7 +652,7 @@ MUST_USE_RESULT static Object* StringReplaceGlobalRegExpWithEmptyString(
if (!heap->lo_space()->Contains(*answer)) {
heap->CreateFillerObjectAt(end_of_string, delta, ClearRecordedSlots::kNo);
}
heap->AdjustLiveBytes(*answer, -delta, Heap::CONCURRENT_TO_SWEEPER);
heap->AdjustLiveBytes(*answer, -delta);
return *answer;
}

View File

@ -164,8 +164,7 @@ void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) {
// Right trim the relocation info to free up remaining space.
const int delta = reloc_info->length() - new_reloc_length;
if (delta > 0) {
isolate->heap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
reloc_info, delta);
isolate->heap()->RightTrimFixedArray(reloc_info, delta);
}
}

View File

@ -11,6 +11,7 @@
// Tests that should have access to private methods of {v8::internal::Heap}.
// Those tests need to be defined using HEAP_TEST(Name) { ... }.
#define HEAP_TEST_METHODS(V) \
V(AdjustBytes) \
V(CompactionFullAbortedPage) \
V(CompactionPartiallyAbortedPage) \
V(CompactionPartiallyAbortedPageIntraAbortedPointers) \

View File

@ -5960,8 +5960,7 @@ static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
Handle<FixedTypedArrayBase> array =
factory->NewFixedTypedArray(initial_length, type, true);
int old_size = array->size();
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
elements_to_trim);
heap->RightTrimFixedArray(*array, elements_to_trim);
// Check that free space filler is at the right place and did not smash the
// array header.
@ -6446,7 +6445,7 @@ HEAP_TEST(Regress587004) {
}
CcTest::CollectGarbage(OLD_SPACE);
heap::SimulateFullSpace(heap->old_space());
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1);
heap->RightTrimFixedArray(*array, N - 1);
heap->mark_compact_collector()->EnsureSweepingCompleted();
ByteArray* byte_array;
const int M = 256;
@ -6530,7 +6529,7 @@ HEAP_TEST(Regress589413) {
}
heap::SimulateIncrementalMarking(heap);
for (size_t j = 0; j < arrays.size(); j++) {
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1);
heap->RightTrimFixedArray(arrays[j], N - 1);
}
}
// Force allocation from the free list.
@ -6895,7 +6894,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
// Trim it once by one word to make checking for white marking color uniform.
Address previous = end_address - kPointerSize;
heap->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(*array, 1);
heap->RightTrimFixedArray(*array, 1);
HeapObject* filler = HeapObject::FromAddress(previous);
CHECK(filler->IsFiller());
CHECK(Marking::IsImpossible(ObjectMarking::MarkBitFrom(previous)));
@ -6904,7 +6903,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
for (int i = 1; i <= 3; i++) {
for (int j = 0; j < 10; j++) {
previous -= kPointerSize * i;
heap->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(*array, i);
heap->RightTrimFixedArray(*array, i);
HeapObject* filler = HeapObject::FromAddress(previous);
CHECK(filler->IsFiller());
CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(previous)));

View File

@ -797,5 +797,38 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
CHECK_EQ(0u, shrinked);
}
HEAP_TEST(AdjustBytes) {
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
HandleScope scope(isolate);
Heap* heap = isolate->heap();
for (int i = 0; i < 3; i++) {
heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting);
}
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap::SealCurrentObjects(CcTest::heap());
Handle<FixedArray> array = isolate->factory()->NewFixedArray(5000, TENURED);
heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting);
heap->delay_sweeper_tasks_for_testing_ = false;
size_t size_before = heap->SizeOfObjects();
heap->RightTrimFixedArray(*array, 1000);
heap->mark_compact_collector()->sweeper().StartSweeperTasks();
heap->mark_compact_collector()->EnsureSweepingCompleted();
size_t size_after = heap->SizeOfObjects();
// Right trimming during sweeping does not affect size counters.
CHECK_EQ(size_before, size_after);
heap->RightTrimFixedArray(*array, 1000);
size_after = heap->SizeOfObjects();
// Right trimming before incremental marking runs does not affect size
// counters.
CHECK_EQ(size_before, size_after);
heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting);
size_after = heap->SizeOfObjects();
CHECK_EQ(size_before, size_after + 2000 * kPointerSize);
}
} // namespace internal
} // namespace v8