2016-05-20 13:30:22 +00:00
|
|
|
// Copyright 2016 the V8 project authors. All rights reserved.
|
2015-12-09 11:25:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
#include "test/cctest/heap/heap-utils.h"
|
2015-12-09 11:25:26 +00:00
|
|
|
|
2019-05-22 07:55:37 +00:00
|
|
|
#include "src/execution/isolate.h"
|
2018-04-09 19:11:22 +00:00
|
|
|
#include "src/heap/factory.h"
|
2015-12-09 11:25:26 +00:00
|
|
|
#include "src/heap/heap-inl.h"
|
|
|
|
#include "src/heap/incremental-marking.h"
|
|
|
|
#include "src/heap/mark-compact.h"
|
2018-09-19 08:19:40 +00:00
|
|
|
#include "test/cctest/cctest.h"
|
2015-12-09 11:25:26 +00:00
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2016-05-20 13:30:22 +00:00
|
|
|
namespace heap {
|
|
|
|
|
2019-01-29 19:12:21 +00:00
|
|
|
void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }
|
|
|
|
|
|
|
|
void InvokeMarkSweep() { CcTest::CollectAllGarbage(); }
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
void SealCurrentObjects(Heap* heap) {
|
2018-09-19 08:19:40 +00:00
|
|
|
CcTest::CollectAllGarbage();
|
|
|
|
CcTest::CollectAllGarbage();
|
2016-05-20 13:30:22 +00:00
|
|
|
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
2018-01-09 08:56:07 +00:00
|
|
|
heap->old_space()->FreeLinearAllocationArea();
|
2016-06-23 09:55:18 +00:00
|
|
|
for (Page* page : *heap->old_space()) {
|
2016-05-20 13:30:22 +00:00
|
|
|
page->MarkNeverAllocateForTesting();
|
|
|
|
}
|
|
|
|
}
|
2015-12-09 11:25:26 +00:00
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
int FixedArrayLenFromSize(int size) {
|
2019-02-06 15:23:06 +00:00
|
|
|
return Min((size - FixedArray::kHeaderSize) / kTaggedSize,
|
|
|
|
FixedArray::kMaxRegularLength);
|
2015-12-09 11:25:26 +00:00
|
|
|
}
|
|
|
|
|
2016-09-06 11:02:21 +00:00
|
|
|
std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap,
|
|
|
|
int remainder) {
|
2017-09-23 13:55:40 +00:00
|
|
|
PauseAllocationObserversScope pause_observers(heap);
|
2016-09-06 11:02:21 +00:00
|
|
|
std::vector<Handle<FixedArray>> handles;
|
|
|
|
Isolate* isolate = heap->isolate();
|
|
|
|
const int kArraySize = 128;
|
|
|
|
const int kArrayLen = heap::FixedArrayLenFromSize(kArraySize);
|
|
|
|
Handle<FixedArray> array;
|
2018-10-11 13:04:34 +00:00
|
|
|
int allocated = 0;
|
|
|
|
do {
|
2018-10-23 11:52:20 +00:00
|
|
|
if (allocated + kArraySize * 2 >
|
|
|
|
static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) {
|
2018-10-11 13:04:34 +00:00
|
|
|
int size =
|
|
|
|
kArraySize * 2 -
|
2018-10-23 11:52:20 +00:00
|
|
|
((allocated + kArraySize * 2) -
|
|
|
|
static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) -
|
2018-10-11 13:04:34 +00:00
|
|
|
remainder;
|
|
|
|
int last_array_len = heap::FixedArrayLenFromSize(size);
|
2019-03-11 19:04:02 +00:00
|
|
|
array = isolate->factory()->NewFixedArray(last_array_len,
|
|
|
|
AllocationType::kOld);
|
2018-10-11 13:04:34 +00:00
|
|
|
CHECK_EQ(size, array->Size());
|
|
|
|
allocated += array->Size() + remainder;
|
2016-09-06 11:02:21 +00:00
|
|
|
} else {
|
2019-03-11 19:04:02 +00:00
|
|
|
array =
|
|
|
|
isolate->factory()->NewFixedArray(kArrayLen, AllocationType::kOld);
|
2018-10-11 13:04:34 +00:00
|
|
|
allocated += array->Size();
|
2016-09-06 11:02:21 +00:00
|
|
|
CHECK_EQ(kArraySize, array->Size());
|
|
|
|
}
|
|
|
|
if (handles.empty()) {
|
|
|
|
// Check that allocations started on a new page.
|
2019-01-15 00:23:43 +00:00
|
|
|
CHECK_EQ(array->address(), Page::FromHeapObject(*array)->area_start());
|
2016-09-06 11:02:21 +00:00
|
|
|
}
|
|
|
|
handles.push_back(array);
|
2018-10-23 11:52:20 +00:00
|
|
|
} while (allocated <
|
|
|
|
static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()));
|
2016-09-06 11:02:21 +00:00
|
|
|
return handles;
|
|
|
|
}
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
|
2019-03-11 19:04:02 +00:00
|
|
|
AllocationType allocation,
|
2016-05-20 13:30:22 +00:00
|
|
|
int object_size) {
|
2015-12-16 14:06:33 +00:00
|
|
|
std::vector<Handle<FixedArray>> handles;
|
|
|
|
Isolate* isolate = heap->isolate();
|
2015-12-09 11:25:26 +00:00
|
|
|
int allocate_memory;
|
|
|
|
int length;
|
|
|
|
int free_memory = padding_size;
|
2019-03-11 19:04:02 +00:00
|
|
|
if (allocation == i::AllocationType::kOld) {
|
2018-01-09 08:56:07 +00:00
|
|
|
heap->old_space()->FreeLinearAllocationArea();
|
2015-12-16 14:06:33 +00:00
|
|
|
int overall_free_memory = static_cast<int>(heap->old_space()->Available());
|
|
|
|
CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
|
2015-12-09 11:25:26 +00:00
|
|
|
} else {
|
2015-12-16 14:06:33 +00:00
|
|
|
int overall_free_memory =
|
2016-06-06 15:12:08 +00:00
|
|
|
static_cast<int>(*heap->new_space()->allocation_limit_address() -
|
|
|
|
*heap->new_space()->allocation_top_address());
|
2015-12-16 14:06:33 +00:00
|
|
|
CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
|
2015-12-09 11:25:26 +00:00
|
|
|
}
|
2015-12-16 14:06:33 +00:00
|
|
|
while (free_memory > 0) {
|
|
|
|
if (free_memory > object_size) {
|
|
|
|
allocate_memory = object_size;
|
2016-05-20 13:30:22 +00:00
|
|
|
length = FixedArrayLenFromSize(allocate_memory);
|
2015-12-09 11:25:26 +00:00
|
|
|
} else {
|
|
|
|
allocate_memory = free_memory;
|
2016-05-20 13:30:22 +00:00
|
|
|
length = FixedArrayLenFromSize(allocate_memory);
|
2015-12-09 11:25:26 +00:00
|
|
|
if (length <= 0) {
|
|
|
|
// Not enough room to create another fixed array. Let's create a filler.
|
2018-12-19 19:10:21 +00:00
|
|
|
if (free_memory > (2 * kTaggedSize)) {
|
2016-06-06 15:12:08 +00:00
|
|
|
heap->CreateFillerObjectAt(
|
2019-08-22 11:17:45 +00:00
|
|
|
*heap->old_space()->allocation_top_address(), free_memory,
|
|
|
|
ClearRecordedSlots::kNo);
|
2016-05-20 13:30:22 +00:00
|
|
|
}
|
2015-12-09 11:25:26 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2019-03-11 19:04:02 +00:00
|
|
|
handles.push_back(isolate->factory()->NewFixedArray(length, allocation));
|
|
|
|
CHECK((allocation == AllocationType::kYoung &&
|
2019-02-11 15:07:56 +00:00
|
|
|
heap->new_space()->Contains(*handles.back())) ||
|
2019-03-11 19:04:02 +00:00
|
|
|
(allocation == AllocationType::kOld &&
|
|
|
|
heap->InOldSpace(*handles.back())));
|
2019-02-06 15:23:06 +00:00
|
|
|
free_memory -= handles.back()->Size();
|
2015-12-09 11:25:26 +00:00
|
|
|
}
|
2015-12-16 14:06:33 +00:00
|
|
|
return handles;
|
2015-12-09 11:25:26 +00:00
|
|
|
}
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes,
|
|
|
|
std::vector<Handle<FixedArray>>* out_handles) {
|
2017-12-12 18:31:47 +00:00
|
|
|
PauseAllocationObserversScope pause_observers(space->heap());
|
2016-06-06 15:12:08 +00:00
|
|
|
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
|
|
|
|
*space->allocation_top_address());
|
2016-05-20 13:30:22 +00:00
|
|
|
CHECK(space_remaining >= extra_bytes);
|
|
|
|
int new_linear_size = space_remaining - extra_bytes;
|
|
|
|
if (new_linear_size == 0) return;
|
2019-03-11 19:04:02 +00:00
|
|
|
std::vector<Handle<FixedArray>> handles = heap::CreatePadding(
|
|
|
|
space->heap(), new_linear_size, i::AllocationType::kYoung);
|
2016-04-20 09:29:33 +00:00
|
|
|
if (out_handles != nullptr)
|
|
|
|
out_handles->insert(out_handles->end(), handles.begin(), handles.end());
|
2015-12-09 11:25:26 +00:00
|
|
|
}
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
void FillCurrentPage(v8::internal::NewSpace* space,
|
|
|
|
std::vector<Handle<FixedArray>>* out_handles) {
|
|
|
|
heap::AllocateAllButNBytes(space, 0, out_handles);
|
|
|
|
}
|
2015-12-09 11:25:26 +00:00
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
bool FillUpOnePage(v8::internal::NewSpace* space,
|
|
|
|
std::vector<Handle<FixedArray>>* out_handles) {
|
2017-12-12 18:31:47 +00:00
|
|
|
PauseAllocationObserversScope pause_observers(space->heap());
|
2016-06-06 15:12:08 +00:00
|
|
|
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
|
|
|
|
*space->allocation_top_address());
|
2016-05-20 13:30:22 +00:00
|
|
|
if (space_remaining == 0) return false;
|
2019-03-11 19:04:02 +00:00
|
|
|
std::vector<Handle<FixedArray>> handles = heap::CreatePadding(
|
|
|
|
space->heap(), space_remaining, i::AllocationType::kYoung);
|
2016-04-20 09:29:33 +00:00
|
|
|
if (out_handles != nullptr)
|
|
|
|
out_handles->insert(out_handles->end(), handles.begin(), handles.end());
|
2016-05-20 13:30:22 +00:00
|
|
|
return true;
|
2015-12-09 11:25:26 +00:00
|
|
|
}
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
void SimulateFullSpace(v8::internal::NewSpace* space,
|
|
|
|
std::vector<Handle<FixedArray>>* out_handles) {
|
|
|
|
heap::FillCurrentPage(space, out_handles);
|
|
|
|
while (heap::FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
|
2015-12-09 11:25:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
|
2019-02-07 10:58:09 +00:00
|
|
|
const double kStepSizeInMs = 100;
|
2016-11-18 12:55:53 +00:00
|
|
|
CHECK(FLAG_incremental_marking);
|
2015-12-09 11:25:26 +00:00
|
|
|
i::IncrementalMarking* marking = heap->incremental_marking();
|
2016-09-06 15:28:54 +00:00
|
|
|
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
2015-12-09 11:25:26 +00:00
|
|
|
if (collector->sweeping_in_progress()) {
|
|
|
|
collector->EnsureSweepingCompleted();
|
|
|
|
}
|
2016-09-06 15:28:54 +00:00
|
|
|
if (marking->IsSweeping()) {
|
|
|
|
marking->FinalizeSweeping();
|
|
|
|
}
|
|
|
|
CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
|
2015-12-09 11:25:26 +00:00
|
|
|
if (marking->IsStopped()) {
|
2016-09-07 10:02:58 +00:00
|
|
|
heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
|
|
|
|
i::GarbageCollectionReason::kTesting);
|
2015-12-09 11:25:26 +00:00
|
|
|
}
|
2016-09-06 15:28:54 +00:00
|
|
|
CHECK(marking->IsMarking() || marking->IsComplete());
|
2015-12-09 11:25:26 +00:00
|
|
|
if (!force_completion) return;
|
|
|
|
|
|
|
|
while (!marking->IsComplete()) {
|
2019-02-07 10:58:09 +00:00
|
|
|
marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
|
|
|
|
i::StepOrigin::kV8);
|
2015-12-09 11:25:26 +00:00
|
|
|
if (marking->IsReadyToOverApproximateWeakClosure()) {
|
|
|
|
marking->FinalizeIncrementally();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
CHECK(marking->IsComplete());
|
|
|
|
}
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
void SimulateFullSpace(v8::internal::PagedSpace* space) {
|
2017-11-06 15:52:24 +00:00
|
|
|
CodeSpaceMemoryModificationScope modification_scope(space->heap());
|
2016-10-13 10:50:39 +00:00
|
|
|
i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
|
|
|
|
if (collector->sweeping_in_progress()) {
|
|
|
|
collector->EnsureSweepingCompleted();
|
|
|
|
}
|
2018-01-09 08:56:07 +00:00
|
|
|
space->FreeLinearAllocationArea();
|
2016-05-20 13:30:22 +00:00
|
|
|
space->ResetFreeList();
|
|
|
|
}
|
|
|
|
|
2016-06-07 17:28:31 +00:00
|
|
|
void AbandonCurrentlyFreeMemory(PagedSpace* space) {
|
2018-01-09 08:56:07 +00:00
|
|
|
space->FreeLinearAllocationArea();
|
2016-06-23 09:55:18 +00:00
|
|
|
for (Page* page : *space) {
|
|
|
|
page->MarkNeverAllocateForTesting();
|
2016-06-07 17:28:31 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void GcAndSweep(Heap* heap, AllocationSpace space) {
|
2016-09-07 10:02:58 +00:00
|
|
|
heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
|
2016-06-07 17:28:31 +00:00
|
|
|
if (heap->mark_compact_collector()->sweeping_in_progress()) {
|
|
|
|
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-03 12:12:38 +00:00
|
|
|
void ForceEvacuationCandidate(Page* page) {
|
|
|
|
CHECK(FLAG_manual_evacuation_candidates_selection);
|
|
|
|
page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
|
|
|
PagedSpace* space = static_cast<PagedSpace*>(page->owner());
|
2019-06-04 10:53:21 +00:00
|
|
|
DCHECK_NOT_NULL(space);
|
2016-11-03 12:12:38 +00:00
|
|
|
Address top = space->top();
|
|
|
|
Address limit = space->limit();
|
|
|
|
if (top < limit && Page::FromAllocationAreaAddress(top) == page) {
|
|
|
|
// Create filler object to keep page iterable if it was iterable.
|
|
|
|
int remaining = static_cast<int>(limit - top);
|
2019-08-22 11:17:45 +00:00
|
|
|
space->heap()->CreateFillerObjectAt(top, remaining,
|
|
|
|
ClearRecordedSlots::kNo);
|
2018-01-09 08:56:07 +00:00
|
|
|
space->FreeLinearAllocationArea();
|
2016-11-03 12:12:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
} // namespace heap
|
2015-12-09 11:25:26 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|