[heap] Harden heap-related cctests
- Move usable functions into proper heap-utils.h/.cc files and remove utils-inl.h file - Fix assumptions accross the board relying on certain behavior that is not invariant This is a requirement for modifying page size. BUG=chromium:581412 LOG=N R=ulan@chromium.org Review-Url: https://codereview.chromium.org/1999753002 Cr-Commit-Position: refs/heads/master@{#36410}
This commit is contained in:
parent
a19404f04a
commit
fdd9f6b92d
@ -135,6 +135,26 @@ GCTracer::GCTracer(Heap* heap)
|
||||
previous_ = previous_incremental_mark_compactor_event_ = current_;
|
||||
}
|
||||
|
||||
void GCTracer::ResetForTesting() {
|
||||
cumulative_incremental_marking_steps_ = 0.0;
|
||||
cumulative_incremental_marking_bytes_ = 0.0;
|
||||
cumulative_incremental_marking_duration_ = 0.0;
|
||||
cumulative_pure_incremental_marking_duration_ = 0.0;
|
||||
longest_incremental_marking_step_ = 0.0;
|
||||
cumulative_incremental_marking_finalization_steps_ = 0.0;
|
||||
cumulative_incremental_marking_finalization_duration_ = 0.0;
|
||||
longest_incremental_marking_finalization_step_ = 0.0;
|
||||
cumulative_marking_duration_ = 0.0;
|
||||
cumulative_sweeping_duration_ = 0.0;
|
||||
allocation_time_ms_ = 0.0;
|
||||
new_space_allocation_counter_bytes_ = 0.0;
|
||||
old_generation_allocation_counter_bytes_ = 0.0;
|
||||
allocation_duration_since_gc_ = 0.0;
|
||||
new_space_allocation_in_bytes_since_gc_ = 0.0;
|
||||
old_generation_allocation_in_bytes_since_gc_ = 0.0;
|
||||
combined_mark_compact_speed_cache_ = 0.0;
|
||||
start_counter_ = 0;
|
||||
}
|
||||
|
||||
void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
|
||||
const char* collector_reason) {
|
||||
|
@ -372,6 +372,8 @@ class GCTracer {
|
||||
static double AverageSpeed(const RingBuffer<BytesAndDuration>& buffer,
|
||||
const BytesAndDuration& initial, double time_ms);
|
||||
|
||||
void ResetForTesting();
|
||||
|
||||
private:
|
||||
// Print one detailed trace line in name=value format.
|
||||
// TODO(ernstm): Move to Heap.
|
||||
|
@ -382,6 +382,7 @@ Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) {
|
||||
}
|
||||
|
||||
void Page::MarkNeverAllocateForTesting() {
|
||||
DCHECK(this->owner()->identity() != NEW_SPACE);
|
||||
DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
|
||||
SetFlag(NEVER_ALLOCATE_ON_PAGE);
|
||||
reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
|
||||
|
@ -99,6 +99,8 @@
|
||||
'gay-precision.cc',
|
||||
'gay-shortest.cc',
|
||||
'heap/heap-tester.h',
|
||||
'heap/heap-utils.cc',
|
||||
'heap/heap-utils.h',
|
||||
'heap/test-alloc.cc',
|
||||
'heap/test-compaction.cc',
|
||||
'heap/test-heap.cc',
|
||||
@ -106,7 +108,6 @@
|
||||
'heap/test-lab.cc',
|
||||
'heap/test-mark-compact.cc',
|
||||
'heap/test-spaces.cc',
|
||||
'heap/utils-inl.h',
|
||||
'print-extension.cc',
|
||||
'profiler-extension.cc',
|
||||
'test-accessors.cc',
|
||||
|
@ -1,9 +1,8 @@
|
||||
// Copyright 2015 the V8 project authors. All rights reserved.
|
||||
// Copyright 2016 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef HEAP_UTILS_H_
|
||||
#define HEAP_UTILS_H_
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
#include "src/factory.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
@ -11,18 +10,29 @@
|
||||
#include "src/heap/mark-compact.h"
|
||||
#include "src/isolate.h"
|
||||
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
namespace heap {
|
||||
|
||||
static int LenFromSize(int size) {
|
||||
void SealCurrentObjects(Heap* heap) {
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
||||
PageIterator it(heap->old_space());
|
||||
heap->old_space()->EmptyAllocationInfo();
|
||||
while (it.has_next()) {
|
||||
Page* page = it.next();
|
||||
page->MarkNeverAllocateForTesting();
|
||||
}
|
||||
}
|
||||
|
||||
int FixedArrayLenFromSize(int size) {
|
||||
return (size - FixedArray::kHeaderSize) / kPointerSize;
|
||||
}
|
||||
|
||||
|
||||
static inline std::vector<Handle<FixedArray>> CreatePadding(
|
||||
Heap* heap, int padding_size, PretenureFlag tenure,
|
||||
int object_size = Page::kMaxRegularHeapObjectSize) {
|
||||
std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
|
||||
PretenureFlag tenure,
|
||||
int object_size) {
|
||||
std::vector<Handle<FixedArray>> handles;
|
||||
Isolate* isolate = heap->isolate();
|
||||
int allocate_memory;
|
||||
@ -42,14 +52,17 @@ static inline std::vector<Handle<FixedArray>> CreatePadding(
|
||||
while (free_memory > 0) {
|
||||
if (free_memory > object_size) {
|
||||
allocate_memory = object_size;
|
||||
length = LenFromSize(allocate_memory);
|
||||
length = FixedArrayLenFromSize(allocate_memory);
|
||||
} else {
|
||||
allocate_memory = free_memory;
|
||||
length = LenFromSize(allocate_memory);
|
||||
length = FixedArrayLenFromSize(allocate_memory);
|
||||
if (length <= 0) {
|
||||
// Not enough room to create another fixed array. Let's create a filler.
|
||||
heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(),
|
||||
free_memory, ClearRecordedSlots::kNo);
|
||||
if (free_memory > (2 * kPointerSize)) {
|
||||
heap->CreateFillerObjectAt(
|
||||
*heap->old_space()->allocation_top_address(), free_memory,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -61,27 +74,8 @@ static inline std::vector<Handle<FixedArray>> CreatePadding(
|
||||
return handles;
|
||||
}
|
||||
|
||||
|
||||
// Helper function that simulates a full new-space in the heap.
|
||||
static inline bool FillUpOnePage(
|
||||
v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles = nullptr) {
|
||||
space->DisableInlineAllocationSteps();
|
||||
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
|
||||
*space->allocation_top_address());
|
||||
if (space_remaining == 0) return false;
|
||||
std::vector<Handle<FixedArray>> handles =
|
||||
CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);
|
||||
if (out_handles != nullptr)
|
||||
out_handles->insert(out_handles->end(), handles.begin(), handles.end());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// Helper function that simulates a fill new-space in the heap.
|
||||
static inline void AllocateAllButNBytes(
|
||||
v8::internal::NewSpace* space, int extra_bytes,
|
||||
std::vector<Handle<FixedArray>>* out_handles = nullptr) {
|
||||
void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes,
|
||||
std::vector<Handle<FixedArray>>* out_handles) {
|
||||
space->DisableInlineAllocationSteps();
|
||||
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
|
||||
*space->allocation_top_address());
|
||||
@ -89,38 +83,37 @@ static inline void AllocateAllButNBytes(
|
||||
int new_linear_size = space_remaining - extra_bytes;
|
||||
if (new_linear_size == 0) return;
|
||||
std::vector<Handle<FixedArray>> handles =
|
||||
CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED);
|
||||
heap::CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED);
|
||||
if (out_handles != nullptr)
|
||||
out_handles->insert(out_handles->end(), handles.begin(), handles.end());
|
||||
}
|
||||
|
||||
static inline void FillCurrentPage(
|
||||
v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles = nullptr) {
|
||||
AllocateAllButNBytes(space, 0, out_handles);
|
||||
void FillCurrentPage(v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles) {
|
||||
heap::AllocateAllButNBytes(space, 0, out_handles);
|
||||
}
|
||||
|
||||
static inline void SimulateFullSpace(
|
||||
v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles = nullptr) {
|
||||
FillCurrentPage(space, out_handles);
|
||||
while (FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
|
||||
bool FillUpOnePage(v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles) {
|
||||
space->DisableInlineAllocationSteps();
|
||||
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
|
||||
*space->allocation_top_address());
|
||||
if (space_remaining == 0) return false;
|
||||
std::vector<Handle<FixedArray>> handles =
|
||||
heap::CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);
|
||||
if (out_handles != nullptr)
|
||||
out_handles->insert(out_handles->end(), handles.begin(), handles.end());
|
||||
return true;
|
||||
}
|
||||
|
||||
void SimulateFullSpace(v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles) {
|
||||
heap::FillCurrentPage(space, out_handles);
|
||||
while (heap::FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Helper function that simulates a full old-space in the heap.
|
||||
static inline void SimulateFullSpace(v8::internal::PagedSpace* space) {
|
||||
space->EmptyAllocationInfo();
|
||||
space->ResetFreeList();
|
||||
space->ClearStats();
|
||||
}
|
||||
|
||||
|
||||
// Helper function that simulates many incremental marking steps until
|
||||
// marking is completed.
|
||||
static inline void SimulateIncrementalMarking(i::Heap* heap,
|
||||
bool force_completion = true) {
|
||||
void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
|
||||
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
i::IncrementalMarking* marking = heap->incremental_marking();
|
||||
if (collector->sweeping_in_progress()) {
|
||||
@ -142,7 +135,12 @@ static inline void SimulateIncrementalMarking(i::Heap* heap,
|
||||
CHECK(marking->IsComplete());
|
||||
}
|
||||
|
||||
void SimulateFullSpace(v8::internal::PagedSpace* space) {
|
||||
space->EmptyAllocationInfo();
|
||||
space->ResetFreeList();
|
||||
space->ClearStats();
|
||||
}
|
||||
|
||||
} // namespace heap
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // HEAP_UTILS_H_
|
47
test/cctest/heap/heap-utils.h
Normal file
47
test/cctest/heap/heap-utils.h
Normal file
@ -0,0 +1,47 @@
|
||||
// Copyright 2016 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef HEAP_HEAP_UTILS_H_
|
||||
#define HEAP_HEAP_UTILS_H_
|
||||
|
||||
#include "src/heap/heap.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
namespace heap {
|
||||
|
||||
void SealCurrentObjects(Heap* heap);
|
||||
|
||||
int FixedArrayLenFromSize(int size);
|
||||
|
||||
std::vector<Handle<FixedArray>> CreatePadding(
|
||||
Heap* heap, int padding_size, PretenureFlag tenure,
|
||||
int object_size = Page::kMaxRegularHeapObjectSize);
|
||||
|
||||
void AllocateAllButNBytes(
|
||||
v8::internal::NewSpace* space, int extra_bytes,
|
||||
std::vector<Handle<FixedArray>>* out_handles = nullptr);
|
||||
|
||||
void FillCurrentPage(v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles = nullptr);
|
||||
|
||||
// Helper function that simulates a full new-space in the heap.
|
||||
bool FillUpOnePage(v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles = nullptr);
|
||||
|
||||
void SimulateFullSpace(v8::internal::NewSpace* space,
|
||||
std::vector<Handle<FixedArray>>* out_handles = nullptr);
|
||||
|
||||
// Helper function that simulates many incremental marking steps until
|
||||
// marking is completed.
|
||||
void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true);
|
||||
|
||||
// Helper function that simulates a full old-space in the heap.
|
||||
void SimulateFullSpace(v8::internal::PagedSpace* space);
|
||||
|
||||
} // namespace heap
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // HEAP_HEAP_UTILS_H_
|
@ -31,7 +31,7 @@
|
||||
#include "src/accessors.h"
|
||||
#include "src/api.h"
|
||||
#include "test/cctest/heap/heap-tester.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
using namespace v8::internal;
|
||||
|
||||
@ -52,11 +52,11 @@ AllocationResult v8::internal::HeapTester::AllocateAfterFailures() {
|
||||
heap->CopyJSObject(JSObject::cast(object)).ToObjectChecked();
|
||||
|
||||
// Old data space.
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
heap->AllocateByteArray(100, TENURED).ToObjectChecked();
|
||||
|
||||
// Old pointer space.
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
heap->AllocateFixedArray(10000, TENURED).ToObjectChecked();
|
||||
|
||||
// Large object space.
|
||||
@ -72,12 +72,12 @@ AllocationResult v8::internal::HeapTester::AllocateAfterFailures() {
|
||||
kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked();
|
||||
|
||||
// Map space.
|
||||
SimulateFullSpace(heap->map_space());
|
||||
heap::SimulateFullSpace(heap->map_space());
|
||||
int instance_size = JSObject::kHeaderSize;
|
||||
heap->AllocateMap(JS_OBJECT_TYPE, instance_size).ToObjectChecked();
|
||||
|
||||
// Test that we can allocate in old pointer space and code space.
|
||||
SimulateFullSpace(heap->code_space());
|
||||
heap::SimulateFullSpace(heap->code_space());
|
||||
heap->AllocateFixedArray(100, TENURED).ToObjectChecked();
|
||||
heap->CopyCode(CcTest::i_isolate()->builtins()->builtin(
|
||||
Builtins::kIllegal)).ToObjectChecked();
|
||||
|
@ -4,12 +4,14 @@
|
||||
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/heap-tester.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
static void CheckInvariantsOfAbortedPage(Page* page) {
|
||||
namespace {
|
||||
|
||||
void CheckInvariantsOfAbortedPage(Page* page) {
|
||||
// Check invariants:
|
||||
// 1) Markbits are cleared
|
||||
// 2) The page is not marked as evacuation candidate anymore
|
||||
@ -19,6 +21,14 @@ static void CheckInvariantsOfAbortedPage(Page* page) {
|
||||
CHECK(!page->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
|
||||
}
|
||||
|
||||
void CheckAllObjectsOnPage(std::vector<Handle<FixedArray>>& handles,
|
||||
Page* page) {
|
||||
for (auto& fixed_array : handles) {
|
||||
CHECK(Page::FromAddress(fixed_array->address()) == page);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
HEAP_TEST(CompactionFullAbortedPage) {
|
||||
// Test the scenario where we reach OOM during compaction and the whole page
|
||||
@ -33,20 +43,19 @@ HEAP_TEST(CompactionFullAbortedPage) {
|
||||
Heap* heap = isolate->heap();
|
||||
{
|
||||
HandleScope scope1(isolate);
|
||||
PageIterator it(heap->old_space());
|
||||
while (it.has_next()) {
|
||||
it.next()->MarkNeverAllocateForTesting();
|
||||
}
|
||||
|
||||
heap::SealCurrentObjects(heap);
|
||||
|
||||
{
|
||||
HandleScope scope2(isolate);
|
||||
CHECK(heap->old_space()->Expand());
|
||||
auto compaction_page_handles =
|
||||
CreatePadding(heap, Page::kAllocatableMemory, TENURED);
|
||||
heap::CreatePadding(heap, Page::kAllocatableMemory, TENURED);
|
||||
Page* to_be_aborted_page =
|
||||
Page::FromAddress(compaction_page_handles.front()->address());
|
||||
to_be_aborted_page->SetFlag(
|
||||
MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
||||
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
|
||||
|
||||
heap->set_force_oom(true);
|
||||
heap->CollectAllGarbage();
|
||||
@ -72,29 +81,29 @@ HEAP_TEST(CompactionPartiallyAbortedPage) {
|
||||
FLAG_concurrent_sweeping = false;
|
||||
FLAG_manual_evacuation_candidates_selection = true;
|
||||
|
||||
const int object_size = 128 * KB;
|
||||
const int objects_per_page = 10;
|
||||
const int object_size = Page::kAllocatableMemory / objects_per_page;
|
||||
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
{
|
||||
HandleScope scope1(isolate);
|
||||
PageIterator it(heap->old_space());
|
||||
while (it.has_next()) {
|
||||
it.next()->MarkNeverAllocateForTesting();
|
||||
}
|
||||
|
||||
heap::SealCurrentObjects(heap);
|
||||
|
||||
{
|
||||
HandleScope scope2(isolate);
|
||||
// Fill another page with objects of size {object_size} (last one is
|
||||
// properly adjusted).
|
||||
CHECK(heap->old_space()->Expand());
|
||||
auto compaction_page_handles =
|
||||
CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size);
|
||||
auto compaction_page_handles = heap::CreatePadding(
|
||||
heap, Page::kAllocatableMemory, TENURED, object_size);
|
||||
Page* to_be_aborted_page =
|
||||
Page::FromAddress(compaction_page_handles.front()->address());
|
||||
to_be_aborted_page->SetFlag(
|
||||
MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
||||
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
|
||||
|
||||
{
|
||||
// Add another page that is filled with {num_objects} objects of size
|
||||
@ -102,8 +111,9 @@ HEAP_TEST(CompactionPartiallyAbortedPage) {
|
||||
HandleScope scope3(isolate);
|
||||
CHECK(heap->old_space()->Expand());
|
||||
const int num_objects = 3;
|
||||
std::vector<Handle<FixedArray>> page_to_fill_handles = CreatePadding(
|
||||
heap, object_size * num_objects, TENURED, object_size);
|
||||
std::vector<Handle<FixedArray>> page_to_fill_handles =
|
||||
heap::CreatePadding(heap, object_size * num_objects, TENURED,
|
||||
object_size);
|
||||
Page* page_to_fill =
|
||||
Page::FromAddress(page_to_fill_handles.front()->address());
|
||||
|
||||
@ -145,7 +155,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
|
||||
FLAG_concurrent_sweeping = false;
|
||||
FLAG_manual_evacuation_candidates_selection = true;
|
||||
|
||||
const int object_size = 128 * KB;
|
||||
const int objects_per_page = 10;
|
||||
const int object_size = Page::kAllocatableMemory / objects_per_page;
|
||||
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
@ -155,10 +166,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
|
||||
Handle<FixedArray> root_array =
|
||||
isolate->factory()->NewFixedArray(10, TENURED);
|
||||
|
||||
PageIterator it(heap->old_space());
|
||||
while (it.has_next()) {
|
||||
it.next()->MarkNeverAllocateForTesting();
|
||||
}
|
||||
heap::SealCurrentObjects(heap);
|
||||
|
||||
Page* to_be_aborted_page = nullptr;
|
||||
{
|
||||
@ -167,7 +175,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
|
||||
// properly adjusted).
|
||||
CHECK(heap->old_space()->Expand());
|
||||
std::vector<Handle<FixedArray>> compaction_page_handles =
|
||||
CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size);
|
||||
heap::CreatePadding(heap, Page::kAllocatableMemory, TENURED,
|
||||
object_size);
|
||||
to_be_aborted_page =
|
||||
Page::FromAddress(compaction_page_handles.front()->address());
|
||||
to_be_aborted_page->SetFlag(
|
||||
@ -176,8 +185,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
|
||||
compaction_page_handles[i]->set(0, *compaction_page_handles[i - 1]);
|
||||
}
|
||||
root_array->set(0, *compaction_page_handles.back());
|
||||
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
|
||||
}
|
||||
|
||||
{
|
||||
// Add another page that is filled with {num_objects} objects of size
|
||||
// {object_size}.
|
||||
@ -186,7 +195,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
|
||||
const int num_objects = 2;
|
||||
int used_memory = object_size * num_objects;
|
||||
std::vector<Handle<FixedArray>> page_to_fill_handles =
|
||||
CreatePadding(heap, used_memory, TENURED, object_size);
|
||||
heap::CreatePadding(heap, used_memory, TENURED, object_size);
|
||||
Page* page_to_fill =
|
||||
Page::FromAddress(page_to_fill_handles.front()->address());
|
||||
|
||||
@ -233,7 +242,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
|
||||
FLAG_concurrent_sweeping = false;
|
||||
FLAG_manual_evacuation_candidates_selection = true;
|
||||
|
||||
const int object_size = 128 * KB;
|
||||
const int objects_per_page = 10;
|
||||
const int object_size = Page::kAllocatableMemory / objects_per_page;
|
||||
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
@ -242,10 +252,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
|
||||
HandleScope scope1(isolate);
|
||||
Handle<FixedArray> root_array =
|
||||
isolate->factory()->NewFixedArray(10, TENURED);
|
||||
PageIterator it(heap->old_space());
|
||||
while (it.has_next()) {
|
||||
it.next()->MarkNeverAllocateForTesting();
|
||||
}
|
||||
heap::SealCurrentObjects(heap);
|
||||
|
||||
Page* to_be_aborted_page = nullptr;
|
||||
{
|
||||
@ -253,8 +260,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
|
||||
// Fill another page with objects of size {object_size} (last one is
|
||||
// properly adjusted).
|
||||
CHECK(heap->old_space()->Expand());
|
||||
auto compaction_page_handles =
|
||||
CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size);
|
||||
auto compaction_page_handles = heap::CreatePadding(
|
||||
heap, Page::kAllocatableMemory, TENURED, object_size);
|
||||
// Sanity check that we have enough space for linking up arrays.
|
||||
CHECK_GE(compaction_page_handles.front()->length(), 2);
|
||||
to_be_aborted_page =
|
||||
@ -270,6 +277,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
|
||||
isolate->factory()->NewFixedArray(1, NOT_TENURED);
|
||||
CHECK(heap->InNewSpace(*new_space_array));
|
||||
compaction_page_handles.front()->set(1, *new_space_array);
|
||||
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
|
||||
}
|
||||
|
||||
{
|
||||
@ -280,7 +288,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
|
||||
const int num_objects = 2;
|
||||
int used_memory = object_size * num_objects;
|
||||
std::vector<Handle<FixedArray>> page_to_fill_handles =
|
||||
CreatePadding(heap, used_memory, TENURED, object_size);
|
||||
heap::CreatePadding(heap, used_memory, TENURED, object_size);
|
||||
Page* page_to_fill =
|
||||
Page::FromAddress(page_to_fill_handles.front()->address());
|
||||
|
||||
|
@ -44,7 +44,7 @@
|
||||
#include "src/snapshot/snapshot.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/heap-tester.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
#include "test/cctest/test-feedback-vector.h"
|
||||
|
||||
|
||||
@ -725,7 +725,7 @@ TEST(BytecodeArray) {
|
||||
Factory* factory = isolate->factory();
|
||||
HandleScope scope(isolate);
|
||||
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
|
||||
for (int i = 0; i < 5; i++) {
|
||||
Handle<Object> number = factory->NewHeapNumber(i);
|
||||
@ -1350,7 +1350,7 @@ TEST(TestCodeFlushingIncremental) {
|
||||
// Simulate several GCs that use incremental marking.
|
||||
const int kAgingThreshold = 6;
|
||||
for (int i = 0; i < kAgingThreshold; i++) {
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
}
|
||||
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
|
||||
@ -1364,7 +1364,7 @@ TEST(TestCodeFlushingIncremental) {
|
||||
// Simulate several GCs that use incremental marking but make sure
|
||||
// the loop breaks once the function is enqueued as a candidate.
|
||||
for (int i = 0; i < kAgingThreshold; i++) {
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
if (!function->next_function_link()->IsUndefined()) break;
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
}
|
||||
@ -1440,7 +1440,7 @@ TEST(TestCodeFlushingIncrementalScavenge) {
|
||||
// Simulate incremental marking so that the functions are enqueued as
|
||||
// code flushing candidates. Then kill one of the functions. Finally
|
||||
// perform a scavenge while incremental marking is still running.
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
*function2.location() = NULL;
|
||||
CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
|
||||
|
||||
@ -1494,7 +1494,7 @@ TEST(TestCodeFlushingIncrementalAbort) {
|
||||
|
||||
// Simulate incremental marking so that the function is enqueued as
|
||||
// code flushing candidate.
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
|
||||
// Enable the debugger and add a breakpoint while incremental marking
|
||||
// is running so that incremental marking aborts and code flushing is
|
||||
@ -1549,7 +1549,7 @@ TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
|
||||
Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
|
||||
CHECK(!g_function->is_compiled());
|
||||
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
CompileRun("%OptimizeFunctionOnNextCall(f); f();");
|
||||
|
||||
// g should now have available an optimized function, unmarked by gc. The
|
||||
@ -2658,7 +2658,7 @@ TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
|
||||
CcTest::heap()->StartIncrementalMarking();
|
||||
// The following calls will increment CcTest::heap()->global_ic_age().
|
||||
CcTest::isolate()->ContextDisposedNotification();
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
|
||||
CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
|
||||
@ -2743,12 +2743,13 @@ HEAP_TEST(GCFlags) {
|
||||
TEST(IdleNotificationFinishMarking) {
|
||||
i::FLAG_allow_natives_syntax = true;
|
||||
CcTest::InitializeVM();
|
||||
SimulateFullSpace(CcTest::heap()->old_space());
|
||||
const int initial_gc_count = CcTest::heap()->gc_count();
|
||||
heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
IncrementalMarking* marking = CcTest::heap()->incremental_marking();
|
||||
marking->Stop();
|
||||
CcTest::heap()->StartIncrementalMarking();
|
||||
|
||||
CHECK_EQ(CcTest::heap()->gc_count(), 0);
|
||||
CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
|
||||
|
||||
// TODO(hpayer): We cannot write proper unit test right now for heap.
|
||||
// The ideal test would call kMaxIdleMarkingDelayCounter to test the
|
||||
@ -2783,7 +2784,7 @@ TEST(IdleNotificationFinishMarking) {
|
||||
(v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
|
||||
static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
|
||||
kLongIdleTime);
|
||||
CHECK_EQ(CcTest::heap()->gc_count(), 1);
|
||||
CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
|
||||
}
|
||||
|
||||
|
||||
@ -2795,7 +2796,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
|
||||
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
|
||||
SimulateFullSpace(CcTest::heap()->new_space());
|
||||
heap::SimulateFullSpace(CcTest::heap()->new_space());
|
||||
AlwaysAllocateScope always_allocate(CcTest::i_isolate());
|
||||
v8::Local<v8::Value> res = CompileRun(
|
||||
"function c(x) {"
|
||||
@ -3270,7 +3271,7 @@ TEST(Regress1465) {
|
||||
CompileRun("%DebugPrint(root);");
|
||||
CHECK_EQ(transitions_count, transitions_before);
|
||||
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
|
||||
// Count number of live transitions after marking. Note that one transition
|
||||
@ -3440,7 +3441,7 @@ TEST(Regress2143a) {
|
||||
"root.foo = 0;"
|
||||
"root = new Object;");
|
||||
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
|
||||
// Compile a StoreIC that performs the prepared map transition. This
|
||||
// will restart incremental marking and should make sure the root is
|
||||
@ -3480,7 +3481,7 @@ TEST(Regress2143b) {
|
||||
"root.foo = 0;"
|
||||
"root = new Object;");
|
||||
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
|
||||
// Compile an optimized LStoreNamedField that performs the prepared
|
||||
// map transition. This will restart incremental marking and should
|
||||
@ -3535,28 +3536,29 @@ TEST(ReleaseOverReservedPages) {
|
||||
|
||||
// Prepare many pages with low live-bytes count.
|
||||
PagedSpace* old_space = heap->old_space();
|
||||
CHECK_EQ(1, old_space->CountTotalPages());
|
||||
const int initial_page_count = old_space->CountTotalPages();
|
||||
const int overall_page_count = number_of_test_pages + initial_page_count;
|
||||
for (int i = 0; i < number_of_test_pages; i++) {
|
||||
AlwaysAllocateScope always_allocate(isolate);
|
||||
SimulateFullSpace(old_space);
|
||||
heap::SimulateFullSpace(old_space);
|
||||
factory->NewFixedArray(1, TENURED);
|
||||
}
|
||||
CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
|
||||
CHECK_EQ(overall_page_count, old_space->CountTotalPages());
|
||||
|
||||
// Triggering one GC will cause a lot of garbage to be discovered but
|
||||
// even spread across all allocated pages.
|
||||
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
|
||||
"triggered for preparation");
|
||||
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
|
||||
CHECK_GE(overall_page_count, old_space->CountTotalPages());
|
||||
|
||||
// Triggering subsequent GCs should cause at least half of the pages
|
||||
// to be released to the OS after at most two cycles.
|
||||
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
|
||||
"triggered by test 1");
|
||||
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
|
||||
CHECK_GE(overall_page_count, old_space->CountTotalPages());
|
||||
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
|
||||
"triggered by test 2");
|
||||
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
|
||||
CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
|
||||
|
||||
// Triggering a last-resort GC should cause all pages to be released to the
|
||||
// OS so that other processes can seize the memory. If we get a failure here
|
||||
@ -3566,7 +3568,7 @@ TEST(ReleaseOverReservedPages) {
|
||||
// boots, but if the 20 small arrays don't fit on the first page then that's
|
||||
// an indication that it is too small.
|
||||
heap->CollectAllAvailableGarbage("triggered really hard");
|
||||
CHECK_EQ(1, old_space->CountTotalPages());
|
||||
CHECK_EQ(initial_page_count, old_space->CountTotalPages());
|
||||
}
|
||||
|
||||
static int forced_gc_counter = 0;
|
||||
@ -3649,7 +3651,7 @@ TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
|
||||
CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
|
||||
CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
|
||||
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
|
||||
CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
|
||||
@ -3716,7 +3718,7 @@ TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
|
||||
Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
|
||||
CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
|
||||
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
|
||||
CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
|
||||
@ -3753,7 +3755,7 @@ TEST(IncrementalMarkingClearsMonomorphicConstructor) {
|
||||
|
||||
// Fire context dispose notification.
|
||||
CcTest::isolate()->ContextDisposedNotification();
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
|
||||
CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
|
||||
@ -3776,7 +3778,7 @@ TEST(IncrementalMarkingPreservesMonomorphicIC) {
|
||||
|
||||
CheckVectorIC(f, 0, MONOMORPHIC);
|
||||
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
|
||||
CheckVectorIC(f, 0, MONOMORPHIC);
|
||||
@ -3808,7 +3810,7 @@ TEST(IncrementalMarkingClearsMonomorphicIC) {
|
||||
|
||||
// Fire context dispose notification.
|
||||
CcTest::isolate()->ContextDisposedNotification();
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
|
||||
CheckVectorICCleared(f, 0);
|
||||
@ -3846,7 +3848,7 @@ TEST(IncrementalMarkingPreservesPolymorphicIC) {
|
||||
CheckVectorIC(f, 0, POLYMORPHIC);
|
||||
|
||||
// Fire context dispose notification.
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
|
||||
CheckVectorIC(f, 0, POLYMORPHIC);
|
||||
@ -3885,7 +3887,7 @@ TEST(IncrementalMarkingClearsPolymorphicIC) {
|
||||
|
||||
// Fire context dispose notification.
|
||||
CcTest::isolate()->ContextDisposedNotification();
|
||||
SimulateIncrementalMarking(CcTest::heap());
|
||||
heap::SimulateIncrementalMarking(CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
|
||||
CheckVectorICCleared(f, 0);
|
||||
@ -4057,7 +4059,7 @@ TEST(Regress159140) {
|
||||
// Simulate incremental marking so that the functions are enqueued as
|
||||
// code flushing candidates. Then optimize one function. Finally
|
||||
// finish the GC to complete code flushing.
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
|
||||
heap->CollectAllGarbage();
|
||||
|
||||
@ -4103,7 +4105,7 @@ TEST(Regress165495) {
|
||||
|
||||
// Simulate incremental marking so that unoptimized code is flushed
|
||||
// even though it still is cached in the optimized code map.
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
heap->CollectAllGarbage();
|
||||
|
||||
// Make a new closure that will get code installed from the code map.
|
||||
@ -4171,7 +4173,7 @@ TEST(Regress169209) {
|
||||
}
|
||||
|
||||
// Simulate incremental marking and collect code flushing candidates.
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
CHECK(shared1->code()->gc_metadata() != NULL);
|
||||
|
||||
// Optimize function and make sure the unoptimized code is replaced.
|
||||
@ -4227,9 +4229,9 @@ TEST(Regress169928) {
|
||||
array_data->set(0, Smi::FromInt(1));
|
||||
array_data->set(1, Smi::FromInt(2));
|
||||
|
||||
AllocateAllButNBytes(CcTest::heap()->new_space(),
|
||||
JSArray::kSize + AllocationMemento::kSize +
|
||||
kPointerSize);
|
||||
heap::AllocateAllButNBytes(
|
||||
CcTest::heap()->new_space(),
|
||||
JSArray::kSize + AllocationMemento::kSize + kPointerSize);
|
||||
|
||||
Handle<JSArray> array =
|
||||
factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
|
||||
@ -4378,7 +4380,7 @@ TEST(Regress514122) {
|
||||
HandleScope inner_scope(isolate);
|
||||
AlwaysAllocateScope always_allocate(isolate);
|
||||
// Make sure literal is placed on an old-space evacuation candidate.
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
|
||||
// Make sure there the number of literals is > 0.
|
||||
Handle<LiteralsArray> lit =
|
||||
@ -4393,7 +4395,7 @@ TEST(Regress514122) {
|
||||
// simulate incremental marking to enqueue optimized code map.
|
||||
FLAG_manual_evacuation_candidates_selection = true;
|
||||
evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
|
||||
// No matter whether reachable or not, {boomer} is doomed.
|
||||
Handle<Object> boomer(shared->optimized_code_map(), isolate);
|
||||
@ -4590,7 +4592,7 @@ TEST(LargeObjectSlotRecording) {
|
||||
HandleScope scope(isolate);
|
||||
|
||||
// Create an object on an evacuation candidate.
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
|
||||
Page* evac_page = Page::FromAddress(lit->address());
|
||||
evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
||||
@ -4603,7 +4605,7 @@ TEST(LargeObjectSlotRecording) {
|
||||
CHECK(heap->lo_space()->Contains(*lo));
|
||||
|
||||
// Start incremental marking to active write barrier.
|
||||
SimulateIncrementalMarking(heap, false);
|
||||
heap::SimulateIncrementalMarking(heap, false);
|
||||
heap->incremental_marking()->AdvanceIncrementalMarking(
|
||||
10000000, IncrementalMarking::IdleStepActions());
|
||||
|
||||
@ -4880,7 +4882,7 @@ TEST(NoWeakHashTableLeakWithIncrementalMarking) {
|
||||
if (!isolate->use_crankshaft()) return;
|
||||
HandleScope outer_scope(heap->isolate());
|
||||
for (int i = 0; i < 3; i++) {
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
{
|
||||
LocalContext context;
|
||||
HandleScope scope(heap->isolate());
|
||||
@ -5529,7 +5531,7 @@ UNINITIALIZED_TEST(Regress538257) {
|
||||
Page::FromAddress(objects[i]->address())
|
||||
->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
||||
}
|
||||
SimulateFullSpace(old_space);
|
||||
heap::SimulateFullSpace(old_space);
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
// If we get this far, we've successfully aborted compaction. Any further
|
||||
// allocations might trigger OOM.
|
||||
@ -5642,7 +5644,7 @@ UNINITIALIZED_TEST(PromotionQueue) {
|
||||
CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
|
||||
|
||||
// Fill-up the first semi-space page.
|
||||
FillUpOnePage(new_space);
|
||||
heap::FillUpOnePage(new_space);
|
||||
|
||||
// Create a small object to initialize the bump pointer on the second
|
||||
// semi-space page.
|
||||
@ -5651,7 +5653,7 @@ UNINITIALIZED_TEST(PromotionQueue) {
|
||||
CHECK(heap->InNewSpace(*small));
|
||||
|
||||
// Fill-up the second semi-space page.
|
||||
FillUpOnePage(new_space);
|
||||
heap::FillUpOnePage(new_space);
|
||||
|
||||
// This scavenge will corrupt memory if the promotion queue is not
|
||||
// evacuated.
|
||||
@ -5681,9 +5683,9 @@ TEST(Regress388880) {
|
||||
|
||||
// Allocate padding objects in old pointer space so, that object allocated
|
||||
// afterwards would end at the end of the page.
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
int padding_size = desired_offset - Page::kObjectStartOffset;
|
||||
CreatePadding(heap, padding_size, TENURED);
|
||||
heap::CreatePadding(heap, padding_size, TENURED);
|
||||
|
||||
Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
|
||||
o->set_properties(*factory->empty_fixed_array());
|
||||
@ -5830,11 +5832,11 @@ void CheckMapRetainingFor(int n) {
|
||||
Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
|
||||
CHECK(!weak_cell->cleared());
|
||||
for (int i = 0; i < n; i++) {
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
}
|
||||
CHECK(!weak_cell->cleared());
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
CHECK(weak_cell->cleared());
|
||||
}
|
||||
@ -5863,7 +5865,7 @@ TEST(RegressArrayListGC) {
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
// Force GC in old space on next addition of retained map.
|
||||
Map::WeakCellForMap(map);
|
||||
SimulateFullSpace(CcTest::heap()->new_space());
|
||||
heap::SimulateFullSpace(CcTest::heap()->new_space());
|
||||
for (int i = 0; i < 10; i++) {
|
||||
heap->AddRetainedMap(map);
|
||||
}
|
||||
@ -6146,6 +6148,7 @@ TEST(NewSpaceAllocationThroughput) {
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
GCTracer* tracer = heap->tracer();
|
||||
tracer->ResetForTesting();
|
||||
int time1 = 100;
|
||||
size_t counter1 = 1000;
|
||||
tracer->SampleAllocation(time1, counter1, 0);
|
||||
@ -6169,6 +6172,7 @@ TEST(NewSpaceAllocationThroughput2) {
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
GCTracer* tracer = heap->tracer();
|
||||
tracer->ResetForTesting();
|
||||
int time1 = 100;
|
||||
size_t counter1 = 1000;
|
||||
tracer->SampleAllocation(time1, counter1, 0);
|
||||
@ -6304,6 +6308,7 @@ TEST(OldGenerationAllocationThroughput) {
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
GCTracer* tracer = heap->tracer();
|
||||
tracer->ResetForTesting();
|
||||
int time1 = 100;
|
||||
size_t counter1 = 1000;
|
||||
tracer->SampleAllocation(time1, 0, counter1);
|
||||
@ -6328,6 +6333,7 @@ TEST(AllocationThroughput) {
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
GCTracer* tracer = heap->tracer();
|
||||
tracer->ResetForTesting();
|
||||
int time1 = 100;
|
||||
size_t counter1 = 1000;
|
||||
tracer->SampleAllocation(time1, counter1, counter1);
|
||||
@ -6445,7 +6451,7 @@ TEST(Regress519319) {
|
||||
parent.Reset(isolate, v8::Object::New(isolate));
|
||||
child.Reset(isolate, v8::Object::New(isolate));
|
||||
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
{
|
||||
UniqueId id = MakeUniqueId(parent);
|
||||
@ -6504,7 +6510,7 @@ HEAP_TEST(Regress587004) {
|
||||
array->set(i, *number);
|
||||
}
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1);
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
||||
ByteArray* byte_array;
|
||||
@ -6587,7 +6593,7 @@ HEAP_TEST(Regress589413) {
|
||||
}
|
||||
}
|
||||
}
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
for (size_t j = 0; j < arrays.size(); j++) {
|
||||
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1);
|
||||
}
|
||||
@ -6616,8 +6622,13 @@ UNINITIALIZED_TEST(PagePromotion) {
|
||||
v8::HandleScope handle_scope(isolate);
|
||||
v8::Context::New(isolate)->Enter();
|
||||
Heap* heap = i_isolate->heap();
|
||||
|
||||
// Clean up any left over objects from cctest initialization.
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
|
||||
std::vector<Handle<FixedArray>> handles;
|
||||
SimulateFullSpace(heap->new_space(), &handles);
|
||||
heap::SimulateFullSpace(heap->new_space(), &handles);
|
||||
heap->CollectGarbage(NEW_SPACE);
|
||||
CHECK_GT(handles.size(), 0u);
|
||||
// First object in handle should be on the first page.
|
||||
@ -6626,7 +6637,7 @@ UNINITIALIZED_TEST(PagePromotion) {
|
||||
// The age mark should not be on the first page.
|
||||
CHECK(!first_page->ContainsLimit(heap->new_space()->age_mark()));
|
||||
// To perform a sanity check on live bytes we need to mark the heap.
|
||||
SimulateIncrementalMarking(heap, true);
|
||||
heap::SimulateIncrementalMarking(heap, true);
|
||||
// Sanity check that the page meets the requirements for promotion.
|
||||
const int threshold_bytes =
|
||||
FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100;
|
||||
|
@ -19,8 +19,7 @@
|
||||
#include "src/full-codegen/full-codegen.h"
|
||||
#include "src/global-handles.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
using v8::IdleTask;
|
||||
using v8::Task;
|
||||
@ -120,7 +119,7 @@ TEST(IncrementalMarkingUsingIdleTasks) {
|
||||
v8::Platform* old_platform = i::V8::GetCurrentPlatform();
|
||||
MockPlatform platform(old_platform);
|
||||
i::V8::SetPlatformForTesting(&platform);
|
||||
SimulateFullSpace(CcTest::heap()->old_space());
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
|
||||
marking->Stop();
|
||||
marking->Start();
|
||||
@ -145,7 +144,7 @@ TEST(IncrementalMarkingUsingIdleTasksAfterGC) {
|
||||
v8::Platform* old_platform = i::V8::GetCurrentPlatform();
|
||||
MockPlatform platform(old_platform);
|
||||
i::V8::SetPlatformForTesting(&platform);
|
||||
SimulateFullSpace(CcTest::heap()->old_space());
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
|
||||
marking->Stop();
|
||||
@ -171,7 +170,7 @@ TEST(IncrementalMarkingUsingDelayedTasks) {
|
||||
v8::Platform* old_platform = i::V8::GetCurrentPlatform();
|
||||
MockPlatform platform(old_platform);
|
||||
i::V8::SetPlatformForTesting(&platform);
|
||||
SimulateFullSpace(CcTest::heap()->old_space());
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
|
||||
marking->Stop();
|
||||
marking->Start();
|
||||
|
@ -43,8 +43,7 @@
|
||||
#include "src/global-handles.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/heap-tester.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
using namespace v8::internal;
|
||||
using v8::Just;
|
||||
@ -76,58 +75,49 @@ TEST(MarkingDeque) {
|
||||
DeleteArray(mem);
|
||||
}
|
||||
|
||||
|
||||
HEAP_TEST(Promotion) {
|
||||
TEST(Promotion) {
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
heap->ConfigureHeap(1, 1, 1, 0);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
{
|
||||
v8::HandleScope sc(CcTest::isolate());
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
v8::HandleScope sc(CcTest::isolate());
|
||||
heap::SealCurrentObjects(heap);
|
||||
|
||||
// Allocate a fixed array in the new space.
|
||||
int array_length =
|
||||
(Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
|
||||
(4 * kPointerSize);
|
||||
Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked();
|
||||
Handle<FixedArray> array(FixedArray::cast(obj));
|
||||
int array_length =
|
||||
heap::FixedArrayLenFromSize(Page::kMaxRegularHeapObjectSize);
|
||||
Handle<FixedArray> array = isolate->factory()->NewFixedArray(array_length);
|
||||
|
||||
// Array should be in the new space.
|
||||
CHECK(heap->InSpace(*array, NEW_SPACE));
|
||||
|
||||
// Call mark compact GC, so array becomes an old object.
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
|
||||
// Array now sits in the old space
|
||||
CHECK(heap->InSpace(*array, OLD_SPACE));
|
||||
// Array should be in the new space.
|
||||
CHECK(heap->InSpace(*array, NEW_SPACE));
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
CHECK(heap->InSpace(*array, OLD_SPACE));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
HEAP_TEST(NoPromotion) {
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
heap->ConfigureHeap(1, 1, 1, 0);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
{
|
||||
v8::HandleScope sc(CcTest::isolate());
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
v8::HandleScope sc(CcTest::isolate());
|
||||
heap::SealCurrentObjects(heap);
|
||||
|
||||
// Allocate a big fixed array in the new space.
|
||||
int array_length =
|
||||
(Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
|
||||
(2 * kPointerSize);
|
||||
Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked();
|
||||
Handle<FixedArray> array(FixedArray::cast(obj));
|
||||
int array_length =
|
||||
heap::FixedArrayLenFromSize(Page::kMaxRegularHeapObjectSize);
|
||||
Handle<FixedArray> array = isolate->factory()->NewFixedArray(array_length);
|
||||
|
||||
// Array should be in the new space.
|
||||
CHECK(heap->InSpace(*array, NEW_SPACE));
|
||||
|
||||
// Simulate a full old space to make promotion fail.
|
||||
SimulateFullSpace(heap->old_space());
|
||||
|
||||
// Call mark compact GC, and it should pass.
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
heap->set_force_oom(true);
|
||||
// Array should be in the new space.
|
||||
CHECK(heap->InSpace(*array, NEW_SPACE));
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
CHECK(heap->InSpace(*array, NEW_SPACE));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
HEAP_TEST(MarkCompactCollector) {
|
||||
FLAG_incremental_marking = false;
|
||||
FLAG_retain_maps_for_n_gc = 0;
|
||||
|
@ -32,7 +32,6 @@
|
||||
#include "src/v8.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/heap-tester.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -208,14 +207,15 @@ TEST(Regress3540) {
|
||||
0));
|
||||
TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
|
||||
CodeRange* code_range = new CodeRange(isolate);
|
||||
const size_t code_range_size = 4 * Page::kPageSize;
|
||||
size_t code_range_size =
|
||||
kMinimumCodeRangeSize > 0 ? kMinimumCodeRangeSize : 3 * MB;
|
||||
if (!code_range->SetUp(code_range_size)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Address address;
|
||||
size_t size;
|
||||
size_t request_size = code_range_size - 2 * Page::kPageSize;
|
||||
size_t request_size = code_range_size - Page::kPageSize;
|
||||
address = code_range->AllocateRawMemory(
|
||||
request_size, request_size - (2 * MemoryAllocator::CodePageGuardSize()),
|
||||
&size);
|
||||
|
@ -51,7 +51,7 @@
|
||||
#include "src/utils.h"
|
||||
#include "src/vm-state.h"
|
||||
#include "test/cctest/heap/heap-tester.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
static const bool kLogThreading = false;
|
||||
|
||||
@ -634,7 +634,7 @@ TEST(MakingExternalUnalignedOneByteString) {
|
||||
"slice('abcdefghijklmnopqrstuvwxyz');"));
|
||||
|
||||
// Trigger GCs so that the newly allocated string moves to old gen.
|
||||
SimulateFullSpace(CcTest::heap()->old_space());
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
|
||||
@ -14798,8 +14798,8 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) {
|
||||
for (int i = 0; i < kIterations; ++i) {
|
||||
LocalContext env(isolate);
|
||||
i::AlwaysAllocateScope always_allocate(i_isolate);
|
||||
SimulateFullSpace(i::FLAG_ignition ? heap->old_space()
|
||||
: heap->code_space());
|
||||
i::heap::SimulateFullSpace(i::FLAG_ignition ? heap->old_space()
|
||||
: heap->code_space());
|
||||
CompileRun(script);
|
||||
|
||||
// Keep a strong reference to the code object in the handle scope.
|
||||
@ -19001,7 +19001,7 @@ void PrologueCallbackAlloc(v8::Isolate* isolate,
|
||||
++prologue_call_count_alloc;
|
||||
|
||||
// Simulate full heap to see if we will reenter this callback
|
||||
SimulateFullSpace(CcTest::heap()->new_space());
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->new_space());
|
||||
|
||||
Local<Object> obj = Object::New(isolate);
|
||||
CHECK(!obj.IsEmpty());
|
||||
@ -19021,7 +19021,7 @@ void EpilogueCallbackAlloc(v8::Isolate* isolate,
|
||||
++epilogue_call_count_alloc;
|
||||
|
||||
// Simulate full heap to see if we will reenter this callback
|
||||
SimulateFullSpace(CcTest::heap()->new_space());
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->new_space());
|
||||
|
||||
Local<Object> obj = Object::New(isolate);
|
||||
CHECK(!obj.IsEmpty());
|
||||
|
@ -35,7 +35,7 @@
|
||||
#include "src/global-handles.h"
|
||||
#include "src/macro-assembler.h"
|
||||
#include "src/objects.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
using namespace v8::internal;
|
||||
|
||||
@ -175,8 +175,8 @@ static void TestHashSetCausesGC(Handle<HashSet> table) {
|
||||
|
||||
// Simulate a full heap so that generating an identity hash code
|
||||
// in subsequent calls will request GC.
|
||||
SimulateFullSpace(CcTest::heap()->new_space());
|
||||
SimulateFullSpace(CcTest::heap()->old_space());
|
||||
heap::SimulateFullSpace(CcTest::heap()->new_space());
|
||||
heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
|
||||
// Calling Contains() should not cause GC ever.
|
||||
int gc_count = isolate->heap()->gc_count();
|
||||
@ -206,8 +206,8 @@ static void TestHashMapCausesGC(Handle<HashMap> table) {
|
||||
|
||||
// Simulate a full heap so that generating an identity hash code
|
||||
// in subsequent calls will request GC.
|
||||
SimulateFullSpace(CcTest::heap()->new_space());
|
||||
SimulateFullSpace(CcTest::heap()->old_space());
|
||||
heap::SimulateFullSpace(CcTest::heap()->new_space());
|
||||
heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
|
||||
// Calling Lookup() should not cause GC ever.
|
||||
CHECK(table->Lookup(key)->IsTheHole());
|
||||
|
@ -47,7 +47,7 @@
|
||||
#include "src/snapshot/snapshot.h"
|
||||
#include "src/snapshot/startup-serializer.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
using namespace v8::internal;
|
||||
|
||||
@ -1828,7 +1828,7 @@ TEST(Regress503552) {
|
||||
false);
|
||||
delete script_data;
|
||||
|
||||
SimulateIncrementalMarking(isolate->heap());
|
||||
heap::SimulateIncrementalMarking(isolate->heap());
|
||||
|
||||
script_data = CodeSerializer::Serialize(isolate, shared, source);
|
||||
delete script_data;
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
#include "src/heap/slots-buffer.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -101,7 +101,7 @@ TEST(FilterInvalidSlotsBufferEntries) {
|
||||
|
||||
// Write an old space reference into field 4 which points to an object on an
|
||||
// evacuation candidate.
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
Handle<FixedArray> valid_object =
|
||||
isolate->factory()->NewFixedArray(23, TENURED);
|
||||
Page* page = Page::FromAddress(valid_object->address());
|
||||
|
@ -15,7 +15,7 @@
|
||||
#include "src/ic/ic.h"
|
||||
#include "src/macro-assembler.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
using namespace v8::base;
|
||||
using namespace v8::internal;
|
||||
@ -1112,7 +1112,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
|
||||
{
|
||||
AlwaysAllocateScope always_allocate(isolate);
|
||||
// Make sure |obj_value| is placed on an old-space evacuation candidate.
|
||||
SimulateFullSpace(old_space);
|
||||
heap::SimulateFullSpace(old_space);
|
||||
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
|
||||
ec_page = Page::FromAddress(obj_value->address());
|
||||
}
|
||||
@ -1142,7 +1142,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
|
||||
FLAG_stress_compaction = true;
|
||||
FLAG_manual_evacuation_candidates_selection = true;
|
||||
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
// Disable stress compaction mode in order to let GC do scavenge.
|
||||
FLAG_stress_compaction = false;
|
||||
|
||||
@ -1451,7 +1451,7 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
|
||||
CHECK(old_space->Contains(*obj));
|
||||
|
||||
// Make sure |obj_value| is placed on an old-space evacuation candidate.
|
||||
SimulateFullSpace(old_space);
|
||||
heap::SimulateFullSpace(old_space);
|
||||
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
|
||||
ec_page = Page::FromAddress(obj_value->address());
|
||||
CHECK_NE(ec_page, Page::FromAddress(obj->address()));
|
||||
@ -1460,7 +1460,7 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
|
||||
// Heap is ready, force |ec_page| to become an evacuation candidate and
|
||||
// simulate incremental marking.
|
||||
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
|
||||
// Check that everything is ready for triggering incremental write barrier
|
||||
// (i.e. that both |obj| and |obj_value| are black and the marking phase is
|
||||
|
@ -31,7 +31,7 @@
|
||||
|
||||
#include "src/global-handles.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
using namespace v8::internal;
|
||||
|
||||
@ -177,7 +177,7 @@ TEST(Regress2060a) {
|
||||
|
||||
// Start second old-space page so that values land on evacuation candidate.
|
||||
Page* first_page = heap->old_space()->anchor()->next_page();
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
|
||||
// Fill up weak map with values on an evacuation candidate.
|
||||
{
|
||||
@ -216,7 +216,7 @@ TEST(Regress2060b) {
|
||||
|
||||
// Start second old-space page so that keys land on evacuation candidate.
|
||||
Page* first_page = heap->old_space()->anchor()->next_page();
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
|
||||
// Fill up weak map with keys on an evacuation candidate.
|
||||
Handle<JSObject> keys[32];
|
||||
@ -249,7 +249,7 @@ TEST(Regress399527) {
|
||||
{
|
||||
HandleScope scope(isolate);
|
||||
AllocateJSWeakMap(isolate);
|
||||
SimulateIncrementalMarking(heap);
|
||||
heap::SimulateIncrementalMarking(heap);
|
||||
}
|
||||
// The weak map is marked black here but leaving the handle scope will make
|
||||
// the object unreachable. Aborting incremental marking will clear all the
|
||||
|
@ -31,7 +31,7 @@
|
||||
|
||||
#include "src/global-handles.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/utils-inl.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
using namespace v8::internal;
|
||||
|
||||
@ -176,7 +176,7 @@ TEST(WeakSet_Regress2060a) {
|
||||
|
||||
// Start second old-space page so that values land on evacuation candidate.
|
||||
Page* first_page = heap->old_space()->anchor()->next_page();
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
|
||||
// Fill up weak set with values on an evacuation candidate.
|
||||
{
|
||||
@ -215,7 +215,7 @@ TEST(WeakSet_Regress2060b) {
|
||||
|
||||
// Start second old-space page so that keys land on evacuation candidate.
|
||||
Page* first_page = heap->old_space()->anchor()->next_page();
|
||||
SimulateFullSpace(heap->old_space());
|
||||
heap::SimulateFullSpace(heap->old_space());
|
||||
|
||||
// Fill up weak set with keys on an evacuation candidate.
|
||||
Handle<JSObject> keys[32];
|
||||
|
Loading…
Reference in New Issue
Block a user