2016-06-20 13:19:25 +00:00
|
|
|
// Copyright 2016 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
2019-05-22 07:55:37 +00:00
|
|
|
#include "src/execution/isolate.h"
|
2016-06-20 13:19:25 +00:00
|
|
|
#include "src/heap/array-buffer-tracker.h"
|
2018-04-09 19:11:22 +00:00
|
|
|
#include "src/heap/factory.h"
|
2016-09-01 12:01:33 +00:00
|
|
|
#include "src/heap/spaces-inl.h"
|
2019-05-23 08:51:46 +00:00
|
|
|
#include "src/objects/objects-inl.h"
|
2016-06-20 13:19:25 +00:00
|
|
|
#include "test/cctest/cctest.h"
|
2016-10-25 15:33:56 +00:00
|
|
|
#include "test/cctest/heap/heap-tester.h"
|
2016-06-20 13:19:25 +00:00
|
|
|
#include "test/cctest/heap/heap-utils.h"
|
|
|
|
|
2017-07-14 12:36:04 +00:00
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2017-08-11 10:04:47 +00:00
|
|
|
namespace heap {
|
2017-07-14 12:36:04 +00:00
|
|
|
|
2018-10-15 13:10:51 +00:00
|
|
|
// Tests don't work when --optimize-for-size is set.
|
|
|
|
#ifndef V8_LITE_MODE
|
|
|
|
|
2016-06-20 13:19:25 +00:00
|
|
|
namespace {
|
|
|
|
|
2016-10-25 15:33:56 +00:00
|
|
|
v8::Isolate* NewIsolateForPagePromotion(int min_semi_space_size = 8,
|
|
|
|
int max_semi_space_size = 8) {
|
2017-05-03 21:31:06 +00:00
|
|
|
// Parallel evacuation messes with fragmentation in a way that objects that
|
|
|
|
// should be copied in semi space are promoted to old space because of
|
|
|
|
// fragmentation.
|
2017-07-14 12:36:04 +00:00
|
|
|
FLAG_parallel_compaction = false;
|
|
|
|
FLAG_page_promotion = true;
|
|
|
|
FLAG_page_promotion_threshold = 0;
|
2017-07-20 13:34:04 +00:00
|
|
|
// Parallel scavenge introduces too much fragmentation.
|
|
|
|
FLAG_parallel_scavenge = false;
|
2017-07-14 12:36:04 +00:00
|
|
|
FLAG_min_semi_space_size = min_semi_space_size;
|
2016-06-20 13:19:25 +00:00
|
|
|
// We cannot optimize for size as we require a new space with more than one
|
|
|
|
// page.
|
2017-07-14 12:36:04 +00:00
|
|
|
FLAG_optimize_for_size = false;
|
2016-06-20 13:19:25 +00:00
|
|
|
// Set max_semi_space_size because it could've been initialized by an
|
|
|
|
// implication of optimize_for_size.
|
2017-07-14 12:36:04 +00:00
|
|
|
FLAG_max_semi_space_size = max_semi_space_size;
|
2016-06-20 13:19:25 +00:00
|
|
|
v8::Isolate::CreateParams create_params;
|
|
|
|
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
|
|
|
|
v8::Isolate* isolate = v8::Isolate::New(create_params);
|
|
|
|
return isolate;
|
|
|
|
}
|
|
|
|
|
2019-09-10 01:19:59 +00:00
|
|
|
Page* FindLastPageInNewSpace(const std::vector<Handle<FixedArray>>& handles) {
|
2017-07-14 12:36:04 +00:00
|
|
|
for (auto rit = handles.rbegin(); rit != handles.rend(); ++rit) {
|
2019-01-15 00:23:43 +00:00
|
|
|
// One deref gets the Handle, the second deref gets the FixedArray.
|
|
|
|
Page* candidate = Page::FromHeapObject(**rit);
|
2017-07-14 12:36:04 +00:00
|
|
|
if (candidate->InNewSpace()) return candidate;
|
|
|
|
}
|
|
|
|
return nullptr;
|
|
|
|
}
|
2016-06-20 13:19:25 +00:00
|
|
|
|
2017-07-14 12:36:04 +00:00
|
|
|
} // namespace
|
2016-06-20 13:19:25 +00:00
|
|
|
|
|
|
|
UNINITIALIZED_TEST(PagePromotion_NewToOld) {
|
2016-11-18 12:55:53 +00:00
|
|
|
if (!i::FLAG_incremental_marking) return;
|
2017-03-07 18:35:18 +00:00
|
|
|
if (!i::FLAG_page_promotion) return;
|
2017-08-23 15:15:27 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2017-03-07 18:35:18 +00:00
|
|
|
|
2016-06-20 13:19:25 +00:00
|
|
|
v8::Isolate* isolate = NewIsolateForPagePromotion();
|
|
|
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
|
|
|
|
{
|
|
|
|
v8::Isolate::Scope isolate_scope(isolate);
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
v8::Context::New(isolate)->Enter();
|
|
|
|
Heap* heap = i_isolate->heap();
|
|
|
|
|
2019-02-06 15:23:06 +00:00
|
|
|
// Ensure that the new space is empty so that the page to be promoted
|
|
|
|
// does not contain the age mark.
|
|
|
|
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
|
|
|
|
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
|
|
|
|
|
2016-06-20 13:19:25 +00:00
|
|
|
std::vector<Handle<FixedArray>> handles;
|
|
|
|
heap::SimulateFullSpace(heap->new_space(), &handles);
|
2016-09-07 10:02:58 +00:00
|
|
|
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
|
2016-06-20 13:19:25 +00:00
|
|
|
CHECK_GT(handles.size(), 0u);
|
2017-07-14 12:36:04 +00:00
|
|
|
Page* const to_be_promoted_page = FindLastPageInNewSpace(handles);
|
|
|
|
CHECK_NOT_NULL(to_be_promoted_page);
|
2017-04-06 15:20:39 +00:00
|
|
|
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
|
2016-06-20 13:19:25 +00:00
|
|
|
// To perform a sanity check on live bytes we need to mark the heap.
|
|
|
|
heap::SimulateIncrementalMarking(heap, true);
|
|
|
|
// Sanity check that the page meets the requirements for promotion.
|
2018-10-23 11:52:20 +00:00
|
|
|
const int threshold_bytes = static_cast<int>(
|
|
|
|
FLAG_page_promotion_threshold *
|
|
|
|
MemoryChunkLayout::AllocatableMemoryInDataPage() / 100);
|
2017-08-12 12:17:52 +00:00
|
|
|
CHECK_GE(heap->incremental_marking()->marking_state()->live_bytes(
|
2017-08-10 16:54:55 +00:00
|
|
|
to_be_promoted_page),
|
2017-04-06 15:20:39 +00:00
|
|
|
threshold_bytes);
|
2016-06-20 13:19:25 +00:00
|
|
|
|
|
|
|
// Actual checks: The page is in new space first, but is moved to old space
|
|
|
|
// during a full GC.
|
2017-04-06 15:20:39 +00:00
|
|
|
CHECK(heap->new_space()->ContainsSlow(to_be_promoted_page->address()));
|
|
|
|
CHECK(!heap->old_space()->ContainsSlow(to_be_promoted_page->address()));
|
2016-06-20 13:19:25 +00:00
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
2017-04-06 15:20:39 +00:00
|
|
|
CHECK(!heap->new_space()->ContainsSlow(to_be_promoted_page->address()));
|
|
|
|
CHECK(heap->old_space()->ContainsSlow(to_be_promoted_page->address()));
|
2016-06-20 13:19:25 +00:00
|
|
|
}
|
2017-07-17 11:00:33 +00:00
|
|
|
isolate->Dispose();
|
2016-06-20 13:19:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
UNINITIALIZED_TEST(PagePromotion_NewToNew) {
|
2019-10-17 11:36:31 +00:00
|
|
|
if (!i::FLAG_page_promotion || FLAG_always_promote_young_mc) return;
|
2017-03-07 18:35:18 +00:00
|
|
|
|
2016-06-20 13:19:25 +00:00
|
|
|
v8::Isolate* isolate = NewIsolateForPagePromotion();
|
|
|
|
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
|
|
|
|
{
|
|
|
|
v8::Isolate::Scope isolate_scope(isolate);
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
v8::Context::New(isolate)->Enter();
|
|
|
|
Heap* heap = i_isolate->heap();
|
|
|
|
|
|
|
|
std::vector<Handle<FixedArray>> handles;
|
|
|
|
heap::SimulateFullSpace(heap->new_space(), &handles);
|
|
|
|
CHECK_GT(handles.size(), 0u);
|
2017-04-06 15:20:39 +00:00
|
|
|
// Last object in handles should definitely be on a page that does not
|
|
|
|
// contain the age mark, thus qualifying for moving.
|
2016-06-20 13:19:25 +00:00
|
|
|
Handle<FixedArray> last_object = handles.back();
|
2019-01-15 00:23:43 +00:00
|
|
|
Page* to_be_promoted_page = Page::FromHeapObject(*last_object);
|
2017-04-06 15:20:39 +00:00
|
|
|
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
|
2016-06-20 13:19:25 +00:00
|
|
|
CHECK(to_be_promoted_page->Contains(last_object->address()));
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address()));
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address()));
|
|
|
|
CHECK(to_be_promoted_page->Contains(last_object->address()));
|
|
|
|
}
|
2017-07-17 11:00:33 +00:00
|
|
|
isolate->Dispose();
|
2016-06-20 13:19:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
UNINITIALIZED_TEST(PagePromotion_NewToNewJSArrayBuffer) {
|
2019-10-17 11:36:31 +00:00
|
|
|
if (!i::FLAG_page_promotion || FLAG_always_promote_young_mc) return;
|
2017-03-07 18:35:18 +00:00
|
|
|
|
2016-06-20 13:19:25 +00:00
|
|
|
// Test makes sure JSArrayBuffer backing stores are still tracked after
|
|
|
|
// new-to-new promotion.
|
|
|
|
v8::Isolate* isolate = NewIsolateForPagePromotion();
|
|
|
|
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
|
|
|
|
{
|
|
|
|
v8::Isolate::Scope isolate_scope(isolate);
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
v8::Context::New(isolate)->Enter();
|
|
|
|
Heap* heap = i_isolate->heap();
|
|
|
|
|
|
|
|
// Fill the current page which potentially contains the age mark.
|
|
|
|
heap::FillCurrentPage(heap->new_space());
|
|
|
|
// Allocate a buffer we would like to check against.
|
|
|
|
Handle<JSArrayBuffer> buffer =
|
2019-09-09 10:19:34 +00:00
|
|
|
i_isolate->factory()
|
|
|
|
->NewJSArrayBufferAndBackingStore(100,
|
|
|
|
InitializedFlag::kZeroInitialized)
|
|
|
|
.ToHandleChecked();
|
2016-06-20 13:19:25 +00:00
|
|
|
std::vector<Handle<FixedArray>> handles;
|
|
|
|
// Simulate a full space, filling the interesting page with live objects.
|
|
|
|
heap::SimulateFullSpace(heap->new_space(), &handles);
|
|
|
|
CHECK_GT(handles.size(), 0u);
|
2017-04-06 15:20:39 +00:00
|
|
|
// First object in handles should be on the same page as the allocated
|
|
|
|
// JSArrayBuffer.
|
2016-06-20 13:19:25 +00:00
|
|
|
Handle<FixedArray> first_object = handles.front();
|
2019-01-15 00:23:43 +00:00
|
|
|
Page* to_be_promoted_page = Page::FromHeapObject(*first_object);
|
2017-04-06 15:20:39 +00:00
|
|
|
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
|
2016-06-20 13:19:25 +00:00
|
|
|
CHECK(to_be_promoted_page->Contains(first_object->address()));
|
|
|
|
CHECK(to_be_promoted_page->Contains(buffer->address()));
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address()));
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address()));
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
|
2018-06-25 12:44:59 +00:00
|
|
|
CHECK(to_be_promoted_page->Contains(first_object->address()));
|
|
|
|
CHECK(to_be_promoted_page->Contains(buffer->address()));
|
2020-01-10 12:33:18 +00:00
|
|
|
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
|
|
|
|
CHECK(ArrayBufferTracker::IsTracked(*buffer));
|
2018-06-25 12:44:59 +00:00
|
|
|
}
|
|
|
|
isolate->Dispose();
|
|
|
|
}
|
|
|
|
|
|
|
|
UNINITIALIZED_TEST(PagePromotion_NewToOldJSArrayBuffer) {
|
|
|
|
if (!i::FLAG_page_promotion) return;
|
|
|
|
|
|
|
|
// Test makes sure JSArrayBuffer backing stores are still tracked after
|
|
|
|
// new-to-old promotion.
|
|
|
|
v8::Isolate* isolate = NewIsolateForPagePromotion();
|
|
|
|
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
|
|
|
|
{
|
|
|
|
v8::Isolate::Scope isolate_scope(isolate);
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
v8::Context::New(isolate)->Enter();
|
|
|
|
Heap* heap = i_isolate->heap();
|
|
|
|
|
|
|
|
// Fill the current page which potentially contains the age mark.
|
|
|
|
heap::FillCurrentPage(heap->new_space());
|
|
|
|
// Allocate a buffer we would like to check against.
|
|
|
|
Handle<JSArrayBuffer> buffer =
|
2019-09-09 10:19:34 +00:00
|
|
|
i_isolate->factory()
|
|
|
|
->NewJSArrayBufferAndBackingStore(100,
|
|
|
|
InitializedFlag::kZeroInitialized)
|
|
|
|
.ToHandleChecked();
|
2018-06-25 12:44:59 +00:00
|
|
|
std::vector<Handle<FixedArray>> handles;
|
|
|
|
// Simulate a full space, filling the interesting page with live objects.
|
|
|
|
heap::SimulateFullSpace(heap->new_space(), &handles);
|
|
|
|
CHECK_GT(handles.size(), 0u);
|
|
|
|
// First object in handles should be on the same page as the allocated
|
|
|
|
// JSArrayBuffer.
|
|
|
|
Handle<FixedArray> first_object = handles.front();
|
2019-01-15 00:23:43 +00:00
|
|
|
Page* to_be_promoted_page = Page::FromHeapObject(*first_object);
|
2018-06-25 12:44:59 +00:00
|
|
|
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
|
|
|
|
CHECK(to_be_promoted_page->Contains(first_object->address()));
|
|
|
|
CHECK(to_be_promoted_page->Contains(buffer->address()));
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address()));
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(heap->old_space()->ContainsSlow(first_object->address()));
|
|
|
|
CHECK(heap->old_space()->ContainsSlow(buffer->address()));
|
2016-06-20 13:19:25 +00:00
|
|
|
CHECK(to_be_promoted_page->Contains(first_object->address()));
|
|
|
|
CHECK(to_be_promoted_page->Contains(buffer->address()));
|
2020-01-10 12:33:18 +00:00
|
|
|
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
|
|
|
|
CHECK(ArrayBufferTracker::IsTracked(*buffer));
|
2016-06-20 13:19:25 +00:00
|
|
|
}
|
2017-07-17 11:00:33 +00:00
|
|
|
isolate->Dispose();
|
2016-06-20 13:19:25 +00:00
|
|
|
}
|
|
|
|
|
2016-10-25 15:33:56 +00:00
|
|
|
UNINITIALIZED_HEAP_TEST(Regress658718) {
|
2019-10-17 11:36:31 +00:00
|
|
|
if (!i::FLAG_page_promotion || FLAG_always_promote_young_mc) return;
|
2017-03-07 18:35:18 +00:00
|
|
|
|
2016-10-25 15:33:56 +00:00
|
|
|
v8::Isolate* isolate = NewIsolateForPagePromotion(4, 8);
|
|
|
|
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
|
|
|
|
{
|
|
|
|
v8::Isolate::Scope isolate_scope(isolate);
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
v8::Context::New(isolate)->Enter();
|
|
|
|
Heap* heap = i_isolate->heap();
|
|
|
|
heap->delay_sweeper_tasks_for_testing_ = true;
|
|
|
|
heap->new_space()->Grow();
|
|
|
|
{
|
|
|
|
v8::HandleScope inner_handle_scope(isolate);
|
|
|
|
std::vector<Handle<FixedArray>> handles;
|
|
|
|
heap::SimulateFullSpace(heap->new_space(), &handles);
|
|
|
|
CHECK_GT(handles.size(), 0u);
|
2017-04-06 15:20:39 +00:00
|
|
|
// Last object in handles should definitely be on a page that does not
|
|
|
|
// contain the age mark, thus qualifying for moving.
|
2016-10-25 15:33:56 +00:00
|
|
|
Handle<FixedArray> last_object = handles.back();
|
2019-01-15 00:23:43 +00:00
|
|
|
Page* to_be_promoted_page = Page::FromHeapObject(*last_object);
|
2017-04-06 15:20:39 +00:00
|
|
|
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
|
2016-10-25 15:33:56 +00:00
|
|
|
CHECK(to_be_promoted_page->Contains(last_object->address()));
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address()));
|
|
|
|
heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting);
|
|
|
|
CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address()));
|
|
|
|
CHECK(to_be_promoted_page->Contains(last_object->address()));
|
|
|
|
}
|
|
|
|
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
|
|
|
|
heap->new_space()->Shrink();
|
2018-04-19 15:34:13 +00:00
|
|
|
heap->memory_allocator()->unmapper()->EnsureUnmappingCompleted();
|
2017-10-16 12:45:43 +00:00
|
|
|
heap->delay_sweeper_tasks_for_testing_ = false;
|
2017-11-27 17:29:51 +00:00
|
|
|
heap->mark_compact_collector()->sweeper()->StartSweeperTasks();
|
2016-10-25 15:33:56 +00:00
|
|
|
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
|
|
|
}
|
2017-07-17 11:00:33 +00:00
|
|
|
isolate->Dispose();
|
2016-10-25 15:33:56 +00:00
|
|
|
}
|
|
|
|
|
2018-10-15 13:10:51 +00:00
|
|
|
#endif // V8_LITE_MODE
|
|
|
|
|
2017-08-11 10:04:47 +00:00
|
|
|
} // namespace heap
|
2016-06-20 13:19:25 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|