2017-08-02 17:27:11 +00:00
|
|
|
// Copyright 2017 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
2019-05-24 13:51:59 +00:00
|
|
|
#include "src/init/v8.h"
|
2017-08-02 17:27:11 +00:00
|
|
|
|
|
|
|
#include "src/heap/heap-inl.h"
|
|
|
|
#include "src/heap/heap.h"
|
|
|
|
#include "src/heap/invalidated-slots-inl.h"
|
|
|
|
#include "src/heap/invalidated-slots.h"
|
2019-09-12 16:50:51 +00:00
|
|
|
#include "src/heap/store-buffer.h"
|
2017-08-02 17:27:11 +00:00
|
|
|
#include "test/cctest/cctest.h"
|
|
|
|
#include "test/cctest/heap/heap-tester.h"
|
|
|
|
#include "test/cctest/heap/heap-utils.h"
|
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2017-08-11 10:04:47 +00:00
|
|
|
namespace heap {
|
2017-08-02 17:27:11 +00:00
|
|
|
|
2017-08-11 10:04:47 +00:00
|
|
|
Page* HeapTester::AllocateByteArraysOnPage(
|
2018-11-24 08:51:21 +00:00
|
|
|
Heap* heap, std::vector<ByteArray>* byte_arrays) {
|
2017-09-23 13:55:40 +00:00
|
|
|
PauseAllocationObserversScope pause_observers(heap);
|
2017-08-02 17:27:11 +00:00
|
|
|
const int kLength = 256 - ByteArray::kHeaderSize;
|
|
|
|
const int kSize = ByteArray::SizeFor(kLength);
|
|
|
|
CHECK_EQ(kSize, 256);
|
|
|
|
Isolate* isolate = heap->isolate();
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
Page* page;
|
|
|
|
// Fill a page with byte arrays.
|
|
|
|
{
|
|
|
|
AlwaysAllocateScope always_allocate(isolate);
|
|
|
|
heap::SimulateFullSpace(old_space);
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array;
|
2019-03-11 19:04:02 +00:00
|
|
|
CHECK(AllocateByteArrayForTest(heap, kLength, AllocationType::kOld)
|
|
|
|
.To(&byte_array));
|
2017-08-02 17:27:11 +00:00
|
|
|
byte_arrays->push_back(byte_array);
|
2019-01-15 00:23:43 +00:00
|
|
|
page = Page::FromHeapObject(byte_array);
|
2017-08-02 17:27:11 +00:00
|
|
|
size_t n = page->area_size() / kSize;
|
|
|
|
for (size_t i = 1; i < n; i++) {
|
2019-03-11 19:04:02 +00:00
|
|
|
CHECK(AllocateByteArrayForTest(heap, kLength, AllocationType::kOld)
|
|
|
|
.To(&byte_array));
|
2017-08-02 17:27:11 +00:00
|
|
|
byte_arrays->push_back(byte_array);
|
2019-01-15 00:23:43 +00:00
|
|
|
CHECK_EQ(page, Page::FromHeapObject(byte_array));
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
|
|
|
}
|
2019-08-26 13:12:22 +00:00
|
|
|
CHECK_NULL(page->invalidated_slots<OLD_TO_OLD>());
|
2017-08-02 17:27:11 +00:00
|
|
|
return page;
|
|
|
|
}
|
|
|
|
|
2019-09-19 21:51:49 +00:00
|
|
|
template <RememberedSetType direction>
|
|
|
|
static size_t GetRememberedSetSize(HeapObject obj) {
|
|
|
|
std::set<Address> slots;
|
|
|
|
RememberedSet<direction>::Iterate(
|
|
|
|
MemoryChunk::FromHeapObject(obj),
|
|
|
|
[&slots](MaybeObjectSlot slot) {
|
|
|
|
slots.insert(slot.address());
|
|
|
|
return KEEP_SLOT;
|
|
|
|
},
|
|
|
|
SlotSet::KEEP_EMPTY_BUCKETS);
|
|
|
|
return slots.size();
|
2019-09-12 16:50:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(StoreBuffer_CreateFromOldToYoung) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = isolate->heap();
|
2019-09-19 21:51:49 +00:00
|
|
|
heap::SealCurrentObjects(heap);
|
|
|
|
CHECK(heap->store_buffer()->Empty());
|
2019-09-12 16:50:51 +00:00
|
|
|
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
const int n = 10;
|
|
|
|
Handle<FixedArray> old = factory->NewFixedArray(n, AllocationType::kOld);
|
|
|
|
|
|
|
|
// Fill the array with refs to both old and new targets.
|
|
|
|
{
|
|
|
|
const auto prev_top = *(heap->store_buffer_top_address());
|
|
|
|
HandleScope scope_inner(isolate);
|
|
|
|
intptr_t expected_slots_count = 0;
|
|
|
|
|
|
|
|
// Add refs from old to new.
|
|
|
|
for (int i = 0; i < n / 2; i++) {
|
|
|
|
Handle<Object> number = factory->NewHeapNumber(i);
|
|
|
|
old->set(i, *number);
|
|
|
|
expected_slots_count++;
|
|
|
|
}
|
|
|
|
// Add refs from old to old.
|
|
|
|
for (int i = n / 2; i < n; i++) {
|
2019-09-13 15:34:35 +00:00
|
|
|
Handle<Object> number = factory->NewHeapNumber<AllocationType::kOld>(i);
|
2019-09-12 16:50:51 +00:00
|
|
|
old->set(i, *number);
|
|
|
|
}
|
|
|
|
// All old to new refs should have been captured and only them.
|
|
|
|
const auto new_top = *(heap->store_buffer_top_address());
|
|
|
|
const intptr_t added_slots_count =
|
|
|
|
(new_top - prev_top) / kSystemPointerSize;
|
|
|
|
CHECK_EQ(expected_slots_count, added_slots_count);
|
|
|
|
}
|
|
|
|
|
2019-09-19 21:51:49 +00:00
|
|
|
// GC should flush the store buffer into remembered sets and retain the target
|
|
|
|
// young objects.
|
|
|
|
CHECK_EQ(0, GetRememberedSetSize<OLD_TO_NEW>(*old));
|
2019-09-12 16:50:51 +00:00
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
|
|
|
|
CHECK(heap->store_buffer()->Empty());
|
2019-09-19 21:51:49 +00:00
|
|
|
CHECK_EQ(n / 2, GetRememberedSetSize<OLD_TO_NEW>(*old));
|
|
|
|
CHECK(Heap::InYoungGeneration(old->get(0)));
|
2019-09-12 16:50:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(StoreBuffer_Overflow) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
|
|
|
|
// Add enough refs from old to new to cause overflow of both buffer chunks.
|
|
|
|
const int n = 2 * StoreBuffer::kStoreBufferSize / kSystemPointerSize + 1;
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
Handle<FixedArray> old = factory->NewFixedArray(n, AllocationType::kOld);
|
|
|
|
for (int i = 0; i < n; i++) {
|
|
|
|
Handle<Object> number = factory->NewHeapNumber(i);
|
|
|
|
old->set(i, *number);
|
|
|
|
}
|
|
|
|
|
|
|
|
// No test validations, the buffer flipping code triggered by the overflow
|
|
|
|
// self-validates with asserts.
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(StoreBuffer_NotUsedOnAgingObjectWithRefsToYounger) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = isolate->heap();
|
2019-09-19 21:51:49 +00:00
|
|
|
heap::SealCurrentObjects(heap);
|
|
|
|
CHECK(heap->store_buffer()->Empty());
|
2019-09-12 16:50:51 +00:00
|
|
|
|
|
|
|
const int n = 10;
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
Handle<FixedArray> arr = factory->NewFixedArray(n);
|
|
|
|
|
|
|
|
// Transition the array into the older new tier.
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
CHECK(Heap::InYoungGeneration(*arr));
|
|
|
|
|
|
|
|
// Fill the array with younger objects.
|
|
|
|
{
|
|
|
|
HandleScope scope_inner(isolate);
|
|
|
|
for (int i = 0; i < n; i++) {
|
|
|
|
Handle<Object> number = factory->NewHeapNumber(i);
|
|
|
|
arr->set(i, *number);
|
|
|
|
}
|
|
|
|
|
|
|
|
// The references aren't crossing generations yet so none should be tracked.
|
2019-09-19 21:51:49 +00:00
|
|
|
CHECK(heap->store_buffer()->Empty());
|
2019-09-12 16:50:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Promote the array into old, its elements are still in new, the old to new
|
|
|
|
// refs are inserted directly into the remembered sets during GC.
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
|
2019-09-19 21:51:49 +00:00
|
|
|
CHECK(heap->InOldSpace(*arr));
|
2019-09-12 16:50:51 +00:00
|
|
|
CHECK(Heap::InYoungGeneration(arr->get(n / 2)));
|
|
|
|
CHECK(heap->store_buffer()->Empty());
|
2019-09-19 21:51:49 +00:00
|
|
|
CHECK_EQ(n, GetRememberedSetSize<OLD_TO_NEW>(*arr));
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(RememberedSet_LargePage) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = isolate->heap();
|
|
|
|
heap::SealCurrentObjects(heap);
|
|
|
|
CHECK(heap->store_buffer()->Empty());
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
// Allocate an object in Large space.
|
|
|
|
const int count = Max(FixedArray::kMaxRegularLength + 1, 128 * KB);
|
|
|
|
Handle<FixedArray> arr = factory->NewFixedArray(count, AllocationType::kOld);
|
|
|
|
CHECK(heap->lo_space()->Contains(*arr));
|
|
|
|
|
|
|
|
// Create OLD_TO_NEW references from the large object.
|
|
|
|
{
|
|
|
|
v8::HandleScope short_lived(CcTest::isolate());
|
|
|
|
Handle<Object> number = factory->NewHeapNumber(42);
|
|
|
|
arr->set(0, *number);
|
|
|
|
arr->set(count - 1, *number);
|
|
|
|
CHECK(!heap->store_buffer()->Empty());
|
|
|
|
}
|
|
|
|
|
|
|
|
// GC should flush the store buffer into the remembered set of the large page,
|
|
|
|
// it should also keep the young targets alive.
|
|
|
|
CcTest::CollectAllGarbage();
|
|
|
|
|
|
|
|
CHECK(heap->store_buffer()->Empty());
|
|
|
|
CHECK(Heap::InYoungGeneration(arr->get(0)));
|
|
|
|
CHECK(Heap::InYoungGeneration(arr->get(count - 1)));
|
|
|
|
CHECK_EQ(2, GetRememberedSetSize<OLD_TO_NEW>(*arr));
|
2019-09-12 16:50:51 +00:00
|
|
|
}
|
|
|
|
|
2017-08-02 17:27:11 +00:00
|
|
|
HEAP_TEST(InvalidatedSlotsNoInvalidatedRanges) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2018-11-24 08:51:21 +00:00
|
|
|
for (ByteArray byte_array : byte_arrays) {
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-02 17:27:11 +00:00
|
|
|
CHECK(filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register every second byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i += 2) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-02 17:27:11 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-02 17:27:11 +00:00
|
|
|
if (i % 2 == 0) {
|
|
|
|
CHECK(!filter.IsValid(addr));
|
|
|
|
} else {
|
|
|
|
CHECK(filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register the all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-02 17:27:11 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-02 17:27:11 +00:00
|
|
|
CHECK(!filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsAfterTrimming) {
|
2018-08-10 13:33:29 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2017-08-02 17:27:11 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register the all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
|
|
|
// Trim byte arrays and check that the slots outside the byte arrays are
|
2018-08-10 13:33:29 +00:00
|
|
|
// considered invalid if the old space page was swept.
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-02 17:27:11 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
|
|
|
heap->RightTrimFixedArray(byte_array, byte_array.length());
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2018-08-10 13:33:29 +00:00
|
|
|
CHECK_EQ(filter.IsValid(addr), page->SweepingDone());
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
|
2017-10-05 11:57:49 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2017-08-02 17:27:11 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
page->MarkEvacuationCandidate();
|
|
|
|
// Register the all byte arrays as invalidated.
|
|
|
|
// This should be no-op because the page is marked as evacuation
|
|
|
|
// candidate.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
|
|
|
// All slots must still be valid.
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-02 17:27:11 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-02 17:27:11 +00:00
|
|
|
CHECK(filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-03 17:58:03 +00:00
|
|
|
HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-03 17:58:03 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Ensure that the first array has smaller size then the rest.
|
|
|
|
heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0].length() - 8);
|
|
|
|
// Register the all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-03 17:58:03 +00:00
|
|
|
}
|
|
|
|
// All slots must still be invalid.
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-03 17:58:03 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-03 17:58:03 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-03 17:58:03 +00:00
|
|
|
CHECK(!filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-10 13:33:29 +00:00
|
|
|
Handle<FixedArray> AllocateArrayOnFreshPage(Isolate* isolate,
|
|
|
|
PagedSpace* old_space, int length) {
|
|
|
|
AlwaysAllocateScope always_allocate(isolate);
|
|
|
|
heap::SimulateFullSpace(old_space);
|
2019-03-11 19:04:02 +00:00
|
|
|
return isolate->factory()->NewFixedArray(length, AllocationType::kOld);
|
2018-08-10 13:33:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Handle<FixedArray> AllocateArrayOnEvacuationCandidate(Isolate* isolate,
|
|
|
|
PagedSpace* old_space,
|
|
|
|
int length) {
|
|
|
|
Handle<FixedArray> object =
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, length);
|
|
|
|
heap::ForceEvacuationCandidate(Page::FromHeapObject(*object));
|
|
|
|
return object;
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsRightTrimFixedArray) {
|
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
FLAG_parallel_compaction = false;
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
// Allocate a dummy page to be swept be the sweeper during evacuation.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> evacuated =
|
|
|
|
AllocateArrayOnEvacuationCandidate(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> trimmed = AllocateArrayOnFreshPage(isolate, old_space, 10);
|
|
|
|
heap::SimulateIncrementalMarking(heap);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *evacuated);
|
|
|
|
}
|
|
|
|
{
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
Handle<HeapObject> dead = factory->NewFixedArray(1);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *dead);
|
|
|
|
}
|
|
|
|
heap->RightTrimFixedArray(*trimmed, trimmed->length() - 1);
|
|
|
|
}
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
CcTest::CollectGarbage(i::OLD_SPACE);
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsRightTrimLargeFixedArray) {
|
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
FLAG_parallel_compaction = false;
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
// Allocate a dummy page to be swept be the sweeper during evacuation.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> evacuated =
|
|
|
|
AllocateArrayOnEvacuationCandidate(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> trimmed;
|
|
|
|
{
|
|
|
|
AlwaysAllocateScope always_allocate(isolate);
|
2018-11-30 14:07:24 +00:00
|
|
|
trimmed = factory->NewFixedArray(
|
2019-03-11 19:04:02 +00:00
|
|
|
kMaxRegularHeapObjectSize / kTaggedSize + 100, AllocationType::kOld);
|
2018-08-10 13:33:29 +00:00
|
|
|
DCHECK(MemoryChunk::FromHeapObject(*trimmed)->InLargeObjectSpace());
|
|
|
|
}
|
|
|
|
heap::SimulateIncrementalMarking(heap);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *evacuated);
|
|
|
|
}
|
|
|
|
{
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
Handle<HeapObject> dead = factory->NewFixedArray(1);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *dead);
|
|
|
|
}
|
|
|
|
heap->RightTrimFixedArray(*trimmed, trimmed->length() - 1);
|
|
|
|
}
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
CcTest::CollectGarbage(i::OLD_SPACE);
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsLeftTrimFixedArray) {
|
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
FLAG_parallel_compaction = false;
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
// Allocate a dummy page to be swept be the sweeper during evacuation.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> evacuated =
|
|
|
|
AllocateArrayOnEvacuationCandidate(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> trimmed = AllocateArrayOnFreshPage(isolate, old_space, 10);
|
|
|
|
heap::SimulateIncrementalMarking(heap);
|
|
|
|
for (int i = 0; i + 1 < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *evacuated);
|
|
|
|
}
|
|
|
|
{
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
Handle<HeapObject> dead = factory->NewFixedArray(1);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *dead);
|
|
|
|
}
|
|
|
|
heap->LeftTrimFixedArray(*trimmed, trimmed->length() - 1);
|
|
|
|
}
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
CcTest::CollectGarbage(i::OLD_SPACE);
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsFastToSlow) {
|
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
FLAG_parallel_compaction = false;
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
|
|
|
|
Handle<String> name = factory->InternalizeUtf8String("TestObject");
|
|
|
|
Handle<String> prop_name1 = factory->InternalizeUtf8String("prop1");
|
|
|
|
Handle<String> prop_name2 = factory->InternalizeUtf8String("prop2");
|
|
|
|
Handle<String> prop_name3 = factory->InternalizeUtf8String("prop3");
|
|
|
|
// Allocate a dummy page to be swept be the sweeper during evacuation.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> evacuated =
|
|
|
|
AllocateArrayOnEvacuationCandidate(isolate, old_space, 1);
|
|
|
|
// Allocate a dummy page to ensure that the JSObject is allocated on
|
|
|
|
// a fresh page.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<JSObject> obj;
|
|
|
|
{
|
|
|
|
AlwaysAllocateScope always_allocate(isolate);
|
|
|
|
Handle<JSFunction> function = factory->NewFunctionForTest(name);
|
|
|
|
function->shared().set_expected_nof_properties(3);
|
2019-03-11 19:04:02 +00:00
|
|
|
obj = factory->NewJSObject(function, AllocationType::kOld);
|
2018-08-10 13:33:29 +00:00
|
|
|
}
|
|
|
|
// Start incremental marking.
|
|
|
|
heap::SimulateIncrementalMarking(heap);
|
|
|
|
// Set properties to point to the evacuation candidate.
|
2019-01-23 16:34:14 +00:00
|
|
|
Object::SetProperty(isolate, obj, prop_name1, evacuated).Check();
|
|
|
|
Object::SetProperty(isolate, obj, prop_name2, evacuated).Check();
|
|
|
|
Object::SetProperty(isolate, obj, prop_name3, evacuated).Check();
|
2018-08-10 13:33:29 +00:00
|
|
|
|
|
|
|
{
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
Handle<HeapObject> dead = factory->NewFixedArray(1);
|
2019-01-23 16:34:14 +00:00
|
|
|
Object::SetProperty(isolate, obj, prop_name1, dead).Check();
|
|
|
|
Object::SetProperty(isolate, obj, prop_name2, dead).Check();
|
|
|
|
Object::SetProperty(isolate, obj, prop_name3, dead).Check();
|
2018-08-10 13:33:29 +00:00
|
|
|
Handle<Map> map(obj->map(), isolate);
|
|
|
|
Handle<Map> normalized_map =
|
|
|
|
Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES, "testing");
|
2019-06-07 08:39:21 +00:00
|
|
|
JSObject::MigrateToMap(isolate, obj, normalized_map);
|
2018-08-10 13:33:29 +00:00
|
|
|
}
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
CcTest::CollectGarbage(i::OLD_SPACE);
|
|
|
|
}
|
|
|
|
|
2019-08-26 13:12:22 +00:00
|
|
|
HEAP_TEST(InvalidatedSlotsCleanupFull) {
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
std::vector<ByteArray> byte_arrays;
|
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
|
2019-08-26 13:12:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Mark full page as free
|
|
|
|
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
|
|
|
cleanup.Free(page->area_start(), page->area_end());
|
|
|
|
|
|
|
|
// After cleanup there should be no invalidated objects on page left
|
|
|
|
CHECK(page->invalidated_slots<OLD_TO_NEW>()->empty());
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsCleanupEachObject) {
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
std::vector<ByteArray> byte_arrays;
|
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
|
2019-08-26 13:12:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Mark each object as free on page
|
|
|
|
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
|
|
|
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
|
|
|
Address free_start = byte_arrays[i].address();
|
|
|
|
Address free_end = free_start + byte_arrays[i].Size();
|
|
|
|
cleanup.Free(free_start, free_end);
|
|
|
|
}
|
|
|
|
|
|
|
|
// After cleanup there should be no invalidated objects on page left
|
|
|
|
CHECK(page->invalidated_slots<OLD_TO_NEW>()->empty());
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsCleanupRightTrim) {
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
std::vector<ByteArray> byte_arrays;
|
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
|
|
|
|
CHECK_GT(byte_arrays.size(), 1);
|
|
|
|
ByteArray& invalidated = byte_arrays[1];
|
|
|
|
|
|
|
|
heap->RightTrimFixedArray(invalidated, invalidated.length() - 8);
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated);
|
2019-08-26 13:12:22 +00:00
|
|
|
|
|
|
|
// Free memory at end of invalidated object
|
|
|
|
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
|
|
|
Address free_start = invalidated.address() + invalidated.Size();
|
|
|
|
cleanup.Free(free_start, page->area_end());
|
|
|
|
|
|
|
|
// After cleanup the invalidated object should be smaller
|
|
|
|
InvalidatedSlots* invalidated_slots = page->invalidated_slots<OLD_TO_NEW>();
|
|
|
|
CHECK_EQ(invalidated_slots->size(), 1);
|
|
|
|
}
|
|
|
|
|
2017-08-11 10:04:47 +00:00
|
|
|
} // namespace heap
|
2017-08-02 17:27:11 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|