2017-08-02 17:27:11 +00:00
|
|
|
// Copyright 2017 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
|
|
|
#include "src/heap/heap-inl.h"
|
|
|
|
#include "src/heap/heap.h"
|
|
|
|
#include "src/heap/invalidated-slots-inl.h"
|
|
|
|
#include "src/heap/invalidated-slots.h"
|
2020-05-04 12:24:44 +00:00
|
|
|
#include "src/heap/memory-chunk.h"
|
|
|
|
#include "src/init/v8.h"
|
2017-08-02 17:27:11 +00:00
|
|
|
#include "test/cctest/cctest.h"
|
|
|
|
#include "test/cctest/heap/heap-tester.h"
|
|
|
|
#include "test/cctest/heap/heap-utils.h"
|
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2017-08-11 10:04:47 +00:00
|
|
|
namespace heap {
|
2017-08-02 17:27:11 +00:00
|
|
|
|
2017-08-11 10:04:47 +00:00
|
|
|
Page* HeapTester::AllocateByteArraysOnPage(
|
2018-11-24 08:51:21 +00:00
|
|
|
Heap* heap, std::vector<ByteArray>* byte_arrays) {
|
2017-09-23 13:55:40 +00:00
|
|
|
PauseAllocationObserversScope pause_observers(heap);
|
2017-08-02 17:27:11 +00:00
|
|
|
const int kLength = 256 - ByteArray::kHeaderSize;
|
|
|
|
const int kSize = ByteArray::SizeFor(kLength);
|
|
|
|
CHECK_EQ(kSize, 256);
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
Page* page;
|
|
|
|
// Fill a page with byte arrays.
|
|
|
|
{
|
2020-03-02 13:52:18 +00:00
|
|
|
AlwaysAllocateScopeForTesting always_allocate(heap);
|
2017-08-02 17:27:11 +00:00
|
|
|
heap::SimulateFullSpace(old_space);
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array;
|
2019-03-11 19:04:02 +00:00
|
|
|
CHECK(AllocateByteArrayForTest(heap, kLength, AllocationType::kOld)
|
|
|
|
.To(&byte_array));
|
2017-08-02 17:27:11 +00:00
|
|
|
byte_arrays->push_back(byte_array);
|
2019-01-15 00:23:43 +00:00
|
|
|
page = Page::FromHeapObject(byte_array);
|
2017-08-02 17:27:11 +00:00
|
|
|
size_t n = page->area_size() / kSize;
|
|
|
|
for (size_t i = 1; i < n; i++) {
|
2019-03-11 19:04:02 +00:00
|
|
|
CHECK(AllocateByteArrayForTest(heap, kLength, AllocationType::kOld)
|
|
|
|
.To(&byte_array));
|
2017-08-02 17:27:11 +00:00
|
|
|
byte_arrays->push_back(byte_array);
|
2019-01-15 00:23:43 +00:00
|
|
|
CHECK_EQ(page, Page::FromHeapObject(byte_array));
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
|
|
|
}
|
2019-08-26 13:12:22 +00:00
|
|
|
CHECK_NULL(page->invalidated_slots<OLD_TO_OLD>());
|
2017-08-02 17:27:11 +00:00
|
|
|
return page;
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsNoInvalidatedRanges) {
|
2020-09-03 10:33:46 +00:00
|
|
|
FLAG_stress_concurrent_allocation = false; // For AllocateByteArraysOnPage.
|
2017-08-02 17:27:11 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2018-11-24 08:51:21 +00:00
|
|
|
for (ByteArray byte_array : byte_arrays) {
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-02 17:27:11 +00:00
|
|
|
CHECK(filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
|
2020-09-03 10:33:46 +00:00
|
|
|
FLAG_stress_concurrent_allocation = false; // For AllocateByteArraysOnPage.
|
2017-08-02 17:27:11 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register every second byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i += 2) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-02 17:27:11 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-02 17:27:11 +00:00
|
|
|
if (i % 2 == 0) {
|
|
|
|
CHECK(!filter.IsValid(addr));
|
|
|
|
} else {
|
|
|
|
CHECK(filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
|
2020-09-03 10:33:46 +00:00
|
|
|
FLAG_stress_concurrent_allocation = false; // For AllocateByteArraysOnPage.
|
2017-08-02 17:27:11 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register the all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-02 17:27:11 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-02 17:27:11 +00:00
|
|
|
CHECK(!filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsAfterTrimming) {
|
2018-08-10 13:33:29 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2017-08-02 17:27:11 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register the all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
|
|
|
// Trim byte arrays and check that the slots outside the byte arrays are
|
2018-08-10 13:33:29 +00:00
|
|
|
// considered invalid if the old space page was swept.
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-02 17:27:11 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
|
|
|
heap->RightTrimFixedArray(byte_array, byte_array.length());
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2018-08-10 13:33:29 +00:00
|
|
|
CHECK_EQ(filter.IsValid(addr), page->SweepingDone());
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
|
2017-10-05 11:57:49 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2017-08-02 17:27:11 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-02 17:27:11 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
page->MarkEvacuationCandidate();
|
|
|
|
// Register the all byte arrays as invalidated.
|
|
|
|
// This should be no-op because the page is marked as evacuation
|
|
|
|
// candidate.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-02 17:27:11 +00:00
|
|
|
}
|
|
|
|
// All slots must still be valid.
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-02 17:27:11 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-02 17:27:11 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-02 17:27:11 +00:00
|
|
|
CHECK(filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-03 17:58:03 +00:00
|
|
|
HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
|
2020-09-03 10:33:46 +00:00
|
|
|
FLAG_stress_concurrent_allocation = false; // For AllocateByteArraysOnPage.
|
2017-08-03 17:58:03 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
2018-11-24 08:51:21 +00:00
|
|
|
std::vector<ByteArray> byte_arrays;
|
2017-08-03 17:58:03 +00:00
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Ensure that the first array has smaller size then the rest.
|
|
|
|
heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0].length() - 8);
|
|
|
|
// Register the all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
|
2017-08-03 17:58:03 +00:00
|
|
|
}
|
|
|
|
// All slots must still be invalid.
|
2019-08-26 13:12:22 +00:00
|
|
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
|
2017-08-03 17:58:03 +00:00
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2018-11-24 08:51:21 +00:00
|
|
|
ByteArray byte_array = byte_arrays[i];
|
2017-08-03 17:58:03 +00:00
|
|
|
Address start = byte_array.address() + ByteArray::kHeaderSize;
|
|
|
|
Address end = byte_array.address() + byte_array.Size();
|
2018-12-19 19:10:21 +00:00
|
|
|
for (Address addr = start; addr < end; addr += kTaggedSize) {
|
2017-08-03 17:58:03 +00:00
|
|
|
CHECK(!filter.IsValid(addr));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-10 13:33:29 +00:00
|
|
|
Handle<FixedArray> AllocateArrayOnFreshPage(Isolate* isolate,
|
|
|
|
PagedSpace* old_space, int length) {
|
2020-03-02 13:52:18 +00:00
|
|
|
AlwaysAllocateScopeForTesting always_allocate(isolate->heap());
|
2018-08-10 13:33:29 +00:00
|
|
|
heap::SimulateFullSpace(old_space);
|
2019-03-11 19:04:02 +00:00
|
|
|
return isolate->factory()->NewFixedArray(length, AllocationType::kOld);
|
2018-08-10 13:33:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Handle<FixedArray> AllocateArrayOnEvacuationCandidate(Isolate* isolate,
|
|
|
|
PagedSpace* old_space,
|
|
|
|
int length) {
|
|
|
|
Handle<FixedArray> object =
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, length);
|
|
|
|
heap::ForceEvacuationCandidate(Page::FromHeapObject(*object));
|
|
|
|
return object;
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsRightTrimFixedArray) {
|
2021-04-21 00:32:15 +00:00
|
|
|
if (!FLAG_incremental_marking) return;
|
2018-08-10 13:33:29 +00:00
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
FLAG_parallel_compaction = false;
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
// Allocate a dummy page to be swept be the sweeper during evacuation.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> evacuated =
|
|
|
|
AllocateArrayOnEvacuationCandidate(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> trimmed = AllocateArrayOnFreshPage(isolate, old_space, 10);
|
|
|
|
heap::SimulateIncrementalMarking(heap);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *evacuated);
|
|
|
|
}
|
|
|
|
{
|
2021-10-15 21:41:08 +00:00
|
|
|
HandleScope new_scope(isolate);
|
2018-08-10 13:33:29 +00:00
|
|
|
Handle<HeapObject> dead = factory->NewFixedArray(1);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *dead);
|
|
|
|
}
|
|
|
|
heap->RightTrimFixedArray(*trimmed, trimmed->length() - 1);
|
|
|
|
}
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
CcTest::CollectGarbage(i::OLD_SPACE);
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsRightTrimLargeFixedArray) {
|
2021-04-21 00:32:15 +00:00
|
|
|
if (!FLAG_incremental_marking) return;
|
2018-08-10 13:33:29 +00:00
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
FLAG_parallel_compaction = false;
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
// Allocate a dummy page to be swept be the sweeper during evacuation.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> evacuated =
|
|
|
|
AllocateArrayOnEvacuationCandidate(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> trimmed;
|
|
|
|
{
|
2020-03-02 13:52:18 +00:00
|
|
|
AlwaysAllocateScopeForTesting always_allocate(heap);
|
2018-11-30 14:07:24 +00:00
|
|
|
trimmed = factory->NewFixedArray(
|
2019-03-11 19:04:02 +00:00
|
|
|
kMaxRegularHeapObjectSize / kTaggedSize + 100, AllocationType::kOld);
|
2018-08-10 13:33:29 +00:00
|
|
|
DCHECK(MemoryChunk::FromHeapObject(*trimmed)->InLargeObjectSpace());
|
|
|
|
}
|
|
|
|
heap::SimulateIncrementalMarking(heap);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *evacuated);
|
|
|
|
}
|
|
|
|
{
|
2021-10-15 21:41:08 +00:00
|
|
|
HandleScope new_scope(isolate);
|
2018-08-10 13:33:29 +00:00
|
|
|
Handle<HeapObject> dead = factory->NewFixedArray(1);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *dead);
|
|
|
|
}
|
|
|
|
heap->RightTrimFixedArray(*trimmed, trimmed->length() - 1);
|
|
|
|
}
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
CcTest::CollectGarbage(i::OLD_SPACE);
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsLeftTrimFixedArray) {
|
2021-04-21 00:32:15 +00:00
|
|
|
if (!FLAG_incremental_marking) return;
|
2018-08-10 13:33:29 +00:00
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
FLAG_parallel_compaction = false;
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
// Allocate a dummy page to be swept be the sweeper during evacuation.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> evacuated =
|
|
|
|
AllocateArrayOnEvacuationCandidate(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> trimmed = AllocateArrayOnFreshPage(isolate, old_space, 10);
|
|
|
|
heap::SimulateIncrementalMarking(heap);
|
|
|
|
for (int i = 0; i + 1 < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *evacuated);
|
|
|
|
}
|
|
|
|
{
|
2021-10-15 21:41:08 +00:00
|
|
|
HandleScope new_scope(isolate);
|
2018-08-10 13:33:29 +00:00
|
|
|
Handle<HeapObject> dead = factory->NewFixedArray(1);
|
|
|
|
for (int i = 1; i < trimmed->length(); i++) {
|
|
|
|
trimmed->set(i, *dead);
|
|
|
|
}
|
|
|
|
heap->LeftTrimFixedArray(*trimmed, trimmed->length() - 1);
|
|
|
|
}
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
CcTest::CollectGarbage(i::OLD_SPACE);
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsFastToSlow) {
|
2021-04-21 00:32:15 +00:00
|
|
|
if (!FLAG_incremental_marking) return;
|
2018-08-10 13:33:29 +00:00
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
FLAG_parallel_compaction = false;
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
PagedSpace* old_space = heap->old_space();
|
|
|
|
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
|
|
|
|
Handle<String> name = factory->InternalizeUtf8String("TestObject");
|
|
|
|
Handle<String> prop_name1 = factory->InternalizeUtf8String("prop1");
|
|
|
|
Handle<String> prop_name2 = factory->InternalizeUtf8String("prop2");
|
|
|
|
Handle<String> prop_name3 = factory->InternalizeUtf8String("prop3");
|
|
|
|
// Allocate a dummy page to be swept be the sweeper during evacuation.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<FixedArray> evacuated =
|
|
|
|
AllocateArrayOnEvacuationCandidate(isolate, old_space, 1);
|
|
|
|
// Allocate a dummy page to ensure that the JSObject is allocated on
|
|
|
|
// a fresh page.
|
|
|
|
AllocateArrayOnFreshPage(isolate, old_space, 1);
|
|
|
|
Handle<JSObject> obj;
|
|
|
|
{
|
2020-03-02 13:52:18 +00:00
|
|
|
AlwaysAllocateScopeForTesting always_allocate(heap);
|
2020-11-10 11:22:49 +00:00
|
|
|
Handle<JSFunction> function = factory->NewFunctionForTesting(name);
|
2018-08-10 13:33:29 +00:00
|
|
|
function->shared().set_expected_nof_properties(3);
|
2019-03-11 19:04:02 +00:00
|
|
|
obj = factory->NewJSObject(function, AllocationType::kOld);
|
2018-08-10 13:33:29 +00:00
|
|
|
}
|
|
|
|
// Start incremental marking.
|
|
|
|
heap::SimulateIncrementalMarking(heap);
|
|
|
|
// Set properties to point to the evacuation candidate.
|
2019-01-23 16:34:14 +00:00
|
|
|
Object::SetProperty(isolate, obj, prop_name1, evacuated).Check();
|
|
|
|
Object::SetProperty(isolate, obj, prop_name2, evacuated).Check();
|
|
|
|
Object::SetProperty(isolate, obj, prop_name3, evacuated).Check();
|
2018-08-10 13:33:29 +00:00
|
|
|
|
|
|
|
{
|
2021-10-15 21:41:08 +00:00
|
|
|
HandleScope new_scope(isolate);
|
2018-08-10 13:33:29 +00:00
|
|
|
Handle<HeapObject> dead = factory->NewFixedArray(1);
|
2019-01-23 16:34:14 +00:00
|
|
|
Object::SetProperty(isolate, obj, prop_name1, dead).Check();
|
|
|
|
Object::SetProperty(isolate, obj, prop_name2, dead).Check();
|
|
|
|
Object::SetProperty(isolate, obj, prop_name3, dead).Check();
|
2018-08-10 13:33:29 +00:00
|
|
|
Handle<Map> map(obj->map(), isolate);
|
|
|
|
Handle<Map> normalized_map =
|
|
|
|
Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES, "testing");
|
2019-06-07 08:39:21 +00:00
|
|
|
JSObject::MigrateToMap(isolate, obj, normalized_map);
|
2018-08-10 13:33:29 +00:00
|
|
|
}
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
|
|
|
CcTest::CollectGarbage(i::OLD_SPACE);
|
|
|
|
}
|
|
|
|
|
2019-08-26 13:12:22 +00:00
|
|
|
HEAP_TEST(InvalidatedSlotsCleanupFull) {
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
std::vector<ByteArray> byte_arrays;
|
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
|
2019-08-26 13:12:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Mark full page as free
|
|
|
|
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
|
|
|
cleanup.Free(page->area_start(), page->area_end());
|
|
|
|
|
|
|
|
// After cleanup there should be no invalidated objects on page left
|
|
|
|
CHECK(page->invalidated_slots<OLD_TO_NEW>()->empty());
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsCleanupEachObject) {
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
std::vector<ByteArray> byte_arrays;
|
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
// Register all byte arrays as invalidated.
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
|
2019-08-26 13:12:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Mark each object as free on page
|
|
|
|
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
|
|
|
|
|
|
|
for (size_t i = 0; i < byte_arrays.size(); i++) {
|
|
|
|
Address free_start = byte_arrays[i].address();
|
|
|
|
Address free_end = free_start + byte_arrays[i].Size();
|
|
|
|
cleanup.Free(free_start, free_end);
|
|
|
|
}
|
|
|
|
|
|
|
|
// After cleanup there should be no invalidated objects on page left
|
|
|
|
CHECK(page->invalidated_slots<OLD_TO_NEW>()->empty());
|
|
|
|
}
|
|
|
|
|
|
|
|
HEAP_TEST(InvalidatedSlotsCleanupRightTrim) {
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Heap* heap = CcTest::heap();
|
|
|
|
std::vector<ByteArray> byte_arrays;
|
|
|
|
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
|
|
|
|
|
|
|
|
CHECK_GT(byte_arrays.size(), 1);
|
|
|
|
ByteArray& invalidated = byte_arrays[1];
|
|
|
|
|
|
|
|
heap->RightTrimFixedArray(invalidated, invalidated.length() - 8);
|
2019-09-09 08:01:01 +00:00
|
|
|
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated);
|
2019-08-26 13:12:22 +00:00
|
|
|
|
|
|
|
// Free memory at end of invalidated object
|
|
|
|
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
|
|
|
|
Address free_start = invalidated.address() + invalidated.Size();
|
|
|
|
cleanup.Free(free_start, page->area_end());
|
|
|
|
|
|
|
|
// After cleanup the invalidated object should be smaller
|
|
|
|
InvalidatedSlots* invalidated_slots = page->invalidated_slots<OLD_TO_NEW>();
|
|
|
|
CHECK_EQ(invalidated_slots->size(), 1);
|
|
|
|
}
|
|
|
|
|
2017-08-11 10:04:47 +00:00
|
|
|
} // namespace heap
|
2017-08-02 17:27:11 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|