2016-06-07 17:28:31 +00:00
|
|
|
// Copyright 2016 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
2019-05-17 12:13:44 +00:00
|
|
|
#include "src/api/api-inl.h"
|
2019-05-22 07:55:37 +00:00
|
|
|
#include "src/execution/isolate.h"
|
2020-01-23 21:44:40 +00:00
|
|
|
#include "src/heap/array-buffer-sweeper.h"
|
2016-06-07 17:28:31 +00:00
|
|
|
#include "src/heap/array-buffer-tracker.h"
|
2018-07-25 14:11:56 +00:00
|
|
|
#include "src/heap/heap-inl.h"
|
2017-01-03 13:16:59 +00:00
|
|
|
#include "src/heap/spaces.h"
|
2018-08-16 16:01:36 +00:00
|
|
|
#include "src/objects/js-array-buffer-inl.h"
|
2019-05-23 08:51:46 +00:00
|
|
|
#include "src/objects/objects-inl.h"
|
2016-06-07 17:28:31 +00:00
|
|
|
#include "test/cctest/cctest.h"
|
|
|
|
#include "test/cctest/heap/heap-utils.h"
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
2019-03-29 09:47:48 +00:00
|
|
|
using LocalTracker = i::LocalArrayBufferTracker;
|
2016-06-07 17:28:31 +00:00
|
|
|
|
2018-12-08 02:59:17 +00:00
|
|
|
bool IsTracked(i::JSArrayBuffer buf) {
|
2016-06-07 17:28:31 +00:00
|
|
|
return i::ArrayBufferTracker::IsTracked(buf);
|
|
|
|
}
|
|
|
|
|
2020-01-17 13:44:09 +00:00
|
|
|
bool IsTrackedYoung(i::Heap* heap, i::ArrayBufferExtension* extension) {
|
2020-01-23 21:44:40 +00:00
|
|
|
bool in_young = heap->array_buffer_sweeper()->young().Contains(extension);
|
|
|
|
bool in_old = heap->array_buffer_sweeper()->old().Contains(extension);
|
|
|
|
CHECK(!(in_young && in_old));
|
|
|
|
return in_young;
|
2020-01-17 13:44:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool IsTrackedOld(i::Heap* heap, i::ArrayBufferExtension* extension) {
|
2020-01-23 21:44:40 +00:00
|
|
|
bool in_young = heap->array_buffer_sweeper()->young().Contains(extension);
|
|
|
|
bool in_old = heap->array_buffer_sweeper()->old().Contains(extension);
|
|
|
|
CHECK(!(in_young && in_old));
|
|
|
|
return in_old;
|
2020-01-17 13:44:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool IsTracked(i::Heap* heap, i::ArrayBufferExtension* extension) {
|
2020-01-23 21:44:40 +00:00
|
|
|
bool in_young = heap->array_buffer_sweeper()->young().Contains(extension);
|
|
|
|
bool in_old = heap->array_buffer_sweeper()->old().Contains(extension);
|
|
|
|
CHECK(!(in_young && in_old));
|
|
|
|
return in_young || in_old;
|
2020-01-17 13:44:09 +00:00
|
|
|
}
|
|
|
|
|
2020-02-07 17:27:22 +00:00
|
|
|
bool IsTracked(i::Heap* heap, i::JSArrayBuffer buffer) {
|
|
|
|
return V8_ARRAY_BUFFER_EXTENSION_BOOL ? IsTracked(heap, buffer.extension())
|
|
|
|
: IsTracked(buffer);
|
|
|
|
}
|
|
|
|
|
2016-06-07 17:28:31 +00:00
|
|
|
} // namespace
|
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2017-08-11 10:04:47 +00:00
|
|
|
namespace heap {
|
2016-06-07 17:28:31 +00:00
|
|
|
|
|
|
|
// The following tests make sure that JSArrayBuffer tracking works expected when
|
|
|
|
// moving the objects through various spaces during GC phases.
|
|
|
|
|
|
|
|
TEST(ArrayBuffer_OnlyMC) {
|
2020-01-10 12:33:18 +00:00
|
|
|
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2017-08-01 19:46:16 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2016-06-07 17:28:31 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
2018-12-08 02:59:17 +00:00
|
|
|
JSArrayBuffer raw_ab;
|
2016-06-07 17:28:31 +00:00
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
raw_ab = *buf;
|
|
|
|
// Prohibit page from being released.
|
2019-01-15 00:23:43 +00:00
|
|
|
Page::FromHeapObject(*buf)->MarkNeverEvacuate();
|
2016-06-07 17:28:31 +00:00
|
|
|
}
|
|
|
|
// 2 GCs are needed because we promote to old space as live, meaning that
|
|
|
|
// we will survive one GC.
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(!IsTracked(raw_ab));
|
|
|
|
}
|
|
|
|
|
2020-01-17 13:44:09 +00:00
|
|
|
TEST(ArrayBuffer_OnlyMC_Extension) {
|
|
|
|
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2020-01-23 21:44:40 +00:00
|
|
|
FLAG_concurrent_array_buffer_sweeping = false;
|
2020-01-17 13:44:09 +00:00
|
|
|
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
|
|
|
ArrayBufferExtension* extension;
|
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
extension = buf->extension();
|
2020-07-02 23:40:05 +00:00
|
|
|
CHECK(FLAG_single_generation ? IsTrackedOld(heap, extension)
|
|
|
|
: IsTrackedYoung(heap, extension));
|
2020-01-17 13:44:09 +00:00
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(IsTrackedOld(heap, extension));
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(IsTrackedOld(heap, extension));
|
|
|
|
}
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(!IsTracked(heap, extension));
|
|
|
|
}
|
|
|
|
|
2016-06-07 17:28:31 +00:00
|
|
|
TEST(ArrayBuffer_OnlyScavenge) {
|
2020-01-10 12:33:18 +00:00
|
|
|
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2017-08-01 19:46:16 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2016-06-07 17:28:31 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
2018-12-08 02:59:17 +00:00
|
|
|
JSArrayBuffer raw_ab;
|
2016-06-07 17:28:31 +00:00
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
raw_ab = *buf;
|
|
|
|
// Prohibit page from being released.
|
2019-01-15 00:23:43 +00:00
|
|
|
Page::FromHeapObject(*buf)->MarkNeverEvacuate();
|
2016-06-07 17:28:31 +00:00
|
|
|
}
|
|
|
|
// 2 GCs are needed because we promote to old space as live, meaning that
|
|
|
|
// we will survive one GC.
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(!IsTracked(raw_ab));
|
|
|
|
}
|
|
|
|
|
2020-01-17 13:44:09 +00:00
|
|
|
TEST(ArrayBuffer_OnlyScavenge_Extension) {
|
|
|
|
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2020-07-02 23:40:05 +00:00
|
|
|
if (FLAG_single_generation) return;
|
2020-01-23 21:44:40 +00:00
|
|
|
FLAG_concurrent_array_buffer_sweeping = false;
|
2020-01-17 13:44:09 +00:00
|
|
|
|
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
|
|
|
ArrayBufferExtension* extension;
|
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
extension = buf->extension();
|
|
|
|
CHECK(IsTrackedYoung(heap, extension));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTrackedYoung(heap, extension));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTrackedOld(heap, extension));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTrackedOld(heap, extension));
|
|
|
|
}
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(!IsTracked(heap, extension));
|
|
|
|
}
|
|
|
|
|
2016-06-07 17:28:31 +00:00
|
|
|
TEST(ArrayBuffer_ScavengeAndMC) {
|
2020-01-10 12:33:18 +00:00
|
|
|
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2017-08-01 19:46:16 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2016-06-07 17:28:31 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
2018-12-08 02:59:17 +00:00
|
|
|
JSArrayBuffer raw_ab;
|
2016-06-07 17:28:31 +00:00
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
raw_ab = *buf;
|
|
|
|
// Prohibit page from being released.
|
2019-01-15 00:23:43 +00:00
|
|
|
Page::FromHeapObject(*buf)->MarkNeverEvacuate();
|
2016-06-07 17:28:31 +00:00
|
|
|
}
|
|
|
|
// 2 GCs are needed because we promote to old space as live, meaning that
|
|
|
|
// we will survive one GC.
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(!IsTracked(raw_ab));
|
|
|
|
}
|
|
|
|
|
2020-01-17 13:44:09 +00:00
|
|
|
TEST(ArrayBuffer_ScavengeAndMC_Extension) {
|
2020-07-02 23:40:05 +00:00
|
|
|
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL || FLAG_single_generation) return;
|
2020-01-23 21:44:40 +00:00
|
|
|
FLAG_concurrent_array_buffer_sweeping = false;
|
|
|
|
|
2020-01-17 13:44:09 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
|
|
|
ArrayBufferExtension* extension;
|
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
extension = buf->extension();
|
|
|
|
CHECK(IsTrackedYoung(heap, extension));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTrackedYoung(heap, extension));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTrackedOld(heap, extension));
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(IsTrackedOld(heap, extension));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTrackedOld(heap, extension));
|
|
|
|
}
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(!IsTracked(heap, extension));
|
|
|
|
}
|
|
|
|
|
2016-06-07 17:28:31 +00:00
|
|
|
TEST(ArrayBuffer_Compaction) {
|
2020-01-10 12:33:18 +00:00
|
|
|
if (FLAG_never_compact || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2017-08-01 19:46:16 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2016-06-07 17:28:31 +00:00
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
heap::AbandonCurrentlyFreeMemory(heap->old_space());
|
|
|
|
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
|
|
|
|
CHECK(IsTracked(*buf1));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
|
2019-01-15 00:23:43 +00:00
|
|
|
Page* page_before_gc = Page::FromHeapObject(*buf1);
|
2016-11-03 12:12:38 +00:00
|
|
|
heap::ForceEvacuationCandidate(page_before_gc);
|
2016-06-07 17:28:31 +00:00
|
|
|
CHECK(IsTracked(*buf1));
|
|
|
|
|
2017-04-26 22:16:41 +00:00
|
|
|
CcTest::CollectAllGarbage();
|
2016-06-07 17:28:31 +00:00
|
|
|
|
2019-01-15 00:23:43 +00:00
|
|
|
Page* page_after_gc = Page::FromHeapObject(*buf1);
|
2016-06-07 17:28:31 +00:00
|
|
|
CHECK(IsTracked(*buf1));
|
|
|
|
|
|
|
|
CHECK_NE(page_before_gc, page_after_gc);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(ArrayBuffer_UnregisterDuringSweep) {
|
2020-01-10 12:33:18 +00:00
|
|
|
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2016-06-07 17:28:31 +00:00
|
|
|
// Regular pages in old space (without compaction) are processed concurrently
|
|
|
|
// in the sweeper. If we happen to unregister a buffer (either explicitly, or
|
2019-10-29 19:00:28 +00:00
|
|
|
// implicitly through e.g. |Detach|) we need to sync with the sweeper
|
2016-06-07 17:28:31 +00:00
|
|
|
// task.
|
|
|
|
//
|
|
|
|
// Note: This test will will only fail on TSAN configurations.
|
|
|
|
|
|
|
|
// Disable verify-heap since it forces sweeping to be completed in the
|
|
|
|
// epilogue of the GC.
|
|
|
|
#ifdef VERIFY_HEAP
|
|
|
|
i::FLAG_verify_heap = false;
|
|
|
|
#endif // VERIFY_HEAP
|
2017-08-01 19:46:16 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2016-06-07 17:28:31 +00:00
|
|
|
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
|
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
// Allocate another buffer on the same page to force processing a
|
|
|
|
// non-empty set of buffers in the last GC.
|
|
|
|
Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(*buf));
|
|
|
|
CHECK(IsTracked(*buf2));
|
|
|
|
}
|
|
|
|
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(OLD_SPACE);
|
2019-10-29 19:00:28 +00:00
|
|
|
// |Detach| will cause the buffer to be |Unregister|ed. Without
|
2016-06-07 17:28:31 +00:00
|
|
|
// barriers and proper synchronization this will trigger a data race on
|
|
|
|
// TSAN.
|
2019-10-29 19:00:28 +00:00
|
|
|
ab->Detach();
|
2016-06-07 17:28:31 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(ArrayBuffer_NonLivePromotion) {
|
2020-01-10 12:33:18 +00:00
|
|
|
if (!FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2017-06-13 09:25:18 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2016-06-07 17:28:31 +00:00
|
|
|
// The test verifies that the marking state is preserved when promoting
|
|
|
|
// a buffer to old space.
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
2018-12-08 02:59:17 +00:00
|
|
|
JSArrayBuffer raw_ab;
|
2016-06-07 17:28:31 +00:00
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Handle<FixedArray> root =
|
2019-03-11 19:04:02 +00:00
|
|
|
heap->isolate()->factory()->NewFixedArray(1, AllocationType::kOld);
|
2016-06-07 17:28:31 +00:00
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
root->set(0, *buf); // Buffer that should not be promoted as live.
|
|
|
|
}
|
|
|
|
heap::SimulateIncrementalMarking(heap, false);
|
|
|
|
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
|
|
|
|
raw_ab = JSArrayBuffer::cast(root->get(0));
|
2018-07-04 09:10:05 +00:00
|
|
|
root->set(0, ReadOnlyRoots(heap).undefined_value());
|
2016-06-07 17:28:31 +00:00
|
|
|
heap::SimulateIncrementalMarking(heap, true);
|
|
|
|
// Prohibit page from being released.
|
2019-01-15 00:23:43 +00:00
|
|
|
Page::FromHeapObject(raw_ab)->MarkNeverEvacuate();
|
2016-06-07 17:28:31 +00:00
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(!IsTracked(raw_ab));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(ArrayBuffer_LivePromotion) {
|
2020-01-10 12:33:18 +00:00
|
|
|
if (!FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2017-06-13 09:25:18 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2016-06-07 17:28:31 +00:00
|
|
|
// The test verifies that the marking state is preserved when promoting
|
|
|
|
// a buffer to old space.
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
2018-12-08 02:59:17 +00:00
|
|
|
JSArrayBuffer raw_ab;
|
2016-06-07 17:28:31 +00:00
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Handle<FixedArray> root =
|
2019-03-11 19:04:02 +00:00
|
|
|
heap->isolate()->factory()->NewFixedArray(1, AllocationType::kOld);
|
2016-06-07 17:28:31 +00:00
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
root->set(0, *buf); // Buffer that should be promoted as live.
|
|
|
|
}
|
|
|
|
heap::SimulateIncrementalMarking(heap, true);
|
|
|
|
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
|
|
|
|
raw_ab = JSArrayBuffer::cast(root->get(0));
|
2018-07-04 09:10:05 +00:00
|
|
|
root->set(0, ReadOnlyRoots(heap).undefined_value());
|
2016-06-07 17:28:31 +00:00
|
|
|
// Prohibit page from being released.
|
2019-01-15 00:23:43 +00:00
|
|
|
Page::FromHeapObject(raw_ab)->MarkNeverEvacuate();
|
2016-06-07 17:28:31 +00:00
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(IsTracked(raw_ab));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
|
2020-01-10 12:33:18 +00:00
|
|
|
if (!i::FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2017-08-01 19:46:16 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2016-06-07 17:28:31 +00:00
|
|
|
// The test verifies that the marking state is preserved across semispace
|
|
|
|
// copy.
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
|
|
|
heap::SealCurrentObjects(heap);
|
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Handle<FixedArray> root =
|
2019-03-11 19:04:02 +00:00
|
|
|
heap->isolate()->factory()->NewFixedArray(1, AllocationType::kOld);
|
2016-06-07 17:28:31 +00:00
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
root->set(0, *buf); // Buffer that should be promoted as live.
|
2019-01-15 00:23:43 +00:00
|
|
|
Page::FromHeapObject(*buf)->MarkNeverEvacuate();
|
2016-06-07 17:28:31 +00:00
|
|
|
}
|
|
|
|
std::vector<Handle<FixedArray>> handles;
|
|
|
|
// Make the whole page transition from new->old, getting the buffers
|
|
|
|
// processed in the sweeper (relying on marking information) instead of
|
|
|
|
// processing during newspace evacuation.
|
|
|
|
heap::FillCurrentPage(heap->new_space(), &handles);
|
|
|
|
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
heap::SimulateIncrementalMarking(heap, true);
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-17 13:44:09 +00:00
|
|
|
TEST(ArrayBuffer_PagePromotion_Extension) {
|
2020-07-02 23:40:05 +00:00
|
|
|
if (!i::FLAG_incremental_marking || !V8_ARRAY_BUFFER_EXTENSION_BOOL ||
|
|
|
|
i::FLAG_single_generation)
|
|
|
|
return;
|
2020-01-17 13:44:09 +00:00
|
|
|
i::FLAG_always_promote_young_mc = true;
|
2020-01-23 21:44:40 +00:00
|
|
|
i::FLAG_concurrent_array_buffer_sweeping = false;
|
|
|
|
|
2020-01-17 13:44:09 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
// The test verifies that the marking state is preserved across semispace
|
|
|
|
// copy.
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
|
|
|
|
heap::SealCurrentObjects(heap);
|
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Handle<FixedArray> root =
|
|
|
|
heap->isolate()->factory()->NewFixedArray(1, AllocationType::kOld);
|
|
|
|
ArrayBufferExtension* extension;
|
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
|
|
|
|
extension = buf->extension();
|
|
|
|
root->set(0, *buf); // Buffer that should be promoted as live.
|
|
|
|
}
|
|
|
|
std::vector<Handle<FixedArray>> handles;
|
|
|
|
// Create live objects on page such that the whole page gets promoted
|
|
|
|
heap::FillCurrentPage(heap->new_space(), &handles);
|
|
|
|
CHECK(IsTrackedYoung(heap, extension));
|
|
|
|
heap::SimulateIncrementalMarking(heap, true);
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
CHECK(IsTrackedOld(heap, extension));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-07 17:28:31 +00:00
|
|
|
UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
|
2020-07-02 23:40:05 +00:00
|
|
|
if (FLAG_optimize_for_size || FLAG_single_generation) return;
|
2019-02-18 08:58:18 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2016-06-07 17:28:31 +00:00
|
|
|
// Test allocates JSArrayBuffer on different pages before triggering a
|
|
|
|
// full GC that performs the semispace copy. If parallelized, this test
|
|
|
|
// ensures proper synchronization in TSAN configurations.
|
2019-02-18 08:58:18 +00:00
|
|
|
FLAG_min_semi_space_size = Max(2 * Page::kPageSize / MB, 1);
|
2016-06-07 17:28:31 +00:00
|
|
|
v8::Isolate::CreateParams create_params;
|
|
|
|
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
|
|
|
|
v8::Isolate* isolate = v8::Isolate::New(create_params);
|
|
|
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
|
|
|
|
{
|
|
|
|
v8::Isolate::Scope isolate_scope(isolate);
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
v8::Context::New(isolate)->Enter();
|
|
|
|
Heap* heap = i_isolate->heap();
|
|
|
|
|
2017-04-27 18:48:47 +00:00
|
|
|
// Ensure heap is in a clean state.
|
2018-09-19 08:19:40 +00:00
|
|
|
CcTest::CollectAllGarbage(i_isolate);
|
|
|
|
CcTest::CollectAllGarbage(i_isolate);
|
2017-04-27 18:48:47 +00:00
|
|
|
|
2016-06-07 17:28:31 +00:00
|
|
|
Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
|
|
|
|
heap::FillCurrentPage(heap->new_space());
|
|
|
|
Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
|
|
|
|
Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
|
2019-01-15 00:23:43 +00:00
|
|
|
CHECK_NE(Page::FromHeapObject(*buf1), Page::FromHeapObject(*buf2));
|
2016-06-07 17:28:31 +00:00
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
}
|
2017-10-04 16:15:19 +00:00
|
|
|
isolate->Dispose();
|
2016-06-07 17:28:31 +00:00
|
|
|
}
|
|
|
|
|
2018-05-12 20:59:02 +00:00
|
|
|
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
|
2020-01-10 12:33:18 +00:00
|
|
|
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
|
2017-05-31 13:54:50 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
2018-06-27 17:26:44 +00:00
|
|
|
ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
|
2017-05-31 13:54:50 +00:00
|
|
|
|
2018-05-12 20:59:02 +00:00
|
|
|
const size_t backing_store_before =
|
2018-06-27 17:26:44 +00:00
|
|
|
heap->new_space()->ExternalBackingStoreBytes(type);
|
2017-05-31 13:54:50 +00:00
|
|
|
{
|
|
|
|
const size_t kArraybufferSize = 117;
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
|
|
|
|
USE(ab);
|
2018-05-12 20:59:02 +00:00
|
|
|
const size_t backing_store_after =
|
2018-06-27 17:26:44 +00:00
|
|
|
heap->new_space()->ExternalBackingStoreBytes(type);
|
2018-05-12 20:59:02 +00:00
|
|
|
CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
|
2017-05-31 13:54:50 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-12 20:59:02 +00:00
|
|
|
TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
|
2020-02-10 12:36:27 +00:00
|
|
|
FLAG_concurrent_array_buffer_sweeping = false;
|
2017-05-31 13:54:50 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
2018-06-27 17:26:44 +00:00
|
|
|
ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
|
2017-05-31 13:54:50 +00:00
|
|
|
|
2018-05-12 20:59:02 +00:00
|
|
|
const size_t backing_store_before =
|
2018-06-27 17:26:44 +00:00
|
|
|
heap->new_space()->ExternalBackingStoreBytes(type);
|
2017-05-31 13:54:50 +00:00
|
|
|
{
|
|
|
|
const size_t kArraybufferSize = 117;
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
|
|
|
|
USE(ab);
|
|
|
|
}
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
2018-05-12 20:59:02 +00:00
|
|
|
const size_t backing_store_after =
|
2018-06-27 17:26:44 +00:00
|
|
|
heap->new_space()->ExternalBackingStoreBytes(type);
|
2018-05-12 20:59:02 +00:00
|
|
|
CHECK_EQ(0, backing_store_after - backing_store_before);
|
2017-05-31 13:54:50 +00:00
|
|
|
}
|
|
|
|
|
2018-06-21 12:06:41 +00:00
|
|
|
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) {
|
2020-02-07 17:27:22 +00:00
|
|
|
if (FLAG_never_compact) return;
|
2018-06-21 12:06:41 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
2020-02-07 17:27:22 +00:00
|
|
|
FLAG_concurrent_array_buffer_sweeping = false;
|
2018-06-21 12:06:41 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext env;
|
|
|
|
v8::Isolate* isolate = env->GetIsolate();
|
|
|
|
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
|
|
|
|
heap::AbandonCurrentlyFreeMemory(heap->old_space());
|
2018-06-27 17:26:44 +00:00
|
|
|
ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
|
2018-06-21 12:06:41 +00:00
|
|
|
|
|
|
|
const size_t backing_store_before =
|
2018-06-27 17:26:44 +00:00
|
|
|
heap->old_space()->ExternalBackingStoreBytes(type);
|
2018-06-21 12:06:41 +00:00
|
|
|
|
|
|
|
const size_t kArraybufferSize = 117;
|
|
|
|
{
|
|
|
|
v8::HandleScope handle_scope(isolate);
|
|
|
|
Local<v8::ArrayBuffer> ab1 =
|
|
|
|
v8::ArrayBuffer::New(isolate, kArraybufferSize);
|
|
|
|
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
|
2020-02-07 17:27:22 +00:00
|
|
|
CHECK(IsTracked(heap, *buf1));
|
2018-06-21 12:06:41 +00:00
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
heap::GcAndSweep(heap, NEW_SPACE);
|
|
|
|
|
2019-01-15 00:23:43 +00:00
|
|
|
Page* page_before_gc = Page::FromHeapObject(*buf1);
|
2018-06-21 12:06:41 +00:00
|
|
|
heap::ForceEvacuationCandidate(page_before_gc);
|
2020-02-07 17:27:22 +00:00
|
|
|
CHECK(IsTracked(heap, *buf1));
|
2018-06-21 12:06:41 +00:00
|
|
|
|
|
|
|
CcTest::CollectAllGarbage();
|
|
|
|
|
|
|
|
const size_t backing_store_after =
|
2018-06-27 17:26:44 +00:00
|
|
|
heap->old_space()->ExternalBackingStoreBytes(type);
|
2018-06-21 12:06:41 +00:00
|
|
|
CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
|
|
|
|
}
|
|
|
|
|
|
|
|
heap::GcAndSweep(heap, OLD_SPACE);
|
|
|
|
const size_t backing_store_after =
|
2018-06-27 17:26:44 +00:00
|
|
|
heap->old_space()->ExternalBackingStoreBytes(type);
|
2018-06-21 12:06:41 +00:00
|
|
|
CHECK_EQ(0, backing_store_after - backing_store_before);
|
|
|
|
}
|
|
|
|
|
2017-08-11 10:04:47 +00:00
|
|
|
} // namespace heap
|
2016-06-07 17:28:31 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|