2012-02-07 08:00:36 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2008-08-22 13:33:59 +00:00
|
|
|
// Redistribution and use in source and binary forms, with or without
|
|
|
|
// modification, are permitted provided that the following conditions are
|
|
|
|
// met:
|
|
|
|
//
|
|
|
|
// * Redistributions of source code must retain the above copyright
|
|
|
|
// notice, this list of conditions and the following disclaimer.
|
|
|
|
// * Redistributions in binary form must reproduce the above
|
|
|
|
// copyright notice, this list of conditions and the following
|
|
|
|
// disclaimer in the documentation and/or other materials provided
|
|
|
|
// with the distribution.
|
|
|
|
// * Neither the name of Google Inc. nor the names of its
|
|
|
|
// contributors may be used to endorse or promote products derived
|
|
|
|
// from this software without specific prior written permission.
|
|
|
|
//
|
|
|
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
|
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
|
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
|
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
|
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
|
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
|
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
2011-12-01 12:32:38 +00:00
|
|
|
#ifdef __linux__
|
2014-06-20 08:40:11 +00:00
|
|
|
#include <errno.h>
|
2011-12-01 12:32:38 +00:00
|
|
|
#include <fcntl.h>
|
2014-06-20 08:40:11 +00:00
|
|
|
#include <sys/stat.h>
|
|
|
|
#include <sys/types.h>
|
2011-12-01 12:32:38 +00:00
|
|
|
#include <unistd.h>
|
|
|
|
#endif
|
|
|
|
|
2013-12-18 08:09:37 +00:00
|
|
|
#include <utility>
|
2013-05-02 20:18:42 +00:00
|
|
|
|
2019-05-24 13:51:59 +00:00
|
|
|
#include "src/init/v8.h"
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2019-05-22 12:44:24 +00:00
|
|
|
#include "src/handles/global-handles.h"
|
2017-01-16 17:21:20 +00:00
|
|
|
#include "src/heap/mark-compact-inl.h"
|
|
|
|
#include "src/heap/mark-compact.h"
|
2019-05-23 08:51:46 +00:00
|
|
|
#include "src/objects/objects-inl.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "test/cctest/cctest.h"
|
2015-12-09 11:25:26 +00:00
|
|
|
#include "test/cctest/heap/heap-tester.h"
|
2016-05-20 13:30:22 +00:00
|
|
|
#include "test/cctest/heap/heap-utils.h"
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2017-08-11 10:04:47 +00:00
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
namespace heap {
|
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
TEST(Promotion) {
|
2020-07-02 23:40:05 +00:00
|
|
|
if (FLAG_single_generation) return;
|
2020-09-03 10:33:46 +00:00
|
|
|
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
2013-09-19 13:30:47 +00:00
|
|
|
CcTest::InitializeVM();
|
2016-05-20 13:30:22 +00:00
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
{
|
|
|
|
v8::HandleScope sc(CcTest::isolate());
|
|
|
|
Heap* heap = isolate->heap();
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
heap::SealCurrentObjects(heap);
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2016-09-06 12:58:59 +00:00
|
|
|
int array_length = heap::FixedArrayLenFromSize(kMaxRegularHeapObjectSize);
|
2016-05-20 13:30:22 +00:00
|
|
|
Handle<FixedArray> array = isolate->factory()->NewFixedArray(array_length);
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2016-05-20 13:30:22 +00:00
|
|
|
// Array should be in the new space.
|
|
|
|
CHECK(heap->InSpace(*array, NEW_SPACE));
|
2017-04-26 22:16:41 +00:00
|
|
|
CcTest::CollectAllGarbage();
|
|
|
|
CcTest::CollectAllGarbage();
|
2016-05-20 13:30:22 +00:00
|
|
|
CHECK(heap->InSpace(*array, OLD_SPACE));
|
|
|
|
}
|
2008-08-22 13:33:59 +00:00
|
|
|
}
|
|
|
|
|
2015-08-21 12:40:22 +00:00
|
|
|
HEAP_TEST(NoPromotion) {
|
2019-10-17 11:36:31 +00:00
|
|
|
if (FLAG_always_promote_young_mc) return;
|
2020-09-03 10:33:46 +00:00
|
|
|
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
2016-09-07 09:21:12 +00:00
|
|
|
// Page promotion allows pages to be moved to old space even in the case of
|
|
|
|
// OOM scenarios.
|
|
|
|
FLAG_page_promotion = false;
|
|
|
|
|
2013-04-10 08:29:39 +00:00
|
|
|
CcTest::InitializeVM();
|
2016-05-20 13:30:22 +00:00
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
{
|
|
|
|
v8::HandleScope sc(CcTest::isolate());
|
|
|
|
Heap* heap = isolate->heap();
|
|
|
|
|
|
|
|
heap::SealCurrentObjects(heap);
|
|
|
|
|
2016-09-06 12:58:59 +00:00
|
|
|
int array_length = heap::FixedArrayLenFromSize(kMaxRegularHeapObjectSize);
|
2016-05-20 13:30:22 +00:00
|
|
|
Handle<FixedArray> array = isolate->factory()->NewFixedArray(array_length);
|
|
|
|
|
|
|
|
heap->set_force_oom(true);
|
|
|
|
// Array should be in the new space.
|
|
|
|
CHECK(heap->InSpace(*array, NEW_SPACE));
|
2017-04-26 22:16:41 +00:00
|
|
|
CcTest::CollectAllGarbage();
|
|
|
|
CcTest::CollectAllGarbage();
|
2016-05-20 13:30:22 +00:00
|
|
|
CHECK(heap->InSpace(*array, NEW_SPACE));
|
|
|
|
}
|
2008-08-22 13:33:59 +00:00
|
|
|
}
|
|
|
|
|
2018-04-09 19:11:22 +00:00
|
|
|
// This is the same as Factory::NewMap, except it doesn't retry on
|
|
|
|
// allocation failure.
|
|
|
|
AllocationResult HeapTester::AllocateMapForTest(Isolate* isolate) {
|
|
|
|
Heap* heap = isolate->heap();
|
2018-12-20 15:47:47 +00:00
|
|
|
HeapObject obj;
|
2019-03-05 11:43:42 +00:00
|
|
|
AllocationResult alloc = heap->AllocateRaw(Map::kSize, AllocationType::kMap);
|
2018-04-09 19:11:22 +00:00
|
|
|
if (!alloc.To(&obj)) return alloc;
|
2018-07-04 09:10:05 +00:00
|
|
|
obj.set_map_after_allocation(ReadOnlyRoots(heap).meta_map(),
|
|
|
|
SKIP_WRITE_BARRIER);
|
2018-04-09 19:11:22 +00:00
|
|
|
return isolate->factory()->InitializeMap(Map::cast(obj), JS_OBJECT_TYPE,
|
|
|
|
JSObject::kHeaderSize,
|
|
|
|
TERMINAL_FAST_ELEMENTS_KIND, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
// This is the same as Factory::NewFixedArray, except it doesn't retry
|
|
|
|
// on allocation failure.
|
|
|
|
AllocationResult HeapTester::AllocateFixedArrayForTest(
|
2019-03-11 19:04:02 +00:00
|
|
|
Heap* heap, int length, AllocationType allocation) {
|
2018-04-09 19:11:22 +00:00
|
|
|
DCHECK(length >= 0 && length <= FixedArray::kMaxLength);
|
|
|
|
int size = FixedArray::SizeFor(length);
|
2018-12-20 15:47:47 +00:00
|
|
|
HeapObject obj;
|
2018-04-09 19:11:22 +00:00
|
|
|
{
|
2019-03-11 19:04:02 +00:00
|
|
|
AllocationResult result = heap->AllocateRaw(size, allocation);
|
2018-04-09 19:11:22 +00:00
|
|
|
if (!result.To(&obj)) return result;
|
|
|
|
}
|
2018-07-04 09:10:05 +00:00
|
|
|
obj.set_map_after_allocation(ReadOnlyRoots(heap).fixed_array_map(),
|
|
|
|
SKIP_WRITE_BARRIER);
|
2018-11-25 02:24:43 +00:00
|
|
|
FixedArray array = FixedArray::cast(obj);
|
2018-04-09 19:11:22 +00:00
|
|
|
array.set_length(length);
|
2018-12-05 17:29:52 +00:00
|
|
|
MemsetTagged(array.data_start(), ReadOnlyRoots(heap).undefined_value(),
|
|
|
|
length);
|
2018-04-09 19:11:22 +00:00
|
|
|
return array;
|
|
|
|
}
|
|
|
|
|
2015-08-21 12:40:22 +00:00
|
|
|
HEAP_TEST(MarkCompactCollector) {
|
2013-08-23 11:04:25 +00:00
|
|
|
FLAG_incremental_marking = false;
|
2015-03-06 12:36:16 +00:00
|
|
|
FLAG_retain_maps_for_n_gc = 0;
|
2013-04-10 08:29:39 +00:00
|
|
|
CcTest::InitializeVM();
|
2013-09-19 09:17:13 +00:00
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
2015-08-21 12:40:22 +00:00
|
|
|
Heap* heap = CcTest::heap();
|
2014-04-09 12:21:47 +00:00
|
|
|
Factory* factory = isolate->factory();
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2013-04-10 08:29:39 +00:00
|
|
|
v8::HandleScope sc(CcTest::isolate());
|
2018-06-23 09:05:50 +00:00
|
|
|
Handle<JSGlobalObject> global(isolate->context().global_object(), isolate);
|
2013-08-23 11:04:25 +00:00
|
|
|
|
2008-08-22 13:33:59 +00:00
|
|
|
// call mark-compact when heap is empty
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(OLD_SPACE);
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2014-04-30 12:25:18 +00:00
|
|
|
AllocationResult allocation;
|
2020-07-02 23:40:05 +00:00
|
|
|
if (!FLAG_single_generation) {
|
|
|
|
// keep allocating garbage in new space until it fails
|
|
|
|
const int arraysize = 100;
|
|
|
|
do {
|
|
|
|
allocation =
|
|
|
|
AllocateFixedArrayForTest(heap, arraysize, AllocationType::kYoung);
|
|
|
|
} while (!allocation.IsRetry());
|
|
|
|
CcTest::CollectGarbage(NEW_SPACE);
|
|
|
|
AllocateFixedArrayForTest(heap, arraysize, AllocationType::kYoung)
|
|
|
|
.ToObjectChecked();
|
|
|
|
}
|
2008-08-22 13:33:59 +00:00
|
|
|
|
|
|
|
// keep allocating maps until it fails
|
|
|
|
do {
|
2018-04-09 19:11:22 +00:00
|
|
|
allocation = AllocateMapForTest(isolate);
|
2014-04-30 12:25:18 +00:00
|
|
|
} while (!allocation.IsRetry());
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(MAP_SPACE);
|
2018-04-09 19:11:22 +00:00
|
|
|
AllocateMapForTest(isolate).ToObjectChecked();
|
2014-04-09 12:21:47 +00:00
|
|
|
|
|
|
|
{ HandleScope scope(isolate);
|
|
|
|
// allocate a garbage
|
|
|
|
Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
|
2020-11-10 11:22:49 +00:00
|
|
|
Handle<JSFunction> function = factory->NewFunctionForTesting(func_name);
|
2019-01-23 16:34:14 +00:00
|
|
|
Object::SetProperty(isolate, global, func_name, function).Check();
|
2014-04-09 12:21:47 +00:00
|
|
|
|
|
|
|
factory->NewJSObject(function);
|
|
|
|
}
|
|
|
|
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(OLD_SPACE);
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2014-04-09 12:21:47 +00:00
|
|
|
{ HandleScope scope(isolate);
|
|
|
|
Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
|
2015-03-02 12:22:27 +00:00
|
|
|
CHECK(Just(true) == JSReceiver::HasOwnProperty(global, func_name));
|
2014-04-11 12:47:34 +00:00
|
|
|
Handle<Object> func_value =
|
2018-07-17 08:49:20 +00:00
|
|
|
Object::GetProperty(isolate, global, func_name).ToHandleChecked();
|
2014-04-09 12:21:47 +00:00
|
|
|
CHECK(func_value->IsJSFunction());
|
|
|
|
Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
|
|
|
|
Handle<JSObject> obj = factory->NewJSObject(function);
|
|
|
|
|
|
|
|
Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
|
2019-01-23 16:34:14 +00:00
|
|
|
Object::SetProperty(isolate, global, obj_name, obj).Check();
|
2014-04-09 12:21:47 +00:00
|
|
|
Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
|
|
|
|
Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
|
2019-01-23 16:34:14 +00:00
|
|
|
Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
|
2014-04-09 12:21:47 +00:00
|
|
|
}
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(OLD_SPACE);
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2014-04-09 12:21:47 +00:00
|
|
|
{ HandleScope scope(isolate);
|
|
|
|
Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
|
2015-03-02 12:22:27 +00:00
|
|
|
CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
|
2014-04-11 12:47:34 +00:00
|
|
|
Handle<Object> object =
|
2018-07-17 08:49:20 +00:00
|
|
|
Object::GetProperty(isolate, global, obj_name).ToHandleChecked();
|
2014-04-09 12:21:47 +00:00
|
|
|
CHECK(object->IsJSObject());
|
|
|
|
Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
|
2018-07-17 08:49:20 +00:00
|
|
|
CHECK_EQ(*Object::GetProperty(isolate, object, prop_name).ToHandleChecked(),
|
2014-04-11 12:47:34 +00:00
|
|
|
Smi::FromInt(23));
|
2014-04-09 12:21:47 +00:00
|
|
|
}
|
2008-08-22 13:33:59 +00:00
|
|
|
}
|
|
|
|
|
2020-07-13 09:17:48 +00:00
|
|
|
HEAP_TEST(DoNotEvacuatePinnedPages) {
|
|
|
|
if (FLAG_never_compact || !FLAG_single_generation) return;
|
|
|
|
|
|
|
|
FLAG_always_compact = true;
|
|
|
|
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
|
|
|
|
v8::HandleScope sc(CcTest::isolate());
|
|
|
|
Heap* heap = isolate->heap();
|
|
|
|
|
|
|
|
heap::SealCurrentObjects(heap);
|
|
|
|
|
|
|
|
auto handles = heap::CreatePadding(
|
|
|
|
heap, static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()),
|
|
|
|
AllocationType::kOld);
|
|
|
|
|
|
|
|
Page* page = Page::FromHeapObject(*handles.front());
|
|
|
|
|
|
|
|
CHECK(heap->InSpace(*handles.front(), OLD_SPACE));
|
|
|
|
page->SetFlag(MemoryChunk::PINNED);
|
|
|
|
|
|
|
|
CcTest::CollectAllGarbage();
|
|
|
|
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
|
|
|
|
|
|
|
// The pinned flag should prevent the page from moving.
|
|
|
|
for (Handle<FixedArray> object : handles) {
|
|
|
|
CHECK_EQ(page, Page::FromHeapObject(*object));
|
|
|
|
}
|
|
|
|
|
|
|
|
page->ClearFlag(MemoryChunk::PINNED);
|
|
|
|
|
|
|
|
CcTest::CollectAllGarbage();
|
|
|
|
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
|
|
|
|
|
|
|
// always_compact ensures that this page is an evacuation candidate, so with
|
|
|
|
// the pin flag cleared compaction should now move it.
|
|
|
|
for (Handle<FixedArray> object : handles) {
|
|
|
|
CHECK_NE(page, Page::FromHeapObject(*object));
|
|
|
|
}
|
|
|
|
}
|
2008-08-22 13:33:59 +00:00
|
|
|
|
2020-08-28 20:48:41 +00:00
|
|
|
HEAP_TEST(ObjectStartBitmap) {
|
|
|
|
if (!FLAG_single_generation || !FLAG_conservative_stack_scanning) return;
|
|
|
|
|
|
|
|
#if V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
|
|
|
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope sc(CcTest::isolate());
|
|
|
|
|
|
|
|
Heap* heap = isolate->heap();
|
|
|
|
heap::SealCurrentObjects(heap);
|
|
|
|
|
|
|
|
auto* factory = isolate->factory();
|
|
|
|
HeapObject obj = *factory->NewStringFromStaticChars("hello");
|
|
|
|
HeapObject obj2 = *factory->NewStringFromStaticChars("world");
|
|
|
|
Page* page = Page::FromAddress(obj.ptr());
|
|
|
|
|
|
|
|
CHECK(page->object_start_bitmap()->CheckBit(obj.address()));
|
|
|
|
CHECK(page->object_start_bitmap()->CheckBit(obj2.address()));
|
|
|
|
|
|
|
|
Address obj_inner_ptr = obj.ptr() + 2;
|
|
|
|
CHECK(page->object_start_bitmap()->FindBasePtr(obj_inner_ptr) ==
|
|
|
|
obj.address());
|
|
|
|
|
|
|
|
Address obj2_inner_ptr = obj2.ptr() + 2;
|
|
|
|
CHECK(page->object_start_bitmap()->FindBasePtr(obj2_inner_ptr) ==
|
|
|
|
obj2.address());
|
|
|
|
|
|
|
|
CcTest::CollectAllGarbage();
|
|
|
|
|
|
|
|
CHECK((obj).IsString());
|
|
|
|
CHECK((obj2).IsString());
|
|
|
|
CHECK(page->object_start_bitmap()->CheckBit(obj.address()));
|
|
|
|
CHECK(page->object_start_bitmap()->CheckBit(obj2.address()));
|
|
|
|
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2011-09-19 18:36:47 +00:00
|
|
|
// TODO(1600): compaction of map space is temporary removed from GC.
|
|
|
|
#if 0
|
2013-06-04 10:30:05 +00:00
|
|
|
static Handle<Map> CreateMap(Isolate* isolate) {
|
|
|
|
return isolate->factory()->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
2010-01-19 16:34:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(MapCompact) {
|
|
|
|
FLAG_max_map_space_pages = 16;
|
2013-04-10 08:29:39 +00:00
|
|
|
CcTest::InitializeVM();
|
2013-09-19 09:17:13 +00:00
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
2013-06-04 10:30:05 +00:00
|
|
|
Factory* factory = isolate->factory();
|
2010-01-19 16:34:37 +00:00
|
|
|
|
|
|
|
{
|
|
|
|
v8::HandleScope sc;
|
|
|
|
// keep allocating maps while pointers are still encodable and thus
|
|
|
|
// mark compact is permitted.
|
2013-06-04 10:30:05 +00:00
|
|
|
Handle<JSObject> root = factory->NewJSObjectFromMap(CreateMap());
|
2010-01-19 16:34:37 +00:00
|
|
|
do {
|
|
|
|
Handle<Map> map = CreateMap();
|
|
|
|
map->set_prototype(*root);
|
2013-06-04 10:30:05 +00:00
|
|
|
root = factory->NewJSObjectFromMap(map);
|
2013-09-19 09:46:15 +00:00
|
|
|
} while (CcTest::heap()->map_space()->MapPointersEncodable());
|
2010-01-19 16:34:37 +00:00
|
|
|
}
|
|
|
|
// Now, as we don't have any handles to just allocated maps, we should
|
|
|
|
// be able to trigger map compaction.
|
|
|
|
// To give an additional chance to fail, try to force compaction which
|
|
|
|
// should be impossible right now.
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectAllGarbage(Heap::kForceCompactionMask);
|
2010-01-19 16:34:37 +00:00
|
|
|
// And now map pointers should be encodable again.
|
2013-09-19 09:46:15 +00:00
|
|
|
CHECK(CcTest::heap()->map_space()->MapPointersEncodable());
|
2010-01-19 16:34:37 +00:00
|
|
|
}
|
2011-09-19 18:36:47 +00:00
|
|
|
#endif
|
2010-01-19 16:34:37 +00:00
|
|
|
|
2013-05-15 08:59:28 +00:00
|
|
|
#if defined(__has_feature)
|
|
|
|
#if __has_feature(address_sanitizer)
|
|
|
|
#define V8_WITH_ASAN 1
|
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
|
2011-12-06 12:09:11 +00:00
|
|
|
// Here is a memory use test that uses /proc, and is therefore Linux-only. We
|
|
|
|
// do not care how much memory the simulator uses, since it is only there for
|
2013-05-15 08:59:28 +00:00
|
|
|
// debugging purposes. Testing with ASAN doesn't make sense, either.
|
|
|
|
#if defined(__linux__) && !defined(USE_SIMULATOR) && !defined(V8_WITH_ASAN)
|
2011-12-01 12:32:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
static uintptr_t ReadLong(char* buffer, intptr_t* position, int base) {
|
|
|
|
char* end_address = buffer + *position;
|
|
|
|
uintptr_t result = strtoul(buffer + *position, &end_address, base);
|
|
|
|
CHECK(result != ULONG_MAX || errno != ERANGE);
|
|
|
|
CHECK(end_address > buffer + *position);
|
|
|
|
*position = end_address - buffer;
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-01-02 15:00:12 +00:00
|
|
|
// The memory use computed this way is not entirely accurate and depends on
|
|
|
|
// the way malloc allocates memory. That's why the memory use may seem to
|
|
|
|
// increase even though the sum of the allocated object sizes decreases. It
|
|
|
|
// also means that the memory use depends on the kernel and stdlib.
|
2011-12-01 12:32:38 +00:00
|
|
|
static intptr_t MemoryInUse() {
|
|
|
|
intptr_t memory_use = 0;
|
|
|
|
|
|
|
|
int fd = open("/proc/self/maps", O_RDONLY);
|
|
|
|
if (fd < 0) return -1;
|
|
|
|
|
2016-10-13 15:17:46 +00:00
|
|
|
const int kBufSize = 20000;
|
2011-12-01 12:32:38 +00:00
|
|
|
char buffer[kBufSize];
|
2015-04-28 06:54:08 +00:00
|
|
|
ssize_t length = read(fd, buffer, kBufSize);
|
2011-12-01 12:32:38 +00:00
|
|
|
intptr_t line_start = 0;
|
|
|
|
CHECK_LT(length, kBufSize); // Make the buffer bigger.
|
|
|
|
CHECK_GT(length, 0); // We have to find some data in the file.
|
|
|
|
while (line_start < length) {
|
|
|
|
if (buffer[line_start] == '\n') {
|
|
|
|
line_start++;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
intptr_t position = line_start;
|
|
|
|
uintptr_t start = ReadLong(buffer, &position, 16);
|
|
|
|
CHECK_EQ(buffer[position++], '-');
|
|
|
|
uintptr_t end = ReadLong(buffer, &position, 16);
|
|
|
|
CHECK_EQ(buffer[position++], ' ');
|
|
|
|
CHECK(buffer[position] == '-' || buffer[position] == 'r');
|
|
|
|
bool read_permission = (buffer[position++] == 'r');
|
|
|
|
CHECK(buffer[position] == '-' || buffer[position] == 'w');
|
|
|
|
bool write_permission = (buffer[position++] == 'w');
|
|
|
|
CHECK(buffer[position] == '-' || buffer[position] == 'x');
|
|
|
|
bool execute_permission = (buffer[position++] == 'x');
|
2014-11-24 18:05:56 +00:00
|
|
|
CHECK(buffer[position] == 's' || buffer[position] == 'p');
|
2011-12-01 12:32:38 +00:00
|
|
|
bool private_mapping = (buffer[position++] == 'p');
|
|
|
|
CHECK_EQ(buffer[position++], ' ');
|
|
|
|
uintptr_t offset = ReadLong(buffer, &position, 16);
|
|
|
|
USE(offset);
|
|
|
|
CHECK_EQ(buffer[position++], ' ');
|
|
|
|
uintptr_t major = ReadLong(buffer, &position, 16);
|
|
|
|
USE(major);
|
|
|
|
CHECK_EQ(buffer[position++], ':');
|
|
|
|
uintptr_t minor = ReadLong(buffer, &position, 16);
|
|
|
|
USE(minor);
|
|
|
|
CHECK_EQ(buffer[position++], ' ');
|
|
|
|
uintptr_t inode = ReadLong(buffer, &position, 10);
|
|
|
|
while (position < length && buffer[position] != '\n') position++;
|
|
|
|
if ((read_permission || write_permission || execute_permission) &&
|
|
|
|
private_mapping && inode == 0) {
|
|
|
|
memory_use += (end - start);
|
|
|
|
}
|
|
|
|
|
|
|
|
line_start = position;
|
|
|
|
}
|
|
|
|
close(fd);
|
|
|
|
return memory_use;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-07-02 09:04:45 +00:00
|
|
|
intptr_t ShortLivingIsolate() {
|
2015-04-29 09:54:34 +00:00
|
|
|
v8::Isolate::CreateParams create_params;
|
|
|
|
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
|
|
|
|
v8::Isolate* isolate = v8::Isolate::New(create_params);
|
2013-07-02 09:04:45 +00:00
|
|
|
{ v8::Isolate::Scope isolate_scope(isolate);
|
|
|
|
v8::Locker lock(isolate);
|
2013-09-10 06:43:23 +00:00
|
|
|
v8::HandleScope handle_scope(isolate);
|
2013-07-02 09:04:45 +00:00
|
|
|
v8::Local<v8::Context> context = v8::Context::New(isolate);
|
|
|
|
CHECK(!context.IsEmpty());
|
|
|
|
}
|
|
|
|
isolate->Dispose();
|
|
|
|
return MemoryInUse();
|
|
|
|
}
|
|
|
|
|
2020-08-13 12:55:14 +00:00
|
|
|
UNINITIALIZED_TEST(RegressJoinThreadsOnIsolateDeinit) {
|
|
|
|
// Memory is measured, do not allocate in background thread.
|
|
|
|
FLAG_stress_concurrent_allocation = false;
|
2013-07-02 09:59:08 +00:00
|
|
|
intptr_t size_limit = ShortLivingIsolate() * 2;
|
2013-07-02 09:04:45 +00:00
|
|
|
for (int i = 0; i < 10; i++) {
|
2013-07-02 09:59:08 +00:00
|
|
|
CHECK_GT(size_limit, ShortLivingIsolate());
|
2013-07-02 09:04:45 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-16 17:21:20 +00:00
|
|
|
TEST(Regress5829) {
|
2021-04-21 00:32:15 +00:00
|
|
|
if (!FLAG_incremental_marking) return;
|
2020-09-03 10:33:46 +00:00
|
|
|
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
|
2017-01-16 17:21:20 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope sc(CcTest::isolate());
|
|
|
|
Heap* heap = isolate->heap();
|
|
|
|
heap::SealCurrentObjects(heap);
|
|
|
|
i::MarkCompactCollector* collector = heap->mark_compact_collector();
|
|
|
|
i::IncrementalMarking* marking = heap->incremental_marking();
|
|
|
|
if (collector->sweeping_in_progress()) {
|
|
|
|
collector->EnsureSweepingCompleted();
|
|
|
|
}
|
|
|
|
CHECK(marking->IsMarking() || marking->IsStopped());
|
|
|
|
if (marking->IsStopped()) {
|
|
|
|
heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
|
|
|
|
i::GarbageCollectionReason::kTesting);
|
|
|
|
}
|
|
|
|
CHECK(marking->IsMarking());
|
|
|
|
marking->StartBlackAllocationForTesting();
|
2019-03-11 19:04:02 +00:00
|
|
|
Handle<FixedArray> array =
|
|
|
|
isolate->factory()->NewFixedArray(10, AllocationType::kOld);
|
2017-01-16 17:21:20 +00:00
|
|
|
Address old_end = array->address() + array->Size();
|
|
|
|
// Right trim the array without clearing the mark bits.
|
|
|
|
array->set_length(9);
|
2019-08-22 11:17:45 +00:00
|
|
|
heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize,
|
|
|
|
ClearRecordedSlots::kNo);
|
2018-01-09 08:56:07 +00:00
|
|
|
heap->old_space()->FreeLinearAllocationArea();
|
2017-03-21 10:02:32 +00:00
|
|
|
Page* page = Page::FromAddress(array->address());
|
2017-08-12 12:17:52 +00:00
|
|
|
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
|
2017-06-21 11:41:12 +00:00
|
|
|
for (auto object_and_size :
|
2017-08-10 16:54:55 +00:00
|
|
|
LiveObjectRange<kGreyObjects>(page, marking_state->bitmap(page))) {
|
[torque] Use generated instance types, part 1
This change begins making use of the fact that Torque now knows about
the relationship between classes and instance types, to replace a few
repetitive lists:
- Instance type checkers (single and range), defined in
src/objects/instance-type.h
- Verification dispatch in src/diagnostics/objects-debug.cc
- Printer dispatch in src/diagnostics/objects-printer.cc
- Postmortem object type detection in
tools/debug_helper/get-object-properties.cc
Torque is updated to generate four macro lists for the instance types,
representing all of the classes separated in two dimensions: classes
that correspond to a single instance type versus those that have a
range, and classes that are fully defined in Torque (with fields and
methods inside '{}') versus those that are only declared. The latter
distinction is useful because fully-defined classes are guaranteed to
correspond to real C++ classes, whereas only-declared classes are not.
A few other changes were required to make the lists above work:
- Renamed IsFiller to IsFreeSpaceOrFiller to better reflect what it does
and avoid conflicts with the new macro-generated IsFiller method. This
is the part I'm most worried about: I think the new name is an
improvement for clarity and consistency, but I could imagine someone
typing IsFiller out of habit and introducing a bug. If we'd prefer to
keep the name IsFiller, my other idea is to rename FreeSpace to
VariableSizeFiller and Filler to FixedSizeFiller.
- Made Tuple3 extend from Struct, not Tuple2, because IsTuple2 is
expected to check for only TUPLE2_TYPE and not include TUPLE3_TYPE.
- Normalized the dispatched behavior for BigIntBase and HeapNumber.
- Added a few new object printers.
Bug: v8:7793
Change-Id: I5462bb105f8a314baa59bd6ab6ab6215df6f313c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1860314
Commit-Queue: Seth Brenith <seth.brenith@microsoft.com>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Reviewed-by: Tobias Tebbi <tebbi@chromium.org>
Reviewed-by: Dan Elphick <delphick@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64597}
2019-10-28 17:42:41 +00:00
|
|
|
CHECK(!object_and_size.first.IsFreeSpaceOrFiller());
|
2017-01-16 17:21:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-12-06 12:09:11 +00:00
|
|
|
#endif // __linux__ and !USE_SIMULATOR
|
2017-08-11 10:04:47 +00:00
|
|
|
|
|
|
|
} // namespace heap
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|