2015-06-15 08:32:52 +00:00
|
|
|
// Copyright 2014 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
2020-05-04 12:24:44 +00:00
|
|
|
#include "src/heap/heap.h"
|
|
|
|
|
2015-06-15 08:32:52 +00:00
|
|
|
#include <cmath>
|
2016-12-19 12:50:30 +00:00
|
|
|
#include <iostream>
|
2015-06-15 08:32:52 +00:00
|
|
|
#include <limits>
|
|
|
|
|
2019-05-22 12:44:24 +00:00
|
|
|
#include "src/handles/handles-inl.h"
|
2020-05-04 12:24:44 +00:00
|
|
|
#include "src/heap/memory-chunk.h"
|
2020-09-03 11:31:34 +00:00
|
|
|
#include "src/heap/safepoint.h"
|
2019-02-14 21:10:30 +00:00
|
|
|
#include "src/heap/spaces-inl.h"
|
2019-05-23 08:51:46 +00:00
|
|
|
#include "src/objects/objects-inl.h"
|
2017-07-13 17:13:53 +00:00
|
|
|
#include "test/unittests/test-utils.h"
|
2015-06-15 08:32:52 +00:00
|
|
|
#include "testing/gtest/include/gtest/gtest.h"
|
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
|
2020-10-01 13:23:44 +00:00
|
|
|
using HeapTest = TestWithContext;
|
2017-07-13 17:13:53 +00:00
|
|
|
|
2019-06-05 21:08:15 +00:00
|
|
|
TEST(Heap, YoungGenerationSizeFromOldGenerationSize) {
|
2017-07-19 09:53:06 +00:00
|
|
|
const size_t pm = i::Heap::kPointerMultiplier;
|
2020-02-06 16:42:55 +00:00
|
|
|
const size_t hlm = i::Heap::kHeapLimitMultiplier;
|
2019-06-05 21:08:15 +00:00
|
|
|
ASSERT_EQ(3 * 512u * pm * KB,
|
2020-02-06 16:42:55 +00:00
|
|
|
i::Heap::YoungGenerationSizeFromOldGenerationSize(128u * hlm * MB));
|
2019-06-05 21:08:15 +00:00
|
|
|
ASSERT_EQ(3 * 2048u * pm * KB,
|
2020-02-06 16:42:55 +00:00
|
|
|
i::Heap::YoungGenerationSizeFromOldGenerationSize(256u * hlm * MB));
|
2019-06-05 21:08:15 +00:00
|
|
|
ASSERT_EQ(3 * 4096u * pm * KB,
|
2020-02-06 16:42:55 +00:00
|
|
|
i::Heap::YoungGenerationSizeFromOldGenerationSize(512u * hlm * MB));
|
|
|
|
ASSERT_EQ(
|
|
|
|
3 * 8192u * pm * KB,
|
|
|
|
i::Heap::YoungGenerationSizeFromOldGenerationSize(1024u * hlm * MB));
|
2019-06-05 21:08:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Heap, GenerationSizesFromHeapSize) {
|
|
|
|
const size_t pm = i::Heap::kPointerMultiplier;
|
2020-02-06 16:42:55 +00:00
|
|
|
const size_t hlm = i::Heap::kHeapLimitMultiplier;
|
2019-06-05 21:08:15 +00:00
|
|
|
size_t old, young;
|
2019-05-28 15:51:07 +00:00
|
|
|
|
2019-06-05 21:08:15 +00:00
|
|
|
i::Heap::GenerationSizesFromHeapSize(1 * KB, &young, &old);
|
|
|
|
ASSERT_EQ(0u, old);
|
|
|
|
ASSERT_EQ(0u, young);
|
2019-05-28 15:51:07 +00:00
|
|
|
|
2019-06-05 21:08:15 +00:00
|
|
|
i::Heap::GenerationSizesFromHeapSize(1 * KB + 3 * 512u * pm * KB, &young,
|
|
|
|
&old);
|
2021-09-24 12:09:22 +00:00
|
|
|
ASSERT_EQ(1u * KB, old);
|
2019-06-05 21:08:15 +00:00
|
|
|
ASSERT_EQ(3 * 512u * pm * KB, young);
|
2019-05-28 15:51:07 +00:00
|
|
|
|
2020-02-06 16:42:55 +00:00
|
|
|
i::Heap::GenerationSizesFromHeapSize(128 * hlm * MB + 3 * 512 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
&young, &old);
|
2020-02-06 16:42:55 +00:00
|
|
|
ASSERT_EQ(128u * hlm * MB, old);
|
2019-06-05 21:08:15 +00:00
|
|
|
ASSERT_EQ(3 * 512u * pm * KB, young);
|
2019-05-28 15:51:07 +00:00
|
|
|
|
2020-02-06 16:42:55 +00:00
|
|
|
i::Heap::GenerationSizesFromHeapSize(256u * hlm * MB + 3 * 2048 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
&young, &old);
|
2020-02-06 16:42:55 +00:00
|
|
|
ASSERT_EQ(256u * hlm * MB, old);
|
2019-06-05 21:08:15 +00:00
|
|
|
ASSERT_EQ(3 * 2048u * pm * KB, young);
|
2019-05-28 15:51:07 +00:00
|
|
|
|
2020-02-06 16:42:55 +00:00
|
|
|
i::Heap::GenerationSizesFromHeapSize(512u * hlm * MB + 3 * 4096 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
&young, &old);
|
2020-02-06 16:42:55 +00:00
|
|
|
ASSERT_EQ(512u * hlm * MB, old);
|
2019-06-05 21:08:15 +00:00
|
|
|
ASSERT_EQ(3 * 4096u * pm * KB, young);
|
|
|
|
|
2020-02-06 16:42:55 +00:00
|
|
|
i::Heap::GenerationSizesFromHeapSize(1024u * hlm * MB + 3 * 8192 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
&young, &old);
|
2020-02-06 16:42:55 +00:00
|
|
|
ASSERT_EQ(1024u * hlm * MB, old);
|
2019-06-05 21:08:15 +00:00
|
|
|
ASSERT_EQ(3 * 8192u * pm * KB, young);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Heap, HeapSizeFromPhysicalMemory) {
|
|
|
|
const size_t pm = i::Heap::kPointerMultiplier;
|
2020-02-06 16:42:55 +00:00
|
|
|
const size_t hlm = i::Heap::kHeapLimitMultiplier;
|
2019-05-28 15:51:07 +00:00
|
|
|
|
2019-06-05 21:08:15 +00:00
|
|
|
// The expected value is old_generation_size + 3 * semi_space_size.
|
2020-02-06 16:42:55 +00:00
|
|
|
ASSERT_EQ(128 * hlm * MB + 3 * 512 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
i::Heap::HeapSizeFromPhysicalMemory(0u));
|
2020-02-06 16:42:55 +00:00
|
|
|
ASSERT_EQ(128 * hlm * MB + 3 * 512 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
i::Heap::HeapSizeFromPhysicalMemory(512u * MB));
|
2020-02-06 16:42:55 +00:00
|
|
|
ASSERT_EQ(256 * hlm * MB + 3 * 2048 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
i::Heap::HeapSizeFromPhysicalMemory(1024u * MB));
|
2020-02-06 16:42:55 +00:00
|
|
|
ASSERT_EQ(512 * hlm * MB + 3 * 4096 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
i::Heap::HeapSizeFromPhysicalMemory(2048u * MB));
|
|
|
|
ASSERT_EQ(
|
2020-02-06 16:42:55 +00:00
|
|
|
1024 * hlm * MB + 3 * 8192 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
i::Heap::HeapSizeFromPhysicalMemory(static_cast<uint64_t>(4096u) * MB));
|
|
|
|
ASSERT_EQ(
|
2020-02-06 16:42:55 +00:00
|
|
|
1024 * hlm * MB + 3 * 8192 * pm * KB,
|
2019-06-05 21:08:15 +00:00
|
|
|
i::Heap::HeapSizeFromPhysicalMemory(static_cast<uint64_t>(8192u) * MB));
|
2017-05-23 17:00:57 +00:00
|
|
|
}
|
|
|
|
|
2017-07-13 17:13:53 +00:00
|
|
|
TEST_F(HeapTest, ASLR) {
|
|
|
|
#if V8_TARGET_ARCH_X64
|
|
|
|
#if V8_OS_MACOSX
|
|
|
|
Heap* heap = i_isolate()->heap();
|
|
|
|
std::set<void*> hints;
|
|
|
|
for (int i = 0; i < 1000; i++) {
|
|
|
|
hints.insert(heap->GetRandomMmapAddr());
|
|
|
|
}
|
|
|
|
if (hints.size() == 1) {
|
|
|
|
EXPECT_TRUE((*hints.begin()) == nullptr);
|
2017-12-15 17:59:57 +00:00
|
|
|
EXPECT_TRUE(i::GetRandomMmapAddr() == nullptr);
|
2017-07-13 17:13:53 +00:00
|
|
|
} else {
|
|
|
|
// It is unlikely that 1000 random samples will collide to less then 500
|
|
|
|
// values.
|
|
|
|
EXPECT_GT(hints.size(), 500u);
|
|
|
|
const uintptr_t kRegionMask = 0xFFFFFFFFu;
|
|
|
|
void* first = *hints.begin();
|
|
|
|
for (void* hint : hints) {
|
|
|
|
uintptr_t diff = reinterpret_cast<uintptr_t>(first) ^
|
|
|
|
reinterpret_cast<uintptr_t>(hint);
|
|
|
|
EXPECT_LE(diff, kRegionMask);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif // V8_OS_MACOSX
|
|
|
|
#endif // V8_TARGET_ARCH_X64
|
|
|
|
}
|
|
|
|
|
2018-09-06 14:04:07 +00:00
|
|
|
TEST_F(HeapTest, ExternalLimitDefault) {
|
|
|
|
Heap* heap = i_isolate()->heap();
|
2020-08-10 19:25:30 +00:00
|
|
|
EXPECT_EQ(kExternalAllocationSoftLimit, heap->external_memory_limit());
|
2018-09-06 14:04:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(HeapTest, ExternalLimitStaysAboveDefaultForExplicitHandling) {
|
|
|
|
v8_isolate()->AdjustAmountOfExternalAllocatedMemory(+10 * MB);
|
|
|
|
v8_isolate()->AdjustAmountOfExternalAllocatedMemory(-10 * MB);
|
|
|
|
Heap* heap = i_isolate()->heap();
|
2020-08-10 19:25:30 +00:00
|
|
|
EXPECT_GE(heap->external_memory_limit(), kExternalAllocationSoftLimit);
|
2018-09-06 14:04:07 +00:00
|
|
|
}
|
|
|
|
|
2020-10-01 13:23:44 +00:00
|
|
|
#ifdef V8_COMPRESS_POINTERS
|
|
|
|
TEST_F(HeapTest, HeapLayout) {
|
2018-10-30 12:48:12 +00:00
|
|
|
// Produce some garbage.
|
|
|
|
RunJS(
|
|
|
|
"let ar = [];"
|
|
|
|
"for (let i = 0; i < 100; i++) {"
|
|
|
|
" ar.push(Array(i));"
|
|
|
|
"}"
|
|
|
|
"ar.push(Array(32 * 1024 * 1024));");
|
|
|
|
|
2021-04-10 02:09:41 +00:00
|
|
|
Address cage_base = i_isolate()->cage_base();
|
|
|
|
EXPECT_TRUE(IsAligned(cage_base, size_t{4} * GB));
|
|
|
|
|
|
|
|
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
2018-10-30 12:48:12 +00:00
|
|
|
Address isolate_root = i_isolate()->isolate_root();
|
2021-04-10 02:09:41 +00:00
|
|
|
EXPECT_EQ(cage_base, isolate_root);
|
|
|
|
#endif
|
2018-10-30 12:48:12 +00:00
|
|
|
|
|
|
|
// Check that all memory chunks belong this region.
|
2021-04-10 02:09:41 +00:00
|
|
|
base::AddressRegion heap_reservation(cage_base, size_t{4} * GB);
|
2018-10-30 12:48:12 +00:00
|
|
|
|
2020-09-03 11:31:34 +00:00
|
|
|
SafepointScope scope(i_isolate()->heap());
|
2018-11-21 19:19:02 +00:00
|
|
|
OldGenerationMemoryChunkIterator iter(i_isolate()->heap());
|
2018-10-30 12:48:12 +00:00
|
|
|
for (;;) {
|
|
|
|
MemoryChunk* chunk = iter.next();
|
|
|
|
if (chunk == nullptr) break;
|
|
|
|
|
|
|
|
Address address = chunk->address();
|
|
|
|
size_t size = chunk->area_end() - address;
|
|
|
|
EXPECT_TRUE(heap_reservation.contains(address, size));
|
|
|
|
}
|
|
|
|
}
|
2020-10-01 13:23:44 +00:00
|
|
|
#endif // V8_COMPRESS_POINTERS
|
2018-10-30 12:48:12 +00:00
|
|
|
|
2015-06-15 08:32:52 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|