[v8] Use |AllocateAtLeast| for resizing v8 zones.

This is part of an ongoing effort to reduce fragmentation in Chrome.  Partition alloc shows v8 zones are a large user of memory in Renderer processes, and that there is fragmentation from these allocations. This CL will reduce this fragmentation by allowing v8 to use all allocated memory for its zones.

Bug: v8:13193, chromium:1238858
Change-Id: Ibeac8bdba9d0e7ff66b14a3dde10e7c87d3cf953
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3889361
Reviewed-by: Adam Klein <adamk@chromium.org>
Commit-Queue: Thiabaud Engelbrecht <thiabaud@google.com>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83235}
This commit is contained in:
Thiabaud Engelbrecht 2022-09-15 16:14:22 +00:00 committed by V8 LUCI CQ
parent 116e217b5f
commit 4444874cdf
5 changed files with 26 additions and 4 deletions

View File

@ -111,6 +111,8 @@ inline void AlignedFree(void* ptr) {
// `AllocateAtLeast()` for a safe version.
inline size_t MallocUsableSize(void* ptr) {
#if V8_OS_WIN
// |_msize| cannot handle a null pointer.
if (!ptr) return 0;
return _msize(ptr);
#elif V8_OS_DARWIN
return malloc_size(ptr);
@ -130,7 +132,7 @@ struct AllocationResult {
// Allocates at least `n * sizeof(T)` uninitialized storage but may allocate
// more which is indicated by the return value. Mimics C++23
// `allocate_ate_least()`.
// `allocate_at_least()`.
template <typename T>
V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) {
const size_t min_wanted_size = n * sizeof(T);
@ -140,6 +142,8 @@ V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) {
#else // V8_HAS_MALLOC_USABLE_SIZE
const size_t usable_size = MallocUsableSize(memory);
#if V8_USE_UNDEFINED_BEHAVIOR_SANITIZER
if (memory == nullptr)
return {nullptr, 0};
// UBSan (specifically, -fsanitize=bounds) assumes that any access outside
// of the requested size for malloc is UB and will trap in ud2 instructions.
// This can be worked around by using `Realloc()` on the specific memory

View File

@ -129,6 +129,16 @@ void* AllocWithRetry(size_t size, MallocFn malloc_fn) {
return result;
}
base::AllocationResult<void*> AllocAtLeastWithRetry(size_t size) {
base::AllocationResult<char*> result = {nullptr, 0u};
for (int i = 0; i < kAllocationTries; ++i) {
result = base::AllocateAtLeast<char>(size);
if (V8_LIKELY(result.ptr != nullptr)) break;
OnCriticalMemoryPressure();
}
return {result.ptr, result.count};
}
void* AlignedAllocWithRetry(size_t size, size_t alignment) {
void* result = nullptr;
for (int i = 0; i < kAllocationTries; ++i) {

View File

@ -94,6 +94,10 @@ using MallocFn = void* (*)(size_t);
// Call free to release memory allocated with this function.
void* AllocWithRetry(size_t size, MallocFn = base::Malloc);
// Performs a malloc, with retry logic on failure. Returns nullptr on failure.
// Call free to release memory allocated with this function.
base::AllocationResult<void*> AllocAtLeastWithRetry(size_t size);
V8_EXPORT_PRIVATE void* AlignedAllocWithRetry(size_t size, size_t alignment);
V8_EXPORT_PRIVATE void AlignedFree(void* ptr);

View File

@ -91,7 +91,9 @@ Segment* AccountingAllocator::AllocateSegment(size_t bytes,
kZonePageSize, PageAllocator::kReadWrite);
} else {
memory = AllocWithRetry(bytes, zone_backing_malloc_);
auto result = AllocAtLeastWithRetry(bytes);
memory = result.ptr;
bytes = result.count;
}
if (memory == nullptr) return nullptr;

View File

@ -96,6 +96,8 @@ TEST_WITH_PLATFORM(AccountingAllocatorOOM, AllocationPlatform) {
CHECK_EQ(result == nullptr, platform.oom_callback_called);
}
// We use |AllocateAtLeast| in the accounting allocator, so we check only that
// we have _at least_ the expected amount of memory allocated.
TEST_WITH_PLATFORM(AccountingAllocatorCurrentAndMax, AllocationPlatform) {
v8::internal::AccountingAllocator allocator;
static constexpr size_t kAllocationSizes[] = {51, 231, 27};
@ -108,8 +110,8 @@ TEST_WITH_PLATFORM(AccountingAllocatorCurrentAndMax, AllocationPlatform) {
for (size_t size : kAllocationSizes) {
segments.push_back(allocator.AllocateSegment(size, support_compression));
CHECK_NOT_NULL(segments.back());
CHECK_EQ(size, segments.back()->total_size());
expected_current += size;
CHECK_LE(size, segments.back()->total_size());
expected_current += segments.back()->total_size();
if (expected_current > expected_max) expected_max = expected_current;
CHECK_EQ(expected_current, allocator.GetCurrentMemoryUsage());
CHECK_EQ(expected_max, allocator.GetMaxMemoryUsage());