cppgc: Implement simple support for 4GB heap reservation

This CL is an initial attempt to reuse BoundedPageAllocator for cppgc.
The caged 4GB heap is needed for:
- fast implementation of the generational barrier;
- potential pointer compression project for Oilpan.

Bug: chromium:1029379
Change-Id: Idfb0ab92c988e2045d4a0e9746bedf841d66e282
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2215818
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68074}
This commit is contained in:
Anton Bikineev 2020-05-29 12:34:52 +02:00 committed by Commit Bot
parent 3e72573ca8
commit 61f5e380de
6 changed files with 193 additions and 1 deletions

View File

@ -227,6 +227,9 @@ declare_args() {
# Enable object names in cppgc for debug purposes.
cppgc_enable_object_names = false
# Enable heap reservation of size 4GB. Only possible for 64bit archs.
cppgc_enable_caged_heap = v8_current_cpu == "x64" || v8_current_cpu == "arm64"
# Enable V8 heap sandbox experimental feature.
# Sets -DV8_HEAP_SANDBOX.
v8_enable_heap_sandbox = ""
@ -305,6 +308,10 @@ assert(
assert(!v8_enable_heap_sandbox || v8_enable_pointer_compression,
"V8 Heap Sandbox requires pointer compression")
assert(!cppgc_enable_caged_heap || v8_current_cpu == "x64" ||
v8_current_cpu == "arm64",
"CppGC caged heap requires 64bit platforms")
v8_random_seed = "314159265"
v8_toolset_for_shell = "host"
@ -375,6 +382,9 @@ config("cppgc_base_config") {
if (cppgc_enable_object_names) {
defines += [ "CPPGC_SUPPORTS_OBJECT_NAMES" ]
}
if (cppgc_enable_caged_heap) {
defines += [ "CPPGC_CAGED_HEAP" ]
}
}
# This config should be applied to code using the libsampler.
@ -4123,6 +4133,8 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/stack.h",
"src/heap/cppgc/sweeper.cc",
"src/heap/cppgc/sweeper.h",
"src/heap/cppgc/virtual-memory.cc",
"src/heap/cppgc/virtual-memory.h",
"src/heap/cppgc/visitor.cc",
"src/heap/cppgc/worklist.h",
]

View File

@ -16,6 +16,10 @@ namespace internal {
using Address = uint8_t*;
using ConstAddress = const uint8_t*;
constexpr size_t kKB = 1024;
constexpr size_t kMB = kKB * 1024;
constexpr size_t kGB = kMB * 1024;
// See 6.7.6 (http://eel.is/c++draft/basic.align) for alignment restrictions. We
// do not fully support all alignment restrictions (following
// alignof(std::max_­align_­t)) but limit to alignof(double).
@ -42,6 +46,11 @@ constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2;
constexpr GCInfoIndex kFreeListGCInfoIndex = 0;
constexpr size_t kFreeListEntrySize = 2 * sizeof(uintptr_t);
#if defined(CPPGC_CAGED_HEAP)
constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
#endif
} // namespace internal
} // namespace cppgc

View File

@ -6,13 +6,17 @@
#include <memory>
#include "src/base/bounded-page-allocator.h"
#include "src/base/page-allocator.h"
#include "src/base/platform/platform.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/stack.h"
#include "src/heap/cppgc/sweeper.h"
#include "src/heap/cppgc/virtual-memory.h"
namespace cppgc {
@ -77,6 +81,38 @@ class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> {
size_t accumulated_size_ = 0;
};
#if defined(CPPGC_CAGED_HEAP)
VirtualMemory ReserveCagedHeap(v8::PageAllocator* platform_allocator) {
DCHECK_EQ(0u,
kCagedHeapReservationSize % platform_allocator->AllocatePageSize());
static constexpr size_t kAllocationTries = 4;
for (size_t i = 0; i < kAllocationTries; ++i) {
void* hint = reinterpret_cast<void*>(RoundDown(
reinterpret_cast<uintptr_t>(platform_allocator->GetRandomMmapAddr()),
kCagedHeapReservationAlignment));
VirtualMemory memory(platform_allocator, kCagedHeapReservationSize,
kCagedHeapReservationAlignment, hint);
if (memory.IsReserved()) return memory;
}
FATAL("Fatal process out of memory: Failed to reserve memory for caged heap");
UNREACHABLE();
}
std::unique_ptr<v8::base::BoundedPageAllocator> CreateBoundedAllocator(
v8::PageAllocator* platform_allocator, void* caged_heap_start) {
DCHECK(caged_heap_start);
auto start = reinterpret_cast<v8::base::BoundedPageAllocator::Address>(
caged_heap_start);
return std::make_unique<v8::base::BoundedPageAllocator>(
platform_allocator, start, kCagedHeapReservationSize, kPageSize);
}
#endif
} // namespace
// static
@ -87,12 +123,20 @@ cppgc::LivenessBroker LivenessBrokerFactory::Create() {
Heap::Heap(std::shared_ptr<cppgc::Platform> platform, size_t custom_spaces)
: raw_heap_(this, custom_spaces),
platform_(std::move(platform)),
#if defined(CPPGC_CAGED_HEAP)
reserved_area_(ReserveCagedHeap(platform_->GetPageAllocator())),
bounded_allocator_(CreateBoundedAllocator(platform_->GetPageAllocator(),
reserved_area_.address())),
page_backend_(std::make_unique<PageBackend>(bounded_allocator_.get())),
#else
page_backend_(
std::make_unique<PageBackend>(platform_->GetPageAllocator())),
#endif
object_allocator_(&raw_heap_),
sweeper_(&raw_heap_, platform_.get()),
stack_(std::make_unique<Stack>(v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()) {}
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()) {
}
Heap::~Heap() {
NoGCScope no_gc(this);

View File

@ -21,6 +21,11 @@
#include "src/heap/cppgc/prefinalizer-handler.h"
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/sweeper.h"
#include "src/heap/cppgc/virtual-memory.h"
#if defined(CPPGC_CAGED_HEAP)
#include "src/base/bounded-page-allocator.h"
#endif
namespace cppgc {
namespace internal {
@ -128,6 +133,12 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
RawHeap raw_heap_;
std::shared_ptr<cppgc::Platform> platform_;
#if defined(CPPGC_CAGED_HEAP)
// The order is important: page_backend_ must be destroyed before
// reserved_area_ is freed.
VirtualMemory reserved_area_;
std::unique_ptr<v8::base::BoundedPageAllocator> bounded_allocator_;
#endif
std::unique_ptr<PageBackend> page_backend_;
ObjectAllocator object_allocator_;
Sweeper sweeper_;

View File

@ -0,0 +1,56 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/virtual-memory.h"
#include "include/cppgc/platform.h"
#include "src/base/macros.h"
namespace cppgc {
namespace internal {
VirtualMemory::VirtualMemory(PageAllocator* page_allocator, size_t size,
size_t alignment, void* hint)
: page_allocator_(page_allocator) {
DCHECK_NOT_NULL(page_allocator);
DCHECK(IsAligned(size, page_allocator->CommitPageSize()));
const size_t page_size = page_allocator_->AllocatePageSize();
start_ = page_allocator->AllocatePages(hint, RoundUp(size, page_size),
RoundUp(alignment, page_size),
PageAllocator::kNoAccess);
if (start_) {
size_ = RoundUp(size, page_size);
}
}
VirtualMemory::~VirtualMemory() V8_NOEXCEPT {
if (IsReserved()) {
page_allocator_->FreePages(start_, size_);
}
}
VirtualMemory::VirtualMemory(VirtualMemory&& other) V8_NOEXCEPT
: page_allocator_(std::move(other.page_allocator_)),
start_(std::move(other.start_)),
size_(std::move(other.size_)) {
other.Reset();
}
VirtualMemory& VirtualMemory::operator=(VirtualMemory&& other) V8_NOEXCEPT {
DCHECK(!IsReserved());
page_allocator_ = std::move(other.page_allocator_);
start_ = std::move(other.start_);
size_ = std::move(other.size_);
other.Reset();
return *this;
}
void VirtualMemory::Reset() {
start_ = nullptr;
size_ = 0;
}
} // namespace internal
} // namespace cppgc

View File

@ -0,0 +1,60 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_VIRTUAL_MEMORY_H_
#define V8_HEAP_CPPGC_VIRTUAL_MEMORY_H_
#include <cstdint>
#include "include/cppgc/platform.h"
#include "src/base/macros.h"
namespace cppgc {
namespace internal {
// Represents and controls an area of reserved memory.
class V8_EXPORT_PRIVATE VirtualMemory {
public:
// Empty VirtualMemory object, controlling no reserved memory.
VirtualMemory() = default;
// Reserves virtual memory containing an area of the given size that is
// aligned per |alignment| rounded up to the |page_allocator|'s allocate page
// size. The |size| is aligned with |page_allocator|'s commit page size.
VirtualMemory(PageAllocator*, size_t size, size_t alignment,
void* hint = nullptr);
// Releases the reserved memory, if any, controlled by this VirtualMemory
// object.
~VirtualMemory() V8_NOEXCEPT;
VirtualMemory(VirtualMemory&&) V8_NOEXCEPT;
VirtualMemory& operator=(VirtualMemory&&) V8_NOEXCEPT;
// Returns whether the memory has been reserved.
bool IsReserved() const { return start_ != nullptr; }
void* address() const {
DCHECK(IsReserved());
return start_;
}
size_t size() const {
DCHECK(IsReserved());
return size_;
}
private:
// Resets to the default state.
void Reset();
PageAllocator* page_allocator_ = nullptr;
void* start_ = nullptr;
size_t size_ = 0;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_VIRTUAL_MEMORY_H_