cppgc-js: Allow custom OOM handling and redirect to V8 handler

Sets up custom OOM handling in cppgc and installs a handler that
redirects to V8's handler when running with unified heap.

Bug: chromium:1242180
Change-Id: I68b7038a3736cc0aa92207db2c3d129a9ff68091
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3116253
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#76467}
This commit is contained in:
Michael Lippautz 2021-08-24 21:18:08 +02:00 committed by V8 LUCI CQ
parent 56090f1b59
commit 79a07d9187
12 changed files with 243 additions and 59 deletions

View File

@ -2676,6 +2676,7 @@ filegroup(
"src/heap/cppgc/page-memory.h",
"src/heap/cppgc/persistent-node.cc",
"src/heap/cppgc/platform.cc",
"src/heap/cppgc/platform.h",
"src/heap/cppgc/pointer-policies.cc",
"src/heap/cppgc/prefinalizer-handler.cc",
"src/heap/cppgc/prefinalizer-handler.h",

View File

@ -5291,6 +5291,7 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/page-memory.h",
"src/heap/cppgc/persistent-node.cc",
"src/heap/cppgc/platform.cc",
"src/heap/cppgc/platform.h",
"src/heap/cppgc/pointer-policies.cc",
"src/heap/cppgc/prefinalizer-handler.cc",
"src/heap/cppgc/prefinalizer-handler.h",

View File

@ -217,6 +217,14 @@ void UnifiedHeapMarker::AddObject(void* object) {
cppgc::internal::HeapObjectHeader::FromObject(object));
}
void FatalOutOfMemoryHandlerImpl(const std::string& reason,
const SourceLocation&, HeapBase* heap) {
FatalProcessOutOfMemory(
reinterpret_cast<v8::internal::Isolate*>(
static_cast<v8::internal::CppHeap*>(heap)->isolate()),
reason.c_str());
}
} // namespace
void CppHeap::MetricRecorderAdapter::AddMainThreadEvent(
@ -355,6 +363,7 @@ void CppHeap::AttachIsolate(Isolate* isolate) {
wrapper_descriptor_);
SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this));
SetStackStart(base::Stack::GetStackStart());
oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl);
no_gc_scope_--;
}
@ -376,6 +385,7 @@ void CppHeap::DetachIsolate() {
isolate_ = nullptr;
// Any future garbage collections will ignore the V8->C++ references.
isolate()->SetEmbedderHeapTracer(nullptr);
oom_handler().SetCustomHandler(nullptr);
// Enter no GC scope.
no_gc_scope_++;
}

View File

@ -17,6 +17,7 @@
#include "src/heap/cppgc/marking-verifier.h"
#include "src/heap/cppgc/object-view.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/platform.h"
#include "src/heap/cppgc/prefinalizer-handler.h"
#include "src/heap/cppgc/stats-collector.h"
@ -56,15 +57,18 @@ HeapBase::HeapBase(
StackSupport stack_support)
: raw_heap_(this, custom_spaces),
platform_(std::move(platform)),
oom_handler_(std::make_unique<FatalOutOfMemoryHandler>(this)),
#if defined(LEAK_SANITIZER)
lsan_page_allocator_(std::make_unique<v8::base::LsanPageAllocator>(
platform_->GetPageAllocator())),
#endif // LEAK_SANITIZER
#if defined(CPPGC_CAGED_HEAP)
caged_heap_(this, page_allocator()),
page_backend_(std::make_unique<PageBackend>(caged_heap_.allocator())),
page_backend_(std::make_unique<PageBackend>(caged_heap_.allocator(),
*oom_handler_.get())),
#else // !CPPGC_CAGED_HEAP
page_backend_(std::make_unique<PageBackend>(*page_allocator())),
page_backend_(std::make_unique<PageBackend>(*page_allocator(),
*oom_handler_.get())),
#endif // !CPPGC_CAGED_HEAP
stats_collector_(std::make_unique<StatsCollector>(platform_.get())),
stack_(std::make_unique<heap::base::Stack>(

View File

@ -18,6 +18,7 @@
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/metric-recorder.h"
#include "src/heap/cppgc/object-allocator.h"
#include "src/heap/cppgc/platform.h"
#include "src/heap/cppgc/process-heap-statistics.h"
#include "src/heap/cppgc/process-heap.h"
#include "src/heap/cppgc/raw-heap.h"
@ -65,6 +66,7 @@ namespace testing {
class TestWithHeap;
} // namespace testing
class FatalOutOfMemoryHandler;
class PageBackend;
class PreFinalizerHandler;
class StatsCollector;
@ -95,6 +97,11 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
cppgc::Platform* platform() { return platform_.get(); }
const cppgc::Platform* platform() const { return platform_.get(); }
FatalOutOfMemoryHandler& oom_handler() { return *oom_handler_.get(); }
const FatalOutOfMemoryHandler& oom_handler() const {
return *oom_handler_.get();
}
PageBackend* page_backend() { return page_backend_.get(); }
const PageBackend* page_backend() const { return page_backend_.get(); }
@ -214,6 +221,7 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
RawHeap raw_heap_;
std::shared_ptr<cppgc::Platform> platform_;
std::unique_ptr<FatalOutOfMemoryHandler> oom_handler_;
#if defined(LEAK_SANITIZER)
std::unique_ptr<v8::base::LsanPageAllocator> lsan_page_allocator_;

View File

@ -6,17 +6,21 @@
#include "src/base/macros.h"
#include "src/base/sanitizer/asan.h"
#include "src/heap/cppgc/platform.h"
namespace cppgc {
namespace internal {
namespace {
void Unprotect(PageAllocator& allocator, const PageMemory& page_memory) {
void Unprotect(PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler,
const PageMemory& page_memory) {
if (SupportsCommittingGuardPages(allocator)) {
CHECK(allocator.SetPermissions(page_memory.writeable_region().base(),
if (!allocator.SetPermissions(page_memory.writeable_region().base(),
page_memory.writeable_region().size(),
PageAllocator::Permission::kReadWrite));
PageAllocator::Permission::kReadWrite)) {
oom_handler("Oilpan: Unprotecting memory.");
}
} else {
// No protection in case the allocator cannot commit at the required
// granularity. Only protect if the allocator supports committing at that
@ -25,34 +29,45 @@ void Unprotect(PageAllocator& allocator, const PageMemory& page_memory) {
// The allocator needs to support committing the overall range.
CHECK_EQ(0u,
page_memory.overall_region().size() % allocator.CommitPageSize());
CHECK(allocator.SetPermissions(page_memory.overall_region().base(),
if (!allocator.SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(),
PageAllocator::Permission::kReadWrite));
PageAllocator::Permission::kReadWrite)) {
oom_handler("Oilpan: Unprotecting memory.");
}
}
}
void Protect(PageAllocator& allocator, const PageMemory& page_memory) {
void Protect(PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler,
const PageMemory& page_memory) {
if (SupportsCommittingGuardPages(allocator)) {
// Swap the same region, providing the OS with a chance for fast lookup and
// change.
CHECK(allocator.SetPermissions(page_memory.writeable_region().base(),
if (!allocator.SetPermissions(page_memory.writeable_region().base(),
page_memory.writeable_region().size(),
PageAllocator::Permission::kNoAccess));
PageAllocator::Permission::kNoAccess)) {
oom_handler("Oilpan: Protecting memory.");
}
} else {
// See Unprotect().
CHECK_EQ(0u,
page_memory.overall_region().size() % allocator.CommitPageSize());
CHECK(allocator.SetPermissions(page_memory.overall_region().base(),
if (!allocator.SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(),
PageAllocator::Permission::kNoAccess));
PageAllocator::Permission::kNoAccess)) {
oom_handler("Oilpan: Protecting memory.");
}
}
}
MemoryRegion ReserveMemoryRegion(PageAllocator& allocator,
FatalOutOfMemoryHandler& oom_handler,
size_t allocation_size) {
void* region_memory =
allocator.AllocatePages(nullptr, allocation_size, kPageSize,
PageAllocator::Permission::kNoAccess);
if (!region_memory) {
oom_handler("Oilpan: Reserving memory.");
}
const MemoryRegion reserved_region(static_cast<Address>(region_memory),
allocation_size);
DCHECK_EQ(reserved_region.base() + allocation_size, reserved_region.end());
@ -69,8 +84,10 @@ void FreeMemoryRegion(PageAllocator& allocator,
} // namespace
PageMemoryRegion::PageMemoryRegion(PageAllocator& allocator,
FatalOutOfMemoryHandler& oom_handler,
MemoryRegion reserved_region, bool is_large)
: allocator_(allocator),
oom_handler_(oom_handler),
reserved_region_(reserved_region),
is_large_(is_large) {}
@ -81,10 +98,12 @@ PageMemoryRegion::~PageMemoryRegion() {
// static
constexpr size_t NormalPageMemoryRegion::kNumPageRegions;
NormalPageMemoryRegion::NormalPageMemoryRegion(PageAllocator& allocator)
NormalPageMemoryRegion::NormalPageMemoryRegion(
PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler)
: PageMemoryRegion(
allocator,
ReserveMemoryRegion(allocator, RoundUp(kPageSize * kNumPageRegions,
allocator, oom_handler,
ReserveMemoryRegion(allocator, oom_handler,
RoundUp(kPageSize * kNumPageRegions,
allocator.AllocatePageSize())),
false) {
#ifdef DEBUG
@ -99,33 +118,35 @@ NormalPageMemoryRegion::~NormalPageMemoryRegion() = default;
void NormalPageMemoryRegion::Allocate(Address writeable_base) {
const size_t index = GetIndex(writeable_base);
ChangeUsed(index, true);
Unprotect(allocator_, GetPageMemory(index));
Unprotect(allocator_, oom_handler_, GetPageMemory(index));
}
void NormalPageMemoryRegion::Free(Address writeable_base) {
const size_t index = GetIndex(writeable_base);
ChangeUsed(index, false);
Protect(allocator_, GetPageMemory(index));
Protect(allocator_, oom_handler_, GetPageMemory(index));
}
void NormalPageMemoryRegion::UnprotectForTesting() {
for (size_t i = 0; i < kNumPageRegions; ++i) {
Unprotect(allocator_, GetPageMemory(i));
Unprotect(allocator_, oom_handler_, GetPageMemory(i));
}
}
LargePageMemoryRegion::LargePageMemoryRegion(PageAllocator& allocator,
LargePageMemoryRegion::LargePageMemoryRegion(
PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler,
size_t length)
: PageMemoryRegion(
allocator,
ReserveMemoryRegion(allocator, RoundUp(length + 2 * kGuardPageSize,
allocator, oom_handler,
ReserveMemoryRegion(allocator, oom_handler,
RoundUp(length + 2 * kGuardPageSize,
allocator.AllocatePageSize())),
true) {}
LargePageMemoryRegion::~LargePageMemoryRegion() = default;
void LargePageMemoryRegion::UnprotectForTesting() {
Unprotect(allocator_, GetPageMemory());
Unprotect(allocator_, oom_handler_, GetPageMemory());
}
PageMemoryRegionTree::PageMemoryRegionTree() = default;
@ -165,14 +186,17 @@ std::pair<NormalPageMemoryRegion*, Address> NormalPageMemoryPool::Take(
return pair;
}
PageBackend::PageBackend(PageAllocator& allocator) : allocator_(allocator) {}
PageBackend::PageBackend(PageAllocator& allocator,
FatalOutOfMemoryHandler& oom_handler)
: allocator_(allocator), oom_handler_(oom_handler) {}
PageBackend::~PageBackend() = default;
Address PageBackend::AllocateNormalPageMemory(size_t bucket) {
std::pair<NormalPageMemoryRegion*, Address> result = page_pool_.Take(bucket);
if (!result.first) {
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator_);
auto pmr =
std::make_unique<NormalPageMemoryRegion>(allocator_, oom_handler_);
for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) {
page_pool_.Add(bucket, pmr.get(),
pmr->GetPageMemory(i).writeable_region().base());
@ -193,9 +217,10 @@ void PageBackend::FreeNormalPageMemory(size_t bucket, Address writeable_base) {
}
Address PageBackend::AllocateLargePageMemory(size_t size) {
auto pmr = std::make_unique<LargePageMemoryRegion>(allocator_, size);
auto pmr =
std::make_unique<LargePageMemoryRegion>(allocator_, oom_handler_, size);
const PageMemory pm = pmr->GetPageMemory();
Unprotect(allocator_, pm);
Unprotect(allocator_, oom_handler_, pm);
page_memory_region_tree_.Add(pmr.get());
large_page_memory_regions_.insert(std::make_pair(pmr.get(), std::move(pmr)));
return pm.writeable_region().base();

View File

@ -18,6 +18,8 @@
namespace cppgc {
namespace internal {
class FatalOutOfMemoryHandler;
class V8_EXPORT_PRIVATE MemoryRegion final {
public:
MemoryRegion() = default;
@ -79,9 +81,11 @@ class V8_EXPORT_PRIVATE PageMemoryRegion {
virtual void UnprotectForTesting() = 0;
protected:
PageMemoryRegion(PageAllocator&, MemoryRegion, bool);
PageMemoryRegion(PageAllocator&, FatalOutOfMemoryHandler&, MemoryRegion,
bool);
PageAllocator& allocator_;
FatalOutOfMemoryHandler& oom_handler_;
const MemoryRegion reserved_region_;
const bool is_large_;
};
@ -91,7 +95,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion {
public:
static constexpr size_t kNumPageRegions = 10;
explicit NormalPageMemoryRegion(PageAllocator&);
NormalPageMemoryRegion(PageAllocator&, FatalOutOfMemoryHandler&);
~NormalPageMemoryRegion() override;
const PageMemory GetPageMemory(size_t index) const {
@ -133,7 +137,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion {
// LargePageMemoryRegion serves a single large PageMemory object.
class V8_EXPORT_PRIVATE LargePageMemoryRegion final : public PageMemoryRegion {
public:
LargePageMemoryRegion(PageAllocator&, size_t);
LargePageMemoryRegion(PageAllocator&, FatalOutOfMemoryHandler&, size_t);
~LargePageMemoryRegion() override;
const PageMemory GetPageMemory() const {
@ -193,7 +197,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryPool final {
// regions alive.
class V8_EXPORT_PRIVATE PageBackend final {
public:
explicit PageBackend(PageAllocator&);
PageBackend(PageAllocator&, FatalOutOfMemoryHandler&);
~PageBackend();
// Allocates a normal page from the backend.
@ -224,6 +228,7 @@ class V8_EXPORT_PRIVATE PageBackend final {
private:
PageAllocator& allocator_;
FatalOutOfMemoryHandler& oom_handler_;
NormalPageMemoryPool page_pool_;
PageMemoryRegionTree page_memory_region_tree_;
std::vector<std::unique_ptr<PageMemoryRegion>> normal_page_memory_regions_;

View File

@ -5,10 +5,35 @@
#include "include/cppgc/platform.h"
#include "src/base/lazy-instance.h"
#include "src/base/logging.h"
#include "src/base/platform/platform.h"
#include "src/heap/cppgc/gc-info-table.h"
#include "src/heap/cppgc/platform.h"
namespace cppgc {
namespace internal {
void Abort() { v8::base::OS::Abort(); }
void FatalOutOfMemoryHandler::operator()(const std::string& reason,
const SourceLocation& loc) const {
if (custom_handler_) {
(*custom_handler_)(reason, loc, heap_);
FATAL("Custom out of memory handler should not have returned");
}
#ifdef DEBUG
V8_Fatal(loc.FileName(), static_cast<int>(loc.Line()),
"Oilpan: Out of memory (%s)", reason.c_str());
#else // !DEBUG
V8_Fatal("Oilpan: Out of memory");
#endif // !DEBUG
}
void FatalOutOfMemoryHandler::SetCustomHandler(Callback* callback) {
custom_handler_ = callback;
}
} // namespace internal
namespace {
PageAllocator* g_page_allocator = nullptr;
@ -27,9 +52,4 @@ void InitializeProcess(PageAllocator* page_allocator) {
void ShutdownProcess() { g_page_allocator = nullptr; }
namespace internal {
void Abort() { v8::base::OS::Abort(); }
} // namespace internal
} // namespace cppgc

43
src/heap/cppgc/platform.h Normal file
View File

@ -0,0 +1,43 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_PLATFORM_H_
#define V8_HEAP_CPPGC_PLATFORM_H_
#include <string>
#include "include/cppgc/source-location.h"
#include "src/base/macros.h"
namespace cppgc {
namespace internal {
class HeapBase;
class V8_EXPORT_PRIVATE FatalOutOfMemoryHandler final {
public:
using Callback = void(const std::string&, const SourceLocation&, HeapBase*);
FatalOutOfMemoryHandler() = default;
explicit FatalOutOfMemoryHandler(HeapBase* heap) : heap_(heap) {}
[[noreturn]] void operator()(
const std::string& reason = std::string(),
const SourceLocation& = SourceLocation::Current()) const;
void SetCustomHandler(Callback*);
// Disallow copy/move.
FatalOutOfMemoryHandler(const FatalOutOfMemoryHandler&) = delete;
FatalOutOfMemoryHandler& operator=(const FatalOutOfMemoryHandler&) = delete;
private:
HeapBase* heap_ = nullptr;
Callback* custom_handler_ = nullptr;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_PLATFORM_H_

View File

@ -119,6 +119,7 @@ v8_source_set("cppgc_unittests_sources") {
"heap/cppgc/object-start-bitmap-unittest.cc",
"heap/cppgc/page-memory-unittest.cc",
"heap/cppgc/persistent-family-unittest.cc",
"heap/cppgc/platform-unittest.cc",
"heap/cppgc/prefinalizer-unittest.cc",
"heap/cppgc/sanitizer-unittest.cc",
"heap/cppgc/source-location-unittest.cc",

View File

@ -5,6 +5,7 @@
#include "src/heap/cppgc/page-memory.h"
#include "src/base/page-allocator.h"
#include "src/heap/cppgc/platform.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace cppgc {
@ -77,7 +78,8 @@ TEST(PageMemoryDeathTest, ConstructNonContainedRegions) {
TEST(PageMemoryRegionTest, NormalPageMemoryRegion) {
v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
FatalOutOfMemoryHandler oom_handler;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
pmr->UnprotectForTesting();
MemoryRegion prev_overall;
for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) {
@ -103,7 +105,9 @@ TEST(PageMemoryRegionTest, NormalPageMemoryRegion) {
TEST(PageMemoryRegionTest, LargePageMemoryRegion) {
v8::base::PageAllocator allocator;
auto pmr = std::make_unique<LargePageMemoryRegion>(allocator, 1024);
FatalOutOfMemoryHandler oom_handler;
auto pmr =
std::make_unique<LargePageMemoryRegion>(allocator, oom_handler, 1024);
pmr->UnprotectForTesting();
const PageMemory pm = pmr->GetPageMemory();
EXPECT_LE(1024u, pm.writeable_region().size());
@ -140,8 +144,10 @@ TEST(PageMemoryRegionDeathTest, ReservationIsFreed) {
// may expand to statements that re-purpose the previously freed memory
// and thus not crash.
EXPECT_DEATH_IF_SUPPORTED(
v8::base::PageAllocator allocator; Address base; {
auto pmr = std::make_unique<LargePageMemoryRegion>(allocator, 1024);
v8::base::PageAllocator allocator; FatalOutOfMemoryHandler oom_handler;
Address base; {
auto pmr = std::make_unique<LargePageMemoryRegion>(allocator,
oom_handler, 1024);
base = pmr->reserved_region().base();
} access(base[0]);
, "");
@ -149,7 +155,8 @@ TEST(PageMemoryRegionDeathTest, ReservationIsFreed) {
TEST(PageMemoryRegionDeathTest, FrontGuardPageAccessCrashes) {
v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
FatalOutOfMemoryHandler oom_handler;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
if (SupportsCommittingGuardPages(allocator)) {
EXPECT_DEATH_IF_SUPPORTED(
access(pmr->GetPageMemory(0).overall_region().base()[0]), "");
@ -158,7 +165,8 @@ TEST(PageMemoryRegionDeathTest, FrontGuardPageAccessCrashes) {
TEST(PageMemoryRegionDeathTest, BackGuardPageAccessCrashes) {
v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
FatalOutOfMemoryHandler oom_handler;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
if (SupportsCommittingGuardPages(allocator)) {
EXPECT_DEATH_IF_SUPPORTED(
access(pmr->GetPageMemory(0).writeable_region().end()[0]), "");
@ -167,7 +175,8 @@ TEST(PageMemoryRegionDeathTest, BackGuardPageAccessCrashes) {
TEST(PageMemoryRegionTreeTest, AddNormalLookupRemove) {
v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
FatalOutOfMemoryHandler oom_handler;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
PageMemoryRegionTree tree;
tree.Add(pmr.get());
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
@ -181,8 +190,10 @@ TEST(PageMemoryRegionTreeTest, AddNormalLookupRemove) {
TEST(PageMemoryRegionTreeTest, AddLargeLookupRemove) {
v8::base::PageAllocator allocator;
FatalOutOfMemoryHandler oom_handler;
constexpr size_t kLargeSize = 5012;
auto pmr = std::make_unique<LargePageMemoryRegion>(allocator, kLargeSize);
auto pmr = std::make_unique<LargePageMemoryRegion>(allocator, oom_handler,
kLargeSize);
PageMemoryRegionTree tree;
tree.Add(pmr.get());
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
@ -196,9 +207,11 @@ TEST(PageMemoryRegionTreeTest, AddLargeLookupRemove) {
TEST(PageMemoryRegionTreeTest, AddLookupRemoveMultiple) {
v8::base::PageAllocator allocator;
auto pmr1 = std::make_unique<NormalPageMemoryRegion>(allocator);
FatalOutOfMemoryHandler oom_handler;
auto pmr1 = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
constexpr size_t kLargeSize = 3127;
auto pmr2 = std::make_unique<LargePageMemoryRegion>(allocator, kLargeSize);
auto pmr2 = std::make_unique<LargePageMemoryRegion>(allocator, oom_handler,
kLargeSize);
PageMemoryRegionTree tree;
tree.Add(pmr1.get());
tree.Add(pmr2.get());
@ -223,7 +236,8 @@ TEST(NormalPageMemoryPool, ConstructorEmpty) {
TEST(NormalPageMemoryPool, AddTakeSameBucket) {
v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
FatalOutOfMemoryHandler oom_handler;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
const PageMemory pm = pmr->GetPageMemory(0);
NormalPageMemoryPool pool;
constexpr size_t kBucket = 0;
@ -235,7 +249,8 @@ TEST(NormalPageMemoryPool, AddTakeSameBucket) {
TEST(NormalPageMemoryPool, AddTakeNotFoundDifferentBucket) {
v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
FatalOutOfMemoryHandler oom_handler;
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
const PageMemory pm = pmr->GetPageMemory(0);
NormalPageMemoryPool pool;
constexpr size_t kFirstBucket = 0;
@ -250,7 +265,8 @@ TEST(NormalPageMemoryPool, AddTakeNotFoundDifferentBucket) {
TEST(PageBackendTest, AllocateNormalUsesPool) {
v8::base::PageAllocator allocator;
PageBackend backend(allocator);
FatalOutOfMemoryHandler oom_handler;
PageBackend backend(allocator, oom_handler);
constexpr size_t kBucket = 0;
Address writeable_base1 = backend.AllocateNormalPageMemory(kBucket);
EXPECT_NE(nullptr, writeable_base1);
@ -262,7 +278,8 @@ TEST(PageBackendTest, AllocateNormalUsesPool) {
TEST(PageBackendTest, AllocateLarge) {
v8::base::PageAllocator allocator;
PageBackend backend(allocator);
FatalOutOfMemoryHandler oom_handler;
PageBackend backend(allocator, oom_handler);
Address writeable_base1 = backend.AllocateLargePageMemory(13731);
EXPECT_NE(nullptr, writeable_base1);
Address writeable_base2 = backend.AllocateLargePageMemory(9478);
@ -274,7 +291,8 @@ TEST(PageBackendTest, AllocateLarge) {
TEST(PageBackendTest, LookupNormal) {
v8::base::PageAllocator allocator;
PageBackend backend(allocator);
FatalOutOfMemoryHandler oom_handler;
PageBackend backend(allocator, oom_handler);
constexpr size_t kBucket = 0;
Address writeable_base = backend.AllocateNormalPageMemory(kBucket);
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
@ -290,7 +308,8 @@ TEST(PageBackendTest, LookupNormal) {
TEST(PageBackendTest, LookupLarge) {
v8::base::PageAllocator allocator;
PageBackend backend(allocator);
FatalOutOfMemoryHandler oom_handler;
PageBackend backend(allocator, oom_handler);
constexpr size_t kSize = 7934;
Address writeable_base = backend.AllocateLargePageMemory(kSize);
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
@ -301,9 +320,10 @@ TEST(PageBackendTest, LookupLarge) {
TEST(PageBackendDeathTest, DestructingBackendDestroysPageMemory) {
v8::base::PageAllocator allocator;
FatalOutOfMemoryHandler oom_handler;
Address base;
{
PageBackend backend(allocator);
PageBackend backend(allocator, oom_handler);
constexpr size_t kBucket = 0;
base = backend.AllocateNormalPageMemory(kBucket);
}

View File

@ -0,0 +1,46 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/platform.h"
#include "src/base/logging.h"
#include "src/base/page-allocator.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace cppgc {
namespace internal {
TEST(FatalOutOfMemoryHandlerDeathTest, DefaultHandlerCrashes) {
FatalOutOfMemoryHandler handler;
EXPECT_DEATH_IF_SUPPORTED(handler(), "");
}
namespace {
constexpr uintptr_t kHeapNeedle = 0x14;
[[noreturn]] void CustomHandler(const std::string&, const SourceLocation&,
HeapBase* heap) {
if (heap == reinterpret_cast<HeapBase*>(kHeapNeedle)) {
FATAL("cust0m h4ndl3r with matching heap");
}
FATAL("cust0m h4ndl3r");
}
} // namespace
TEST(FatalOutOfMemoryHandlerDeathTest, CustomHandlerCrashes) {
FatalOutOfMemoryHandler handler;
handler.SetCustomHandler(&CustomHandler);
EXPECT_DEATH_IF_SUPPORTED(handler(), "cust0m h4ndl3r");
}
TEST(FatalOutOfMemoryHandlerDeathTest, CustomHandlerWithHeapState) {
FatalOutOfMemoryHandler handler(reinterpret_cast<HeapBase*>(kHeapNeedle));
handler.SetCustomHandler(&CustomHandler);
EXPECT_DEATH_IF_SUPPORTED(handler(), "cust0m h4ndl3r with matching heap");
}
} // namespace internal
} // namespace cppgc