cppgc: shared-cage: Introduce shared cage for all heaps
The CL does the following: 1) Globalizes CagedHeap for all HeapBases; 2) Adds the global variable representing the cage base; 3) Changes all write-barriers to use this global variable for value/slot checks; 4) Removes no longer needed functionality introduced in previous CLs. Bug: v8:12231 Change-Id: I281a7b0bf67e349c988486fc2d43ec6d703fd292 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3688050 Commit-Queue: Anton Bikineev <bikineev@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/main@{#81027}
This commit is contained in:
parent
36e47253a4
commit
897c5618aa
@ -10,6 +10,7 @@
|
|||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
|
|
||||||
#include "cppgc/internal/api-constants.h"
|
#include "cppgc/internal/api-constants.h"
|
||||||
|
#include "cppgc/internal/caged-heap.h"
|
||||||
#include "cppgc/internal/logging.h"
|
#include "cppgc/internal/logging.h"
|
||||||
#include "cppgc/platform.h"
|
#include "cppgc/platform.h"
|
||||||
#include "v8config.h" // NOLINT(build/include_directory)
|
#include "v8config.h" // NOLINT(build/include_directory)
|
||||||
@ -85,11 +86,17 @@ static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
|
|||||||
// TODO(v8:12231): Remove this class entirely so that it doesn't occupy space is
|
// TODO(v8:12231): Remove this class entirely so that it doesn't occupy space is
|
||||||
// when CPPGC_YOUNG_GENERATION is off.
|
// when CPPGC_YOUNG_GENERATION is off.
|
||||||
struct CagedHeapLocalData final {
|
struct CagedHeapLocalData final {
|
||||||
explicit CagedHeapLocalData(PageAllocator&);
|
V8_INLINE static CagedHeapLocalData& Get() {
|
||||||
|
return *reinterpret_cast<CagedHeapLocalData*>(CagedHeapBase::GetBase());
|
||||||
|
}
|
||||||
|
|
||||||
#if defined(CPPGC_YOUNG_GENERATION)
|
#if defined(CPPGC_YOUNG_GENERATION)
|
||||||
AgeTable age_table;
|
AgeTable age_table;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
private:
|
||||||
|
friend class CagedHeap;
|
||||||
|
explicit CagedHeapLocalData(PageAllocator&);
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
#ifndef INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
|
#ifndef INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
|
||||||
#define INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
|
#define INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
|
||||||
|
|
||||||
|
#include <climits>
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
|
|
||||||
#include "cppgc/internal/api-constants.h"
|
#include "cppgc/internal/api-constants.h"
|
||||||
@ -18,23 +19,56 @@ namespace internal {
|
|||||||
|
|
||||||
class V8_EXPORT CagedHeapBase {
|
class V8_EXPORT CagedHeapBase {
|
||||||
public:
|
public:
|
||||||
V8_INLINE static bool IsWithinNormalPageReservation(uintptr_t heap_base,
|
V8_INLINE static uintptr_t OffsetFromAddress(const void* address) {
|
||||||
void* address) {
|
return reinterpret_cast<uintptr_t>(address) &
|
||||||
return (reinterpret_cast<uintptr_t>(address) - heap_base) <
|
(api_constants::kCagedHeapReservationAlignment - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
V8_INLINE static bool IsWithinCage(const void* address) {
|
||||||
|
CPPGC_DCHECK(g_heap_base_);
|
||||||
|
return (reinterpret_cast<uintptr_t>(address) &
|
||||||
|
~(api_constants::kCagedHeapReservationAlignment - 1)) ==
|
||||||
|
g_heap_base_;
|
||||||
|
}
|
||||||
|
|
||||||
|
V8_INLINE static bool AreWithinCage(const void* addr1, const void* addr2) {
|
||||||
|
static constexpr size_t kHalfWordShift = sizeof(uint32_t) * CHAR_BIT;
|
||||||
|
static_assert((static_cast<size_t>(1) << kHalfWordShift) ==
|
||||||
|
api_constants::kCagedHeapReservationSize);
|
||||||
|
CPPGC_DCHECK(g_heap_base_);
|
||||||
|
return !(((reinterpret_cast<uintptr_t>(addr1) ^ g_heap_base_) |
|
||||||
|
(reinterpret_cast<uintptr_t>(addr2) ^ g_heap_base_)) >>
|
||||||
|
kHalfWordShift);
|
||||||
|
}
|
||||||
|
|
||||||
|
V8_INLINE static bool IsWithinNormalPageReservation(void* address) {
|
||||||
|
return (reinterpret_cast<uintptr_t>(address) - g_heap_base_) <
|
||||||
api_constants::kCagedHeapNormalPageReservationSize;
|
api_constants::kCagedHeapNormalPageReservationSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
V8_INLINE static BasePageHandle* LookupPageFromInnerPointer(
|
V8_INLINE static bool IsWithinLargePageReservation(const void* ptr) {
|
||||||
uintptr_t heap_base, void* ptr) {
|
CPPGC_DCHECK(g_heap_base_);
|
||||||
if (V8_LIKELY(IsWithinNormalPageReservation(heap_base, ptr)))
|
auto uptr = reinterpret_cast<uintptr_t>(ptr);
|
||||||
return BasePageHandle::FromPayload(ptr);
|
return (uptr >= g_heap_base_ +
|
||||||
|
api_constants::kCagedHeapNormalPageReservationSize) &&
|
||||||
|
(uptr < g_heap_base_ + api_constants::kCagedHeapReservationSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
V8_INLINE static uintptr_t GetBase() { return g_heap_base_; }
|
||||||
|
|
||||||
|
V8_INLINE static BasePageHandle& LookupPageFromInnerPointer(void* ptr) {
|
||||||
|
if (V8_LIKELY(IsWithinNormalPageReservation(ptr)))
|
||||||
|
return *BasePageHandle::FromPayload(ptr);
|
||||||
else
|
else
|
||||||
return LookupLargePageFromInnerPointer(heap_base, ptr);
|
return LookupLargePageFromInnerPointer(ptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static BasePageHandle* LookupLargePageFromInnerPointer(uintptr_t heap_base,
|
friend class CagedHeap;
|
||||||
void* address);
|
|
||||||
|
static BasePageHandle& LookupLargePageFromInnerPointer(void* address);
|
||||||
|
|
||||||
|
static uintptr_t g_heap_base_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
|
@ -48,10 +48,6 @@ class V8_EXPORT WriteBarrier final {
|
|||||||
Type type = Type::kNone;
|
Type type = Type::kNone;
|
||||||
#endif // !V8_ENABLE_CHECKS
|
#endif // !V8_ENABLE_CHECKS
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
uintptr_t start = 0;
|
|
||||||
CagedHeapLocalData& caged_heap() const {
|
|
||||||
return *reinterpret_cast<CagedHeapLocalData*>(start);
|
|
||||||
}
|
|
||||||
uintptr_t slot_offset = 0;
|
uintptr_t slot_offset = 0;
|
||||||
uintptr_t value_offset = 0;
|
uintptr_t value_offset = 0;
|
||||||
#endif // CPPGC_CAGED_HEAP
|
#endif // CPPGC_CAGED_HEAP
|
||||||
@ -169,9 +165,8 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
|
|||||||
static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
|
static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
|
||||||
WriteBarrier::Params& params,
|
WriteBarrier::Params& params,
|
||||||
HeapHandleCallback) {
|
HeapHandleCallback) {
|
||||||
if (!TryGetCagedHeap(value, value, params)) {
|
const bool within_cage = CagedHeapBase::IsWithinCage(value);
|
||||||
return WriteBarrier::Type::kNone;
|
if (!within_cage) return WriteBarrier::Type::kNone;
|
||||||
}
|
|
||||||
|
|
||||||
// We know that |value| points either within the normal page or to the
|
// We know that |value| points either within the normal page or to the
|
||||||
// beginning of large-page, so extract the page header by bitmasking.
|
// beginning of large-page, so extract the page header by bitmasking.
|
||||||
@ -188,35 +183,6 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
|
|||||||
|
|
||||||
template <WriteBarrier::ValueMode value_mode>
|
template <WriteBarrier::ValueMode value_mode>
|
||||||
struct ValueModeDispatch;
|
struct ValueModeDispatch;
|
||||||
|
|
||||||
static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
|
|
||||||
WriteBarrier::Params& params) {
|
|
||||||
// The compiler must fold these checks into a single one.
|
|
||||||
if (!value || value == kSentinelPointer) return false;
|
|
||||||
|
|
||||||
// Now we are certain that |value| points within the cage.
|
|
||||||
const uintptr_t real_cage_base =
|
|
||||||
reinterpret_cast<uintptr_t>(value) &
|
|
||||||
~(api_constants::kCagedHeapReservationAlignment - 1);
|
|
||||||
|
|
||||||
const uintptr_t cage_base_from_slot =
|
|
||||||
reinterpret_cast<uintptr_t>(slot) &
|
|
||||||
~(api_constants::kCagedHeapReservationAlignment - 1);
|
|
||||||
|
|
||||||
// If |cage_base_from_slot| is different from |real_cage_base|, the slot
|
|
||||||
// must be on stack, bail out.
|
|
||||||
if (V8_UNLIKELY(real_cage_base != cage_base_from_slot)) return false;
|
|
||||||
|
|
||||||
// Otherwise, set params.start and return.
|
|
||||||
params.start = real_cage_base;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns whether marking is in progress. If marking is not in progress
|
|
||||||
// sets the start of the cage accordingly.
|
|
||||||
//
|
|
||||||
// TODO(chromium:1056170): Create fast path on API.
|
|
||||||
static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
template <>
|
template <>
|
||||||
@ -229,7 +195,7 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
|||||||
if (V8_LIKELY(!WriteBarrier::IsEnabled()))
|
if (V8_LIKELY(!WriteBarrier::IsEnabled()))
|
||||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||||
|
|
||||||
const bool within_cage = TryGetCagedHeap(slot, value, params);
|
const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
|
||||||
if (!within_cage) return WriteBarrier::Type::kNone;
|
if (!within_cage) return WriteBarrier::Type::kNone;
|
||||||
|
|
||||||
// We know that |value| points either within the normal page or to the
|
// We know that |value| points either within the normal page or to the
|
||||||
@ -243,8 +209,8 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
|||||||
if (!heap_handle.is_young_generation_enabled())
|
if (!heap_handle.is_young_generation_enabled())
|
||||||
return WriteBarrier::Type::kNone;
|
return WriteBarrier::Type::kNone;
|
||||||
params.heap = &heap_handle;
|
params.heap = &heap_handle;
|
||||||
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
|
params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
|
||||||
params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
|
params.value_offset = CagedHeapBase::OffsetFromAddress(value);
|
||||||
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
||||||
#else // !CPPGC_YOUNG_GENERATION
|
#else // !CPPGC_YOUNG_GENERATION
|
||||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||||
@ -269,18 +235,16 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
|||||||
|
|
||||||
#if defined(CPPGC_YOUNG_GENERATION)
|
#if defined(CPPGC_YOUNG_GENERATION)
|
||||||
HeapHandle& handle = callback();
|
HeapHandle& handle = callback();
|
||||||
if (V8_LIKELY(!IsMarking(handle, params))) {
|
if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
|
||||||
// params.start is populated by IsMarking().
|
|
||||||
if (!handle.is_young_generation_enabled()) {
|
if (!handle.is_young_generation_enabled()) {
|
||||||
return WriteBarrier::Type::kNone;
|
return WriteBarrier::Type::kNone;
|
||||||
}
|
}
|
||||||
params.heap = &handle;
|
params.heap = &handle;
|
||||||
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
|
// Check if slot is on stack.
|
||||||
// params.value_offset stays 0.
|
if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
|
||||||
if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
|
|
||||||
// Check if slot is on stack.
|
|
||||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||||
}
|
}
|
||||||
|
params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
|
||||||
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
||||||
}
|
}
|
||||||
#else // !defined(CPPGC_YOUNG_GENERATION)
|
#else // !defined(CPPGC_YOUNG_GENERATION)
|
||||||
@ -428,13 +392,15 @@ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
|
|||||||
void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
|
void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
|
||||||
CheckParams(Type::kGenerational, params);
|
CheckParams(Type::kGenerational, params);
|
||||||
|
|
||||||
const CagedHeapLocalData& local_data = params.caged_heap();
|
const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
|
||||||
const AgeTable& age_table = local_data.age_table;
|
const AgeTable& age_table = local_data.age_table;
|
||||||
|
|
||||||
// Bail out if the slot is in young generation.
|
// Bail out if the slot is in young generation.
|
||||||
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
|
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
// TODO(chromium:1029379): Consider reload local_data in the slow path to
|
||||||
|
// reduce register pressure.
|
||||||
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
|
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
|
||||||
params.heap);
|
params.heap);
|
||||||
}
|
}
|
||||||
@ -444,7 +410,7 @@ void WriteBarrier::GenerationalBarrierForSourceObject(
|
|||||||
const Params& params, const void* inner_pointer) {
|
const Params& params, const void* inner_pointer) {
|
||||||
CheckParams(Type::kGenerational, params);
|
CheckParams(Type::kGenerational, params);
|
||||||
|
|
||||||
const CagedHeapLocalData& local_data = params.caged_heap();
|
const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
|
||||||
const AgeTable& age_table = local_data.age_table;
|
const AgeTable& age_table = local_data.age_table;
|
||||||
|
|
||||||
// Assume that if the first element is in young generation, the whole range is
|
// Assume that if the first element is in young generation, the whole range is
|
||||||
@ -452,6 +418,8 @@ void WriteBarrier::GenerationalBarrierForSourceObject(
|
|||||||
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
|
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
// TODO(chromium:1029379): Consider reload local_data in the slow path to
|
||||||
|
// reduce register pressure.
|
||||||
GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer,
|
GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer,
|
||||||
params.heap);
|
params.heap);
|
||||||
}
|
}
|
||||||
|
@ -35,17 +35,12 @@ static_assert(api_constants::kCagedHeapReservationAlignment ==
|
|||||||
static_assert(api_constants::kCagedHeapNormalPageReservationSize ==
|
static_assert(api_constants::kCagedHeapNormalPageReservationSize ==
|
||||||
kCagedHeapNormalPageReservationSize);
|
kCagedHeapNormalPageReservationSize);
|
||||||
|
|
||||||
|
uintptr_t CagedHeapBase::g_heap_base_ = 0u;
|
||||||
|
|
||||||
|
CagedHeap* CagedHeap::instance_ = nullptr;
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
// TODO(v8:12231): Remove once shared cage is there. Currently it's only used
|
|
||||||
// for large pages lookup in the write barrier.
|
|
||||||
using Cages = std::map<uintptr_t /*cage_base*/, HeapBase*>;
|
|
||||||
|
|
||||||
static Cages& global_cages() {
|
|
||||||
static v8::base::LeakyObject<Cages> instance;
|
|
||||||
return *instance.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
VirtualMemory ReserveCagedHeap(PageAllocator& platform_allocator) {
|
VirtualMemory ReserveCagedHeap(PageAllocator& platform_allocator) {
|
||||||
DCHECK_EQ(0u,
|
DCHECK_EQ(0u,
|
||||||
kCagedHeapReservationSize % platform_allocator.AllocatePageSize());
|
kCagedHeapReservationSize % platform_allocator.AllocatePageSize());
|
||||||
@ -67,10 +62,25 @@ VirtualMemory ReserveCagedHeap(PageAllocator& platform_allocator) {
|
|||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
CagedHeap::CagedHeap(HeapBase& heap_base, PageAllocator& platform_allocator)
|
// static
|
||||||
|
void CagedHeap::InitializeIfNeeded(PageAllocator& platform_allocator) {
|
||||||
|
static v8::base::LeakyObject<CagedHeap> caged_heap(platform_allocator);
|
||||||
|
instance_ = caged_heap.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
|
CagedHeap& CagedHeap::Instance() {
|
||||||
|
DCHECK_NOT_NULL(instance_);
|
||||||
|
return *instance_;
|
||||||
|
}
|
||||||
|
|
||||||
|
CagedHeap::CagedHeap(PageAllocator& platform_allocator)
|
||||||
: reserved_area_(ReserveCagedHeap(platform_allocator)) {
|
: reserved_area_(ReserveCagedHeap(platform_allocator)) {
|
||||||
using CagedAddress = CagedHeap::AllocatorType::Address;
|
using CagedAddress = CagedHeap::AllocatorType::Address;
|
||||||
|
|
||||||
|
CagedHeapBase::g_heap_base_ =
|
||||||
|
reinterpret_cast<uintptr_t>(reserved_area_.address());
|
||||||
|
|
||||||
#if defined(CPPGC_POINTER_COMPRESSION)
|
#if defined(CPPGC_POINTER_COMPRESSION)
|
||||||
// With pointer compression only single heap per thread is allowed.
|
// With pointer compression only single heap per thread is allowed.
|
||||||
CHECK(!CageBaseGlobal::IsSet());
|
CHECK(!CageBaseGlobal::IsSet());
|
||||||
@ -111,18 +121,6 @@ CagedHeap::CagedHeap(HeapBase& heap_base, PageAllocator& platform_allocator)
|
|||||||
kCagedHeapNormalPageReservationSize, kPageSize,
|
kCagedHeapNormalPageReservationSize, kPageSize,
|
||||||
v8::base::PageInitializationMode::kAllocatedPagesMustBeZeroInitialized,
|
v8::base::PageInitializationMode::kAllocatedPagesMustBeZeroInitialized,
|
||||||
v8::base::PageFreeingMode::kMakeInaccessible);
|
v8::base::PageFreeingMode::kMakeInaccessible);
|
||||||
|
|
||||||
auto is_inserted = global_cages().emplace(
|
|
||||||
reinterpret_cast<uintptr_t>(reserved_area_.address()), &heap_base);
|
|
||||||
CHECK(is_inserted.second);
|
|
||||||
}
|
|
||||||
|
|
||||||
CagedHeap::~CagedHeap() {
|
|
||||||
#if defined(CPPGC_POINTER_COMPRESSION)
|
|
||||||
CHECK_EQ(reinterpret_cast<uintptr_t>(reserved_area_.address()),
|
|
||||||
CageBaseGlobalUpdater::GetCageBase());
|
|
||||||
CageBaseGlobalUpdater::UpdateCageBase(0u);
|
|
||||||
#endif // defined(CPPGC_POINTER_COMPRESSION)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void CagedHeap::NotifyLargePageCreated(LargePage* page) {
|
void CagedHeap::NotifyLargePageCreated(LargePage* page) {
|
||||||
@ -139,33 +137,33 @@ void CagedHeap::NotifyLargePageDestroyed(LargePage* page) {
|
|||||||
DCHECK_EQ(1u, size);
|
DCHECK_EQ(1u, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
BasePage* CagedHeap::LookupPageFromInnerPointer(void* ptr) const {
|
BasePage& CagedHeap::LookupPageFromInnerPointer(void* ptr) const {
|
||||||
DCHECK(IsOnHeap(ptr));
|
DCHECK(IsOnHeap(ptr));
|
||||||
if (V8_LIKELY(IsWithinNormalPageReservation(ptr))) {
|
if (V8_LIKELY(CagedHeapBase::IsWithinNormalPageReservation(ptr))) {
|
||||||
return NormalPage::FromPayload(ptr);
|
return *NormalPage::FromPayload(ptr);
|
||||||
} else {
|
} else {
|
||||||
return LookupLargePageFromInnerPointer(ptr);
|
return LookupLargePageFromInnerPointer(ptr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
LargePage* CagedHeap::LookupLargePageFromInnerPointer(void* ptr) const {
|
LargePage& CagedHeap::LookupLargePageFromInnerPointer(void* ptr) const {
|
||||||
auto it = large_pages_.upper_bound(static_cast<LargePage*>(ptr));
|
auto it = large_pages_.upper_bound(static_cast<LargePage*>(ptr));
|
||||||
DCHECK_NE(large_pages_.begin(), it);
|
DCHECK_NE(large_pages_.begin(), it);
|
||||||
auto* page = *std::next(it, -1);
|
auto* page = *std::next(it, -1);
|
||||||
DCHECK(page);
|
DCHECK(page);
|
||||||
DCHECK(page->PayloadContains(static_cast<ConstAddress>(ptr)));
|
DCHECK(page->PayloadContains(static_cast<ConstAddress>(ptr)));
|
||||||
return page;
|
return *page;
|
||||||
|
}
|
||||||
|
|
||||||
|
void CagedHeap::ResetForTesting() {
|
||||||
|
// Clear the large pages to support tests within the same process.
|
||||||
|
large_pages_.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
BasePageHandle* CagedHeapBase::LookupLargePageFromInnerPointer(
|
BasePageHandle& CagedHeapBase::LookupLargePageFromInnerPointer(void* address) {
|
||||||
uintptr_t heap_base, void* address) {
|
auto& page = CagedHeap::Instance().LookupLargePageFromInnerPointer(address);
|
||||||
DCHECK_EQ(0, heap_base & (kCagedHeapReservationAlignment - 1));
|
return page;
|
||||||
|
|
||||||
auto it = global_cages().find(heap_base);
|
|
||||||
DCHECK_NE(global_cages().end(), it);
|
|
||||||
|
|
||||||
return it->second->caged_heap().LookupLargePageFromInnerPointer(address);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
|
@ -9,20 +9,25 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
#include <set>
|
#include <set>
|
||||||
|
|
||||||
|
#include "include/cppgc/internal/caged-heap.h"
|
||||||
#include "include/cppgc/platform.h"
|
#include "include/cppgc/platform.h"
|
||||||
#include "src/base/bounded-page-allocator.h"
|
#include "src/base/bounded-page-allocator.h"
|
||||||
|
#include "src/base/lazy-instance.h"
|
||||||
#include "src/heap/cppgc/globals.h"
|
#include "src/heap/cppgc/globals.h"
|
||||||
#include "src/heap/cppgc/virtual-memory.h"
|
#include "src/heap/cppgc/virtual-memory.h"
|
||||||
|
|
||||||
namespace cppgc {
|
namespace cppgc {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
|
namespace testing {
|
||||||
|
class TestWithHeap;
|
||||||
|
}
|
||||||
|
|
||||||
struct CagedHeapLocalData;
|
struct CagedHeapLocalData;
|
||||||
class HeapBase;
|
|
||||||
class BasePage;
|
class BasePage;
|
||||||
class LargePage;
|
class LargePage;
|
||||||
|
|
||||||
class CagedHeap final {
|
class V8_EXPORT_PRIVATE CagedHeap final {
|
||||||
public:
|
public:
|
||||||
using AllocatorType = v8::base::BoundedPageAllocator;
|
using AllocatorType = v8::base::BoundedPageAllocator;
|
||||||
|
|
||||||
@ -44,8 +49,7 @@ class CagedHeap final {
|
|||||||
return OffsetFromAddress(address) < kCagedHeapNormalPageReservationSize;
|
return OffsetFromAddress(address) < kCagedHeapNormalPageReservationSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
CagedHeap(HeapBase& heap, PageAllocator& platform_allocator);
|
static CagedHeap& Instance();
|
||||||
~CagedHeap();
|
|
||||||
|
|
||||||
CagedHeap(const CagedHeap&) = delete;
|
CagedHeap(const CagedHeap&) = delete;
|
||||||
CagedHeap& operator=(const CagedHeap&) = delete;
|
CagedHeap& operator=(const CagedHeap&) = delete;
|
||||||
@ -67,8 +71,8 @@ class CagedHeap final {
|
|||||||
void NotifyLargePageCreated(LargePage* page);
|
void NotifyLargePageCreated(LargePage* page);
|
||||||
void NotifyLargePageDestroyed(LargePage* page);
|
void NotifyLargePageDestroyed(LargePage* page);
|
||||||
|
|
||||||
BasePage* LookupPageFromInnerPointer(void* ptr) const;
|
BasePage& LookupPageFromInnerPointer(void* ptr) const;
|
||||||
LargePage* LookupLargePageFromInnerPointer(void* ptr) const;
|
LargePage& LookupLargePageFromInnerPointer(void* ptr) const;
|
||||||
|
|
||||||
CagedHeapLocalData& local_data() {
|
CagedHeapLocalData& local_data() {
|
||||||
return *static_cast<CagedHeapLocalData*>(reserved_area_.address());
|
return *static_cast<CagedHeapLocalData*>(reserved_area_.address());
|
||||||
@ -78,6 +82,8 @@ class CagedHeap final {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool IsOnHeap(const void* address) const {
|
bool IsOnHeap(const void* address) const {
|
||||||
|
DCHECK_EQ(reserved_area_.address(),
|
||||||
|
reinterpret_cast<void*>(CagedHeapBase::GetBase()));
|
||||||
return reinterpret_cast<void*>(BaseFromAddress(address)) ==
|
return reinterpret_cast<void*>(BaseFromAddress(address)) ==
|
||||||
reserved_area_.address();
|
reserved_area_.address();
|
||||||
}
|
}
|
||||||
@ -85,6 +91,18 @@ class CagedHeap final {
|
|||||||
void* base() const { return reserved_area_.address(); }
|
void* base() const { return reserved_area_.address(); }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
friend class v8::base::LeakyObject<CagedHeap>;
|
||||||
|
friend class HeapBase;
|
||||||
|
friend class testing::TestWithHeap;
|
||||||
|
|
||||||
|
static void InitializeIfNeeded(PageAllocator&);
|
||||||
|
|
||||||
|
explicit CagedHeap(PageAllocator& platform_allocator);
|
||||||
|
|
||||||
|
void ResetForTesting();
|
||||||
|
|
||||||
|
static CagedHeap* instance_;
|
||||||
|
|
||||||
const VirtualMemory reserved_area_;
|
const VirtualMemory reserved_area_;
|
||||||
std::unique_ptr<AllocatorType> normal_page_bounded_allocator_;
|
std::unique_ptr<AllocatorType> normal_page_bounded_allocator_;
|
||||||
std::unique_ptr<AllocatorType> large_page_bounded_allocator_;
|
std::unique_ptr<AllocatorType> large_page_bounded_allocator_;
|
||||||
|
@ -64,15 +64,7 @@ HeapBase::HeapBase(
|
|||||||
lsan_page_allocator_(std::make_unique<v8::base::LsanPageAllocator>(
|
lsan_page_allocator_(std::make_unique<v8::base::LsanPageAllocator>(
|
||||||
platform_->GetPageAllocator())),
|
platform_->GetPageAllocator())),
|
||||||
#endif // LEAK_SANITIZER
|
#endif // LEAK_SANITIZER
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
page_backend_(InitializePageBackend(*page_allocator(), *oom_handler_)),
|
||||||
caged_heap_(*this, *page_allocator()),
|
|
||||||
page_backend_(std::make_unique<PageBackend>(
|
|
||||||
caged_heap_.normal_page_allocator(),
|
|
||||||
caged_heap_.large_page_allocator(), *oom_handler_.get())),
|
|
||||||
#else // !CPPGC_CAGED_HEAP
|
|
||||||
page_backend_(std::make_unique<PageBackend>(
|
|
||||||
*page_allocator(), *page_allocator(), *oom_handler_.get())),
|
|
||||||
#endif // !CPPGC_CAGED_HEAP
|
|
||||||
stats_collector_(std::make_unique<StatsCollector>(platform_.get())),
|
stats_collector_(std::make_unique<StatsCollector>(platform_.get())),
|
||||||
stack_(std::make_unique<heap::base::Stack>(
|
stack_(std::make_unique<heap::base::Stack>(
|
||||||
v8::base::Stack::GetStackStart())),
|
v8::base::Stack::GetStackStart())),
|
||||||
@ -109,6 +101,20 @@ size_t HeapBase::ObjectPayloadSize() const {
|
|||||||
return ObjectSizeCounter().GetSize(const_cast<RawHeap&>(raw_heap()));
|
return ObjectSizeCounter().GetSize(const_cast<RawHeap&>(raw_heap()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
|
std::unique_ptr<PageBackend> HeapBase::InitializePageBackend(
|
||||||
|
PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler) {
|
||||||
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
|
CagedHeap::InitializeIfNeeded(allocator);
|
||||||
|
auto& caged_heap = CagedHeap::Instance();
|
||||||
|
return std::make_unique<PageBackend>(caged_heap.normal_page_allocator(),
|
||||||
|
caged_heap.large_page_allocator(),
|
||||||
|
oom_handler);
|
||||||
|
#else // !CPPGC_CAGED_HEAP
|
||||||
|
return std::make_unique<PageBackend>(allocator, allocator, oom_handler);
|
||||||
|
#endif // !CPPGC_CAGED_HEAP
|
||||||
|
}
|
||||||
|
|
||||||
size_t HeapBase::ExecutePreFinalizers() {
|
size_t HeapBase::ExecutePreFinalizers() {
|
||||||
#ifdef CPPGC_ALLOW_ALLOCATIONS_IN_PREFINALIZERS
|
#ifdef CPPGC_ALLOW_ALLOCATIONS_IN_PREFINALIZERS
|
||||||
// Allocations in pre finalizers should not trigger another GC.
|
// Allocations in pre finalizers should not trigger another GC.
|
||||||
@ -157,7 +163,7 @@ void HeapBase::ResetRememberedSet() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
caged_heap().local_data().age_table.Reset(page_allocator());
|
CagedHeap::Instance().local_data().age_table.Reset(page_allocator());
|
||||||
remembered_set_.Reset();
|
remembered_set_.Reset();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -28,10 +28,6 @@
|
|||||||
#include "src/heap/cppgc/write-barrier.h"
|
#include "src/heap/cppgc/write-barrier.h"
|
||||||
#include "v8config.h" // NOLINT(build/include_directory)
|
#include "v8config.h" // NOLINT(build/include_directory)
|
||||||
|
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
|
||||||
#include "src/heap/cppgc/caged-heap.h"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if defined(CPPGC_YOUNG_GENERATION)
|
#if defined(CPPGC_YOUNG_GENERATION)
|
||||||
#include "src/heap/cppgc/remembered-set.h"
|
#include "src/heap/cppgc/remembered-set.h"
|
||||||
#endif
|
#endif
|
||||||
@ -112,11 +108,6 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
|
|||||||
return stats_collector_.get();
|
return stats_collector_.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
|
||||||
CagedHeap& caged_heap() { return caged_heap_; }
|
|
||||||
const CagedHeap& caged_heap() const { return caged_heap_; }
|
|
||||||
#endif
|
|
||||||
|
|
||||||
heap::base::Stack* stack() { return stack_.get(); }
|
heap::base::Stack* stack() { return stack_.get(); }
|
||||||
|
|
||||||
PreFinalizerHandler* prefinalizer_handler() {
|
PreFinalizerHandler* prefinalizer_handler() {
|
||||||
@ -236,6 +227,9 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
|
|||||||
using HeapHandle::is_incremental_marking_in_progress;
|
using HeapHandle::is_incremental_marking_in_progress;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
static std::unique_ptr<PageBackend> InitializePageBackend(
|
||||||
|
PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler);
|
||||||
|
|
||||||
// Used by the incremental scheduler to finalize a GC if supported.
|
// Used by the incremental scheduler to finalize a GC if supported.
|
||||||
virtual void FinalizeIncrementalGarbageCollectionIfNeeded(
|
virtual void FinalizeIncrementalGarbageCollectionIfNeeded(
|
||||||
cppgc::Heap::StackState) = 0;
|
cppgc::Heap::StackState) = 0;
|
||||||
@ -262,9 +256,6 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
|
|||||||
std::unique_ptr<v8::base::LsanPageAllocator> lsan_page_allocator_;
|
std::unique_ptr<v8::base::LsanPageAllocator> lsan_page_allocator_;
|
||||||
#endif // LEAK_SANITIZER
|
#endif // LEAK_SANITIZER
|
||||||
|
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
|
||||||
CagedHeap caged_heap_;
|
|
||||||
#endif // CPPGC_CAGED_HEAP
|
|
||||||
std::unique_ptr<PageBackend> page_backend_;
|
std::unique_ptr<PageBackend> page_backend_;
|
||||||
|
|
||||||
// HeapRegistry requires access to page_backend_.
|
// HeapRegistry requires access to page_backend_.
|
||||||
|
@ -47,8 +47,8 @@ BasePage* BasePage::FromInnerAddress(const HeapBase* heap, void* address) {
|
|||||||
const BasePage* BasePage::FromInnerAddress(const HeapBase* heap,
|
const BasePage* BasePage::FromInnerAddress(const HeapBase* heap,
|
||||||
const void* address) {
|
const void* address) {
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
return heap->caged_heap().LookupPageFromInnerPointer(
|
return static_cast<BasePage*>(
|
||||||
const_cast<void*>(address));
|
&CagedHeapBase::LookupPageFromInnerPointer(const_cast<void*>(address)));
|
||||||
#else // !defined(CPPGC_CAGED_HEAP)
|
#else // !defined(CPPGC_CAGED_HEAP)
|
||||||
return reinterpret_cast<const BasePage*>(
|
return reinterpret_cast<const BasePage*>(
|
||||||
heap->page_backend()->Lookup(static_cast<ConstAddress>(address)));
|
heap->page_backend()->Lookup(static_cast<ConstAddress>(address)));
|
||||||
@ -243,7 +243,7 @@ LargePage* LargePage::Create(PageBackend& page_backend, LargePageSpace& space,
|
|||||||
LargePage* page = new (memory) LargePage(*heap, space, size);
|
LargePage* page = new (memory) LargePage(*heap, space, size);
|
||||||
page->SynchronizedStore();
|
page->SynchronizedStore();
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
heap->caged_heap().NotifyLargePageCreated(page);
|
CagedHeap::Instance().NotifyLargePageCreated(page);
|
||||||
#endif // defined(CPPGC_CAGED_HEAP)
|
#endif // defined(CPPGC_CAGED_HEAP)
|
||||||
page->heap().stats_collector()->NotifyAllocatedMemory(allocation_size);
|
page->heap().stats_collector()->NotifyAllocatedMemory(allocation_size);
|
||||||
return page;
|
return page;
|
||||||
@ -267,7 +267,7 @@ void LargePage::Destroy(LargePage* page) {
|
|||||||
page->~LargePage();
|
page->~LargePage();
|
||||||
PageBackend* backend = heap.page_backend();
|
PageBackend* backend = heap.page_backend();
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
heap.caged_heap().NotifyLargePageDestroyed(page);
|
CagedHeap::Instance().NotifyLargePageDestroyed(page);
|
||||||
#endif // defined(CPPGC_CAGED_HEAP)
|
#endif // defined(CPPGC_CAGED_HEAP)
|
||||||
heap.stats_collector()->NotifyFreedMemory(AllocationSize(payload_size));
|
heap.stats_collector()->NotifyFreedMemory(AllocationSize(payload_size));
|
||||||
backend->FreeLargePageMemory(reinterpret_cast<Address>(page));
|
backend->FreeLargePageMemory(reinterpret_cast<Address>(page));
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
#include "src/heap/cppgc/marking-verifier.h"
|
#include "src/heap/cppgc/marking-verifier.h"
|
||||||
|
|
||||||
#include "include/cppgc/internal/caged-heap-local-data.h"
|
|
||||||
#include "src/base/logging.h"
|
#include "src/base/logging.h"
|
||||||
#include "src/heap/cppgc/gc-info-table.h"
|
#include "src/heap/cppgc/gc-info-table.h"
|
||||||
#include "src/heap/cppgc/heap-object-header.h"
|
#include "src/heap/cppgc/heap-object-header.h"
|
||||||
@ -12,6 +11,10 @@
|
|||||||
#include "src/heap/cppgc/marking-visitor.h"
|
#include "src/heap/cppgc/marking-visitor.h"
|
||||||
#include "src/heap/cppgc/object-view.h"
|
#include "src/heap/cppgc/object-view.h"
|
||||||
|
|
||||||
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
|
#include "include/cppgc/internal/caged-heap-local-data.h"
|
||||||
|
#endif // defined(CPPGC_CAGED_HEAP)
|
||||||
|
|
||||||
namespace cppgc {
|
namespace cppgc {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
@ -106,8 +109,9 @@ bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader& header) {
|
|||||||
|
|
||||||
#if defined(CPPGC_YOUNG_GENERATION)
|
#if defined(CPPGC_YOUNG_GENERATION)
|
||||||
if (collection_type_ == Heap::Config::CollectionType::kMinor) {
|
if (collection_type_ == Heap::Config::CollectionType::kMinor) {
|
||||||
const auto age = heap_.caged_heap().local_data().age_table.GetAge(
|
auto& caged_heap = CagedHeap::Instance();
|
||||||
heap_.caged_heap().OffsetFromAddress(header.ObjectStart()));
|
const auto age = caged_heap.local_data().age_table.GetAge(
|
||||||
|
caged_heap.OffsetFromAddress(header.ObjectStart()));
|
||||||
if (age == AgeTable::Age::kOld) {
|
if (age == AgeTable::Age::kOld) {
|
||||||
// Do not verify old objects.
|
// Do not verify old objects.
|
||||||
return true;
|
return true;
|
||||||
|
@ -37,7 +37,7 @@ void MarkRangeAsYoung(BasePage* page, Address begin, Address end) {
|
|||||||
const bool new_page =
|
const bool new_page =
|
||||||
(begin == page->PayloadStart()) && (end == page->PayloadEnd());
|
(begin == page->PayloadStart()) && (end == page->PayloadEnd());
|
||||||
|
|
||||||
auto& age_table = page->heap().caged_heap().local_data().age_table;
|
auto& age_table = CagedHeap::Instance().local_data().age_table;
|
||||||
age_table.SetAgeForRange(CagedHeap::OffsetFromAddress(begin),
|
age_table.SetAgeForRange(CagedHeap::OffsetFromAddress(begin),
|
||||||
CagedHeap::OffsetFromAddress(end),
|
CagedHeap::OffsetFromAddress(end),
|
||||||
AgeTable::Age::kYoung,
|
AgeTable::Age::kYoung,
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
#include "include/cppgc/internal/pointer-policies.h"
|
#include "include/cppgc/internal/pointer-policies.h"
|
||||||
|
|
||||||
#include "include/cppgc/internal/caged-heap-local-data.h"
|
|
||||||
#include "include/cppgc/internal/persistent-node.h"
|
#include "include/cppgc/internal/persistent-node.h"
|
||||||
#include "src/base/logging.h"
|
#include "src/base/logging.h"
|
||||||
#include "src/base/macros.h"
|
#include "src/base/macros.h"
|
||||||
|
@ -428,8 +428,7 @@ class SweepFinalizer final {
|
|||||||
SetMemoryInaccessible(header, size);
|
SetMemoryInaccessible(header, size);
|
||||||
};
|
};
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
const uint64_t cage_base =
|
const uint64_t cage_base = CagedHeapBase::GetBase();
|
||||||
reinterpret_cast<uint64_t>(page->heap().caged_heap().base());
|
|
||||||
HeapObjectHeader* next_unfinalized = nullptr;
|
HeapObjectHeader* next_unfinalized = nullptr;
|
||||||
|
|
||||||
for (auto* unfinalized_header = page_state->unfinalized_objects_head;
|
for (auto* unfinalized_header = page_state->unfinalized_objects_head;
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
#include "src/heap/cppgc/visitor.h"
|
#include "src/heap/cppgc/visitor.h"
|
||||||
|
|
||||||
#include "src/base/sanitizer/msan.h"
|
#include "src/base/sanitizer/msan.h"
|
||||||
#include "src/heap/cppgc/caged-heap.h"
|
|
||||||
#include "src/heap/cppgc/gc-info-table.h"
|
#include "src/heap/cppgc/gc-info-table.h"
|
||||||
#include "src/heap/cppgc/heap-base.h"
|
#include "src/heap/cppgc/heap-base.h"
|
||||||
#include "src/heap/cppgc/heap-object-header.h"
|
#include "src/heap/cppgc/heap-object-header.h"
|
||||||
@ -13,6 +12,10 @@
|
|||||||
#include "src/heap/cppgc/object-view.h"
|
#include "src/heap/cppgc/object-view.h"
|
||||||
#include "src/heap/cppgc/page-memory.h"
|
#include "src/heap/cppgc/page-memory.h"
|
||||||
|
|
||||||
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
|
#include "src/heap/cppgc/caged-heap.h"
|
||||||
|
#endif // defined(CPPGC_CAGED_HEAP)
|
||||||
|
|
||||||
namespace cppgc {
|
namespace cppgc {
|
||||||
|
|
||||||
#ifdef V8_ENABLE_CHECKS
|
#ifdef V8_ENABLE_CHECKS
|
||||||
@ -68,7 +71,7 @@ void ConservativeTracingVisitor::TryTracePointerConservatively(
|
|||||||
Address pointer) {
|
Address pointer) {
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
// TODO(chromium:1056170): Add support for SIMD in stack scanning.
|
// TODO(chromium:1056170): Add support for SIMD in stack scanning.
|
||||||
if (V8_LIKELY(!heap_.caged_heap().IsOnHeap(pointer))) return;
|
if (V8_LIKELY(!CagedHeapBase::IsWithinCage(pointer))) return;
|
||||||
#endif // defined(CPPGC_CAGED_HEAP)
|
#endif // defined(CPPGC_CAGED_HEAP)
|
||||||
|
|
||||||
const BasePage* page = reinterpret_cast<const BasePage*>(
|
const BasePage* page = reinterpret_cast<const BasePage*>(
|
||||||
|
@ -185,23 +185,6 @@ bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(HeapHandle& heap_handle) {
|
|||||||
return marker && marker->IsMarking();
|
return marker && marker->IsMarking();
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(CPPGC_CAGED_HEAP)
|
|
||||||
|
|
||||||
// static
|
|
||||||
bool WriteBarrierTypeForCagedHeapPolicy::IsMarking(
|
|
||||||
const HeapHandle& heap_handle, WriteBarrier::Params& params) {
|
|
||||||
const auto& heap_base = internal::HeapBase::From(heap_handle);
|
|
||||||
const bool is_marking = heap_base.marker() && heap_base.marker()->IsMarking();
|
|
||||||
// Also set caged heap start here to avoid another call immediately after
|
|
||||||
// checking IsMarking().
|
|
||||||
#if defined(CPPGC_YOUNG_GENERATION)
|
|
||||||
params.start = reinterpret_cast<uintptr_t>(heap_base.caged_heap().base());
|
|
||||||
#endif // !CPPGC_YOUNG_GENERATION
|
|
||||||
return is_marking;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif // CPPGC_CAGED_HEAP
|
|
||||||
|
|
||||||
#if defined(CPPGC_YOUNG_GENERATION)
|
#if defined(CPPGC_YOUNG_GENERATION)
|
||||||
|
|
||||||
// static
|
// static
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
// found in the LICENSE file.
|
// found in the LICENSE file.
|
||||||
|
|
||||||
#include "include/cppgc/internal/caged-heap-local-data.h"
|
#include "include/cppgc/internal/caged-heap-local-data.h"
|
||||||
|
#include "include/cppgc/internal/caged-heap.h"
|
||||||
#include "test/unittests/heap/cppgc/tests.h"
|
#include "test/unittests/heap/cppgc/tests.h"
|
||||||
#include "testing/gtest/include/gtest/gtest.h"
|
#include "testing/gtest/include/gtest/gtest.h"
|
||||||
|
|
||||||
@ -18,8 +19,7 @@ class AgeTableTest : public testing::TestWithHeap {
|
|||||||
|
|
||||||
AgeTableTest()
|
AgeTableTest()
|
||||||
: disallow_gc_(GetHeapHandle()),
|
: disallow_gc_(GetHeapHandle()),
|
||||||
age_table_(Heap::From(GetHeap())->caged_heap().local_data().age_table) {
|
age_table_(CagedHeap::Instance().local_data().age_table) {}
|
||||||
}
|
|
||||||
|
|
||||||
~AgeTableTest() override {
|
~AgeTableTest() override {
|
||||||
age_table_.Reset(GetPlatform().GetPageAllocator());
|
age_table_.Reset(GetPlatform().GetPageAllocator());
|
||||||
@ -197,9 +197,8 @@ TEST_F(AgeTableTest, SetAgeForMultipleCardsConsiderAdjacentCards) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(AgeTableTest, MarkAllCardsAsYoung) {
|
TEST_F(AgeTableTest, MarkAllCardsAsYoung) {
|
||||||
void* heap_start = Heap::From(GetHeap())->caged_heap().base();
|
uint8_t* heap_start = reinterpret_cast<uint8_t*>(CagedHeapBase::GetBase());
|
||||||
void* heap_end =
|
void* heap_end = heap_start + kCagedHeapReservationSize - 1;
|
||||||
static_cast<uint8_t*>(heap_start) + kCagedHeapReservationSize - 1;
|
|
||||||
AssertAgeForAddressRange(heap_start, heap_end, Age::kOld);
|
AssertAgeForAddressRange(heap_start, heap_end, Age::kOld);
|
||||||
SetAgeForAddressRange(heap_start, heap_end, Age::kYoung,
|
SetAgeForAddressRange(heap_start, heap_end, Age::kYoung,
|
||||||
AdjacentCardsPolicy::kIgnore);
|
AdjacentCardsPolicy::kIgnore);
|
||||||
|
@ -248,7 +248,6 @@ TYPED_TEST(MinorGCTestForType, OldObjectIsNotVisited) {
|
|||||||
|
|
||||||
template <typename Type1, typename Type2>
|
template <typename Type1, typename Type2>
|
||||||
void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
|
void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
|
||||||
auto* internal_heap = Heap::From(heap);
|
|
||||||
Persistent<Type1> old =
|
Persistent<Type1> old =
|
||||||
MakeGarbageCollected<Type1>(heap->GetAllocationHandle());
|
MakeGarbageCollected<Type1>(heap->GetAllocationHandle());
|
||||||
test->CollectMinor();
|
test->CollectMinor();
|
||||||
@ -265,12 +264,10 @@ void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
|
|||||||
ptr->next = young;
|
ptr->next = young;
|
||||||
young = ptr;
|
young = ptr;
|
||||||
EXPECT_TRUE(HeapObjectHeader::FromObject(young).IsYoung());
|
EXPECT_TRUE(HeapObjectHeader::FromObject(young).IsYoung());
|
||||||
const uintptr_t offset =
|
const uintptr_t offset = CagedHeap::OffsetFromAddress(young);
|
||||||
internal_heap->caged_heap().OffsetFromAddress(young);
|
|
||||||
// Age may be young or unknown.
|
// Age may be young or unknown.
|
||||||
EXPECT_NE(
|
EXPECT_NE(AgeTable::Age::kOld,
|
||||||
AgeTable::Age::kOld,
|
CagedHeap::Instance().local_data().age_table.GetAge(offset));
|
||||||
Heap::From(heap)->caged_heap().local_data().age_table.GetAge(offset));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,6 +49,12 @@ TestWithHeap::TestWithHeap()
|
|||||||
: heap_(Heap::Create(platform_)),
|
: heap_(Heap::Create(platform_)),
|
||||||
allocation_handle_(heap_->GetAllocationHandle()) {}
|
allocation_handle_(heap_->GetAllocationHandle()) {}
|
||||||
|
|
||||||
|
TestWithHeap::~TestWithHeap() {
|
||||||
|
#if defined(CPPGC_CAGED_HEAP)
|
||||||
|
CagedHeap::Instance().ResetForTesting();
|
||||||
|
#endif // defined(CPPGC_CAGED_HEAP)
|
||||||
|
}
|
||||||
|
|
||||||
void TestWithHeap::ResetLinearAllocationBuffers() {
|
void TestWithHeap::ResetLinearAllocationBuffers() {
|
||||||
Heap::From(GetHeap())->object_allocator().ResetLinearAllocationBuffers();
|
Heap::From(GetHeap())->object_allocator().ResetLinearAllocationBuffers();
|
||||||
}
|
}
|
||||||
|
@ -69,6 +69,7 @@ class TestWithPlatform : public ::testing::Test {
|
|||||||
class TestWithHeap : public TestWithPlatform {
|
class TestWithHeap : public TestWithPlatform {
|
||||||
public:
|
public:
|
||||||
TestWithHeap();
|
TestWithHeap();
|
||||||
|
~TestWithHeap() override;
|
||||||
|
|
||||||
void PreciseGC() {
|
void PreciseGC() {
|
||||||
heap_->ForceGarbageCollectionSlow(
|
heap_->ForceGarbageCollectionSlow(
|
||||||
|
Loading…
Reference in New Issue
Block a user