cppgc: shared-cage: Remove heap-specific metadata from cage-header

The CL is a prerequisite for the shared cage. Instead of storing
state variables (is_incremental_marking_in_progress,
is_young_generation_enabled) in the cage metadata, the CL moves them to
HeapHandle. The HeapHandle pointer is now retrieved from page-headers.

To make sure that the write-barrier code is better optimized, the
HeapHandle definition is moved to internal/ headers. The part of
BasePage that contains HeapBase (i.e. HeapHandle) pointer is also
extracted and moved to the headers.

Bug: v8:12231
Change-Id: I44bf65d99a621d9548e4250386cf87476ca186ac
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3689730
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81005}
This commit is contained in:
Anton Bikineev 2022-06-08 17:07:01 +02:00 committed by V8 LUCI CQ
parent 74dbb80465
commit 62159ea316
18 changed files with 249 additions and 89 deletions

View File

@ -458,11 +458,14 @@ filegroup(
"include/cppgc/garbage-collected.h", "include/cppgc/garbage-collected.h",
"include/cppgc/heap.h", "include/cppgc/heap.h",
"include/cppgc/heap-consistency.h", "include/cppgc/heap-consistency.h",
"include/cppgc/heap-handle.h",
"include/cppgc/heap-state.h", "include/cppgc/heap-state.h",
"include/cppgc/heap-statistics.h", "include/cppgc/heap-statistics.h",
"include/cppgc/internal/api-constants.h", "include/cppgc/internal/api-constants.h",
"include/cppgc/internal/atomic-entry-flag.h", "include/cppgc/internal/atomic-entry-flag.h",
"include/cppgc/internal/base-page-handle.h",
"include/cppgc/internal/caged-heap-local-data.h", "include/cppgc/internal/caged-heap-local-data.h",
"include/cppgc/internal/caged-heap.h",
"include/cppgc/internal/compiler-specific.h", "include/cppgc/internal/compiler-specific.h",
"include/cppgc/internal/finalizer-trait.h", "include/cppgc/internal/finalizer-trait.h",
"include/cppgc/internal/gc-info.h", "include/cppgc/internal/gc-info.h",

View File

@ -5667,11 +5667,13 @@ v8_header_set("cppgc_headers") {
"include/cppgc/explicit-management.h", "include/cppgc/explicit-management.h",
"include/cppgc/garbage-collected.h", "include/cppgc/garbage-collected.h",
"include/cppgc/heap-consistency.h", "include/cppgc/heap-consistency.h",
"include/cppgc/heap-handle.h",
"include/cppgc/heap-state.h", "include/cppgc/heap-state.h",
"include/cppgc/heap-statistics.h", "include/cppgc/heap-statistics.h",
"include/cppgc/heap.h", "include/cppgc/heap.h",
"include/cppgc/internal/api-constants.h", "include/cppgc/internal/api-constants.h",
"include/cppgc/internal/atomic-entry-flag.h", "include/cppgc/internal/atomic-entry-flag.h",
"include/cppgc/internal/base-page-handle.h",
"include/cppgc/internal/compiler-specific.h", "include/cppgc/internal/compiler-specific.h",
"include/cppgc/internal/finalizer-trait.h", "include/cppgc/internal/finalizer-trait.h",
"include/cppgc/internal/gc-info.h", "include/cppgc/internal/gc-info.h",
@ -5701,6 +5703,7 @@ v8_header_set("cppgc_headers") {
if (cppgc_enable_caged_heap) { if (cppgc_enable_caged_heap) {
sources += [ "include/cppgc/internal/caged-heap-local-data.h" ] sources += [ "include/cppgc/internal/caged-heap-local-data.h" ]
sources += [ "include/cppgc/internal/caged-heap.h" ]
} }
deps = [ deps = [

View File

@ -0,0 +1,41 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_HEAP_HANDLE_H_
#define INCLUDE_CPPGC_HEAP_HANDLE_H_
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
class HeapBase;
class WriteBarrierTypeForCagedHeapPolicy;
} // namespace internal
/**
* Opaque handle used for additional heap APIs.
*/
class HeapHandle {
private:
HeapHandle() = default;
V8_INLINE bool is_incremental_marking_in_progress() const {
return is_incremental_marking_in_progress_;
}
V8_INLINE bool is_young_generation_enabled() const {
return is_young_generation_enabled_;
}
bool is_incremental_marking_in_progress_ = false;
bool is_young_generation_enabled_ = false;
friend class internal::HeapBase;
friend class internal::WriteBarrierTypeForCagedHeapPolicy;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_HEAP_HANDLE_H_

View File

@ -21,6 +21,7 @@
namespace cppgc { namespace cppgc {
class AllocationHandle; class AllocationHandle;
class HeapHandle;
/** /**
* Implementation details of cppgc. Those details are considered internal and * Implementation details of cppgc. Those details are considered internal and
@ -31,11 +32,6 @@ namespace internal {
class Heap; class Heap;
} // namespace internal } // namespace internal
/**
* Used for additional heap APIs.
*/
class HeapHandle;
class V8_EXPORT Heap { class V8_EXPORT Heap {
public: public:
/** /**

View File

@ -32,6 +32,12 @@ static constexpr uint16_t kFullyConstructedBitMask = uint16_t{1};
static constexpr size_t kPageSize = size_t{1} << 17; static constexpr size_t kPageSize = size_t{1} << 17;
#if defined(V8_TARGET_ARCH_ARM64) && defined(V8_OS_MACOS)
constexpr size_t kGuardPageSize = 0;
#else
constexpr size_t kGuardPageSize = 4096;
#endif
static constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2; static constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2;
#if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_CAGED_HEAP)

View File

@ -0,0 +1,45 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_INTERNAL_BASE_PAGE_HANDLE_H_
#define INCLUDE_CPPGC_INTERNAL_BASE_PAGE_HANDLE_H_
#include "cppgc/heap-handle.h"
#include "cppgc/internal/api-constants.h"
#include "cppgc/internal/logging.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
// The class is needed in the header to allow for fast access to HeapHandle in
// the write barrier.
class BasePageHandle {
public:
static V8_INLINE BasePageHandle* FromPayload(void* payload) {
return reinterpret_cast<BasePageHandle*>(
(reinterpret_cast<uintptr_t>(payload) &
~(api_constants::kPageSize - 1)) +
api_constants::kGuardPageSize);
}
static V8_INLINE const BasePageHandle* FromPayload(const void* payload) {
return FromPayload(const_cast<void*>(payload));
}
HeapHandle& heap_handle() { return heap_handle_; }
const HeapHandle& heap_handle() const { return heap_handle_; }
protected:
explicit BasePageHandle(HeapHandle& heap_handle) : heap_handle_(heap_handle) {
CPPGC_DCHECK(reinterpret_cast<uintptr_t>(this) % api_constants::kPageSize ==
api_constants::kGuardPageSize);
}
HeapHandle& heap_handle_;
};
} // namespace internal
} // namespace cppgc
#endif // INCLUDE_CPPGC_INTERNAL_BASE_PAGE_HANDLE_H_

View File

@ -82,12 +82,11 @@ static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
#endif // CPPGC_YOUNG_GENERATION #endif // CPPGC_YOUNG_GENERATION
// TODO(v8:12231): Remove this class entirely so that it doesn't occupy space is
// when CPPGC_YOUNG_GENERATION is off.
struct CagedHeapLocalData final { struct CagedHeapLocalData final {
CagedHeapLocalData(HeapBase&, PageAllocator&); explicit CagedHeapLocalData(PageAllocator&);
bool is_incremental_marking_in_progress = false;
bool is_young_generation_enabled = false;
HeapBase& heap_base;
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
AgeTable age_table; AgeTable age_table;
#endif #endif

View File

@ -0,0 +1,41 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
#define INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
#include <cstddef>
#include "cppgc/internal/api-constants.h"
#include "cppgc/internal/base-page-handle.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
class V8_EXPORT CagedHeapBase {
public:
V8_INLINE static bool IsWithinNormalPageReservation(uintptr_t heap_base,
void* address) {
return (reinterpret_cast<uintptr_t>(address) - heap_base) <
api_constants::kCagedHeapNormalPageReservationSize;
}
V8_INLINE static BasePageHandle* LookupPageFromInnerPointer(
uintptr_t heap_base, void* ptr) {
if (V8_LIKELY(IsWithinNormalPageReservation(heap_base, ptr)))
return BasePageHandle::FromPayload(ptr);
else
return LookupLargePageFromInnerPointer(heap_base, ptr);
}
private:
static BasePageHandle* LookupLargePageFromInnerPointer(uintptr_t heap_base,
void* address);
};
} // namespace internal
} // namespace cppgc
#endif // INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_

View File

@ -8,6 +8,7 @@
#include <cstddef> #include <cstddef>
#include <cstdint> #include <cstdint>
#include "cppgc/heap-handle.h"
#include "cppgc/heap-state.h" #include "cppgc/heap-state.h"
#include "cppgc/internal/api-constants.h" #include "cppgc/internal/api-constants.h"
#include "cppgc/internal/atomic-entry-flag.h" #include "cppgc/internal/atomic-entry-flag.h"
@ -18,6 +19,7 @@
#if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_CAGED_HEAP)
#include "cppgc/internal/caged-heap-local-data.h" #include "cppgc/internal/caged-heap-local-data.h"
#include "cppgc/internal/caged-heap.h"
#endif #endif
namespace cppgc { namespace cppgc {
@ -123,9 +125,11 @@ class V8_EXPORT WriteBarrier final {
static CagedHeapLocalData& GetLocalData(HeapHandle&); static CagedHeapLocalData& GetLocalData(HeapHandle&);
static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data, static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
const AgeTable& age_table, const AgeTable& age_table,
const void* slot, uintptr_t value_offset); const void* slot, uintptr_t value_offset,
HeapHandle* heap_handle);
static void GenerationalBarrierForSourceObjectSlow( static void GenerationalBarrierForSourceObjectSlow(
const CagedHeapLocalData& local_data, const void* object); const CagedHeapLocalData& local_data, const void* object,
HeapHandle* heap_handle);
#endif // CPPGC_YOUNG_GENERATION #endif // CPPGC_YOUNG_GENERATION
static AtomicEntryFlag write_barrier_enabled_; static AtomicEntryFlag write_barrier_enabled_;
@ -168,9 +172,17 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
if (!TryGetCagedHeap(value, value, params)) { if (!TryGetCagedHeap(value, value, params)) {
return WriteBarrier::Type::kNone; return WriteBarrier::Type::kNone;
} }
if (V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) {
// We know that |value| points either within the normal page or to the
// beginning of large-page, so extract the page header by bitmasking.
BasePageHandle* page =
BasePageHandle::FromPayload(const_cast<void*>(value));
HeapHandle& heap_handle = page->heap_handle();
if (V8_UNLIKELY(heap_handle.is_incremental_marking_in_progress())) {
return SetAndReturnType<WriteBarrier::Type::kMarking>(params); return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
} }
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
} }
@ -220,12 +232,17 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
const bool within_cage = TryGetCagedHeap(slot, value, params); const bool within_cage = TryGetCagedHeap(slot, value, params);
if (!within_cage) return WriteBarrier::Type::kNone; if (!within_cage) return WriteBarrier::Type::kNone;
const auto& caged_heap = params.caged_heap(); // We know that |value| points either within the normal page or to the
if (V8_LIKELY(!caged_heap.is_incremental_marking_in_progress)) { // beginning of large-page, so extract the page header by bitmasking.
BasePageHandle* page =
BasePageHandle::FromPayload(const_cast<void*>(value));
HeapHandle& heap_handle = page->heap_handle();
if (V8_LIKELY(!heap_handle.is_incremental_marking_in_progress())) {
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
if (!caged_heap.is_young_generation_enabled) if (!heap_handle.is_young_generation_enabled())
return WriteBarrier::Type::kNone; return WriteBarrier::Type::kNone;
params.heap = reinterpret_cast<HeapHandle*>(params.start); params.heap = &heap_handle;
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start; params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start; params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params); return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
@ -235,7 +252,7 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
} }
// Use marking barrier. // Use marking barrier.
params.heap = reinterpret_cast<HeapHandle*>(params.start); params.heap = &heap_handle;
return SetAndReturnType<WriteBarrier::Type::kMarking>(params); return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
} }
}; };
@ -254,8 +271,9 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
HeapHandle& handle = callback(); HeapHandle& handle = callback();
if (V8_LIKELY(!IsMarking(handle, params))) { if (V8_LIKELY(!IsMarking(handle, params))) {
// params.start is populated by IsMarking(). // params.start is populated by IsMarking().
if (!params.caged_heap().is_young_generation_enabled) if (!handle.is_young_generation_enabled()) {
return WriteBarrier::Type::kNone; return WriteBarrier::Type::kNone;
}
params.heap = &handle; params.heap = &handle;
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start; params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
// params.value_offset stays 0. // params.value_offset stays 0.
@ -417,7 +435,8 @@ void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung)) if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
return; return;
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset); GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
params.heap);
} }
// static // static
@ -433,7 +452,8 @@ void WriteBarrier::GenerationalBarrierForSourceObject(
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung)) if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
return; return;
GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer); GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer,
params.heap);
} }
#endif // !CPPGC_YOUNG_GENERATION #endif // !CPPGC_YOUNG_GENERATION

View File

@ -13,9 +13,7 @@
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
CagedHeapLocalData::CagedHeapLocalData(HeapBase& heap_base, CagedHeapLocalData::CagedHeapLocalData(PageAllocator& allocator) {
PageAllocator& allocator)
: heap_base(heap_base) {
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
age_table.Reset(&allocator); age_table.Reset(&allocator);
#endif // defined(CPPGC_YOUNG_GENERATION) #endif // defined(CPPGC_YOUNG_GENERATION)

View File

@ -2,6 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#include "include/cppgc/internal/caged-heap.h"
#include <map>
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
#if !defined(CPPGC_CAGED_HEAP) #if !defined(CPPGC_CAGED_HEAP)
@ -12,10 +16,12 @@
#include "include/cppgc/member.h" #include "include/cppgc/member.h"
#include "include/cppgc/platform.h" #include "include/cppgc/platform.h"
#include "src/base/bounded-page-allocator.h" #include "src/base/bounded-page-allocator.h"
#include "src/base/lazy-instance.h"
#include "src/base/logging.h" #include "src/base/logging.h"
#include "src/base/platform/platform.h" #include "src/base/platform/platform.h"
#include "src/heap/cppgc/caged-heap.h" #include "src/heap/cppgc/caged-heap.h"
#include "src/heap/cppgc/globals.h" #include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-base.h"
#include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/member.h" #include "src/heap/cppgc/member.h"
@ -31,6 +37,15 @@ static_assert(api_constants::kCagedHeapNormalPageReservationSize ==
namespace { namespace {
// TODO(v8:12231): Remove once shared cage is there. Currently it's only used
// for large pages lookup in the write barrier.
using Cages = std::map<uintptr_t /*cage_base*/, HeapBase*>;
static Cages& global_cages() {
static v8::base::LeakyObject<Cages> instance;
return *instance.get();
}
VirtualMemory ReserveCagedHeap(PageAllocator& platform_allocator) { VirtualMemory ReserveCagedHeap(PageAllocator& platform_allocator) {
DCHECK_EQ(0u, DCHECK_EQ(0u,
kCagedHeapReservationSize % platform_allocator.AllocatePageSize()); kCagedHeapReservationSize % platform_allocator.AllocatePageSize());
@ -70,8 +85,7 @@ CagedHeap::CagedHeap(HeapBase& heap_base, PageAllocator& platform_allocator)
// Failing to commit the reservation means that we are out of memory. // Failing to commit the reservation means that we are out of memory.
CHECK(is_not_oom); CHECK(is_not_oom);
new (reserved_area_.address()) new (reserved_area_.address()) CagedHeapLocalData(platform_allocator);
CagedHeapLocalData(heap_base, platform_allocator);
const CagedAddress caged_heap_start = const CagedAddress caged_heap_start =
RoundUp(reinterpret_cast<CagedAddress>(reserved_area_.address()) + RoundUp(reinterpret_cast<CagedAddress>(reserved_area_.address()) +
@ -97,6 +111,10 @@ CagedHeap::CagedHeap(HeapBase& heap_base, PageAllocator& platform_allocator)
kCagedHeapNormalPageReservationSize, kPageSize, kCagedHeapNormalPageReservationSize, kPageSize,
v8::base::PageInitializationMode::kAllocatedPagesMustBeZeroInitialized, v8::base::PageInitializationMode::kAllocatedPagesMustBeZeroInitialized,
v8::base::PageFreeingMode::kMakeInaccessible); v8::base::PageFreeingMode::kMakeInaccessible);
auto is_inserted = global_cages().emplace(
reinterpret_cast<uintptr_t>(reserved_area_.address()), &heap_base);
CHECK(is_inserted.second);
} }
CagedHeap::~CagedHeap() { CagedHeap::~CagedHeap() {
@ -107,12 +125,6 @@ CagedHeap::~CagedHeap() {
#endif // defined(CPPGC_POINTER_COMPRESSION) #endif // defined(CPPGC_POINTER_COMPRESSION)
} }
#if defined(CPPGC_YOUNG_GENERATION)
void CagedHeap::EnableGenerationalGC() {
local_data().is_young_generation_enabled = true;
}
#endif // defined(CPPGC_YOUNG_GENERATION)
void CagedHeap::NotifyLargePageCreated(LargePage* page) { void CagedHeap::NotifyLargePageCreated(LargePage* page) {
DCHECK(page); DCHECK(page);
auto result = large_pages_.insert(page); auto result = large_pages_.insert(page);
@ -145,5 +157,16 @@ LargePage* CagedHeap::LookupLargePageFromInnerPointer(void* ptr) const {
return page; return page;
} }
// static
BasePageHandle* CagedHeapBase::LookupLargePageFromInnerPointer(
uintptr_t heap_base, void* address) {
DCHECK_EQ(0, heap_base & (kCagedHeapReservationAlignment - 1));
auto it = global_cages().find(heap_base);
DCHECK_NE(global_cages().end(), it);
return it->second->caged_heap().LookupLargePageFromInnerPointer(address);
}
} // namespace internal } // namespace internal
} // namespace cppgc } // namespace cppgc

View File

@ -50,10 +50,6 @@ class CagedHeap final {
CagedHeap(const CagedHeap&) = delete; CagedHeap(const CagedHeap&) = delete;
CagedHeap& operator=(const CagedHeap&) = delete; CagedHeap& operator=(const CagedHeap&) = delete;
#if defined(CPPGC_YOUNG_GENERATION)
void EnableGenerationalGC();
#endif // defined(CPPGC_YOUNG_GENERATION)
AllocatorType& normal_page_allocator() { AllocatorType& normal_page_allocator() {
return *normal_page_bounded_allocator_; return *normal_page_bounded_allocator_;
} }
@ -72,6 +68,7 @@ class CagedHeap final {
void NotifyLargePageDestroyed(LargePage* page); void NotifyLargePageDestroyed(LargePage* page);
BasePage* LookupPageFromInnerPointer(void* ptr) const; BasePage* LookupPageFromInnerPointer(void* ptr) const;
LargePage* LookupLargePageFromInnerPointer(void* ptr) const;
CagedHeapLocalData& local_data() { CagedHeapLocalData& local_data() {
return *static_cast<CagedHeapLocalData*>(reserved_area_.address()); return *static_cast<CagedHeapLocalData*>(reserved_area_.address());
@ -88,8 +85,6 @@ class CagedHeap final {
void* base() const { return reserved_area_.address(); } void* base() const { return reserved_area_.address(); }
private: private:
LargePage* LookupLargePageFromInnerPointer(void* ptr) const;
const VirtualMemory reserved_area_; const VirtualMemory reserved_area_;
std::unique_ptr<AllocatorType> normal_page_bounded_allocator_; std::unique_ptr<AllocatorType> normal_page_bounded_allocator_;
std::unique_ptr<AllocatorType> large_page_bounded_allocator_; std::unique_ptr<AllocatorType> large_page_bounded_allocator_;

View File

@ -90,8 +90,7 @@ HeapBase::HeapBase(
#endif // defined(CPPGC_YOUNG_GENERATION) #endif // defined(CPPGC_YOUNG_GENERATION)
stack_support_(stack_support), stack_support_(stack_support),
marking_support_(marking_support), marking_support_(marking_support),
sweeping_support_(sweeping_support), sweeping_support_(sweeping_support) {
generation_support_(GenerationSupport::kSingleGeneration) {
stats_collector_->RegisterObserver( stats_collector_->RegisterObserver(
&allocation_observer_for_PROCESS_HEAP_STATISTICS_); &allocation_observer_for_PROCESS_HEAP_STATISTICS_);
} }
@ -128,8 +127,7 @@ void HeapBase::EnableGenerationalGC() {
// Notify the global flag that the write barrier must always be enabled. // Notify the global flag that the write barrier must always be enabled.
YoungGenerationEnabler::Enable(); YoungGenerationEnabler::Enable();
// Enable young generation for the current heap. // Enable young generation for the current heap.
caged_heap().EnableGenerationalGC(); HeapHandle::is_young_generation_enabled_ = true;
generation_support_ = GenerationSupport::kYoungAndOldGenerations;
} }
void HeapBase::ResetRememberedSet() { void HeapBase::ResetRememberedSet() {
@ -174,11 +172,8 @@ void HeapBase::Terminate() {
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
if (generational_gc_supported()) { if (generational_gc_supported()) {
DCHECK(caged_heap().local_data().is_young_generation_enabled); DCHECK(is_young_generation_enabled());
DCHECK_EQ(GenerationSupport::kYoungAndOldGenerations, generation_support_); HeapHandle::is_young_generation_enabled_ = false;
caged_heap().local_data().is_young_generation_enabled = false;
generation_support_ = GenerationSupport::kSingleGeneration;
YoungGenerationEnabler::Disable(); YoungGenerationEnabler::Disable();
} }
#endif // defined(CPPGC_YOUNG_GENERATION) #endif // defined(CPPGC_YOUNG_GENERATION)

View File

@ -8,6 +8,7 @@
#include <memory> #include <memory>
#include <set> #include <set>
#include "include/cppgc/heap-handle.h"
#include "include/cppgc/heap-statistics.h" #include "include/cppgc/heap-statistics.h"
#include "include/cppgc/heap.h" #include "include/cppgc/heap.h"
#include "include/cppgc/internal/persistent-node.h" #include "include/cppgc/internal/persistent-node.h"
@ -60,12 +61,6 @@ class OverrideEmbedderStackStateScope;
class Platform; class Platform;
class V8_EXPORT HeapHandle {
private:
HeapHandle() = default;
friend class internal::HeapBase;
};
namespace internal { namespace internal {
class FatalOutOfMemoryHandler; class FatalOutOfMemoryHandler;
@ -218,8 +213,7 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
SweepingType sweeping_support() const { return sweeping_support_; } SweepingType sweeping_support() const { return sweeping_support_; }
bool generational_gc_supported() const { bool generational_gc_supported() const {
const bool supported = const bool supported = is_young_generation_enabled();
(generation_support_ == GenerationSupport::kYoungAndOldGenerations);
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
DCHECK_IMPLIES(supported, YoungGenerationEnabler::IsEnabled()); DCHECK_IMPLIES(supported, YoungGenerationEnabler::IsEnabled());
#endif // defined(CPPGC_YOUNG_GENERATION) #endif // defined(CPPGC_YOUNG_GENERATION)
@ -235,12 +229,13 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
name_for_unnamed_object_ = value; name_for_unnamed_object_ = value;
} }
protected: void set_incremental_marking_in_progress(bool value) {
enum class GenerationSupport : uint8_t { is_incremental_marking_in_progress_ = value;
kSingleGeneration, }
kYoungAndOldGenerations,
};
using HeapHandle::is_incremental_marking_in_progress;
protected:
// Used by the incremental scheduler to finalize a GC if supported. // Used by the incremental scheduler to finalize a GC if supported.
virtual void FinalizeIncrementalGarbageCollectionIfNeeded( virtual void FinalizeIncrementalGarbageCollectionIfNeeded(
cppgc::Heap::StackState) = 0; cppgc::Heap::StackState) = 0;
@ -313,7 +308,6 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
MarkingType marking_support_; MarkingType marking_support_;
SweepingType sweeping_support_; SweepingType sweeping_support_;
GenerationSupport generation_support_;
HeapObjectNameForUnnamedObject name_for_unnamed_object_ = HeapObjectNameForUnnamedObject name_for_unnamed_object_ =
HeapObjectNameForUnnamedObject::kUseHiddenName; HeapObjectNameForUnnamedObject::kUseHiddenName;

View File

@ -22,6 +22,8 @@
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
static_assert(api_constants::kGuardPageSize == kGuardPageSize);
namespace { namespace {
Address AlignAddress(Address address, size_t alignment) { Address AlignAddress(Address address, size_t alignment) {
@ -31,6 +33,10 @@ Address AlignAddress(Address address, size_t alignment) {
} // namespace } // namespace
HeapBase& BasePage::heap() const {
return static_cast<HeapBase&>(heap_handle_);
}
// static // static
BasePage* BasePage::FromInnerAddress(const HeapBase* heap, void* address) { BasePage* BasePage::FromInnerAddress(const HeapBase* heap, void* address) {
return const_cast<BasePage*>( return const_cast<BasePage*>(
@ -119,10 +125,10 @@ const HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress(
} }
BasePage::BasePage(HeapBase& heap, BaseSpace& space, PageType type) BasePage::BasePage(HeapBase& heap, BaseSpace& space, PageType type)
: heap_(heap), space_(space), type_(type) { : BasePageHandle(heap), space_(space), type_(type) {
DCHECK_EQ(0u, (reinterpret_cast<uintptr_t>(this) - kGuardPageSize) & DCHECK_EQ(0u, (reinterpret_cast<uintptr_t>(this) - kGuardPageSize) &
kPageOffsetMask); kPageOffsetMask);
DCHECK_EQ(&heap_.raw_heap(), space_.raw_heap()); DCHECK_EQ(&heap.raw_heap(), space_.raw_heap());
} }
// static // static

View File

@ -5,6 +5,7 @@
#ifndef V8_HEAP_CPPGC_HEAP_PAGE_H_ #ifndef V8_HEAP_CPPGC_HEAP_PAGE_H_
#define V8_HEAP_CPPGC_HEAP_PAGE_H_ #define V8_HEAP_CPPGC_HEAP_PAGE_H_
#include "include/cppgc/internal/base-page-handle.h"
#include "src/base/iterator.h" #include "src/base/iterator.h"
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/heap/cppgc/globals.h" #include "src/heap/cppgc/globals.h"
@ -20,7 +21,7 @@ class LargePageSpace;
class HeapBase; class HeapBase;
class PageBackend; class PageBackend;
class V8_EXPORT_PRIVATE BasePage { class V8_EXPORT_PRIVATE BasePage : public BasePageHandle {
public: public:
static inline BasePage* FromPayload(void*); static inline BasePage* FromPayload(void*);
static inline const BasePage* FromPayload(const void*); static inline const BasePage* FromPayload(const void*);
@ -33,7 +34,7 @@ class V8_EXPORT_PRIVATE BasePage {
BasePage(const BasePage&) = delete; BasePage(const BasePage&) = delete;
BasePage& operator=(const BasePage&) = delete; BasePage& operator=(const BasePage&) = delete;
HeapBase& heap() const { return heap_; } HeapBase& heap() const;
BaseSpace& space() const { return space_; } BaseSpace& space() const { return space_; }
@ -91,7 +92,6 @@ class V8_EXPORT_PRIVATE BasePage {
BasePage(HeapBase&, BaseSpace&, PageType); BasePage(HeapBase&, BaseSpace&, PageType);
private: private:
HeapBase& heap_;
BaseSpace& space_; BaseSpace& space_;
PageType type_; PageType type_;
size_t discarded_memory_ = 0; size_t discarded_memory_ = 0;
@ -260,16 +260,12 @@ class V8_EXPORT_PRIVATE LargePage final : public BasePage {
// static // static
BasePage* BasePage::FromPayload(void* payload) { BasePage* BasePage::FromPayload(void* payload) {
return reinterpret_cast<BasePage*>( return static_cast<BasePage*>(BasePageHandle::FromPayload(payload));
(reinterpret_cast<uintptr_t>(payload) & kPageBaseMask) + kGuardPageSize);
} }
// static // static
const BasePage* BasePage::FromPayload(const void* payload) { const BasePage* BasePage::FromPayload(const void* payload) {
return reinterpret_cast<const BasePage*>( return static_cast<const BasePage*>(BasePageHandle::FromPayload(payload));
(reinterpret_cast<uintptr_t>(const_cast<void*>(payload)) &
kPageBaseMask) +
kGuardPageSize);
} }
template <AccessMode mode = AccessMode::kNonAtomic> template <AccessMode mode = AccessMode::kNonAtomic>

View File

@ -38,9 +38,7 @@ bool EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
config.marking_type == config.marking_type ==
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) { Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
WriteBarrier::FlagUpdater::Enter(); WriteBarrier::FlagUpdater::Enter();
#if defined(CPPGC_CAGED_HEAP) heap.set_incremental_marking_in_progress(true);
heap.caged_heap().local_data().is_incremental_marking_in_progress = true;
#endif // defined(CPPGC_CAGED_HEAP)
return true; return true;
} }
return false; return false;
@ -52,9 +50,7 @@ bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
config.marking_type == config.marking_type ==
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) { Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
WriteBarrier::FlagUpdater::Exit(); WriteBarrier::FlagUpdater::Exit();
#if defined(CPPGC_CAGED_HEAP) heap.set_incremental_marking_in_progress(false);
heap.caged_heap().local_data().is_incremental_marking_in_progress = false;
#endif // defined(CPPGC_CAGED_HEAP)
return true; return true;
} }
return false; return false;

View File

@ -28,12 +28,7 @@ namespace {
template <MarkerBase::WriteBarrierType type> template <MarkerBase::WriteBarrierType type>
void ProcessMarkValue(HeapObjectHeader& header, MarkerBase* marker, void ProcessMarkValue(HeapObjectHeader& header, MarkerBase* marker,
const void* value) { const void* value) {
#if defined(CPPGC_CAGED_HEAP) DCHECK(marker->heap().is_incremental_marking_in_progress());
DCHECK(reinterpret_cast<CagedHeapLocalData*>(
reinterpret_cast<uintptr_t>(value) &
~(kCagedHeapReservationAlignment - 1))
->is_incremental_marking_in_progress);
#endif
DCHECK(header.IsMarked<AccessMode::kAtomic>()); DCHECK(header.IsMarked<AccessMode::kAtomic>());
DCHECK(marker); DCHECK(marker);
@ -128,31 +123,39 @@ void WriteBarrier::SteeleMarkingBarrierSlow(const void* value) {
void WriteBarrier::GenerationalBarrierSlow(const CagedHeapLocalData& local_data, void WriteBarrier::GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
const AgeTable& age_table, const AgeTable& age_table,
const void* slot, const void* slot,
uintptr_t value_offset) { uintptr_t value_offset,
HeapHandle* heap_handle) {
DCHECK(slot); DCHECK(slot);
DCHECK(heap_handle);
DCHECK_GT(kCagedHeapReservationSize, value_offset);
// A write during atomic pause (e.g. pre-finalizer) may trigger the slow path // A write during atomic pause (e.g. pre-finalizer) may trigger the slow path
// of the barrier. This is a result of the order of bailouts where not marking // of the barrier. This is a result of the order of bailouts where not marking
// results in applying the generational barrier. // results in applying the generational barrier.
if (local_data.heap_base.in_atomic_pause()) return; auto& heap = HeapBase::From(*heap_handle);
if (heap.in_atomic_pause()) return;
if (value_offset > 0 && age_table.GetAge(value_offset) == AgeTable::Age::kOld) if (value_offset > 0 && age_table.GetAge(value_offset) == AgeTable::Age::kOld)
return; return;
// Record slot. // Record slot.
local_data.heap_base.remembered_set().AddSlot((const_cast<void*>(slot))); heap.remembered_set().AddSlot((const_cast<void*>(slot)));
} }
// static // static
void WriteBarrier::GenerationalBarrierForSourceObjectSlow( void WriteBarrier::GenerationalBarrierForSourceObjectSlow(
const CagedHeapLocalData& local_data, const void* inner_pointer) { const CagedHeapLocalData& local_data, const void* inner_pointer,
HeapHandle* heap_handle) {
DCHECK(inner_pointer); DCHECK(inner_pointer);
DCHECK(heap_handle);
auto& heap = HeapBase::From(*heap_handle);
auto& object_header = auto& object_header =
BasePage::FromInnerAddress(&local_data.heap_base, inner_pointer) BasePage::FromInnerAddress(&heap, inner_pointer)
->ObjectHeaderFromInnerAddress<AccessMode::kAtomic>(inner_pointer); ->ObjectHeaderFromInnerAddress<AccessMode::kAtomic>(inner_pointer);
// Record the source object. // Record the source object.
local_data.heap_base.remembered_set().AddSourceObject( heap.remembered_set().AddSourceObject(
const_cast<HeapObjectHeader&>(object_header)); const_cast<HeapObjectHeader&>(object_header));
} }
#endif // CPPGC_YOUNG_GENERATION #endif // CPPGC_YOUNG_GENERATION