cppgc: Optimize MakeGarbageCollected

Annotate slow path call for creating a new GCInfo accordingly. This
path will only hit for the first object allocation for a given type.
All subsequent allocations will use the fast path.

Bug: chromium:1408821
Change-Id: Ifc1d3491a94b30dfeee1a2c9679c64939025fefe
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4161752
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85408}
This commit is contained in:
Michael Lippautz 2023-01-19 17:06:36 +01:00 committed by V8 LUCI CQ
parent 2670e3d0cc
commit 48e79783ee
4 changed files with 88 additions and 67 deletions

View File

@ -10,6 +10,7 @@
#include <type_traits>
#include "cppgc/internal/finalizer-trait.h"
#include "cppgc/internal/logging.h"
#include "cppgc/internal/name-trait.h"
#include "cppgc/trace-trait.h"
#include "v8config.h" // NOLINT(build/include_directory)
@ -20,12 +21,12 @@ namespace internal {
using GCInfoIndex = uint16_t;
struct V8_EXPORT EnsureGCInfoIndexTrait final {
// Acquires a new GC info object and returns the index. In addition, also
// updates `registered_index` atomically.
// Acquires a new GC info object and updates `registered_index` with the index
// that identifies that new info accordingly.
template <typename T>
V8_INLINE static GCInfoIndex EnsureIndex(
V8_INLINE static void EnsureIndex(
std::atomic<GCInfoIndex>& registered_index) {
return EnsureGCInfoIndexTraitDispatch<T>{}(registered_index);
EnsureGCInfoIndexTraitDispatch<T>{}(registered_index);
}
private:
@ -34,38 +35,32 @@ struct V8_EXPORT EnsureGCInfoIndexTrait final {
bool = NameTrait<T>::HasNonHiddenName()>
struct EnsureGCInfoIndexTraitDispatch;
static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback,
FinalizationCallback,
NameCallback);
static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback,
FinalizationCallback);
static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback, NameCallback);
static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback);
static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback,
FinalizationCallback,
NameCallback);
static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback,
FinalizationCallback);
static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback,
NameCallback);
static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback);
static void V8_PRESERVE_MOST
EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback,
FinalizationCallback, NameCallback);
static void V8_PRESERVE_MOST EnsureGCInfoIndexPolymorphic(
std::atomic<GCInfoIndex>&, TraceCallback, FinalizationCallback);
static void V8_PRESERVE_MOST EnsureGCInfoIndexPolymorphic(
std::atomic<GCInfoIndex>&, TraceCallback, NameCallback);
static void V8_PRESERVE_MOST
EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback);
static void V8_PRESERVE_MOST
EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback,
FinalizationCallback, NameCallback);
static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic(
std::atomic<GCInfoIndex>&, TraceCallback, FinalizationCallback);
static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic(
std::atomic<GCInfoIndex>&, TraceCallback, NameCallback);
static void V8_PRESERVE_MOST
EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback);
};
#define DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function) \
template <typename T> \
struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch< \
T, is_polymorphic, has_finalizer, has_non_hidden_name> { \
V8_INLINE GCInfoIndex \
operator()(std::atomic<GCInfoIndex>& registered_index) { \
return function; \
V8_INLINE void operator()(std::atomic<GCInfoIndex>& registered_index) { \
function; \
} \
};
@ -143,9 +138,16 @@ struct GCInfoTrait final {
static_assert(sizeof(T), "T must be fully defined");
static std::atomic<GCInfoIndex>
registered_index; // Uses zero initialization.
const GCInfoIndex index = registered_index.load(std::memory_order_acquire);
return index ? index
: EnsureGCInfoIndexTrait::EnsureIndex<T>(registered_index);
GCInfoIndex index = registered_index.load(std::memory_order_acquire);
if (V8_UNLIKELY(!index)) {
EnsureGCInfoIndexTrait::EnsureIndex<T>(registered_index);
// Slow path call uses V8_PRESERVE_MOST which does not support return
// values (also preserves RAX). Avoid out parameter by just reloading the
// value here which at this point is guaranteed to be set.
index = registered_index.load(std::memory_order_acquire);
CPPGC_DCHECK(index != 0);
}
return index;
}
};

View File

@ -20,68 +20,68 @@ HeapObjectName GetHiddenName(const void*, HeapObjectNameForUnnamedObject) {
} // namespace
// static
GCInfoIndex EnsureGCInfoIndexTrait::EnsureGCInfoIndexPolymorphic(
void EnsureGCInfoIndexTrait::EnsureGCInfoIndexPolymorphic(
std::atomic<GCInfoIndex>& registered_index, TraceCallback trace_callback,
FinalizationCallback finalization_callback, NameCallback name_callback) {
return GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
registered_index,
{finalization_callback, trace_callback, name_callback, true});
}
// static
GCInfoIndex EnsureGCInfoIndexTrait::EnsureGCInfoIndexPolymorphic(
void EnsureGCInfoIndexTrait::EnsureGCInfoIndexPolymorphic(
std::atomic<GCInfoIndex>& registered_index, TraceCallback trace_callback,
FinalizationCallback finalization_callback) {
return GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
registered_index,
{finalization_callback, trace_callback, GetHiddenName, true});
}
// static
GCInfoIndex EnsureGCInfoIndexTrait::EnsureGCInfoIndexPolymorphic(
void EnsureGCInfoIndexTrait::EnsureGCInfoIndexPolymorphic(
std::atomic<GCInfoIndex>& registered_index, TraceCallback trace_callback,
NameCallback name_callback) {
return GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
registered_index, {nullptr, trace_callback, name_callback, true});
}
// static
GCInfoIndex EnsureGCInfoIndexTrait::EnsureGCInfoIndexPolymorphic(
void EnsureGCInfoIndexTrait::EnsureGCInfoIndexPolymorphic(
std::atomic<GCInfoIndex>& registered_index, TraceCallback trace_callback) {
return GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
registered_index, {nullptr, trace_callback, GetHiddenName, true});
}
// static
GCInfoIndex EnsureGCInfoIndexTrait::EnsureGCInfoIndexNonPolymorphic(
void EnsureGCInfoIndexTrait::EnsureGCInfoIndexNonPolymorphic(
std::atomic<GCInfoIndex>& registered_index, TraceCallback trace_callback,
FinalizationCallback finalization_callback, NameCallback name_callback) {
return GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
registered_index,
{finalization_callback, trace_callback, name_callback, false});
}
// static
GCInfoIndex EnsureGCInfoIndexTrait::EnsureGCInfoIndexNonPolymorphic(
void EnsureGCInfoIndexTrait::EnsureGCInfoIndexNonPolymorphic(
std::atomic<GCInfoIndex>& registered_index, TraceCallback trace_callback,
FinalizationCallback finalization_callback) {
return GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
registered_index,
{finalization_callback, trace_callback, GetHiddenName, false});
}
// static
GCInfoIndex EnsureGCInfoIndexTrait::EnsureGCInfoIndexNonPolymorphic(
void EnsureGCInfoIndexTrait::EnsureGCInfoIndexNonPolymorphic(
std::atomic<GCInfoIndex>& registered_index, TraceCallback trace_callback,
NameCallback name_callback) {
return GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
registered_index, {nullptr, trace_callback, name_callback, false});
}
// static
GCInfoIndex EnsureGCInfoIndexTrait::EnsureGCInfoIndexNonPolymorphic(
void EnsureGCInfoIndexTrait::EnsureGCInfoIndexNonPolymorphic(
std::atomic<GCInfoIndex>& registered_index, TraceCallback trace_callback) {
return GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
GlobalGCInfoTable::GetMutable().RegisterNewGCInfo(
registered_index, {nullptr, trace_callback, GetHiddenName, false});
}

View File

@ -113,22 +113,23 @@ ObjectAllocator::ObjectAllocator(RawHeap& heap, PageBackend& page_backend,
oom_handler_(oom_handler),
garbage_collector_(garbage_collector) {}
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace& space, size_t size,
AlignVal alignment,
GCInfoIndex gcinfo) {
void* memory = OutOfLineAllocateImpl(space, size, alignment, gcinfo);
void ObjectAllocator::OutOfLineAllocateGCSafePoint(NormalPageSpace& space,
size_t size,
AlignVal alignment,
GCInfoIndex gcinfo,
void** object) {
*object = OutOfLineAllocateImpl(space, size, alignment, gcinfo);
stats_collector_.NotifySafePointForConservativeCollection();
if (prefinalizer_handler_.IsInvokingPreFinalizers()) {
// Objects allocated during pre finalizers should be allocated as black
// since marking is already done. Atomics are not needed because there is
// no concurrent marking in the background.
HeapObjectHeader::FromObject(memory).MarkNonAtomic();
HeapObjectHeader::FromObject(*object).MarkNonAtomic();
// Resetting the allocation buffer forces all further allocations in pre
// finalizers to go through this slow path.
ReplaceLinearAllocationBuffer(space, stats_collector_, nullptr, 0);
prefinalizer_handler_.NotifyAllocationInPrefinalizer(size);
}
return memory;
}
void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,

View File

@ -62,11 +62,21 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
inline static RawHeap::RegularSpaceType GetInitialSpaceIndexForSize(
size_t size);
inline void* AllocateObjectOnSpace(NormalPageSpace& space, size_t size,
GCInfoIndex gcinfo);
inline void* AllocateObjectOnSpace(NormalPageSpace& space, size_t size,
AlignVal alignment, GCInfoIndex gcinfo);
void* OutOfLineAllocate(NormalPageSpace&, size_t, AlignVal, GCInfoIndex);
inline void* AllocateObjectOnSpace(NormalPageSpace&, size_t, GCInfoIndex);
inline void* AllocateObjectOnSpace(NormalPageSpace&, size_t, AlignVal,
GCInfoIndex);
inline void* OutOfLineAllocate(NormalPageSpace&, size_t, AlignVal,
GCInfoIndex);
// Called from the fast path LAB allocation when the LAB capacity cannot fit
// the allocation or a large object is requested. Use out parameter as
// `V8_PRESERVE_MOST` cannot handle non-void return values.
//
// Prefer using `OutOfLineAllocate()`.
void V8_PRESERVE_MOST OutOfLineAllocateGCSafePoint(NormalPageSpace&, size_t,
AlignVal, GCInfoIndex,
void**);
// Raw allocation, does not emit safepoint for conservative GC.
void* OutOfLineAllocateImpl(NormalPageSpace&, size_t, AlignVal, GCInfoIndex);
bool TryRefillLinearAllocationBuffer(NormalPageSpace&, size_t);
@ -136,6 +146,14 @@ RawHeap::RegularSpaceType ObjectAllocator::GetInitialSpaceIndexForSize(
return RawHeap::RegularSpaceType::kNormal4;
}
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace& space, size_t size,
AlignVal alignment,
GCInfoIndex gcinfo) {
void* object;
OutOfLineAllocateGCSafePoint(space, size, alignment, gcinfo, &object);
return object;
}
void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace& space,
size_t size, AlignVal alignment,
GCInfoIndex gcinfo) {
@ -175,13 +193,13 @@ void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace& space,
.SetBit<AccessMode::kAtomic>(reinterpret_cast<ConstAddress>(&filler));
lab_allocation_will_succeed = true;
}
if (lab_allocation_will_succeed) {
void* object = AllocateObjectOnSpace(space, size, gcinfo);
DCHECK_NOT_NULL(object);
DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(object) & kAlignmentMask);
return object;
if (V8_UNLIKELY(!lab_allocation_will_succeed)) {
return OutOfLineAllocate(space, size, alignment, gcinfo);
}
return OutOfLineAllocate(space, size, alignment, gcinfo);
void* object = AllocateObjectOnSpace(space, size, gcinfo);
DCHECK_NOT_NULL(object);
DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(object) & kAlignmentMask);
return object;
}
void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace& space,
@ -190,7 +208,7 @@ void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace& space,
NormalPageSpace::LinearAllocationBuffer& current_lab =
space.linear_allocation_buffer();
if (current_lab.size() < size) {
if (V8_UNLIKELY(current_lab.size() < size)) {
return OutOfLineAllocate(
space, size, static_cast<AlignVal>(kAllocationGranularity), gcinfo);
}