cppgc: Fix low-level write barriers
Some types of supported low-level write barrier only requires passing a slot, which may not be even part of a heap object but stack. This complicates the situation, as even with caged heap, there's no way to distinguish a stack and heap slot. Solve this by passing an optional callback that can lazy be used to get the heap. This can be used by the embedder to retrieve the heap from e.g. TLS if needed. This aligns the barrier with Oilpan in Blink. Bug: chromium:1056170 Change-Id: I1e5d022ab17a2614a67b6ef39ed12691bcbd0ac6 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2675924 Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Reviewed-by: Omer Katz <omerkatz@chromium.org> Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#72550}
This commit is contained in:
parent
f116eb1816
commit
65893d84e5
3
BUILD.gn
3
BUILD.gn
@ -4614,6 +4614,7 @@ v8_source_set("cppgc_base") {
|
||||
"include/cppgc/ephemeron-pair.h",
|
||||
"include/cppgc/garbage-collected.h",
|
||||
"include/cppgc/heap-consistency.h",
|
||||
"include/cppgc/heap-state.h",
|
||||
"include/cppgc/heap.h",
|
||||
"include/cppgc/internal/api-constants.h",
|
||||
"include/cppgc/internal/atomic-entry-flag.h",
|
||||
@ -4634,6 +4635,7 @@ v8_source_set("cppgc_base") {
|
||||
"include/cppgc/persistent.h",
|
||||
"include/cppgc/platform.h",
|
||||
"include/cppgc/prefinalizer.h",
|
||||
"include/cppgc/sentinel-pointer.h",
|
||||
"include/cppgc/source-location.h",
|
||||
"include/cppgc/trace-trait.h",
|
||||
"include/cppgc/type-traits.h",
|
||||
@ -4665,6 +4667,7 @@ v8_source_set("cppgc_base") {
|
||||
"src/heap/cppgc/heap-page.h",
|
||||
"src/heap/cppgc/heap-space.cc",
|
||||
"src/heap/cppgc/heap-space.h",
|
||||
"src/heap/cppgc/heap-state.cc",
|
||||
"src/heap/cppgc/heap-visitor.h",
|
||||
"src/heap/cppgc/heap.cc",
|
||||
"src/heap/cppgc/heap.h",
|
||||
|
@ -4,6 +4,7 @@ include_rules = [
|
||||
"+cppgc/common.h",
|
||||
# Used by v8-cppgc.h to bridge to cppgc.
|
||||
"+cppgc/custom-space.h",
|
||||
"+cppgc/internal/process-heap.h",
|
||||
"+cppgc/internal/write-barrier.h",
|
||||
"+cppgc/visitor.h",
|
||||
]
|
||||
|
@ -50,17 +50,22 @@ class HeapConsistency final {
|
||||
/**
|
||||
* Gets the required write barrier type for a specific write.
|
||||
*
|
||||
* \param slot Slot containing the pointer to some part of an object object
|
||||
* that has been allocated using `MakeGarbageCollected()`. Does not consider
|
||||
* the value of `slot`.
|
||||
* \param slot Slot to some part of an object. The object must not necessarily
|
||||
have been allocated using `MakeGarbageCollected()` but can also live
|
||||
off-heap or on stack.
|
||||
* \param params Parameters that may be used for actual write barrier calls.
|
||||
* Only filled if return value indicates that a write barrier is needed. The
|
||||
* contents of the `params` are an implementation detail.
|
||||
* \param callback Callback returning the corresponding heap handle. The
|
||||
* callback is only invoked if the heap cannot otherwise be figured out. The
|
||||
* callback must not allocate.
|
||||
* \returns whether a write barrier is needed and which barrier to invoke.
|
||||
*/
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrierType
|
||||
GetWriteBarrierType(const void* slot, WriteBarrierParams& params) {
|
||||
return internal::WriteBarrier::GetWriteBarrierType(slot, params);
|
||||
GetWriteBarrierType(const void* slot, WriteBarrierParams& params,
|
||||
HeapHandleCallback callback) {
|
||||
return internal::WriteBarrier::GetWriteBarrierType(slot, params, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -81,7 +86,6 @@ class HeapConsistency final {
|
||||
* elements if they have not yet been processed.
|
||||
*
|
||||
* \param params The parameters retrieved from `GetWriteBarrierType()`.
|
||||
* \param heap The corresponding heap.
|
||||
* \param first_element Pointer to the first element that should be processed.
|
||||
* The slot itself must reside in an object that has been allocated using
|
||||
* `MakeGarbageCollected()`.
|
||||
@ -92,11 +96,11 @@ class HeapConsistency final {
|
||||
* element if necessary.
|
||||
*/
|
||||
static V8_INLINE void DijkstraWriteBarrierRange(
|
||||
const WriteBarrierParams& params, HeapHandle& heap,
|
||||
const void* first_element, size_t element_size, size_t number_of_elements,
|
||||
const WriteBarrierParams& params, const void* first_element,
|
||||
size_t element_size, size_t number_of_elements,
|
||||
TraceCallback trace_callback) {
|
||||
internal::WriteBarrier::DijkstraMarkingBarrierRange(
|
||||
params, heap, first_element, element_size, number_of_elements,
|
||||
params, first_element, element_size, number_of_elements,
|
||||
trace_callback);
|
||||
}
|
||||
|
||||
@ -132,46 +136,6 @@ class HeapConsistency final {
|
||||
HeapConsistency() = delete;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helpers to peek into heap-internal state.
|
||||
*/
|
||||
class V8_EXPORT HeapState final {
|
||||
public:
|
||||
/**
|
||||
* Returns whether the garbage collector is marking. This API is experimental
|
||||
* and is expected to be removed in future.
|
||||
*
|
||||
* \param heap_handle The corresponding heap.
|
||||
* \returns true if the garbage collector is currently marking, and false
|
||||
* otherwise.
|
||||
*/
|
||||
static bool IsMarking(HeapHandle& heap_handle);
|
||||
|
||||
/*
|
||||
* Returns whether the garbage collector is sweeping. This API is experimental
|
||||
* and is expected to be removed in future.
|
||||
*
|
||||
* \param heap_handle The corresponding heap.
|
||||
* \returns true if the garbage collector is currently sweeping, and false
|
||||
* otherwise.
|
||||
*/
|
||||
static bool IsSweeping(HeapHandle& heap_handle);
|
||||
|
||||
/**
|
||||
* Returns whether the garbage collector is in the atomic pause, i.e., the
|
||||
* mutator is stopped from running. This API is experimental and is expected
|
||||
* to be removed in future.
|
||||
*
|
||||
* \param heap_handle The corresponding heap.
|
||||
* \returns true if the garbage collector is currently in the atomic pause,
|
||||
* and false otherwise.
|
||||
*/
|
||||
static bool IsInAtomicPause(HeapHandle& heap_handle);
|
||||
|
||||
private:
|
||||
HeapState() = delete;
|
||||
};
|
||||
|
||||
/**
|
||||
* Disallows garbage collection finalizations. Any garbage collection triggers
|
||||
* result in a crash when in this scope.
|
||||
|
59
include/cppgc/heap-state.h
Normal file
59
include/cppgc/heap-state.h
Normal file
@ -0,0 +1,59 @@
|
||||
// Copyright 2021 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef INCLUDE_CPPGC_HEAP_STATE_H_
|
||||
#define INCLUDE_CPPGC_HEAP_STATE_H_
|
||||
|
||||
#include "v8config.h" // NOLINT(build/include_directory)
|
||||
|
||||
namespace cppgc {
|
||||
|
||||
class HeapHandle;
|
||||
|
||||
namespace subtle {
|
||||
|
||||
/**
|
||||
* Helpers to peek into heap-internal state.
|
||||
*/
|
||||
class V8_EXPORT HeapState final {
|
||||
public:
|
||||
/**
|
||||
* Returns whether the garbage collector is marking. This API is experimental
|
||||
* and is expected to be removed in future.
|
||||
*
|
||||
* \param heap_handle The corresponding heap.
|
||||
* \returns true if the garbage collector is currently marking, and false
|
||||
* otherwise.
|
||||
*/
|
||||
static bool IsMarking(const HeapHandle& heap_handle);
|
||||
|
||||
/*
|
||||
* Returns whether the garbage collector is sweeping. This API is experimental
|
||||
* and is expected to be removed in future.
|
||||
*
|
||||
* \param heap_handle The corresponding heap.
|
||||
* \returns true if the garbage collector is currently sweeping, and false
|
||||
* otherwise.
|
||||
*/
|
||||
static bool IsSweeping(const HeapHandle& heap_handle);
|
||||
|
||||
/**
|
||||
* Returns whether the garbage collector is in the atomic pause, i.e., the
|
||||
* mutator is stopped from running. This API is experimental and is expected
|
||||
* to be removed in future.
|
||||
*
|
||||
* \param heap_handle The corresponding heap.
|
||||
* \returns true if the garbage collector is currently in the atomic pause,
|
||||
* and false otherwise.
|
||||
*/
|
||||
static bool IsInAtomicPause(const HeapHandle& heap_handle);
|
||||
|
||||
private:
|
||||
HeapState() = delete;
|
||||
};
|
||||
|
||||
} // namespace subtle
|
||||
} // namespace cppgc
|
||||
|
||||
#endif // INCLUDE_CPPGC_HEAP_STATE_H_
|
@ -136,23 +136,8 @@ template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
|
||||
typename CheckingPolicy = DefaultCheckingPolicy>
|
||||
class BasicMember;
|
||||
|
||||
// Special tag type used to denote some sentinel member. The semantics of the
|
||||
// sentinel is defined by the embedder.
|
||||
struct SentinelPointer {
|
||||
template <typename T>
|
||||
operator T*() const { // NOLINT
|
||||
static constexpr intptr_t kSentinelValue = 1;
|
||||
return reinterpret_cast<T*>(kSentinelValue);
|
||||
}
|
||||
// Hidden friends.
|
||||
friend bool operator==(SentinelPointer, SentinelPointer) { return true; }
|
||||
friend bool operator!=(SentinelPointer, SentinelPointer) { return false; }
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
||||
constexpr internal::SentinelPointer kSentinelPointer;
|
||||
|
||||
} // namespace cppgc
|
||||
|
||||
#endif // INCLUDE_CPPGC_INTERNAL_POINTER_POLICIES_H_
|
||||
|
@ -5,8 +5,10 @@
|
||||
#ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
|
||||
#define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
|
||||
|
||||
#include "cppgc/heap-state.h"
|
||||
#include "cppgc/internal/api-constants.h"
|
||||
#include "cppgc/internal/process-heap.h"
|
||||
#include "cppgc/sentinel-pointer.h"
|
||||
#include "cppgc/trace-trait.h"
|
||||
#include "v8config.h" // NOLINT(build/include_directory)
|
||||
|
||||
@ -32,17 +34,17 @@ class V8_EXPORT WriteBarrier final {
|
||||
};
|
||||
|
||||
struct Params {
|
||||
HeapHandle* heap = nullptr;
|
||||
#if V8_ENABLE_CHECKS
|
||||
Type type = Type::kNone;
|
||||
#endif // !V8_ENABLE_CHECKS
|
||||
#if defined(CPPGC_CAGED_HEAP)
|
||||
uintptr_t start;
|
||||
|
||||
uintptr_t start = 0;
|
||||
CagedHeapLocalData& caged_heap() const {
|
||||
return *reinterpret_cast<CagedHeapLocalData*>(start);
|
||||
}
|
||||
uintptr_t slot_offset;
|
||||
uintptr_t value_offset;
|
||||
uintptr_t slot_offset = 0;
|
||||
uintptr_t value_offset = 0;
|
||||
#endif // CPPGC_CAGED_HEAP
|
||||
};
|
||||
|
||||
@ -55,17 +57,19 @@ class V8_EXPORT WriteBarrier final {
|
||||
static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
|
||||
Params& params);
|
||||
// Returns the required write barrier for a given `slot`.
|
||||
static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params);
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
|
||||
HeapHandleCallback callback);
|
||||
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE Type GetWriteBarrierTypeForExternallyReferencedObject(
|
||||
const void* value, Params& params);
|
||||
const void* value, Params& params, HeapHandleCallback callback);
|
||||
|
||||
static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
|
||||
const void* object);
|
||||
static V8_INLINE void DijkstraMarkingBarrierRange(
|
||||
const Params& params, HeapHandle& heap, const void* first_element,
|
||||
size_t element_size, size_t number_of_elements,
|
||||
TraceCallback trace_callback);
|
||||
const Params& params, const void* first_element, size_t element_size,
|
||||
size_t number_of_elements, TraceCallback trace_callback);
|
||||
static V8_INLINE void SteeleMarkingBarrier(const Params& params,
|
||||
const void* object);
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
@ -102,64 +106,50 @@ class V8_EXPORT WriteBarrier final {
|
||||
static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
static CagedHeapLocalData& GetLocalData(HeapHandle&);
|
||||
static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
|
||||
const AgeTable& ageTable,
|
||||
const void* slot, uintptr_t value_offset);
|
||||
#endif // CPPGC_YOUNG_GENERATION
|
||||
};
|
||||
|
||||
template <WriteBarrier::Type type>
|
||||
V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
|
||||
if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
|
||||
#if V8_ENABLE_CHECKS
|
||||
params.type = type;
|
||||
#endif // !V8_ENABLE_CHECKS
|
||||
return type;
|
||||
}
|
||||
|
||||
#if defined(CPPGC_CAGED_HEAP)
|
||||
class WriteBarrierTypeForCagedHeapPolicy final {
|
||||
class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
|
||||
public:
|
||||
template <WriteBarrier::ValueMode value_mode>
|
||||
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
|
||||
WriteBarrier::Params& params) {
|
||||
const bool have_caged_heap =
|
||||
value_mode == WriteBarrier::ValueMode::kValuePresent
|
||||
? TryGetCagedHeap(slot, value, params)
|
||||
: TryGetCagedHeap(slot, slot, params);
|
||||
if (!have_caged_heap) {
|
||||
return WriteBarrier::Type::kNone;
|
||||
}
|
||||
if (V8_UNLIKELY(params.caged_heap().is_marking_in_progress)) {
|
||||
#if V8_ENABLE_CHECKS
|
||||
params.type = WriteBarrier::Type::kMarking;
|
||||
#endif // !V8_ENABLE_CHECKS
|
||||
return WriteBarrier::Type::kMarking;
|
||||
}
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
|
||||
if (value_mode == WriteBarrier::ValueMode::kValuePresent) {
|
||||
params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
|
||||
} else {
|
||||
params.value_offset = 0;
|
||||
}
|
||||
#if V8_ENABLE_CHECKS
|
||||
params.type = WriteBarrier::Type::kGenerational;
|
||||
#endif // !V8_ENABLE_CHECKS
|
||||
return WriteBarrier::Type::kGenerational;
|
||||
#else // !CPPGC_YOUNG_GENERATION
|
||||
return WriteBarrier::Type::kNone;
|
||||
#endif // !CPPGC_YOUNG_GENERATION
|
||||
WriteBarrier::Params& params,
|
||||
HeapHandleCallback callback) {
|
||||
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
|
||||
}
|
||||
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
|
||||
const void* value, WriteBarrier::Params& params) {
|
||||
const void* value, WriteBarrier::Params& params, HeapHandleCallback) {
|
||||
if (!TryGetCagedHeap(value, value, params)) {
|
||||
return WriteBarrier::Type::kNone;
|
||||
}
|
||||
if (V8_UNLIKELY(params.caged_heap().is_marking_in_progress)) {
|
||||
#if V8_ENABLE_CHECKS
|
||||
params.type = WriteBarrier::Type::kMarking;
|
||||
#endif // !V8_ENABLE_CHECKS
|
||||
return WriteBarrier::Type::kMarking;
|
||||
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
||||
}
|
||||
return WriteBarrier::Type::kNone;
|
||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||
}
|
||||
|
||||
private:
|
||||
WriteBarrierTypeForCagedHeapPolicy() = delete;
|
||||
|
||||
template <WriteBarrier::ValueMode value_mode>
|
||||
struct ValueModeDispatch;
|
||||
|
||||
static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
|
||||
WriteBarrier::Params& params) {
|
||||
params.start = reinterpret_cast<uintptr_t>(value) &
|
||||
@ -173,57 +163,167 @@ class WriteBarrierTypeForCagedHeapPolicy final {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Returns whether marking is in progress. If marking is not in progress
|
||||
// sets the start of the cage accordingly.
|
||||
//
|
||||
// TODO(chromium:1056170): Create fast path on API.
|
||||
static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
|
||||
};
|
||||
|
||||
template <>
|
||||
struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
||||
WriteBarrier::ValueMode::kValuePresent> {
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
|
||||
WriteBarrier::Params& params,
|
||||
HeapHandleCallback) {
|
||||
bool within_cage = TryGetCagedHeap(slot, value, params);
|
||||
if (!within_cage) {
|
||||
return WriteBarrier::Type::kNone;
|
||||
}
|
||||
if (V8_LIKELY(!params.caged_heap().is_marking_in_progress)) {
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
params.heap = reinterpret_cast<HeapHandle*>(params.start);
|
||||
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
|
||||
params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
|
||||
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
||||
#else // !CPPGC_YOUNG_GENERATION
|
||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||
#endif // !CPPGC_YOUNG_GENERATION
|
||||
}
|
||||
params.heap = reinterpret_cast<HeapHandle*>(params.start);
|
||||
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
||||
WriteBarrier::ValueMode::kNoValuePresent> {
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
|
||||
WriteBarrier::Params& params,
|
||||
HeapHandleCallback callback) {
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
HeapHandle& handle = callback();
|
||||
if (V8_LIKELY(!IsMarking(handle, params))) {
|
||||
// params.start is populated by IsMarking().
|
||||
params.heap = &handle;
|
||||
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
|
||||
// params.value_offset stays 0.
|
||||
if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
|
||||
// Check if slot is on stack.
|
||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||
}
|
||||
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
||||
}
|
||||
#else // !CPPGC_YOUNG_GENERATION
|
||||
if (V8_LIKELY(!ProcessHeap::IsAnyIncrementalOrConcurrentMarking())) {
|
||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||
}
|
||||
HeapHandle& handle = callback();
|
||||
if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
|
||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||
}
|
||||
#endif // !CPPGC_YOUNG_GENERATION
|
||||
params.heap = &handle;
|
||||
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
||||
}
|
||||
};
|
||||
|
||||
#endif // CPPGC_CAGED_HEAP
|
||||
|
||||
class WriteBarrierTypeForNonCagedHeapPolicy final {
|
||||
class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
|
||||
public:
|
||||
template <WriteBarrier::ValueMode value_mode>
|
||||
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
|
||||
WriteBarrier::Params& params) {
|
||||
return GetInternal(params);
|
||||
WriteBarrier::Params& params,
|
||||
HeapHandleCallback callback) {
|
||||
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
|
||||
}
|
||||
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
|
||||
const void* value, WriteBarrier::Params& params) {
|
||||
return GetInternal(params);
|
||||
const void* value, WriteBarrier::Params& params,
|
||||
HeapHandleCallback callback) {
|
||||
return GetInternal(params, callback);
|
||||
}
|
||||
|
||||
private:
|
||||
static V8_INLINE WriteBarrier::Type GetInternal(
|
||||
WriteBarrier::Params& params) {
|
||||
WriteBarrier::Type type =
|
||||
V8_LIKELY(!ProcessHeap::IsAnyIncrementalOrConcurrentMarking())
|
||||
? WriteBarrier::Type::kNone
|
||||
: WriteBarrier::Type::kMarking;
|
||||
#if V8_ENABLE_CHECKS
|
||||
params.type = type;
|
||||
#endif // !V8_ENABLE_CHECKS
|
||||
return type;
|
||||
template <WriteBarrier::ValueMode value_mode>
|
||||
struct ValueModeDispatch;
|
||||
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrier::Type GetInternal(WriteBarrier::Params& params,
|
||||
HeapHandleCallback callback) {
|
||||
if (V8_UNLIKELY(ProcessHeap::IsAnyIncrementalOrConcurrentMarking())) {
|
||||
HeapHandle& handle = callback();
|
||||
if (subtle::HeapState::IsMarking(handle)) {
|
||||
params.heap = &handle;
|
||||
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
||||
}
|
||||
}
|
||||
return WriteBarrier::Type::kNone;
|
||||
}
|
||||
|
||||
// TODO(chromium:1056170): Create fast path on API.
|
||||
static bool IsMarking(const void*, HeapHandle**);
|
||||
|
||||
WriteBarrierTypeForNonCagedHeapPolicy() = delete;
|
||||
};
|
||||
|
||||
template <>
|
||||
struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
|
||||
WriteBarrier::ValueMode::kValuePresent> {
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
|
||||
WriteBarrier::Params& params,
|
||||
HeapHandleCallback callback) {
|
||||
// The following check covers nullptr as well as sentinel pointer.
|
||||
if (object <= static_cast<void*>(kSentinelPointer)) {
|
||||
return WriteBarrier::Type::kNone;
|
||||
}
|
||||
if (IsMarking(object, ¶ms.heap)) {
|
||||
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
||||
}
|
||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
|
||||
WriteBarrier::ValueMode::kNoValuePresent> {
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
|
||||
WriteBarrier::Params& params,
|
||||
HeapHandleCallback callback) {
|
||||
return GetInternal(params, callback);
|
||||
}
|
||||
};
|
||||
|
||||
// static
|
||||
WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
||||
const void* slot, const void* value, WriteBarrier::Params& params) {
|
||||
return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
|
||||
params);
|
||||
params, []() {});
|
||||
}
|
||||
|
||||
// static
|
||||
template <typename HeapHandleCallback>
|
||||
WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
||||
const void* slot, WriteBarrier::Params& params) {
|
||||
return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(slot, nullptr,
|
||||
params);
|
||||
const void* slot, WriteBarrier::Params& params,
|
||||
HeapHandleCallback callback) {
|
||||
return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
|
||||
slot, nullptr, params, callback);
|
||||
}
|
||||
|
||||
// static
|
||||
template <typename HeapHandleCallback>
|
||||
WriteBarrier::Type
|
||||
WriteBarrier::GetWriteBarrierTypeForExternallyReferencedObject(
|
||||
const void* value, Params& params) {
|
||||
return WriteBarrierTypePolicy::GetForExternallyReferenced(value, params);
|
||||
const void* value, Params& params, HeapHandleCallback callback) {
|
||||
return WriteBarrierTypePolicy::GetForExternallyReferenced(value, params,
|
||||
callback);
|
||||
}
|
||||
|
||||
// static
|
||||
@ -240,13 +340,12 @@ void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
|
||||
|
||||
// static
|
||||
void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
|
||||
HeapHandle& heap,
|
||||
const void* first_element,
|
||||
size_t element_size,
|
||||
size_t number_of_elements,
|
||||
TraceCallback trace_callback) {
|
||||
CheckParams(Type::kMarking, params);
|
||||
DijkstraMarkingBarrierRangeSlow(heap, first_element, element_size,
|
||||
DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
|
||||
number_of_elements, trace_callback);
|
||||
}
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include <type_traits>
|
||||
|
||||
#include "cppgc/internal/pointer-policies.h"
|
||||
#include "cppgc/sentinel-pointer.h"
|
||||
#include "cppgc/type-traits.h"
|
||||
#include "v8config.h" // NOLINT(build/include_directory)
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
|
||||
#include "cppgc/internal/persistent-node.h"
|
||||
#include "cppgc/internal/pointer-policies.h"
|
||||
#include "cppgc/sentinel-pointer.h"
|
||||
#include "cppgc/source-location.h"
|
||||
#include "cppgc/type-traits.h"
|
||||
#include "cppgc/visitor.h"
|
||||
|
32
include/cppgc/sentinel-pointer.h
Normal file
32
include/cppgc/sentinel-pointer.h
Normal file
@ -0,0 +1,32 @@
|
||||
// Copyright 2021 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef INCLUDE_CPPGC_SENTINEL_POINTER_H_
|
||||
#define INCLUDE_CPPGC_SENTINEL_POINTER_H_
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
// Special tag type used to denote some sentinel member. The semantics of the
|
||||
// sentinel is defined by the embedder.
|
||||
struct SentinelPointer {
|
||||
template <typename T>
|
||||
operator T*() const { // NOLINT
|
||||
static constexpr intptr_t kSentinelValue = 1;
|
||||
return reinterpret_cast<T*>(kSentinelValue);
|
||||
}
|
||||
// Hidden friends.
|
||||
friend bool operator==(SentinelPointer, SentinelPointer) { return true; }
|
||||
friend bool operator!=(SentinelPointer, SentinelPointer) { return false; }
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
||||
constexpr internal::SentinelPointer kSentinelPointer;
|
||||
|
||||
} // namespace cppgc
|
||||
|
||||
#endif // INCLUDE_CPPGC_SENTINEL_POINTER_H_
|
@ -9,6 +9,7 @@
|
||||
#include <vector>
|
||||
|
||||
#include "cppgc/custom-space.h"
|
||||
#include "cppgc/internal/process-heap.h"
|
||||
#include "cppgc/internal/write-barrier.h"
|
||||
#include "cppgc/visitor.h"
|
||||
#include "v8-internal.h" // NOLINT(build/include_directory)
|
||||
@ -97,12 +98,30 @@ class V8_EXPORT JSHeapConsistency final {
|
||||
* \param params Parameters that may be used for actual write barrier calls.
|
||||
* Only filled if return value indicates that a write barrier is needed. The
|
||||
* contents of the `params` are an implementation detail.
|
||||
* \param callback Callback returning the corresponding heap handle. The
|
||||
* callback is only invoked if the heap cannot otherwise be figured out. The
|
||||
* callback must not allocate.
|
||||
* \returns whether a write barrier is needed and which barrier to invoke.
|
||||
*/
|
||||
static V8_INLINE WriteBarrierType GetWriteBarrierType(
|
||||
const TracedReferenceBase& ref, WriteBarrierParams& params) {
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrierType
|
||||
GetWriteBarrierType(const TracedReferenceBase& ref,
|
||||
WriteBarrierParams& params, HeapHandleCallback callback) {
|
||||
if (ref.IsEmpty()) return WriteBarrierType::kNone;
|
||||
return cppgc::internal::WriteBarrier::GetWriteBarrierType(&ref, params);
|
||||
|
||||
if (V8_LIKELY(!cppgc::internal::ProcessHeap::
|
||||
IsAnyIncrementalOrConcurrentMarking())) {
|
||||
return cppgc::internal::WriteBarrier::Type::kNone;
|
||||
}
|
||||
cppgc::HeapHandle& handle = callback();
|
||||
if (!cppgc::subtle::HeapState::IsMarking(handle)) {
|
||||
return cppgc::internal::WriteBarrier::Type::kNone;
|
||||
}
|
||||
params.heap = &handle;
|
||||
#if V8_ENABLE_CHECKS
|
||||
params.type = cppgc::internal::WriteBarrier::Type::kMarking;
|
||||
#endif // !V8_ENABLE_CHECKS
|
||||
return cppgc::internal::WriteBarrier::Type::kMarking;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -117,16 +136,21 @@ class V8_EXPORT JSHeapConsistency final {
|
||||
* \param params Parameters that may be used for actual write barrier calls.
|
||||
* Only filled if return value indicates that a write barrier is needed. The
|
||||
* contents of the `params` are an implementation detail.
|
||||
* \param callback Callback returning the corresponding heap handle. The
|
||||
* callback is only invoked if the heap cannot otherwise be figured out. The
|
||||
* callback must not allocate.
|
||||
* \returns whether a write barrier is needed and which barrier to invoke.
|
||||
*/
|
||||
static V8_INLINE WriteBarrierType
|
||||
GetWriteBarrierType(v8::Local<v8::Object>& wrapper, int wrapper_index,
|
||||
const void* wrappable, WriteBarrierParams& params) {
|
||||
template <typename HeapHandleCallback>
|
||||
static V8_INLINE WriteBarrierType GetWriteBarrierType(
|
||||
v8::Local<v8::Object>& wrapper, int wrapper_index, const void* wrappable,
|
||||
WriteBarrierParams& params, HeapHandleCallback callback) {
|
||||
#if V8_ENABLE_CHECKS
|
||||
CheckWrapper(wrapper, wrapper_index, wrappable);
|
||||
#endif // V8_ENABLE_CHECKS
|
||||
return cppgc::internal::WriteBarrier::
|
||||
GetWriteBarrierTypeForExternallyReferencedObject(wrappable, params);
|
||||
GetWriteBarrierTypeForExternallyReferencedObject(wrappable, params,
|
||||
callback);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -62,23 +62,5 @@ NoGarbageCollectionScope::NoGarbageCollectionScope(
|
||||
|
||||
NoGarbageCollectionScope::~NoGarbageCollectionScope() { Leave(heap_handle_); }
|
||||
|
||||
// static
|
||||
bool HeapState::IsMarking(HeapHandle& heap_handle) {
|
||||
const auto& heap_base = internal::HeapBase::From(heap_handle);
|
||||
return heap_base.marker();
|
||||
}
|
||||
|
||||
// static
|
||||
bool HeapState::IsSweeping(HeapHandle& heap_handle) {
|
||||
const auto& heap_base = internal::HeapBase::From(heap_handle);
|
||||
return heap_base.sweeper().IsSweepingInProgress();
|
||||
}
|
||||
|
||||
// static
|
||||
bool HeapState::IsInAtomicPause(HeapHandle& heap_handle) {
|
||||
const auto& heap_base = internal::HeapBase::From(heap_handle);
|
||||
return heap_base.in_atomic_pause();
|
||||
}
|
||||
|
||||
} // namespace subtle
|
||||
} // namespace cppgc
|
||||
|
31
src/heap/cppgc/heap-state.cc
Normal file
31
src/heap/cppgc/heap-state.cc
Normal file
@ -0,0 +1,31 @@
|
||||
// Copyright 2021 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "include/cppgc/heap-state.h"
|
||||
|
||||
#include "src/heap/cppgc/heap-base.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace subtle {
|
||||
|
||||
// static
|
||||
bool HeapState::IsMarking(const HeapHandle& heap_handle) {
|
||||
const auto& heap_base = internal::HeapBase::From(heap_handle);
|
||||
return heap_base.marker();
|
||||
}
|
||||
|
||||
// static
|
||||
bool HeapState::IsSweeping(const HeapHandle& heap_handle) {
|
||||
const auto& heap_base = internal::HeapBase::From(heap_handle);
|
||||
return heap_base.sweeper().IsSweepingInProgress();
|
||||
}
|
||||
|
||||
// static
|
||||
bool HeapState::IsInAtomicPause(const HeapHandle& heap_handle) {
|
||||
const auto& heap_base = internal::HeapBase::From(heap_handle);
|
||||
return heap_base.in_atomic_pause();
|
||||
}
|
||||
|
||||
} // namespace subtle
|
||||
} // namespace cppgc
|
@ -125,7 +125,7 @@ void WriteBarrier::GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
|
||||
// Record slot.
|
||||
local_data.heap_base->remembered_slots().insert(const_cast<void*>(slot));
|
||||
}
|
||||
#endif
|
||||
#endif // CPPGC_YOUNG_GENERATION
|
||||
|
||||
#if V8_ENABLE_CHECKS
|
||||
// static
|
||||
@ -134,5 +134,35 @@ void WriteBarrier::CheckParams(Type expected_type, const Params& params) {
|
||||
}
|
||||
#endif // V8_ENABLE_CHECKS
|
||||
|
||||
// static
|
||||
bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(const void* object,
|
||||
HeapHandle** handle) {
|
||||
// Large objects cannot have mixins, so we are guaranteed to always have
|
||||
// a pointer on the same page.
|
||||
const auto* page = BasePage::FromPayload(object);
|
||||
*handle = page->heap();
|
||||
return page->heap()->marker();
|
||||
}
|
||||
|
||||
#if defined(CPPGC_CAGED_HEAP)
|
||||
|
||||
// static
|
||||
bool WriteBarrierTypeForCagedHeapPolicy::IsMarking(
|
||||
const HeapHandle& heap_handle, WriteBarrier::Params& params) {
|
||||
const auto& heap_base = internal::HeapBase::From(heap_handle);
|
||||
if (heap_base.marker()) {
|
||||
return true;
|
||||
}
|
||||
// Also set caged heap start here to avoid another call immediately after
|
||||
// checking IsMarking().
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
params.start =
|
||||
reinterpret_cast<uintptr_t>(&heap_base.caged_heap().local_data());
|
||||
#endif // !CPPGC_YOUNG_GENERATION
|
||||
return false;
|
||||
}
|
||||
|
||||
#endif // CPPGC_CAGED_HEAP
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
@ -415,10 +415,13 @@ TEST_F(WriteBarrierTest, DijkstraWriteBarrierRangeTriggersWhenMarkingIsOn) {
|
||||
EXPECT_FALSE(object1->IsMarked());
|
||||
WriteBarrierParams params;
|
||||
EXPECT_EQ(WriteBarrierType::kMarking,
|
||||
HeapConsistency::GetWriteBarrierType(object2->objects, params));
|
||||
HeapConsistency::GetWriteBarrierType(
|
||||
object2->objects, params, [this]() -> HeapHandle& {
|
||||
return GetHeap()->GetHeapHandle();
|
||||
}));
|
||||
HeapConsistency::DijkstraWriteBarrierRange(
|
||||
params, GetHeap()->GetHeapHandle(), object2->objects,
|
||||
sizeof(InlinedObject), 4, TraceTrait<InlinedObject>::Trace);
|
||||
params, object2->objects, sizeof(InlinedObject), 4,
|
||||
TraceTrait<InlinedObject>::Trace);
|
||||
EXPECT_TRUE(object1->IsMarked());
|
||||
}
|
||||
}
|
||||
@ -432,10 +435,13 @@ TEST_F(WriteBarrierTest, DijkstraWriteBarrierRangeBailoutIfMarked) {
|
||||
ExpectNoWriteBarrierFires scope(marker(), {object1});
|
||||
WriteBarrierParams params;
|
||||
EXPECT_EQ(WriteBarrierType::kMarking,
|
||||
HeapConsistency::GetWriteBarrierType(object2->objects, params));
|
||||
HeapConsistency::GetWriteBarrierType(
|
||||
object2->objects, params, [this]() -> HeapHandle& {
|
||||
return GetHeap()->GetHeapHandle();
|
||||
}));
|
||||
HeapConsistency::DijkstraWriteBarrierRange(
|
||||
params, GetHeap()->GetHeapHandle(), object2->objects,
|
||||
sizeof(InlinedObject), 4, TraceTrait<InlinedObject>::Trace);
|
||||
params, object2->objects, sizeof(InlinedObject), 4,
|
||||
TraceTrait<InlinedObject>::Trace);
|
||||
}
|
||||
}
|
||||
|
||||
@ -447,8 +453,8 @@ TEST_F(WriteBarrierTest, SteeleWriteBarrierTriggersWhenMarkingIsOn) {
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(object1).TryMarkAtomic());
|
||||
WriteBarrierParams params;
|
||||
EXPECT_EQ(WriteBarrierType::kMarking,
|
||||
HeapConsistency::GetWriteBarrierType(object2->next_ref().Get(),
|
||||
params));
|
||||
HeapConsistency::GetWriteBarrierType(
|
||||
&object2->next_ref(), object2->next_ref().Get(), params));
|
||||
HeapConsistency::SteeleWriteBarrier(params, object2->next_ref().Get());
|
||||
}
|
||||
}
|
||||
@ -460,8 +466,8 @@ TEST_F(WriteBarrierTest, SteeleWriteBarrierBailoutIfNotMarked) {
|
||||
ExpectNoWriteBarrierFires scope(marker(), {object1});
|
||||
WriteBarrierParams params;
|
||||
EXPECT_EQ(WriteBarrierType::kMarking,
|
||||
HeapConsistency::GetWriteBarrierType(object2->next_ref().Get(),
|
||||
params));
|
||||
HeapConsistency::GetWriteBarrierType(
|
||||
&object2->next_ref(), object2->next_ref().Get(), params));
|
||||
HeapConsistency::SteeleWriteBarrier(params, object2->next_ref().Get());
|
||||
}
|
||||
}
|
||||
|
@ -75,7 +75,8 @@ TEST_F(UnifiedHeapTest, WriteBarrierV8ToCppReference) {
|
||||
WrapperHelper::SetWrappableConnection(api_object, wrappable, wrappable);
|
||||
JSHeapConsistency::WriteBarrierParams params;
|
||||
auto barrier_type = JSHeapConsistency::GetWriteBarrierType(
|
||||
api_object, 1, wrappable, params);
|
||||
api_object, 1, wrappable, params,
|
||||
[this]() -> cppgc::HeapHandle& { return cpp_heap().GetHeapHandle(); });
|
||||
EXPECT_EQ(JSHeapConsistency::WriteBarrierType::kMarking, barrier_type);
|
||||
JSHeapConsistency::DijkstraMarkingBarrier(
|
||||
params, cpp_heap().GetHeapHandle(), wrappable);
|
||||
@ -105,8 +106,9 @@ TEST_F(UnifiedHeapTest, WriteBarrierCppToV8Reference) {
|
||||
api_object->SetAlignedPointerInInternalField(1, kMagicAddress);
|
||||
wrappable->SetWrapper(v8_isolate(), api_object);
|
||||
JSHeapConsistency::WriteBarrierParams params;
|
||||
auto barrier_type =
|
||||
JSHeapConsistency::GetWriteBarrierType(wrappable->wrapper(), params);
|
||||
auto barrier_type = JSHeapConsistency::GetWriteBarrierType(
|
||||
wrappable->wrapper(), params,
|
||||
[this]() -> cppgc::HeapHandle& { return cpp_heap().GetHeapHandle(); });
|
||||
EXPECT_EQ(JSHeapConsistency::WriteBarrierType::kMarking, barrier_type);
|
||||
JSHeapConsistency::DijkstraMarkingBarrier(
|
||||
params, cpp_heap().GetHeapHandle(), wrappable->wrapper());
|
||||
|
Loading…
Reference in New Issue
Block a user