2020-06-02 16:20:04 +00:00
|
|
|
// Copyright 2020 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
|
|
|
#ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
|
|
|
|
#define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
|
|
|
|
|
2021-05-20 09:36:52 +00:00
|
|
|
#include <cstddef>
|
|
|
|
#include <cstdint>
|
|
|
|
|
2021-02-05 14:05:48 +00:00
|
|
|
#include "cppgc/heap-state.h"
|
2020-06-15 18:57:06 +00:00
|
|
|
#include "cppgc/internal/api-constants.h"
|
2021-02-17 17:21:07 +00:00
|
|
|
#include "cppgc/internal/atomic-entry-flag.h"
|
2021-08-09 14:19:56 +00:00
|
|
|
#include "cppgc/platform.h"
|
2021-02-05 14:05:48 +00:00
|
|
|
#include "cppgc/sentinel-pointer.h"
|
2020-11-24 11:52:42 +00:00
|
|
|
#include "cppgc/trace-trait.h"
|
2020-06-02 16:20:04 +00:00
|
|
|
#include "v8config.h" // NOLINT(build/include_directory)
|
|
|
|
|
2020-06-15 18:57:06 +00:00
|
|
|
#if defined(CPPGC_CAGED_HEAP)
|
|
|
|
#include "cppgc/internal/caged-heap-local-data.h"
|
|
|
|
#endif
|
|
|
|
|
2020-06-02 16:20:04 +00:00
|
|
|
namespace cppgc {
|
2020-11-24 11:52:42 +00:00
|
|
|
|
|
|
|
class HeapHandle;
|
|
|
|
|
2020-06-02 16:20:04 +00:00
|
|
|
namespace internal {
|
|
|
|
|
2021-05-20 09:36:52 +00:00
|
|
|
#if defined(CPPGC_CAGED_HEAP)
|
2020-11-24 17:52:14 +00:00
|
|
|
class WriteBarrierTypeForCagedHeapPolicy;
|
2021-05-20 09:36:52 +00:00
|
|
|
#else // !CPPGC_CAGED_HEAP
|
2020-11-24 17:52:14 +00:00
|
|
|
class WriteBarrierTypeForNonCagedHeapPolicy;
|
2021-05-20 09:36:52 +00:00
|
|
|
#endif // !CPPGC_CAGED_HEAP
|
2020-11-24 17:52:14 +00:00
|
|
|
|
2020-06-02 16:20:04 +00:00
|
|
|
class V8_EXPORT WriteBarrier final {
|
|
|
|
public:
|
2020-11-24 17:52:14 +00:00
|
|
|
enum class Type : uint8_t {
|
|
|
|
kNone,
|
|
|
|
kMarking,
|
|
|
|
kGenerational,
|
|
|
|
};
|
|
|
|
|
|
|
|
struct Params {
|
2021-02-05 14:05:48 +00:00
|
|
|
HeapHandle* heap = nullptr;
|
2020-11-24 17:52:14 +00:00
|
|
|
#if V8_ENABLE_CHECKS
|
|
|
|
Type type = Type::kNone;
|
|
|
|
#endif // !V8_ENABLE_CHECKS
|
2020-06-15 18:57:06 +00:00
|
|
|
#if defined(CPPGC_CAGED_HEAP)
|
2021-02-05 14:05:48 +00:00
|
|
|
uintptr_t start = 0;
|
2020-11-24 17:52:14 +00:00
|
|
|
CagedHeapLocalData& caged_heap() const {
|
|
|
|
return *reinterpret_cast<CagedHeapLocalData*>(start);
|
|
|
|
}
|
2021-02-05 14:05:48 +00:00
|
|
|
uintptr_t slot_offset = 0;
|
|
|
|
uintptr_t value_offset = 0;
|
2020-11-24 11:52:42 +00:00
|
|
|
#endif // CPPGC_CAGED_HEAP
|
2020-11-24 17:52:14 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
enum class ValueMode {
|
|
|
|
kValuePresent,
|
|
|
|
kNoValuePresent,
|
|
|
|
};
|
2020-11-24 11:52:42 +00:00
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
// Returns the required write barrier for a given `slot` and `value`.
|
|
|
|
static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
|
|
|
|
Params& params);
|
|
|
|
// Returns the required write barrier for a given `slot`.
|
2021-02-05 14:05:48 +00:00
|
|
|
template <typename HeapHandleCallback>
|
|
|
|
static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
|
|
|
|
HeapHandleCallback callback);
|
2021-09-16 11:49:37 +00:00
|
|
|
// Returns the required write barrier for a given `value`.
|
|
|
|
static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
|
2020-11-24 17:52:14 +00:00
|
|
|
|
2021-02-05 14:05:48 +00:00
|
|
|
template <typename HeapHandleCallback>
|
2021-01-21 11:03:56 +00:00
|
|
|
static V8_INLINE Type GetWriteBarrierTypeForExternallyReferencedObject(
|
2021-02-05 14:05:48 +00:00
|
|
|
const void* value, Params& params, HeapHandleCallback callback);
|
2021-01-21 11:03:56 +00:00
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
|
|
|
|
const void* object);
|
2020-11-24 11:52:42 +00:00
|
|
|
static V8_INLINE void DijkstraMarkingBarrierRange(
|
2021-02-05 14:05:48 +00:00
|
|
|
const Params& params, const void* first_element, size_t element_size,
|
|
|
|
size_t number_of_elements, TraceCallback trace_callback);
|
2020-11-24 17:52:14 +00:00
|
|
|
static V8_INLINE void SteeleMarkingBarrier(const Params& params,
|
|
|
|
const void* object);
|
|
|
|
#if defined(CPPGC_YOUNG_GENERATION)
|
|
|
|
static V8_INLINE void GenerationalBarrier(const Params& params,
|
|
|
|
const void* slot);
|
|
|
|
#else // !CPPGC_YOUNG_GENERATION
|
|
|
|
static V8_INLINE void GenerationalBarrier(const Params& params,
|
|
|
|
const void* slot) {}
|
|
|
|
#endif // CPPGC_YOUNG_GENERATION
|
|
|
|
|
2020-11-30 21:30:55 +00:00
|
|
|
#if V8_ENABLE_CHECKS
|
|
|
|
static void CheckParams(Type expected_type, const Params& params);
|
|
|
|
#else // !V8_ENABLE_CHECKS
|
|
|
|
static void CheckParams(Type expected_type, const Params& params) {}
|
|
|
|
#endif // !V8_ENABLE_CHECKS
|
|
|
|
|
2021-02-17 17:21:07 +00:00
|
|
|
// The IncrementalOrConcurrentUpdater class allows cppgc internal to update
|
|
|
|
// |incremental_or_concurrent_marking_flag_|.
|
|
|
|
class IncrementalOrConcurrentMarkingFlagUpdater;
|
|
|
|
static bool IsAnyIncrementalOrConcurrentMarking() {
|
|
|
|
return incremental_or_concurrent_marking_flag_.MightBeEntered();
|
|
|
|
}
|
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
private:
|
|
|
|
WriteBarrier() = delete;
|
|
|
|
|
2020-11-24 11:52:42 +00:00
|
|
|
#if defined(CPPGC_CAGED_HEAP)
|
2020-11-24 17:52:14 +00:00
|
|
|
using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
|
|
|
|
#else // !CPPGC_CAGED_HEAP
|
|
|
|
using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
|
|
|
|
#endif // !CPPGC_CAGED_HEAP
|
2020-11-24 11:52:42 +00:00
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
static void DijkstraMarkingBarrierSlow(const void* value);
|
|
|
|
static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
|
|
|
|
static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
|
|
|
|
const void* first_element,
|
|
|
|
size_t element_size,
|
|
|
|
size_t number_of_elements,
|
|
|
|
TraceCallback trace_callback);
|
|
|
|
static void SteeleMarkingBarrierSlow(const void* value);
|
|
|
|
static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
|
2020-11-24 11:52:42 +00:00
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
#if defined(CPPGC_YOUNG_GENERATION)
|
2021-02-05 14:05:48 +00:00
|
|
|
static CagedHeapLocalData& GetLocalData(HeapHandle&);
|
2020-11-24 17:52:14 +00:00
|
|
|
static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
|
|
|
|
const AgeTable& ageTable,
|
|
|
|
const void* slot, uintptr_t value_offset);
|
|
|
|
#endif // CPPGC_YOUNG_GENERATION
|
2021-02-17 17:21:07 +00:00
|
|
|
|
|
|
|
static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
|
2020-11-24 17:52:14 +00:00
|
|
|
};
|
2020-11-24 11:52:42 +00:00
|
|
|
|
2021-02-05 14:05:48 +00:00
|
|
|
template <WriteBarrier::Type type>
|
|
|
|
V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
|
|
|
|
if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
|
|
|
|
#if V8_ENABLE_CHECKS
|
|
|
|
params.type = type;
|
|
|
|
#endif // !V8_ENABLE_CHECKS
|
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
2020-11-24 11:52:42 +00:00
|
|
|
#if defined(CPPGC_CAGED_HEAP)
|
2021-02-05 14:05:48 +00:00
|
|
|
class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
|
2020-11-24 17:52:14 +00:00
|
|
|
public:
|
2021-02-05 14:05:48 +00:00
|
|
|
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
2020-11-24 17:52:14 +00:00
|
|
|
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
|
2021-02-05 14:05:48 +00:00
|
|
|
WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
|
|
|
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
|
2020-06-02 16:20:04 +00:00
|
|
|
}
|
|
|
|
|
2021-09-16 11:49:37 +00:00
|
|
|
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
|
|
|
static V8_INLINE WriteBarrier::Type Get(const void* value,
|
|
|
|
WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
|
|
|
return GetNoSlot(value, params, callback);
|
|
|
|
}
|
|
|
|
|
2021-02-05 14:05:48 +00:00
|
|
|
template <typename HeapHandleCallback>
|
2021-01-21 11:03:56 +00:00
|
|
|
static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
|
2021-09-16 11:49:37 +00:00
|
|
|
const void* value, WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
|
|
|
return GetNoSlot(value, params, callback);
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
WriteBarrierTypeForCagedHeapPolicy() = delete;
|
|
|
|
|
|
|
|
template <typename HeapHandleCallback>
|
|
|
|
static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
|
|
|
|
WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback) {
|
2021-01-21 11:03:56 +00:00
|
|
|
if (!TryGetCagedHeap(value, value, params)) {
|
|
|
|
return WriteBarrier::Type::kNone;
|
|
|
|
}
|
2021-02-08 11:36:00 +00:00
|
|
|
if (V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) {
|
2021-02-05 14:05:48 +00:00
|
|
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
2021-01-21 11:03:56 +00:00
|
|
|
}
|
2021-02-05 14:05:48 +00:00
|
|
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
2021-01-21 11:03:56 +00:00
|
|
|
}
|
|
|
|
|
2021-02-05 14:05:48 +00:00
|
|
|
template <WriteBarrier::ValueMode value_mode>
|
|
|
|
struct ValueModeDispatch;
|
|
|
|
|
2020-11-24 11:52:42 +00:00
|
|
|
static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
|
2020-11-24 17:52:14 +00:00
|
|
|
WriteBarrier::Params& params) {
|
2021-08-16 09:16:27 +00:00
|
|
|
// TODO(chromium:1056170): Check if the null check can be folded in with
|
|
|
|
// the rest of the write barrier.
|
|
|
|
if (!value) return false;
|
2020-11-24 17:52:14 +00:00
|
|
|
params.start = reinterpret_cast<uintptr_t>(value) &
|
2020-11-24 11:52:42 +00:00
|
|
|
~(api_constants::kCagedHeapReservationAlignment - 1);
|
2020-11-24 17:52:14 +00:00
|
|
|
const uintptr_t slot_offset =
|
|
|
|
reinterpret_cast<uintptr_t>(slot) - params.start;
|
|
|
|
if (slot_offset > api_constants::kCagedHeapReservationSize) {
|
2020-11-24 11:52:42 +00:00
|
|
|
// Check if slot is on stack or value is sentinel or nullptr. This relies
|
|
|
|
// on the fact that kSentinelPointer is encoded as 0x1.
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
2021-02-05 14:05:48 +00:00
|
|
|
|
|
|
|
// Returns whether marking is in progress. If marking is not in progress
|
|
|
|
// sets the start of the cage accordingly.
|
|
|
|
//
|
|
|
|
// TODO(chromium:1056170): Create fast path on API.
|
|
|
|
static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
|
2020-11-24 17:52:14 +00:00
|
|
|
};
|
2021-02-05 14:05:48 +00:00
|
|
|
|
|
|
|
template <>
|
|
|
|
struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
|
|
|
WriteBarrier::ValueMode::kValuePresent> {
|
|
|
|
template <typename HeapHandleCallback>
|
|
|
|
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
|
|
|
|
WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback) {
|
|
|
|
bool within_cage = TryGetCagedHeap(slot, value, params);
|
|
|
|
if (!within_cage) {
|
|
|
|
return WriteBarrier::Type::kNone;
|
|
|
|
}
|
2021-02-08 11:36:00 +00:00
|
|
|
if (V8_LIKELY(!params.caged_heap().is_incremental_marking_in_progress)) {
|
2021-02-05 14:05:48 +00:00
|
|
|
#if defined(CPPGC_YOUNG_GENERATION)
|
|
|
|
params.heap = reinterpret_cast<HeapHandle*>(params.start);
|
|
|
|
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
|
|
|
|
params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
|
|
|
#else // !CPPGC_YOUNG_GENERATION
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
|
|
|
#endif // !CPPGC_YOUNG_GENERATION
|
|
|
|
}
|
|
|
|
params.heap = reinterpret_cast<HeapHandle*>(params.start);
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
template <>
|
|
|
|
struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
|
|
|
WriteBarrier::ValueMode::kNoValuePresent> {
|
|
|
|
template <typename HeapHandleCallback>
|
|
|
|
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
|
|
|
|
WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
|
|
|
#if defined(CPPGC_YOUNG_GENERATION)
|
|
|
|
HeapHandle& handle = callback();
|
|
|
|
if (V8_LIKELY(!IsMarking(handle, params))) {
|
|
|
|
// params.start is populated by IsMarking().
|
|
|
|
params.heap = &handle;
|
|
|
|
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
|
|
|
|
// params.value_offset stays 0.
|
|
|
|
if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
|
|
|
|
// Check if slot is on stack.
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
|
|
|
}
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
|
|
|
}
|
|
|
|
#else // !CPPGC_YOUNG_GENERATION
|
2021-02-17 17:21:07 +00:00
|
|
|
if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
|
2021-02-05 14:05:48 +00:00
|
|
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
|
|
|
}
|
|
|
|
HeapHandle& handle = callback();
|
|
|
|
if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
|
|
|
}
|
|
|
|
#endif // !CPPGC_YOUNG_GENERATION
|
|
|
|
params.heap = &handle;
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-11-24 11:52:42 +00:00
|
|
|
#endif // CPPGC_CAGED_HEAP
|
|
|
|
|
2021-02-05 14:05:48 +00:00
|
|
|
class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
|
2020-11-24 17:52:14 +00:00
|
|
|
public:
|
2021-02-05 14:05:48 +00:00
|
|
|
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
2020-11-24 17:52:14 +00:00
|
|
|
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
|
2021-02-05 14:05:48 +00:00
|
|
|
WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
|
|
|
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
|
2021-01-21 11:03:56 +00:00
|
|
|
}
|
|
|
|
|
2021-09-16 11:49:37 +00:00
|
|
|
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
|
|
|
static V8_INLINE WriteBarrier::Type Get(const void* value,
|
|
|
|
WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
|
|
|
// The slot will never be used in `Get()` below.
|
|
|
|
return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
|
|
|
|
callback);
|
|
|
|
}
|
|
|
|
|
2021-02-05 14:05:48 +00:00
|
|
|
template <typename HeapHandleCallback>
|
2021-01-21 11:03:56 +00:00
|
|
|
static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
|
2021-02-05 14:05:48 +00:00
|
|
|
const void* value, WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
2021-02-08 11:36:00 +00:00
|
|
|
// The slot will never be used in `Get()` below.
|
|
|
|
return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
|
|
|
|
callback);
|
2021-01-21 11:03:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2021-02-05 14:05:48 +00:00
|
|
|
template <WriteBarrier::ValueMode value_mode>
|
|
|
|
struct ValueModeDispatch;
|
|
|
|
|
|
|
|
// TODO(chromium:1056170): Create fast path on API.
|
|
|
|
static bool IsMarking(const void*, HeapHandle**);
|
2021-02-08 11:36:00 +00:00
|
|
|
// TODO(chromium:1056170): Create fast path on API.
|
|
|
|
static bool IsMarking(HeapHandle&);
|
2021-02-05 14:05:48 +00:00
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
WriteBarrierTypeForNonCagedHeapPolicy() = delete;
|
|
|
|
};
|
|
|
|
|
2021-02-05 14:05:48 +00:00
|
|
|
template <>
|
|
|
|
struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
|
|
|
|
WriteBarrier::ValueMode::kValuePresent> {
|
|
|
|
template <typename HeapHandleCallback>
|
|
|
|
static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
|
|
|
|
WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
|
|
|
// The following check covers nullptr as well as sentinel pointer.
|
|
|
|
if (object <= static_cast<void*>(kSentinelPointer)) {
|
|
|
|
return WriteBarrier::Type::kNone;
|
|
|
|
}
|
|
|
|
if (IsMarking(object, ¶ms.heap)) {
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
|
|
|
}
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
template <>
|
|
|
|
struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
|
|
|
|
WriteBarrier::ValueMode::kNoValuePresent> {
|
|
|
|
template <typename HeapHandleCallback>
|
2021-02-08 11:36:00 +00:00
|
|
|
static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
|
2021-02-05 14:05:48 +00:00
|
|
|
WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
2021-02-17 17:21:07 +00:00
|
|
|
if (V8_UNLIKELY(WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
|
2021-02-08 11:36:00 +00:00
|
|
|
HeapHandle& handle = callback();
|
|
|
|
if (IsMarking(handle)) {
|
|
|
|
params.heap = &handle;
|
|
|
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return WriteBarrier::Type::kNone;
|
2021-02-05 14:05:48 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
// static
|
|
|
|
WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
|
|
|
const void* slot, const void* value, WriteBarrier::Params& params) {
|
|
|
|
return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
|
2021-02-05 14:05:48 +00:00
|
|
|
params, []() {});
|
2020-11-24 17:52:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// static
|
2021-02-05 14:05:48 +00:00
|
|
|
template <typename HeapHandleCallback>
|
2020-11-24 17:52:14 +00:00
|
|
|
WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
2021-02-05 14:05:48 +00:00
|
|
|
const void* slot, WriteBarrier::Params& params,
|
|
|
|
HeapHandleCallback callback) {
|
|
|
|
return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
|
|
|
|
slot, nullptr, params, callback);
|
2020-11-24 17:52:14 +00:00
|
|
|
}
|
|
|
|
|
2021-09-16 11:49:37 +00:00
|
|
|
// static
|
|
|
|
WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
|
|
|
const void* value, WriteBarrier::Params& params) {
|
|
|
|
return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
|
|
|
|
[]() {});
|
|
|
|
}
|
|
|
|
|
2021-01-21 11:03:56 +00:00
|
|
|
// static
|
2021-02-05 14:05:48 +00:00
|
|
|
template <typename HeapHandleCallback>
|
2021-01-21 11:03:56 +00:00
|
|
|
WriteBarrier::Type
|
|
|
|
WriteBarrier::GetWriteBarrierTypeForExternallyReferencedObject(
|
2021-02-05 14:05:48 +00:00
|
|
|
const void* value, Params& params, HeapHandleCallback callback) {
|
|
|
|
return WriteBarrierTypePolicy::GetForExternallyReferenced(value, params,
|
|
|
|
callback);
|
2021-01-21 11:03:56 +00:00
|
|
|
}
|
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
// static
|
|
|
|
void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
|
|
|
|
const void* object) {
|
|
|
|
CheckParams(Type::kMarking, params);
|
|
|
|
#if defined(CPPGC_CAGED_HEAP)
|
|
|
|
// Caged heap already filters out sentinels.
|
|
|
|
DijkstraMarkingBarrierSlow(object);
|
|
|
|
#else // !CPPGC_CAGED_HEAP
|
|
|
|
DijkstraMarkingBarrierSlowWithSentinelCheck(object);
|
|
|
|
#endif // !CPPGC_CAGED_HEAP
|
|
|
|
}
|
|
|
|
|
|
|
|
// static
|
|
|
|
void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
|
|
|
|
const void* first_element,
|
|
|
|
size_t element_size,
|
|
|
|
size_t number_of_elements,
|
|
|
|
TraceCallback trace_callback) {
|
|
|
|
CheckParams(Type::kMarking, params);
|
2021-02-05 14:05:48 +00:00
|
|
|
DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
|
2020-11-24 17:52:14 +00:00
|
|
|
number_of_elements, trace_callback);
|
|
|
|
}
|
|
|
|
|
|
|
|
// static
|
|
|
|
void WriteBarrier::SteeleMarkingBarrier(const Params& params,
|
|
|
|
const void* object) {
|
|
|
|
CheckParams(Type::kMarking, params);
|
|
|
|
#if defined(CPPGC_CAGED_HEAP)
|
|
|
|
// Caged heap already filters out sentinels.
|
|
|
|
SteeleMarkingBarrierSlow(object);
|
|
|
|
#else // !CPPGC_CAGED_HEAP
|
|
|
|
SteeleMarkingBarrierSlowWithSentinelCheck(object);
|
|
|
|
#endif // !CPPGC_CAGED_HEAP
|
|
|
|
}
|
2020-06-16 22:41:00 +00:00
|
|
|
|
|
|
|
#if defined(CPPGC_YOUNG_GENERATION)
|
2020-11-24 17:52:14 +00:00
|
|
|
// static
|
|
|
|
void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
|
|
|
|
CheckParams(Type::kGenerational, params);
|
2020-06-16 22:41:00 +00:00
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
const CagedHeapLocalData& local_data = params.caged_heap();
|
|
|
|
const AgeTable& age_table = local_data.age_table;
|
2020-06-16 22:41:00 +00:00
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
// Bail out if the slot is in young generation.
|
|
|
|
if (V8_LIKELY(age_table[params.slot_offset] == AgeTable::Age::kYoung)) return;
|
2020-06-16 22:41:00 +00:00
|
|
|
|
2020-11-24 17:52:14 +00:00
|
|
|
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif // !CPPGC_YOUNG_GENERATION
|
2020-06-02 16:20:04 +00:00
|
|
|
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace cppgc
|
|
|
|
|
|
|
|
#endif // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
|