api: Provide write barrier in TracedReferenceBase
TracedReferenceBase use (traced) global handles to implement the referencs. Provide a write barrier in the corresponding handle methods. Doing so - avoids bugs by having embedders taking care of write barrier management. - speeds up the barrier as it is better integrated in the handle methods. Drive-by: We don't need write barriers on initializating stores. Bug: v8:12165 Change-Id: Ie49cc3783aeed576fd46c957c473c61362fefbf2 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3247039 Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Omer Katz <omerkatz@chromium.org> Cr-Commit-Position: refs/heads/main@{#77593}
This commit is contained in:
parent
cced52a97e
commit
e5a509049e
@ -26,13 +26,20 @@ namespace v8 {
|
||||
class Value;
|
||||
|
||||
namespace internal {
|
||||
class BasicTracedReferenceExtractor;
|
||||
} // namespace internal
|
||||
|
||||
namespace api_internal {
|
||||
class BasicTracedReferenceExtractor;
|
||||
|
||||
enum class GlobalHandleDestructionMode { kWithDestructor, kWithoutDestructor };
|
||||
|
||||
enum class GlobalHandleStoreMode {
|
||||
kInitializingStore,
|
||||
kAssigningStore,
|
||||
};
|
||||
|
||||
V8_EXPORT internal::Address* GlobalizeTracedReference(
|
||||
internal::Isolate* isolate, internal::Address* handle,
|
||||
internal::Address* slot, bool has_destructor);
|
||||
internal::Address* slot, GlobalHandleDestructionMode destruction_mode,
|
||||
GlobalHandleStoreMode store_mode);
|
||||
V8_EXPORT void MoveTracedGlobalReference(internal::Address** from,
|
||||
internal::Address** to);
|
||||
V8_EXPORT void CopyTracedGlobalReference(const internal::Address* const* from,
|
||||
@ -41,7 +48,8 @@ V8_EXPORT void DisposeTracedGlobal(internal::Address* global_handle);
|
||||
V8_EXPORT void SetFinalizationCallbackTraced(
|
||||
internal::Address* location, void* parameter,
|
||||
WeakCallbackInfo<void>::Callback callback);
|
||||
} // namespace api_internal
|
||||
|
||||
} // namespace internal
|
||||
|
||||
/**
|
||||
* Deprecated. Use |TracedReference<T>| instead.
|
||||
@ -164,15 +172,15 @@ class BasicTracedReference : public TracedReferenceBase {
|
||||
}
|
||||
|
||||
private:
|
||||
enum DestructionMode { kWithDestructor, kWithoutDestructor };
|
||||
|
||||
/**
|
||||
* An empty BasicTracedReference without storage cell.
|
||||
*/
|
||||
BasicTracedReference() = default;
|
||||
|
||||
V8_INLINE static internal::Address* New(Isolate* isolate, T* that, void* slot,
|
||||
DestructionMode destruction_mode);
|
||||
V8_INLINE static internal::Address* New(
|
||||
Isolate* isolate, T* that, void* slot,
|
||||
internal::GlobalHandleDestructionMode destruction_mode,
|
||||
internal::GlobalHandleStoreMode store_mode);
|
||||
|
||||
friend class EmbedderHeapTracer;
|
||||
template <typename F>
|
||||
@ -215,8 +223,10 @@ class TracedGlobal : public BasicTracedReference<T> {
|
||||
*/
|
||||
template <class S>
|
||||
TracedGlobal(Isolate* isolate, Local<S> that) : BasicTracedReference<T>() {
|
||||
this->val_ = this->New(isolate, that.val_, &this->val_,
|
||||
BasicTracedReference<T>::kWithDestructor);
|
||||
this->val_ =
|
||||
this->New(isolate, that.val_, &this->val_,
|
||||
internal::GlobalHandleDestructionMode::kWithDestructor,
|
||||
internal::GlobalHandleStoreMode::kInitializingStore);
|
||||
static_assert(std::is_base_of<T, S>::value, "type check");
|
||||
}
|
||||
|
||||
@ -338,8 +348,10 @@ class TracedReference : public BasicTracedReference<T> {
|
||||
*/
|
||||
template <class S>
|
||||
TracedReference(Isolate* isolate, Local<S> that) : BasicTracedReference<T>() {
|
||||
this->val_ = this->New(isolate, that.val_, &this->val_,
|
||||
BasicTracedReference<T>::kWithoutDestructor);
|
||||
this->val_ =
|
||||
this->New(isolate, that.val_, &this->val_,
|
||||
internal::GlobalHandleDestructionMode::kWithoutDestructor,
|
||||
internal::GlobalHandleStoreMode::kInitializingStore);
|
||||
static_assert(std::is_base_of<T, S>::value, "type check");
|
||||
}
|
||||
|
||||
@ -420,18 +432,19 @@ class TracedReference : public BasicTracedReference<T> {
|
||||
// --- Implementation ---
|
||||
template <class T>
|
||||
internal::Address* BasicTracedReference<T>::New(
|
||||
Isolate* isolate, T* that, void* slot, DestructionMode destruction_mode) {
|
||||
Isolate* isolate, T* that, void* slot,
|
||||
internal::GlobalHandleDestructionMode destruction_mode,
|
||||
internal::GlobalHandleStoreMode store_mode) {
|
||||
if (that == nullptr) return nullptr;
|
||||
internal::Address* p = reinterpret_cast<internal::Address*>(that);
|
||||
return api_internal::GlobalizeTracedReference(
|
||||
return internal::GlobalizeTracedReference(
|
||||
reinterpret_cast<internal::Isolate*>(isolate), p,
|
||||
reinterpret_cast<internal::Address*>(slot),
|
||||
destruction_mode == kWithDestructor);
|
||||
reinterpret_cast<internal::Address*>(slot), destruction_mode, store_mode);
|
||||
}
|
||||
|
||||
void TracedReferenceBase::Reset() {
|
||||
if (IsEmpty()) return;
|
||||
api_internal::DisposeTracedGlobal(reinterpret_cast<internal::Address*>(val_));
|
||||
internal::DisposeTracedGlobal(reinterpret_cast<internal::Address*>(val_));
|
||||
SetSlotThreadSafe(nullptr);
|
||||
}
|
||||
|
||||
@ -484,7 +497,8 @@ void TracedGlobal<T>::Reset(Isolate* isolate, const Local<S>& other) {
|
||||
Reset();
|
||||
if (other.IsEmpty()) return;
|
||||
this->val_ = this->New(isolate, other.val_, &this->val_,
|
||||
BasicTracedReference<T>::kWithDestructor);
|
||||
internal::GlobalHandleDestructionMode::kWithDestructor,
|
||||
internal::GlobalHandleStoreMode::kAssigningStore);
|
||||
}
|
||||
|
||||
template <class T>
|
||||
@ -506,7 +520,7 @@ TracedGlobal<T>& TracedGlobal<T>::operator=(const TracedGlobal<S>& rhs) {
|
||||
template <class T>
|
||||
TracedGlobal<T>& TracedGlobal<T>::operator=(TracedGlobal&& rhs) {
|
||||
if (this != &rhs) {
|
||||
api_internal::MoveTracedGlobalReference(
|
||||
internal::MoveTracedGlobalReference(
|
||||
reinterpret_cast<internal::Address**>(&rhs.val_),
|
||||
reinterpret_cast<internal::Address**>(&this->val_));
|
||||
}
|
||||
@ -518,7 +532,7 @@ TracedGlobal<T>& TracedGlobal<T>::operator=(const TracedGlobal& rhs) {
|
||||
if (this != &rhs) {
|
||||
this->Reset();
|
||||
if (rhs.val_ != nullptr) {
|
||||
api_internal::CopyTracedGlobalReference(
|
||||
internal::CopyTracedGlobalReference(
|
||||
reinterpret_cast<const internal::Address* const*>(&rhs.val_),
|
||||
reinterpret_cast<internal::Address**>(&this->val_));
|
||||
}
|
||||
@ -534,7 +548,8 @@ void TracedReference<T>::Reset(Isolate* isolate, const Local<S>& other) {
|
||||
if (other.IsEmpty()) return;
|
||||
this->SetSlotThreadSafe(
|
||||
this->New(isolate, other.val_, &this->val_,
|
||||
BasicTracedReference<T>::kWithoutDestructor));
|
||||
internal::GlobalHandleDestructionMode::kWithoutDestructor,
|
||||
internal::GlobalHandleStoreMode::kAssigningStore));
|
||||
}
|
||||
|
||||
template <class T>
|
||||
@ -557,7 +572,7 @@ TracedReference<T>& TracedReference<T>::operator=(
|
||||
template <class T>
|
||||
TracedReference<T>& TracedReference<T>::operator=(TracedReference&& rhs) {
|
||||
if (this != &rhs) {
|
||||
api_internal::MoveTracedGlobalReference(
|
||||
internal::MoveTracedGlobalReference(
|
||||
reinterpret_cast<internal::Address**>(&rhs.val_),
|
||||
reinterpret_cast<internal::Address**>(&this->val_));
|
||||
}
|
||||
@ -569,7 +584,7 @@ TracedReference<T>& TracedReference<T>::operator=(const TracedReference& rhs) {
|
||||
if (this != &rhs) {
|
||||
this->Reset();
|
||||
if (rhs.val_ != nullptr) {
|
||||
api_internal::CopyTracedGlobalReference(
|
||||
internal::CopyTracedGlobalReference(
|
||||
reinterpret_cast<const internal::Address* const*>(&rhs.val_),
|
||||
reinterpret_cast<internal::Address**>(&this->val_));
|
||||
}
|
||||
@ -596,7 +611,7 @@ uint16_t TracedReferenceBase::WrapperClassId() const {
|
||||
template <class T>
|
||||
void TracedGlobal<T>::SetFinalizationCallback(
|
||||
void* parameter, typename WeakCallbackInfo<void>::Callback callback) {
|
||||
api_internal::SetFinalizationCallbackTraced(
|
||||
internal::SetFinalizationCallbackTraced(
|
||||
reinterpret_cast<internal::Address*>(this->val_), parameter, callback);
|
||||
}
|
||||
|
||||
|
@ -830,17 +830,19 @@ void ResourceConstraints::ConfigureDefaults(uint64_t physical_memory,
|
||||
}
|
||||
}
|
||||
|
||||
namespace api_internal {
|
||||
i::Address* GlobalizeTracedReference(i::Isolate* isolate, i::Address* obj,
|
||||
internal::Address* slot,
|
||||
bool has_destructor) {
|
||||
namespace internal {
|
||||
|
||||
i::Address* GlobalizeTracedReference(
|
||||
i::Isolate* isolate, i::Address* obj, internal::Address* slot,
|
||||
GlobalHandleDestructionMode destruction_mode,
|
||||
GlobalHandleStoreMode store_mode) {
|
||||
LOG_API(isolate, TracedGlobal, New);
|
||||
#ifdef DEBUG
|
||||
Utils::ApiCheck((slot != nullptr), "v8::GlobalizeTracedReference",
|
||||
"the address slot must be not null");
|
||||
#endif
|
||||
i::Handle<i::Object> result =
|
||||
isolate->global_handles()->CreateTraced(*obj, slot, has_destructor);
|
||||
i::Handle<i::Object> result = isolate->global_handles()->CreateTraced(
|
||||
*obj, slot, destruction_mode, store_mode);
|
||||
#ifdef VERIFY_HEAP
|
||||
if (i::FLAG_verify_heap) {
|
||||
i::Object(*obj).ObjectVerify(isolate);
|
||||
@ -849,6 +851,30 @@ i::Address* GlobalizeTracedReference(i::Isolate* isolate, i::Address* obj,
|
||||
return result.location();
|
||||
}
|
||||
|
||||
void MoveTracedGlobalReference(internal::Address** from,
|
||||
internal::Address** to) {
|
||||
GlobalHandles::MoveTracedGlobal(from, to);
|
||||
}
|
||||
|
||||
void CopyTracedGlobalReference(const internal::Address* const* from,
|
||||
internal::Address** to) {
|
||||
GlobalHandles::CopyTracedGlobal(from, to);
|
||||
}
|
||||
|
||||
void DisposeTracedGlobal(internal::Address* location) {
|
||||
GlobalHandles::DestroyTraced(location);
|
||||
}
|
||||
|
||||
void SetFinalizationCallbackTraced(internal::Address* location, void* parameter,
|
||||
WeakCallbackInfo<void>::Callback callback) {
|
||||
GlobalHandles::SetFinalizationCallbackForTraced(location, parameter,
|
||||
callback);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
|
||||
namespace api_internal {
|
||||
|
||||
i::Address* GlobalizeReference(i::Isolate* isolate, i::Address* obj) {
|
||||
LOG_API(isolate, Persistent, New);
|
||||
i::Handle<i::Object> result = isolate->global_handles()->Create(*obj);
|
||||
@ -900,26 +926,6 @@ Value* Eternalize(Isolate* v8_isolate, Value* value) {
|
||||
isolate->eternal_handles()->Get(index).location());
|
||||
}
|
||||
|
||||
void MoveTracedGlobalReference(internal::Address** from,
|
||||
internal::Address** to) {
|
||||
i::GlobalHandles::MoveTracedGlobal(from, to);
|
||||
}
|
||||
|
||||
void CopyTracedGlobalReference(const internal::Address* const* from,
|
||||
internal::Address** to) {
|
||||
i::GlobalHandles::CopyTracedGlobal(from, to);
|
||||
}
|
||||
|
||||
void DisposeTracedGlobal(internal::Address* location) {
|
||||
i::GlobalHandles::DestroyTraced(location);
|
||||
}
|
||||
|
||||
void SetFinalizationCallbackTraced(internal::Address* location, void* parameter,
|
||||
WeakCallbackInfo<void>::Callback callback) {
|
||||
i::GlobalHandles::SetFinalizationCallbackForTraced(location, parameter,
|
||||
callback);
|
||||
}
|
||||
|
||||
void FromJustIsNothing() {
|
||||
Utils::ApiCheck(false, "v8::FromJust", "Maybe value is Nothing.");
|
||||
}
|
||||
|
@ -16,6 +16,7 @@
|
||||
#include "src/heap/embedder-tracing.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap-write-barrier-inl.h"
|
||||
#include "src/heap/heap-write-barrier.h"
|
||||
#include "src/init/v8.h"
|
||||
#include "src/logging/counters.h"
|
||||
#include "src/objects/objects-inl.h"
|
||||
@ -948,16 +949,17 @@ Handle<Object> GlobalHandles::Create(Address value) {
|
||||
return Create(Object(value));
|
||||
}
|
||||
|
||||
Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
|
||||
bool has_destructor) {
|
||||
Handle<Object> GlobalHandles::CreateTraced(
|
||||
Object value, Address* slot, GlobalHandleDestructionMode destruction_mode,
|
||||
GlobalHandleStoreMode store_mode) {
|
||||
return CreateTraced(
|
||||
value, slot, has_destructor,
|
||||
value, slot, destruction_mode, store_mode,
|
||||
on_stack_nodes_->IsOnStack(reinterpret_cast<uintptr_t>(slot)));
|
||||
}
|
||||
|
||||
Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
|
||||
bool has_destructor,
|
||||
bool is_on_stack) {
|
||||
Handle<Object> GlobalHandles::CreateTraced(
|
||||
Object value, Address* slot, GlobalHandleDestructionMode destruction_mode,
|
||||
GlobalHandleStoreMode store_mode, bool is_on_stack) {
|
||||
GlobalHandles::TracedNode* result;
|
||||
if (is_on_stack) {
|
||||
result = on_stack_nodes_->Acquire(value, reinterpret_cast<uintptr_t>(slot));
|
||||
@ -967,15 +969,21 @@ Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
|
||||
traced_young_nodes_.push_back(result);
|
||||
result->set_in_young_list(true);
|
||||
}
|
||||
if (store_mode != GlobalHandleStoreMode::kInitializingStore) {
|
||||
WriteBarrier::MarkingFromGlobalHandle(value);
|
||||
}
|
||||
}
|
||||
const bool has_destructor =
|
||||
destruction_mode == GlobalHandleDestructionMode::kWithDestructor;
|
||||
result->set_has_destructor(has_destructor);
|
||||
result->set_parameter(has_destructor ? slot : nullptr);
|
||||
return result->handle();
|
||||
}
|
||||
|
||||
Handle<Object> GlobalHandles::CreateTraced(Address value, Address* slot,
|
||||
bool has_destructor) {
|
||||
return CreateTraced(Object(value), slot, has_destructor);
|
||||
Handle<Object> GlobalHandles::CreateTraced(
|
||||
Address value, Address* slot, GlobalHandleDestructionMode destruction_mode,
|
||||
GlobalHandleStoreMode store_mode) {
|
||||
return CreateTraced(Object(value), slot, destruction_mode, store_mode);
|
||||
}
|
||||
|
||||
Handle<Object> GlobalHandles::CopyGlobal(Address* location) {
|
||||
@ -1012,7 +1020,10 @@ void GlobalHandles::CopyTracedGlobal(const Address* const* from, Address** to) {
|
||||
GlobalHandles* global_handles =
|
||||
GlobalHandles::From(const_cast<TracedNode*>(node));
|
||||
Handle<Object> o = global_handles->CreateTraced(
|
||||
node->object(), reinterpret_cast<Address*>(to), node->has_destructor());
|
||||
node->object(), reinterpret_cast<Address*>(to),
|
||||
node->has_destructor() ? GlobalHandleDestructionMode::kWithDestructor
|
||||
: GlobalHandleDestructionMode::kWithoutDestructor,
|
||||
GlobalHandleStoreMode::kAssigningStore);
|
||||
SetSlotThreadSafe(to, o.location());
|
||||
TracedNode::Verify(global_handles, from);
|
||||
TracedNode::Verify(global_handles, to);
|
||||
@ -1082,7 +1093,10 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) {
|
||||
DCHECK(global_handles);
|
||||
Handle<Object> o = global_handles->CreateTraced(
|
||||
from_node->object(), reinterpret_cast<Address*>(to),
|
||||
from_node->has_destructor(), to_on_stack);
|
||||
from_node->has_destructor()
|
||||
? GlobalHandleDestructionMode::kWithDestructor
|
||||
: GlobalHandleDestructionMode::kWithoutDestructor,
|
||||
GlobalHandleStoreMode::kAssigningStore, to_on_stack);
|
||||
SetSlotThreadSafe(to, o.location());
|
||||
to_node = TracedNode::FromLocation(*to);
|
||||
DCHECK(to_node->markbit());
|
||||
@ -1095,6 +1109,9 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) {
|
||||
global_handles->traced_young_nodes_.push_back(to_node);
|
||||
to_node->set_in_young_list(true);
|
||||
}
|
||||
if (!to_on_stack) {
|
||||
WriteBarrier::MarkingFromGlobalHandle(to_node->object());
|
||||
}
|
||||
}
|
||||
DestroyTraced(*from);
|
||||
SetSlotThreadSafe(from, nullptr);
|
||||
@ -1110,6 +1127,7 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) {
|
||||
if (to_node->has_destructor()) {
|
||||
to_node->set_parameter(to);
|
||||
}
|
||||
WriteBarrier::MarkingFromGlobalHandle(to_node->object());
|
||||
SetSlotThreadSafe(from, nullptr);
|
||||
}
|
||||
TracedNode::Verify(global_handles, to);
|
||||
|
@ -13,6 +13,7 @@
|
||||
#include "include/v8-callbacks.h"
|
||||
#include "include/v8-persistent-handle.h"
|
||||
#include "include/v8-profiler.h"
|
||||
#include "include/v8-traced-handle.h"
|
||||
#include "src/handles/handles.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/objects/heap-object.h"
|
||||
@ -104,11 +105,16 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
|
||||
template <typename T>
|
||||
inline Handle<T> Create(T value);
|
||||
|
||||
Handle<Object> CreateTraced(Object value, Address* slot, bool has_destructor,
|
||||
Handle<Object> CreateTraced(Object value, Address* slot,
|
||||
GlobalHandleDestructionMode destruction_mode,
|
||||
GlobalHandleStoreMode store_mode,
|
||||
bool is_on_stack);
|
||||
Handle<Object> CreateTraced(Object value, Address* slot, bool has_destructor);
|
||||
Handle<Object> CreateTraced(Object value, Address* slot,
|
||||
GlobalHandleDestructionMode destruction_mode,
|
||||
GlobalHandleStoreMode store_mode);
|
||||
Handle<Object> CreateTraced(Address value, Address* slot,
|
||||
bool has_destructor);
|
||||
GlobalHandleDestructionMode destruction_mode,
|
||||
GlobalHandleStoreMode store_mode);
|
||||
|
||||
void RecordStats(HeapStats* stats);
|
||||
|
||||
|
@ -262,6 +262,19 @@ void WriteBarrier::Marking(DescriptorArray descriptor_array,
|
||||
MarkingSlow(*heap, descriptor_array, number_of_own_descriptors);
|
||||
}
|
||||
|
||||
// static
|
||||
void WriteBarrier::MarkingFromGlobalHandle(Object value) {
|
||||
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
|
||||
if (!value.IsHeapObject()) return;
|
||||
|
||||
HeapObject heap_value = HeapObject::cast(value);
|
||||
// Value may be in read only space but the chunk should never be marked
|
||||
// as marking which would result in a bail out.
|
||||
auto heap = GetHeapIfMarking(heap_value);
|
||||
if (!heap) return;
|
||||
MarkingSlowFromGlobalHandle(*heap, heap_value);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -40,6 +40,11 @@ void WriteBarrier::MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot slot,
|
||||
marking_barrier->Write(host, slot, value);
|
||||
}
|
||||
|
||||
// static
|
||||
void WriteBarrier::MarkingSlowFromGlobalHandle(Heap* heap, HeapObject value) {
|
||||
heap->marking_barrier()->WriteWithoutHost(value);
|
||||
}
|
||||
|
||||
void WriteBarrier::MarkingSlow(Heap* heap, Code host, RelocInfo* reloc_info,
|
||||
HeapObject value) {
|
||||
MarkingBarrier* marking_barrier = current_marking_barrier
|
||||
|
@ -55,6 +55,8 @@ class V8_EXPORT_PRIVATE WriteBarrier {
|
||||
static inline void Marking(DescriptorArray, int number_of_own_descriptors);
|
||||
// It is invoked from generated code and has to take raw addresses.
|
||||
static int MarkingFromCode(Address raw_host, Address raw_slot);
|
||||
// Invoked from global handles where no host object is available.
|
||||
static inline void MarkingFromGlobalHandle(Object value);
|
||||
|
||||
static void SetForThread(MarkingBarrier*);
|
||||
static void ClearForThread(MarkingBarrier*);
|
||||
@ -62,6 +64,8 @@ class V8_EXPORT_PRIVATE WriteBarrier {
|
||||
static MarkingBarrier* CurrentMarkingBarrier(Heap* heap);
|
||||
|
||||
private:
|
||||
static inline base::Optional<Heap*> GetHeapIfMarking(HeapObject object);
|
||||
|
||||
static void MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot,
|
||||
HeapObject value);
|
||||
static void MarkingSlow(Heap* heap, Code host, RelocInfo*, HeapObject value);
|
||||
@ -69,7 +73,7 @@ class V8_EXPORT_PRIVATE WriteBarrier {
|
||||
ArrayBufferExtension*);
|
||||
static void MarkingSlow(Heap* heap, DescriptorArray,
|
||||
int number_of_own_descriptors);
|
||||
static inline base::Optional<Heap*> GetHeapIfMarking(HeapObject object);
|
||||
static void MarkingSlowFromGlobalHandle(Heap* heap, HeapObject value);
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -48,6 +48,17 @@ void MarkingBarrier::Write(HeapObject host, HeapObjectSlot slot,
|
||||
}
|
||||
}
|
||||
|
||||
void MarkingBarrier::WriteWithoutHost(HeapObject value) {
|
||||
DCHECK(is_main_thread_barrier_);
|
||||
if (WhiteToGreyAndPush(value)) {
|
||||
incremental_marking_->RestartIfNotMarking();
|
||||
|
||||
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
|
||||
heap_->AddRetainingRoot(Root::kWriteBarrier, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) {
|
||||
DCHECK(IsCurrentMarkingBarrier());
|
||||
if (MarkValue(host, value)) {
|
||||
|
@ -36,6 +36,9 @@ class MarkingBarrier {
|
||||
void Write(Code host, RelocInfo*, HeapObject value);
|
||||
void Write(JSArrayBuffer host, ArrayBufferExtension*);
|
||||
void Write(DescriptorArray, int number_of_own_descriptors);
|
||||
// Only usable when there's no valid JS host object for this write, e.g., when
|
||||
// value is held alive from a global handle.
|
||||
void WriteWithoutHost(HeapObject value);
|
||||
|
||||
// Returns true if the slot needs to be recorded.
|
||||
inline bool MarkValue(HeapObject host, HeapObject value);
|
||||
|
@ -463,6 +463,10 @@ TEST(TracedGlobalInStdUnorderedMap) {
|
||||
}
|
||||
|
||||
TEST(TracedGlobalToUnmodifiedJSObjectDiesOnMarkSweep) {
|
||||
// When stressing incremental marking, a write barrier may keep the object
|
||||
// alive.
|
||||
if (FLAG_stress_incremental_marking) return;
|
||||
|
||||
CcTest::InitializeVM();
|
||||
TracedGlobalTest(
|
||||
CcTest::isolate(), ConstructJSObject,
|
||||
|
@ -3,14 +3,17 @@
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "include/v8-cppgc.h"
|
||||
#include "include/v8-traced-handle.h"
|
||||
#include "src/api/api-inl.h"
|
||||
#include "src/heap/cppgc/visitor.h"
|
||||
#include "test/unittests/heap/heap-utils.h"
|
||||
#include "test/unittests/test-utils.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
using TracedReferenceTest = TestWithIsolate;
|
||||
using TracedReferenceTest = TestWithHeapInternals;
|
||||
|
||||
TEST_F(TracedReferenceTest, ResetFromLocal) {
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
@ -200,5 +203,148 @@ TEST_F(TracedReferenceTest, TracedReferenceTrace) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(TracedReferenceTest, NoWriteBarrierOnConstruction) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
{
|
||||
v8::HandleScope handles(v8_isolate());
|
||||
v8::Local<v8::Object> local =
|
||||
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
|
||||
SimulateIncrementalMarking();
|
||||
MarkCompactCollector::MarkingState state;
|
||||
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
|
||||
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(TracedReferenceTest, WriteBarrierOnHeapReset) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
{
|
||||
v8::HandleScope handles(v8_isolate());
|
||||
v8::Local<v8::Object> local =
|
||||
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
|
||||
auto ref = std::make_unique<v8::TracedReference<v8::Object>>();
|
||||
SimulateIncrementalMarking();
|
||||
MarkCompactCollector::MarkingState state;
|
||||
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
ref->Reset(v8_isolate(), local);
|
||||
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackReset) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
|
||||
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
|
||||
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
{
|
||||
v8::HandleScope handles(v8_isolate());
|
||||
v8::Local<v8::Object> local =
|
||||
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
|
||||
v8::TracedReference<v8::Object> ref;
|
||||
SimulateIncrementalMarking();
|
||||
MarkCompactCollector::MarkingState state;
|
||||
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
ref.Reset(v8_isolate(), local);
|
||||
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(TracedReferenceTest, WriteBarrierOnHeapCopy) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
{
|
||||
v8::HandleScope handles(v8_isolate());
|
||||
v8::Local<v8::Object> local =
|
||||
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
|
||||
auto ref_from =
|
||||
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
|
||||
auto ref_to = std::make_unique<v8::TracedReference<v8::Object>>();
|
||||
SimulateIncrementalMarking();
|
||||
MarkCompactCollector::MarkingState state;
|
||||
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
*ref_to = *ref_from;
|
||||
EXPECT_TRUE(!ref_from->IsEmpty());
|
||||
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackCopy) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
|
||||
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
|
||||
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
{
|
||||
v8::HandleScope handles(v8_isolate());
|
||||
v8::Local<v8::Object> local =
|
||||
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
|
||||
auto ref_from =
|
||||
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
|
||||
v8::TracedReference<v8::Object> ref_to;
|
||||
SimulateIncrementalMarking();
|
||||
MarkCompactCollector::MarkingState state;
|
||||
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
ref_to = *ref_from;
|
||||
EXPECT_TRUE(!ref_from->IsEmpty());
|
||||
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(TracedReferenceTest, WriteBarrierOnMove) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
{
|
||||
v8::HandleScope handles(v8_isolate());
|
||||
v8::Local<v8::Object> local =
|
||||
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
|
||||
auto ref_from =
|
||||
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
|
||||
auto ref_to = std::make_unique<v8::TracedReference<v8::Object>>();
|
||||
SimulateIncrementalMarking();
|
||||
MarkCompactCollector::MarkingState state;
|
||||
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
*ref_to = std::move(*ref_from);
|
||||
ASSERT_TRUE(ref_from->IsEmpty());
|
||||
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackMove) {
|
||||
if (!FLAG_incremental_marking) return;
|
||||
|
||||
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
|
||||
|
||||
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
|
||||
v8::Context::Scope context_scope(context);
|
||||
{
|
||||
v8::HandleScope handles(v8_isolate());
|
||||
v8::Local<v8::Object> local =
|
||||
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
|
||||
auto ref_from =
|
||||
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
|
||||
v8::TracedReference<v8::Object> ref_to;
|
||||
SimulateIncrementalMarking();
|
||||
MarkCompactCollector::MarkingState state;
|
||||
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
ref_to = std::move(*ref_from);
|
||||
ASSERT_TRUE(ref_from->IsEmpty());
|
||||
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
Loading…
Reference in New Issue
Block a user