[heap] Load MarkingBarrier from thread local on main thread

Each thread has its own MarkingBarrier instance for incremental
marking. A thread local variable is used to get the current thread's
instance on background threads.

However on main threads this thread local variable was always
set to nullptr. The main thread would get to its own instance through
the heap_ field in the host object's page header. This was solved this
way because setting current_marking_barrier on the main thread
seemed quite complex. Multiple isolates may be run on the same thread
and isolates may even be migrated between threads.

However, with --shared-space loading the heap_ field for a shared
object would return the main isolate's heap and we end up with
the wrong MarkingBarrier instance on client isolates. So this
CL makes main and background threads more uniform by setting the
thread local field also on the main thread. The field is set by
the already existing v8::Isolate::Scope API. Some embedders might have
to add these scopes if they don't use them properly already.

Bug: v8:13267
Change-Id: Idfdaf35073d04dd5e13ad6065ef42eae3ce6a259
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3998633
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#84144}
This commit is contained in:
Dominik Inführ 2022-11-08 20:56:26 +01:00 committed by V8 LUCI CQ
parent 06681fbd91
commit 910def9edc
16 changed files with 107 additions and 93 deletions

View File

@ -5865,8 +5865,8 @@ int Shell::Main(int argc, char* argv[]) {
{
D8Console console2(isolate2);
Initialize(isolate2, &console2);
PerIsolateData data2(isolate2);
Isolate::Scope isolate_scope(isolate2);
PerIsolateData data2(isolate2);
result = RunMain(isolate2, false);
}

View File

@ -3699,6 +3699,13 @@ void Isolate::SetIsolateThreadLocals(Isolate* isolate,
PerIsolateThreadData* data) {
g_current_isolate_ = isolate;
g_current_per_isolate_thread_data_ = data;
if (isolate && isolate->main_thread_local_isolate()) {
WriteBarrier::SetForThread(
isolate->main_thread_local_heap()->marking_barrier());
} else {
WriteBarrier::SetForThread(nullptr);
}
}
Isolate::~Isolate() {
@ -4504,6 +4511,9 @@ void Isolate::Enter() {
ThreadId::Current());
// Same thread re-enters the isolate, no need to re-init anything.
entry_stack_->entry_count++;
// Set thread local for marking barrier again. The marking barrier may now
// be set up while previously it wasn't.
SetIsolateThreadLocals(this, current_data);
return;
}
}

View File

@ -125,8 +125,7 @@ inline void CombinedWriteBarrierInternal(HeapObject host, HeapObjectSlot slot,
CodePageHeaderModificationScope rwx_write_scope(
"Marking a Code object requires write access to the Code page header");
#endif
WriteBarrier::MarkingSlow(host_chunk->GetHeap(), host, HeapObjectSlot(slot),
value);
WriteBarrier::MarkingSlow(host, HeapObjectSlot(slot), value);
}
}
@ -208,8 +207,7 @@ inline void CombinedEphemeronWriteBarrier(EphemeronHashTable host,
// Currently Code values are never stored in EphemeronTables. If this ever
// changes then the CodePageHeaderModificationScope might be required here.
DCHECK(!IsCodeSpaceObject(heap_object_value));
WriteBarrier::MarkingSlow(host_chunk->GetHeap(), host, HeapObjectSlot(slot),
heap_object_value);
WriteBarrier::MarkingSlow(host, HeapObjectSlot(slot), heap_object_value);
}
}
@ -255,20 +253,11 @@ inline bool IsCodeSpaceObject(HeapObject object) {
return chunk->InCodeSpace();
}
base::Optional<Heap*> WriteBarrier::GetHeapIfMarking(HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return {};
bool WriteBarrier::IsMarking(HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return false;
heap_internals::MemoryChunk* chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
if (V8_LIKELY(!chunk->IsMarking())) return {};
return chunk->GetHeap();
}
Heap* WriteBarrier::GetHeap(HeapObject object) {
DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
heap_internals::MemoryChunk* chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
DCHECK(!chunk->InReadOnlySpace());
return chunk->GetHeap();
return chunk->IsMarking();
}
void WriteBarrier::Marking(HeapObject host, ObjectSlot slot, Object value) {
@ -299,15 +288,13 @@ void WriteBarrier::Marking(HeapObject host, MaybeObjectSlot slot,
void WriteBarrier::Marking(HeapObject host, HeapObjectSlot slot,
HeapObject value) {
auto heap = GetHeapIfMarking(host);
if (!heap) return;
MarkingSlow(*heap, host, slot, value);
if (!IsMarking(host)) return;
MarkingSlow(host, slot, value);
}
void WriteBarrier::Marking(Code host, RelocInfo* reloc_info, HeapObject value) {
auto heap = GetHeapIfMarking(host);
if (!heap) return;
MarkingSlow(*heap, host, reloc_info, value);
if (!IsMarking(host)) return;
MarkingSlow(host, reloc_info, value);
}
void WriteBarrier::Shared(Code host, RelocInfo* reloc_info, HeapObject value) {
@ -317,51 +304,40 @@ void WriteBarrier::Shared(Code host, RelocInfo* reloc_info, HeapObject value) {
heap_internals::MemoryChunk::FromHeapObject(value);
if (!value_chunk->InSharedHeap()) return;
Heap* heap = GetHeap(host);
DCHECK_NOT_NULL(heap);
SharedSlow(heap, host, reloc_info, value);
SharedSlow(host, reloc_info, value);
}
void WriteBarrier::Marking(JSArrayBuffer host,
ArrayBufferExtension* extension) {
if (!extension) return;
auto heap = GetHeapIfMarking(host);
if (!heap) return;
MarkingSlow(*heap, host, extension);
if (!extension || !IsMarking(host)) return;
MarkingSlow(host, extension);
}
void WriteBarrier::Marking(DescriptorArray descriptor_array,
int number_of_own_descriptors) {
auto heap = GetHeapIfMarking(descriptor_array);
if (!heap) return;
MarkingSlow(*heap, descriptor_array, number_of_own_descriptors);
if (!IsMarking(descriptor_array)) return;
MarkingSlow(descriptor_array, number_of_own_descriptors);
}
// static
void WriteBarrier::MarkingFromGlobalHandle(Object value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
if (!value.IsHeapObject()) return;
HeapObject heap_value = HeapObject::cast(value);
// Value may be in read only space but the chunk should never be marked
// as marking which would result in a bail out.
auto heap = GetHeapIfMarking(heap_value);
if (!heap) return;
MarkingSlowFromGlobalHandle(*heap, heap_value);
MarkingSlowFromGlobalHandle(HeapObject::cast(value));
}
// static
void WriteBarrier::MarkingFromInternalFields(JSObject host) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
auto heap = GetHeapIfMarking(host);
if (!heap) return;
if (CurrentMarkingBarrier(heap.value())->is_minor()) {
if (!IsMarking(host)) return;
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(host);
if (marking_barrier->is_minor()) {
// TODO(v8:13012): We do not currently mark Oilpan objects while MinorMC is
// active. Once Oilpan uses a generational GC with incremental marking and
// unified heap, this barrier will be needed again.
return;
}
MarkingSlowFromInternalFields(*heap, host);
MarkingSlowFromInternalFields(marking_barrier->heap(), host);
}
#ifdef ENABLE_SLOW_DCHECKS

View File

@ -21,31 +21,38 @@ namespace {
thread_local MarkingBarrier* current_marking_barrier = nullptr;
} // namespace
MarkingBarrier* WriteBarrier::CurrentMarkingBarrier(Heap* heap) {
return current_marking_barrier
? current_marking_barrier
: heap->main_thread_local_heap()->marking_barrier();
MarkingBarrier* WriteBarrier::CurrentMarkingBarrier(
HeapObject verification_candidate) {
MarkingBarrier* marking_barrier = current_marking_barrier;
DCHECK_NOT_NULL(marking_barrier);
#if DEBUG
if (!verification_candidate.InSharedHeap()) {
Heap* host_heap =
MemoryChunk::FromHeapObject(verification_candidate)->heap();
LocalHeap* local_heap = LocalHeap::Current();
if (!local_heap) local_heap = host_heap->main_thread_local_heap();
DCHECK_EQ(marking_barrier, local_heap->marking_barrier());
}
#endif // DEBUG
return marking_barrier;
}
void WriteBarrier::SetForThread(MarkingBarrier* marking_barrier) {
DCHECK_NULL(current_marking_barrier);
MarkingBarrier* WriteBarrier::SetForThread(MarkingBarrier* marking_barrier) {
MarkingBarrier* existing = current_marking_barrier;
current_marking_barrier = marking_barrier;
return existing;
}
void WriteBarrier::ClearForThread(MarkingBarrier* marking_barrier) {
DCHECK_EQ(current_marking_barrier, marking_barrier);
current_marking_barrier = nullptr;
}
void WriteBarrier::MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot slot,
void WriteBarrier::MarkingSlow(HeapObject host, HeapObjectSlot slot,
HeapObject value) {
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(heap);
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(host);
marking_barrier->Write(host, slot, value);
}
// static
void WriteBarrier::MarkingSlowFromGlobalHandle(Heap* heap, HeapObject value) {
heap->main_thread_local_heap()->marking_barrier()->WriteWithoutHost(value);
void WriteBarrier::MarkingSlowFromGlobalHandle(HeapObject value) {
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(value);
marking_barrier->WriteWithoutHost(value);
}
// static
@ -56,13 +63,13 @@ void WriteBarrier::MarkingSlowFromInternalFields(Heap* heap, JSObject host) {
local_embedder_heap_tracer->EmbedderWriteBarrier(heap, host);
}
void WriteBarrier::MarkingSlow(Heap* heap, Code host, RelocInfo* reloc_info,
void WriteBarrier::MarkingSlow(Code host, RelocInfo* reloc_info,
HeapObject value) {
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(heap);
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(host);
marking_barrier->Write(host, reloc_info, value);
}
void WriteBarrier::SharedSlow(Heap* heap, Code host, RelocInfo* reloc_info,
void WriteBarrier::SharedSlow(Code host, RelocInfo* reloc_info,
HeapObject value) {
MarkCompactCollector::RecordRelocSlotInfo info =
MarkCompactCollector::ProcessRelocInfo(host, reloc_info, value);
@ -72,15 +79,15 @@ void WriteBarrier::SharedSlow(Heap* heap, Code host, RelocInfo* reloc_info,
info.offset);
}
void WriteBarrier::MarkingSlow(Heap* heap, JSArrayBuffer host,
void WriteBarrier::MarkingSlow(JSArrayBuffer host,
ArrayBufferExtension* extension) {
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(heap);
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(host);
marking_barrier->Write(host, extension);
}
void WriteBarrier::MarkingSlow(Heap* heap, DescriptorArray descriptor_array,
void WriteBarrier::MarkingSlow(DescriptorArray descriptor_array,
int number_of_own_descriptors) {
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(heap);
MarkingBarrier* marking_barrier = CurrentMarkingBarrier(descriptor_array);
marking_barrier->Write(descriptor_array, number_of_own_descriptors);
}

View File

@ -67,10 +67,10 @@ class V8_EXPORT_PRIVATE WriteBarrier {
static inline void MarkingFromGlobalHandle(Object value);
static inline void MarkingFromInternalFields(JSObject host);
static void SetForThread(MarkingBarrier*);
static void ClearForThread(MarkingBarrier*);
static MarkingBarrier* SetForThread(MarkingBarrier*);
static MarkingBarrier* CurrentMarkingBarrier(Heap* heap);
static MarkingBarrier* CurrentMarkingBarrier(
HeapObject verification_candidate);
#ifdef ENABLE_SLOW_DCHECKS
template <typename T>
@ -78,22 +78,18 @@ class V8_EXPORT_PRIVATE WriteBarrier {
static bool IsImmortalImmovableHeapObject(HeapObject object);
#endif
static void MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot,
HeapObject value);
static void MarkingSlow(HeapObject host, HeapObjectSlot, HeapObject value);
private:
static inline base::Optional<Heap*> GetHeapIfMarking(HeapObject object);
static inline Heap* GetHeap(HeapObject object);
static inline bool IsMarking(HeapObject object);
static void MarkingSlow(Heap* heap, Code host, RelocInfo*, HeapObject value);
static void MarkingSlow(Heap* heap, JSArrayBuffer host,
ArrayBufferExtension*);
static void MarkingSlow(Heap* heap, DescriptorArray,
int number_of_own_descriptors);
static void MarkingSlowFromGlobalHandle(Heap* heap, HeapObject value);
static void MarkingSlow(Code host, RelocInfo*, HeapObject value);
static void MarkingSlow(JSArrayBuffer host, ArrayBufferExtension*);
static void MarkingSlow(DescriptorArray, int number_of_own_descriptors);
static void MarkingSlowFromGlobalHandle(HeapObject value);
static void MarkingSlowFromInternalFields(Heap* heap, JSObject host);
static void SharedSlow(Heap* heap, Code host, RelocInfo*, HeapObject value);
static void SharedSlow(Code host, RelocInfo*, HeapObject value);
friend class Heap;
};

View File

@ -7202,7 +7202,12 @@ void Heap::WriteBarrierForRangeImpl(MemoryChunk* source_page, HeapObject object,
static_assert(!(kModeMask & kDoEvacuationSlotRecording) ||
(kModeMask & kDoMarking));
MarkingBarrier* marking_barrier = WriteBarrier::CurrentMarkingBarrier(this);
MarkingBarrier* marking_barrier = nullptr;
if (kModeMask & kDoMarking) {
marking_barrier = WriteBarrier::CurrentMarkingBarrier(object);
}
MarkCompactCollector* collector = this->mark_compact_collector();
CodeTPageHeaderModificationScope rwx_write_scope(

View File

@ -61,7 +61,8 @@ LocalHeap::LocalHeap(Heap* heap, ThreadKind kind,
heap_->safepoint()->AddLocalHeap(this, [this] {
if (!is_main_thread()) {
WriteBarrier::SetForThread(marking_barrier_.get());
saved_marking_barrier_ =
WriteBarrier::SetForThread(marking_barrier_.get());
if (heap_->incremental_marking()->IsMarking()) {
marking_barrier_->Activate(
heap_->incremental_marking()->IsCompacting(),
@ -92,7 +93,10 @@ LocalHeap::~LocalHeap() {
"Publishing of marking barrier results for Code space pages requires "
"write access to Code page headers");
marking_barrier_->Publish();
WriteBarrier::ClearForThread(marking_barrier_.get());
MarkingBarrier* overwritten =
WriteBarrier::SetForThread(saved_marking_barrier_);
DCHECK_EQ(overwritten, marking_barrier_.get());
USE(overwritten);
}
});

View File

@ -335,6 +335,8 @@ class V8_EXPORT_PRIVATE LocalHeap {
std::unique_ptr<ConcurrentAllocator> code_space_allocator_;
std::unique_ptr<ConcurrentAllocator> shared_old_space_allocator_;
MarkingBarrier* saved_marking_barrier_ = nullptr;
friend class CollectionBarrier;
friend class ConcurrentAllocator;
friend class GlobalSafepoint;

View File

@ -13,7 +13,7 @@ namespace v8 {
namespace internal {
bool MarkingBarrier::MarkValue(HeapObject host, HeapObject value) {
DCHECK(IsCurrentMarkingBarrier());
DCHECK(IsCurrentMarkingBarrier(host));
DCHECK(is_activated_);
DCHECK(!marking_state_.IsImpossible(value));
// Host may have an impossible markbit pattern if manual allocation folding

View File

@ -38,7 +38,7 @@ MarkingBarrier::~MarkingBarrier() { DCHECK(typed_slots_map_.empty()); }
void MarkingBarrier::Write(HeapObject host, HeapObjectSlot slot,
HeapObject value) {
DCHECK(IsCurrentMarkingBarrier());
DCHECK(IsCurrentMarkingBarrier(host));
if (MarkValue(host, value)) {
if (is_compacting_ && slot.address()) {
DCHECK(is_major());
@ -59,7 +59,7 @@ void MarkingBarrier::WriteWithoutHost(HeapObject value) {
}
void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) {
DCHECK(IsCurrentMarkingBarrier());
DCHECK(IsCurrentMarkingBarrier(host));
if (MarkValue(host, value)) {
if (is_compacting_) {
DCHECK(is_major());
@ -76,7 +76,7 @@ void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) {
void MarkingBarrier::Write(JSArrayBuffer host,
ArrayBufferExtension* extension) {
DCHECK(IsCurrentMarkingBarrier());
DCHECK(IsCurrentMarkingBarrier(host));
if (!V8_CONCURRENT_MARKING_BOOL && !marking_state_.IsBlack(host)) {
// The extension will be marked when the marker visits the host object.
return;
@ -92,7 +92,7 @@ void MarkingBarrier::Write(JSArrayBuffer host,
void MarkingBarrier::Write(DescriptorArray descriptor_array,
int number_of_own_descriptors) {
DCHECK(IsCurrentMarkingBarrier());
DCHECK(IsCurrentMarkingBarrier(descriptor_array));
DCHECK(IsReadOnlyHeapObject(descriptor_array.map()));
if (is_minor() && !heap_->InYoungGeneration(descriptor_array)) return;
@ -131,7 +131,7 @@ void MarkingBarrier::Write(DescriptorArray descriptor_array,
void MarkingBarrier::RecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target) {
DCHECK(IsCurrentMarkingBarrier());
DCHECK(IsCurrentMarkingBarrier(host));
if (!MarkCompactCollector::ShouldRecordRelocSlot(host, rinfo, target)) return;
MarkCompactCollector::RecordRelocSlotInfo info =
@ -295,8 +295,9 @@ void MarkingBarrier::Activate(bool is_compacting,
}
}
bool MarkingBarrier::IsCurrentMarkingBarrier() {
return WriteBarrier::CurrentMarkingBarrier(heap_) == this;
bool MarkingBarrier::IsCurrentMarkingBarrier(
HeapObject verification_candidate) {
return WriteBarrier::CurrentMarkingBarrier(verification_candidate) == this;
}
} // namespace internal

View File

@ -49,6 +49,8 @@ class MarkingBarrier {
return marking_barrier_type_ == MarkingBarrierType::kMinor;
}
Heap* heap() const { return heap_; }
private:
inline bool ShouldMarkObject(HeapObject value) const;
inline bool WhiteToGreyAndPush(HeapObject value);
@ -61,7 +63,7 @@ class MarkingBarrier {
void DeactivateSpace(PagedSpace*);
void DeactivateSpace(NewSpace*);
bool IsCurrentMarkingBarrier();
bool IsCurrentMarkingBarrier(HeapObject verification_candidate);
template <typename TSlot>
inline void MarkRange(HeapObject value, TSlot start, TSlot end);

View File

@ -45,6 +45,7 @@ UNINITIALIZED_TEST(EagerUnmappingInCollectAllAvailableGarbage) {
v8::Isolate* isolate = v8::Isolate::New(create_params);
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
v8::Local<v8::Context> context = CcTest::NewContext(isolate);
v8::Context::Scope context_scope(context);

View File

@ -57,9 +57,13 @@ class MultiClientIsolateTest {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = allocator.get();
main_isolate_ = v8::Isolate::New(create_params);
i_main_isolate()->Enter();
}
~MultiClientIsolateTest() { main_isolate_->Dispose(); }
~MultiClientIsolateTest() {
i_main_isolate()->Exit();
main_isolate_->Dispose();
}
v8::Isolate* main_isolate() const { return main_isolate_; }

View File

@ -119,6 +119,7 @@ class WasmSerializationTest {
// serialization (when the isolate is disposed).
std::weak_ptr<NativeModule> weak_native_module;
{
v8::Isolate::Scope isolate_scope(serialization_v8_isolate);
HandleScope scope(serialization_isolate);
v8::Local<v8::Context> serialization_context =
v8::Context::New(serialization_v8_isolate);
@ -269,6 +270,7 @@ UNINITIALIZED_TEST(CompiledWasmModulesTransfer) {
std::vector<v8::CompiledWasmModule> store;
std::shared_ptr<NativeModule> original_native_module;
{
v8::Isolate::Scope isolate_scope(from_isolate);
v8::HandleScope scope(from_isolate);
LocalContext env(from_isolate);
@ -292,6 +294,7 @@ UNINITIALIZED_TEST(CompiledWasmModulesTransfer) {
{
v8::Isolate* to_isolate = v8::Isolate::New(create_params);
{
v8::Isolate::Scope isolate_scope(to_isolate);
v8::HandleScope scope(to_isolate);
LocalContext env(to_isolate);

View File

@ -34,12 +34,14 @@ class SharedEngineIsolate {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate::Initialize(isolate_, create_params);
v8_isolate()->Enter();
v8::HandleScope handle_scope(v8_isolate());
v8::Context::New(v8_isolate())->Enter();
testing::SetupIsolateForWasmModule(isolate());
zone_.reset(new Zone(isolate()->allocator(), ZONE_NAME));
}
~SharedEngineIsolate() {
v8_isolate()->Exit();
zone_.reset();
isolate_->Dispose();
}

View File

@ -62,6 +62,7 @@ InspectorIsolateData::InspectorIsolateData(
&InspectorIsolateData::PromiseRejectHandler);
inspector_ = v8_inspector::V8Inspector::create(isolate_.get(), this);
}
v8::Isolate::Scope isolate_scope(isolate_.get());
v8::HandleScope handle_scope(isolate_.get());
not_inspectable_private_.Reset(
isolate_.get(),