From a2f63f1886b8c184fddec858adc783b2cf7aedf9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Olivier=20Fl=C3=BCckiger?= Date: Fri, 3 Feb 2023 13:43:12 +0100 Subject: [PATCH] [gc][static-roots] Don't access markbits on R/O pages MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Markbits on read only pages are initialized BLACK and never change. For any practical purposes they are unused. This CL prevents any access to the markbits of these pages. This is a precursor to removing them entirely from the page to make room for static roots. Bug: v8:13466 Change-Id: I61d3f6d9bbca750d0f34475859b34ff44f7fec1e Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4212397 Reviewed-by: Dominik Inführ Auto-Submit: Olivier Flückiger Commit-Queue: Olivier Flückiger Cr-Commit-Position: refs/heads/main@{#85665} --- src/heap/basic-memory-chunk.cc | 4 +- src/heap/basic-memory-chunk.h | 2 + src/heap/concurrent-marking.cc | 3 +- src/heap/mark-compact-inl.h | 1 + src/heap/mark-compact.cc | 45 ++++++++++--------- src/heap/mark-compact.h | 1 - src/heap/marking-barrier-inl.h | 2 + src/heap/marking-visitor-inl.h | 14 ++++-- src/heap/marking-visitor.h | 1 + src/heap/object-stats.cc | 10 +++-- src/heap/read-only-spaces.cc | 18 ++------ src/objects/heap-object.h | 2 + src/objects/objects-inl.h | 2 + test/cctest/heap/test-concurrent-marking.cc | 15 ++++--- test/cctest/heap/test-heap.cc | 2 +- .../heap/marking-worklist-unittest.cc | 35 ++++++++++++--- 16 files changed, 98 insertions(+), 59 deletions(-) diff --git a/src/heap/basic-memory-chunk.cc b/src/heap/basic-memory-chunk.cc index 4f4bc4814c..fc271cdb81 100644 --- a/src/heap/basic-memory-chunk.cc +++ b/src/heap/basic-memory-chunk.cc @@ -64,7 +64,9 @@ BasicMemoryChunk::BasicMemoryChunk(Heap* heap, BaseSpace* space, high_water_mark_(area_start - reinterpret_cast
(this)), owner_(space), reservation_(std::move(reservation)) { - marking_bitmap()->Clear(); + if (space->identity() != RO_SPACE) { + marking_bitmap()->Clear(); + } } bool BasicMemoryChunk::InOldSpace() const { diff --git a/src/heap/basic-memory-chunk.h b/src/heap/basic-memory-chunk.h index dfcbdca5a8..864c803975 100644 --- a/src/heap/basic-memory-chunk.h +++ b/src/heap/basic-memory-chunk.h @@ -306,6 +306,8 @@ class BasicMemoryChunk { template ConcurrentBitmap* marking_bitmap() const { + // TODO(olivf) Change to DCHECK once we have some coverage + CHECK(!InReadOnlySpace()); return static_cast*>( Bitmap::FromAddress(address() + kMarkingBitmapOffset)); } diff --git a/src/heap/concurrent-marking.cc b/src/heap/concurrent-marking.cc index 73eff30596..72f60b2dcd 100644 --- a/src/heap/concurrent-marking.cc +++ b/src/heap/concurrent-marking.cc @@ -263,7 +263,7 @@ class YoungGenerationConcurrentMarkingVisitor final marking_state_(heap->isolate(), memory_chunk_data) {} bool ShouldMarkObject(HeapObject object) const { - return !object.InSharedHeap(); + return !object.InSharedHeap() && !object.InReadOnlySpace(); } void SynchronizePageAccess(HeapObject heap_object) { @@ -787,6 +787,7 @@ void ConcurrentMarking::RunMajor(JobDelegate* delegate, done = true; break; } + DCHECK(!object.InReadOnlySpace()); objects_processed++; Address new_space_top = kNullAddress; diff --git a/src/heap/mark-compact-inl.h b/src/heap/mark-compact-inl.h index 7168b7350a..da353373cc 100644 --- a/src/heap/mark-compact-inl.h +++ b/src/heap/mark-compact-inl.h @@ -86,6 +86,7 @@ void MarkCompactCollector::AddTransitionArray(TransitionArray array) { } bool MarkCompactCollector::ShouldMarkObject(HeapObject object) const { + if (object.InReadOnlySpace()) return false; if (V8_LIKELY(!uses_shared_heap_)) return true; if (v8_flags.shared_space) { if (is_shared_heap_isolate_) return true; diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index c805d98b1c..06793449ce 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -293,7 +293,8 @@ class FullMarkingVerifier : public MarkingVerifier { CHECK(heap_->SharedHeapContains(heap_object)); } - CHECK(marking_state_->IsBlack(heap_object)); + CHECK(heap_object.InReadOnlySpace() || + marking_state_->IsBlack(heap_object)); } V8_INLINE bool ShouldVerifyObject(HeapObject heap_object) { @@ -626,14 +627,6 @@ void MarkCompactCollector::CollectGarbage() { } #ifdef VERIFY_HEAP -void MarkCompactCollector::VerifyMarkbitsAreDirty(ReadOnlySpace* space) { - ReadOnlyHeapObjectIterator iterator(space); - for (HeapObject object = iterator.Next(); !object.is_null(); - object = iterator.Next()) { - CHECK(non_atomic_marking_state()->IsBlack(object)); - } -} - void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpaceBase* space) { for (Page* p : *space) { CHECK(non_atomic_marking_state()->bitmap(p)->IsClean()); @@ -667,9 +660,6 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() { VerifyMarkbitsAreClean(heap_->old_space()); VerifyMarkbitsAreClean(heap_->code_space()); VerifyMarkbitsAreClean(heap_->new_space()); - // Read-only space should always be black since we never collect any objects - // in it or linked from it. - VerifyMarkbitsAreDirty(heap_->read_only_space()); VerifyMarkbitsAreClean(heap_->lo_space()); VerifyMarkbitsAreClean(heap_->code_lo_space()); VerifyMarkbitsAreClean(heap_->new_lo_space()); @@ -1338,7 +1328,8 @@ class InternalizedStringTableCleaner final : public RootVisitor { if (o.IsHeapObject()) { HeapObject heap_object = HeapObject::cast(o); DCHECK(!Heap::InYoungGeneration(heap_object)); - if (marking_state->IsWhite(heap_object)) { + if (!heap_object.InReadOnlySpace() && + marking_state->IsWhite(heap_object)) { pointers_removed_++; // Set the entry to the_hole_value (as deleted). p.store(StringTable::deleted_element()); @@ -2729,7 +2720,8 @@ bool ShouldRetainMap(MarkingState* marking_state, Map map, int age) { } Object constructor = map.GetConstructor(); if (!constructor.IsHeapObject() || - marking_state->IsWhite(HeapObject::cast(constructor))) { + (!HeapObject::cast(constructor).InReadOnlySpace() && + marking_state->IsWhite(HeapObject::cast(constructor)))) { // The constructor is dead, no new objects with this map can // be created. Do not retain this map. return false; @@ -2768,7 +2760,8 @@ void MarkCompactCollector::RetainMaps() { } Object prototype = map.prototype(); if (age > 0 && prototype.IsHeapObject() && - marking_state()->IsWhite(HeapObject::cast(prototype))) { + (!HeapObject::cast(prototype).InReadOnlySpace() && + marking_state()->IsWhite(HeapObject::cast(prototype)))) { // The prototype is not marked, age the map. new_age = age - 1; } else { @@ -2980,7 +2973,10 @@ class StringForwardingTableCleaner final { String original_string = String::cast(original); if (marking_state_->IsBlack(original_string)) { Object forward = record->ForwardStringObjectOrHash(isolate_); - if (!forward.IsHeapObject()) return; + if (!forward.IsHeapObject() || + HeapObject::cast(forward).InReadOnlySpace()) { + return; + } marking_state_->WhiteToBlack(HeapObject::cast(forward)); } else { DisposeExternalResource(record); @@ -3036,11 +3032,15 @@ class StringForwardingTableCleaner final { StringForwardingTable::Record* record) { if (original_string.IsInternalizedString()) return; Object forward = record->ForwardStringObjectOrHash(isolate_); - if (!forward.IsHeapObject()) return; + if (!forward.IsHeapObject()) { + return; + } String forward_string = String::cast(forward); // Mark the forwarded string to keep it alive. - marking_state_->WhiteToBlack(forward_string); + if (!forward_string.InReadOnlySpace()) { + marking_state_->WhiteToBlack(forward_string); + } // Transition the original string to a ThinString and override the // forwarding index with the correct hash. original_string.MakeThin(isolate_, forward_string); @@ -3679,7 +3679,8 @@ void MarkCompactCollector::ClearJSWeakRefs() { JSWeakRef weak_ref; while (local_weak_objects()->js_weak_refs_local.Pop(&weak_ref)) { HeapObject target = HeapObject::cast(weak_ref.target()); - if (!non_atomic_marking_state()->IsBlackOrGrey(target)) { + if (!target.InReadOnlySpace() && + !non_atomic_marking_state()->IsBlackOrGrey(target)) { weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value()); } else { // The value of the JSWeakRef is alive. @@ -3696,7 +3697,8 @@ void MarkCompactCollector::ClearJSWeakRefs() { } }; HeapObject target = HeapObject::cast(weak_cell.target()); - if (!non_atomic_marking_state()->IsBlackOrGrey(target)) { + if (!target.InReadOnlySpace() && + !non_atomic_marking_state()->IsBlackOrGrey(target)) { DCHECK(target.CanBeHeldWeakly()); // The value of the WeakCell is dead. JSFinalizationRegistry finalization_registry = @@ -3718,7 +3720,8 @@ void MarkCompactCollector::ClearJSWeakRefs() { } HeapObject unregister_token = weak_cell.unregister_token(); - if (!non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) { + if (!unregister_token.InReadOnlySpace() && + !non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) { DCHECK(unregister_token.CanBeHeldWeakly()); // The unregister token is dead. Remove any corresponding entries in the // key map. Multiple WeakCell with the same token will have all their diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h index e1fe165346..fbf460b798 100644 --- a/src/heap/mark-compact.h +++ b/src/heap/mark-compact.h @@ -420,7 +420,6 @@ class MarkCompactCollector final : public CollectorBase { void VerifyMarking(); #ifdef VERIFY_HEAP void VerifyMarkbitsAreClean(); - void VerifyMarkbitsAreDirty(ReadOnlySpace* space); void VerifyMarkbitsAreClean(PagedSpaceBase* space); void VerifyMarkbitsAreClean(NewSpace* space); void VerifyMarkbitsAreClean(LargeObjectSpace* space); diff --git a/src/heap/marking-barrier-inl.h b/src/heap/marking-barrier-inl.h index 4a9761abd4..37c31c515b 100644 --- a/src/heap/marking-barrier-inl.h +++ b/src/heap/marking-barrier-inl.h @@ -14,6 +14,8 @@ namespace v8 { namespace internal { void MarkingBarrier::MarkValue(HeapObject host, HeapObject value) { + if (value.InReadOnlySpace()) return; + DCHECK(IsCurrentMarkingBarrier(host)); DCHECK(is_activated_ || shared_heap_worklist_.has_value()); diff --git a/src/heap/marking-visitor-inl.h b/src/heap/marking-visitor-inl.h index 9f47e8f071..b877819184 100644 --- a/src/heap/marking-visitor-inl.h +++ b/src/heap/marking-visitor-inl.h @@ -257,6 +257,7 @@ int MarkingVisitorBase:: if (end < size) { // The object can be pushed back onto the marking worklist only after // progress bar was updated. + DCHECK(ShouldMarkObject(object)); local_marking_worklists_->Push(object); } } @@ -372,7 +373,8 @@ int MarkingVisitorBase::VisitEphemeronHashTable( // WeakMaps and WeakSets and therefore cannot be ephemeron keys. See also // MarkCompactCollector::ProcessEphemeron. DCHECK(!key.InSharedWritableHeap()); - if (concrete_visitor()->marking_state()->IsBlackOrGrey(key)) { + if (key.InReadOnlySpace() || + concrete_visitor()->marking_state()->IsBlackOrGrey(key)) { VisitPointer(table, value_slot); } else { Object value_obj = table.ValueAt(i); @@ -405,7 +407,8 @@ int MarkingVisitorBase::VisitJSWeakRef( if (weak_ref.target().IsHeapObject()) { HeapObject target = HeapObject::cast(weak_ref.target()); SynchronizePageAccess(target); - if (concrete_visitor()->marking_state()->IsBlackOrGrey(target)) { + if (target.InReadOnlySpace() || + concrete_visitor()->marking_state()->IsBlackOrGrey(target)) { // Record the slot inside the JSWeakRef, since the // VisitJSObjectSubclass above didn't visit it. ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset); @@ -432,8 +435,10 @@ int MarkingVisitorBase::VisitWeakCell( HeapObject unregister_token = weak_cell.relaxed_unregister_token(); SynchronizePageAccess(target); SynchronizePageAccess(unregister_token); - if (concrete_visitor()->marking_state()->IsBlackOrGrey(target) && - concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token)) { + if ((target.InReadOnlySpace() || + concrete_visitor()->marking_state()->IsBlackOrGrey(target)) && + (unregister_token.InReadOnlySpace() || + concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token))) { // Record the slots inside the WeakCell, since the IterateBody above // didn't visit it. ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset); @@ -458,6 +463,7 @@ int MarkingVisitorBase::VisitWeakCell( template int MarkingVisitorBase::MarkDescriptorArrayBlack( DescriptorArray descriptors) { + if (descriptors.InReadOnlySpace()) return 0; concrete_visitor()->marking_state()->WhiteToGrey(descriptors); if (concrete_visitor()->marking_state()->GreyToBlack(descriptors)) { VisitMapPointer(descriptors); diff --git a/src/heap/marking-visitor.h b/src/heap/marking-visitor.h index 58463a267c..39c502e395 100644 --- a/src/heap/marking-visitor.h +++ b/src/heap/marking-visitor.h @@ -122,6 +122,7 @@ class MarkingVisitorBase : public HeapVisitor { } bool ShouldMarkObject(HeapObject object) const { + if (object.InReadOnlySpace()) return false; if (should_mark_shared_heap_) return true; return !object.InSharedHeap(); } diff --git a/src/heap/object-stats.cc b/src/heap/object-stats.cc index 40e29e41db..c04fda22c2 100644 --- a/src/heap/object-stats.cc +++ b/src/heap/object-stats.cc @@ -846,8 +846,12 @@ bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase array) { } bool ObjectStatsCollectorImpl::SameLiveness(HeapObject obj1, HeapObject obj2) { - return obj1.is_null() || obj2.is_null() || - marking_state_->Color(obj1) == marking_state_->Color(obj2); + if (obj1.is_null() || obj2.is_null()) return true; + auto col1 = obj1.InReadOnlySpace() ? Marking::ObjectColor::BLACK_OBJECT + : marking_state_->Color(obj1); + auto col2 = obj2.InReadOnlySpace() ? Marking::ObjectColor::BLACK_OBJECT + : marking_state_->Color(obj2); + return col1 == col2; } void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map map) { @@ -1093,7 +1097,7 @@ class ObjectStatsVisitor { phase_(phase) {} void Visit(HeapObject obj) { - if (marking_state_->IsBlack(obj)) { + if (obj.InReadOnlySpace() || marking_state_->IsBlack(obj)) { live_collector_->CollectStatistics( obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes); } else { diff --git a/src/heap/read-only-spaces.cc b/src/heap/read-only-spaces.cc index e85c2bca6b..26db3d2f53 100644 --- a/src/heap/read-only-spaces.cc +++ b/src/heap/read-only-spaces.cc @@ -338,7 +338,6 @@ ReadOnlyPage::ReadOnlyPage(Heap* heap, BaseSpace* space, size_t chunk_size, std::move(reservation)) { allocated_bytes_ = 0; SetFlags(Flag::NEVER_EVACUATE | Flag::READ_ONLY_HEAP); - heap->non_atomic_marking_state()->bitmap(this)->MarkAllBits(); } void ReadOnlyPage::MakeHeaderRelocatable() { @@ -545,11 +544,6 @@ void ReadOnlySpace::FreeLinearAllocationArea() { return; } - // Clear the bits in the unused black area. - ReadOnlyPage* page = pages_.back(); - heap()->marking_state()->bitmap(page)->ClearRange( - page->AddressToMarkbitIndex(top_), page->AddressToMarkbitIndex(limit_)); - heap()->CreateFillerObjectAt(top_, static_cast(limit_ - top_)); BasicMemoryChunk::UpdateHighWaterMark(top_); @@ -662,15 +656,9 @@ AllocationResult ReadOnlySpace::AllocateRawUnaligned(int size_in_bytes) { AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes, AllocationAlignment alignment) { - AllocationResult result = - USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned - ? AllocateRawAligned(size_in_bytes, alignment) - : AllocateRawUnaligned(size_in_bytes); - HeapObject heap_obj; - if (result.To(&heap_obj)) { - DCHECK(heap()->marking_state()->IsBlack(heap_obj)); - } - return result; + return USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned + ? AllocateRawAligned(size_in_bytes, alignment) + : AllocateRawUnaligned(size_in_bytes); } size_t ReadOnlyPage::ShrinkToHighWaterMark() { diff --git a/src/objects/heap-object.h b/src/objects/heap-object.h index 2e0ea5fe02..26b5b10ee9 100644 --- a/src/objects/heap-object.h +++ b/src/objects/heap-object.h @@ -95,6 +95,8 @@ class HeapObject : public Object { V8_INLINE bool InSharedWritableHeap() const; + V8_INLINE bool InReadOnlySpace() const; + #define IS_TYPE_FUNCTION_DECL(Type) \ V8_INLINE bool Is##Type() const; \ V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const; diff --git a/src/objects/objects-inl.h b/src/objects/objects-inl.h index 03bf938775..1aca9a80a8 100644 --- a/src/objects/objects-inl.h +++ b/src/objects/objects-inl.h @@ -207,6 +207,8 @@ bool HeapObject::InSharedWritableHeap() const { return BasicMemoryChunk::FromHeapObject(*this)->InSharedHeap(); } +bool HeapObject::InReadOnlySpace() const { return IsReadOnlyHeapObject(*this); } + bool HeapObject::IsJSObjectThatCanBeTrackedAsPrototype() const { // Do not optimize objects in the shared heap because it is not // threadsafe. Objects in the shared heap have fixed layouts and their maps diff --git a/test/cctest/heap/test-concurrent-marking.cc b/test/cctest/heap/test-concurrent-marking.cc index e57fa68f37..c4e2089c8e 100644 --- a/test/cctest/heap/test-concurrent-marking.cc +++ b/test/cctest/heap/test-concurrent-marking.cc @@ -26,6 +26,11 @@ void PublishSegment(MarkingWorklist& worklist, HeapObject object) { local.Publish(); } +HeapObject ObjectForTesting(i::Isolate* isolate) { + return HeapObject::cast( + isolate->roots_table().slot(RootIndex::kFirstStrongRoot).load(isolate)); +} + TEST(ConcurrentMarking) { if (!i::v8_flags.concurrent_marking) return; CcTest::InitializeVM(); @@ -42,7 +47,7 @@ TEST(ConcurrentMarking) { new ConcurrentMarking(heap, &weak_objects); MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); PublishSegment(*collector->marking_worklists()->shared(), - ReadOnlyRoots(heap).undefined_value()); + ObjectForTesting(heap->isolate())); concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR); concurrent_marking->Join(); delete concurrent_marking; @@ -64,11 +69,11 @@ TEST(ConcurrentMarkingReschedule) { new ConcurrentMarking(heap, &weak_objects); MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); PublishSegment(*collector->marking_worklists()->shared(), - ReadOnlyRoots(heap).undefined_value()); + ObjectForTesting(heap->isolate())); concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR); concurrent_marking->Join(); PublishSegment(*collector->marking_worklists()->shared(), - ReadOnlyRoots(heap).undefined_value()); + ObjectForTesting(heap->isolate())); concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR); concurrent_marking->Join(); delete concurrent_marking; @@ -91,12 +96,12 @@ TEST(ConcurrentMarkingPreemptAndReschedule) { MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); for (int i = 0; i < 5000; i++) PublishSegment(*collector->marking_worklists()->shared(), - ReadOnlyRoots(heap).undefined_value()); + ObjectForTesting(heap->isolate())); concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR); concurrent_marking->Pause(); for (int i = 0; i < 5000; i++) PublishSegment(*collector->marking_worklists()->shared(), - ReadOnlyRoots(heap).undefined_value()); + ObjectForTesting(heap->isolate())); concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR); concurrent_marking->Join(); delete concurrent_marking; diff --git a/test/cctest/heap/test-heap.cc b/test/cctest/heap/test-heap.cc index 3bd7942a16..0ed33aa57b 100644 --- a/test/cctest/heap/test-heap.cc +++ b/test/cctest/heap/test-heap.cc @@ -5811,7 +5811,7 @@ TEST(Regress598319) { // progress bar, we would fail here. for (int i = 0; i < arr.get().length(); i++) { HeapObject arr_value = HeapObject::cast(arr.get().get(i)); - CHECK(marking_state->IsBlack(arr_value)); + CHECK(arr_value.InReadOnlySpace() || marking_state->IsBlack(arr_value)); } } diff --git a/test/unittests/heap/marking-worklist-unittest.cc b/test/unittests/heap/marking-worklist-unittest.cc index c6eabd85b4..45bbdad4be 100644 --- a/test/unittests/heap/marking-worklist-unittest.cc +++ b/test/unittests/heap/marking-worklist-unittest.cc @@ -22,7 +22,10 @@ TEST_F(MarkingWorklistTest, PushPop) { MarkingWorklists holder; MarkingWorklists::Local worklists(&holder); HeapObject pushed_object = - ReadOnlyRoots(i_isolate()->heap()).undefined_value(); + HeapObject::cast(i_isolate() + ->roots_table() + .slot(RootIndex::kFirstStrongRoot) + .load(i_isolate())); worklists.Push(pushed_object); HeapObject popped_object; EXPECT_TRUE(worklists.Pop(&popped_object)); @@ -33,7 +36,10 @@ TEST_F(MarkingWorklistTest, PushPopOnHold) { MarkingWorklists holder; MarkingWorklists::Local worklists(&holder); HeapObject pushed_object = - ReadOnlyRoots(i_isolate()->heap()).undefined_value(); + HeapObject::cast(i_isolate() + ->roots_table() + .slot(RootIndex::kFirstStrongRoot) + .load(i_isolate())); worklists.PushOnHold(pushed_object); HeapObject popped_object; EXPECT_TRUE(worklists.PopOnHold(&popped_object)); @@ -45,7 +51,10 @@ TEST_F(MarkingWorklistTest, MergeOnHold) { MarkingWorklists::Local main_worklists(&holder); MarkingWorklists::Local worker_worklists(&holder); HeapObject pushed_object = - ReadOnlyRoots(i_isolate()->heap()).undefined_value(); + HeapObject::cast(i_isolate() + ->roots_table() + .slot(RootIndex::kFirstStrongRoot) + .load(i_isolate())); worker_worklists.PushOnHold(pushed_object); worker_worklists.Publish(); main_worklists.MergeOnHold(); @@ -59,7 +68,10 @@ TEST_F(MarkingWorklistTest, ShareWorkIfGlobalPoolIsEmpty) { MarkingWorklists::Local main_worklists(&holder); MarkingWorklists::Local worker_worklists(&holder); HeapObject pushed_object = - ReadOnlyRoots(i_isolate()->heap()).undefined_value(); + HeapObject::cast(i_isolate() + ->roots_table() + .slot(RootIndex::kFirstStrongRoot) + .load(i_isolate())); main_worklists.Push(pushed_object); main_worklists.ShareWork(); HeapObject popped_object; @@ -73,7 +85,10 @@ TEST_F(MarkingWorklistTest, ContextWorklistsPushPop) { holder.CreateContextWorklists({context}); MarkingWorklists::Local worklists(&holder); HeapObject pushed_object = - ReadOnlyRoots(i_isolate()->heap()).undefined_value(); + HeapObject::cast(i_isolate() + ->roots_table() + .slot(RootIndex::kFirstStrongRoot) + .load(i_isolate())); worklists.SwitchToContext(context); worklists.Push(pushed_object); worklists.SwitchToSharedForTesting(); @@ -89,7 +104,10 @@ TEST_F(MarkingWorklistTest, ContextWorklistsEmpty) { holder.CreateContextWorklists({context}); MarkingWorklists::Local worklists(&holder); HeapObject pushed_object = - ReadOnlyRoots(i_isolate()->heap()).undefined_value(); + HeapObject::cast(i_isolate() + ->roots_table() + .slot(RootIndex::kFirstStrongRoot) + .load(i_isolate())); worklists.SwitchToContext(context); worklists.Push(pushed_object); EXPECT_FALSE(worklists.IsEmpty()); @@ -110,7 +128,10 @@ TEST_F(MarkingWorklistTest, ContextWorklistCrossTask) { MarkingWorklists::Local main_worklists(&holder); MarkingWorklists::Local worker_worklists(&holder); HeapObject pushed_object = - ReadOnlyRoots(i_isolate()->heap()).undefined_value(); + HeapObject::cast(i_isolate() + ->roots_table() + .slot(RootIndex::kFirstStrongRoot) + .load(i_isolate())); main_worklists.SwitchToContext(context1); main_worklists.Push(pushed_object); main_worklists.ShareWork();