diff --git a/src/heap/basic-memory-chunk.cc b/src/heap/basic-memory-chunk.cc
index fc271cdb81..4f4bc4814c 100644
--- a/src/heap/basic-memory-chunk.cc
+++ b/src/heap/basic-memory-chunk.cc
@@ -64,9 +64,7 @@ BasicMemoryChunk::BasicMemoryChunk(Heap* heap, BaseSpace* space,
high_water_mark_(area_start - reinterpret_cast
(this)),
owner_(space),
reservation_(std::move(reservation)) {
- if (space->identity() != RO_SPACE) {
- marking_bitmap()->Clear();
- }
+ marking_bitmap()->Clear();
}
bool BasicMemoryChunk::InOldSpace() const {
diff --git a/src/heap/basic-memory-chunk.h b/src/heap/basic-memory-chunk.h
index 864c803975..dfcbdca5a8 100644
--- a/src/heap/basic-memory-chunk.h
+++ b/src/heap/basic-memory-chunk.h
@@ -306,8 +306,6 @@ class BasicMemoryChunk {
template
ConcurrentBitmap* marking_bitmap() const {
- // TODO(olivf) Change to DCHECK once we have some coverage
- CHECK(!InReadOnlySpace());
return static_cast*>(
Bitmap::FromAddress(address() + kMarkingBitmapOffset));
}
diff --git a/src/heap/concurrent-marking.cc b/src/heap/concurrent-marking.cc
index 72f60b2dcd..73eff30596 100644
--- a/src/heap/concurrent-marking.cc
+++ b/src/heap/concurrent-marking.cc
@@ -263,7 +263,7 @@ class YoungGenerationConcurrentMarkingVisitor final
marking_state_(heap->isolate(), memory_chunk_data) {}
bool ShouldMarkObject(HeapObject object) const {
- return !object.InSharedHeap() && !object.InReadOnlySpace();
+ return !object.InSharedHeap();
}
void SynchronizePageAccess(HeapObject heap_object) {
@@ -787,7 +787,6 @@ void ConcurrentMarking::RunMajor(JobDelegate* delegate,
done = true;
break;
}
- DCHECK(!object.InReadOnlySpace());
objects_processed++;
Address new_space_top = kNullAddress;
diff --git a/src/heap/mark-compact-inl.h b/src/heap/mark-compact-inl.h
index 2b3c4e3b9f..aebd8ec18a 100644
--- a/src/heap/mark-compact-inl.h
+++ b/src/heap/mark-compact-inl.h
@@ -86,7 +86,6 @@ void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
}
bool MarkCompactCollector::ShouldMarkObject(HeapObject object) const {
- if (object.InReadOnlySpace()) return false;
if (V8_LIKELY(!uses_shared_heap_)) return true;
if (v8_flags.shared_space) {
if (is_shared_heap_isolate_) return true;
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
index e53441ea78..a69409f478 100644
--- a/src/heap/mark-compact.cc
+++ b/src/heap/mark-compact.cc
@@ -293,8 +293,7 @@ class FullMarkingVerifier : public MarkingVerifier {
CHECK(heap_->SharedHeapContains(heap_object));
}
- CHECK(heap_object.InReadOnlySpace() ||
- marking_state_->IsBlack(heap_object));
+ CHECK(marking_state_->IsBlack(heap_object));
}
V8_INLINE bool ShouldVerifyObject(HeapObject heap_object) {
@@ -627,6 +626,14 @@ void MarkCompactCollector::CollectGarbage() {
}
#ifdef VERIFY_HEAP
+void MarkCompactCollector::VerifyMarkbitsAreDirty(ReadOnlySpace* space) {
+ ReadOnlyHeapObjectIterator iterator(space);
+ for (HeapObject object = iterator.Next(); !object.is_null();
+ object = iterator.Next()) {
+ CHECK(non_atomic_marking_state()->IsBlack(object));
+ }
+}
+
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpaceBase* space) {
for (Page* p : *space) {
CHECK(non_atomic_marking_state()->bitmap(p)->IsClean());
@@ -660,6 +667,9 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
VerifyMarkbitsAreClean(heap_->old_space());
VerifyMarkbitsAreClean(heap_->code_space());
VerifyMarkbitsAreClean(heap_->new_space());
+ // Read-only space should always be black since we never collect any objects
+ // in it or linked from it.
+ VerifyMarkbitsAreDirty(heap_->read_only_space());
VerifyMarkbitsAreClean(heap_->lo_space());
VerifyMarkbitsAreClean(heap_->code_lo_space());
VerifyMarkbitsAreClean(heap_->new_lo_space());
@@ -1328,8 +1338,7 @@ class InternalizedStringTableCleaner final : public RootVisitor {
if (o.IsHeapObject()) {
HeapObject heap_object = HeapObject::cast(o);
DCHECK(!Heap::InYoungGeneration(heap_object));
- if (!heap_object.InReadOnlySpace() &&
- marking_state->IsWhite(heap_object)) {
+ if (marking_state->IsWhite(heap_object)) {
pointers_removed_++;
// Set the entry to the_hole_value (as deleted).
p.store(StringTable::deleted_element());
@@ -2721,8 +2730,7 @@ bool ShouldRetainMap(MarkingState* marking_state, Map map, int age) {
}
Object constructor = map.GetConstructor();
if (!constructor.IsHeapObject() ||
- (!HeapObject::cast(constructor).InReadOnlySpace() &&
- marking_state->IsWhite(HeapObject::cast(constructor)))) {
+ marking_state->IsWhite(HeapObject::cast(constructor))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
@@ -2761,8 +2769,7 @@ void MarkCompactCollector::RetainMaps() {
}
Object prototype = map.prototype();
if (age > 0 && prototype.IsHeapObject() &&
- (!HeapObject::cast(prototype).InReadOnlySpace() &&
- marking_state()->IsWhite(HeapObject::cast(prototype)))) {
+ marking_state()->IsWhite(HeapObject::cast(prototype))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
@@ -2974,10 +2981,7 @@ class StringForwardingTableCleaner final {
String original_string = String::cast(original);
if (marking_state_->IsBlack(original_string)) {
Object forward = record->ForwardStringObjectOrHash(isolate_);
- if (!forward.IsHeapObject() ||
- HeapObject::cast(forward).InReadOnlySpace()) {
- return;
- }
+ if (!forward.IsHeapObject()) return;
marking_state_->WhiteToBlack(HeapObject::cast(forward));
} else {
DisposeExternalResource(record);
@@ -3033,15 +3037,11 @@ class StringForwardingTableCleaner final {
StringForwardingTable::Record* record) {
if (original_string.IsInternalizedString()) return;
Object forward = record->ForwardStringObjectOrHash(isolate_);
- if (!forward.IsHeapObject()) {
- return;
- }
+ if (!forward.IsHeapObject()) return;
String forward_string = String::cast(forward);
// Mark the forwarded string to keep it alive.
- if (!forward_string.InReadOnlySpace()) {
- marking_state_->WhiteToBlack(forward_string);
- }
+ marking_state_->WhiteToBlack(forward_string);
// Transition the original string to a ThinString and override the
// forwarding index with the correct hash.
original_string.MakeThin(isolate_, forward_string);
@@ -3680,8 +3680,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
JSWeakRef weak_ref;
while (local_weak_objects()->js_weak_refs_local.Pop(&weak_ref)) {
HeapObject target = HeapObject::cast(weak_ref.target());
- if (!target.InReadOnlySpace() &&
- !non_atomic_marking_state()->IsBlackOrGrey(target)) {
+ if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value());
} else {
// The value of the JSWeakRef is alive.
@@ -3698,8 +3697,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
}
};
HeapObject target = HeapObject::cast(weak_cell.target());
- if (!target.InReadOnlySpace() &&
- !non_atomic_marking_state()->IsBlackOrGrey(target)) {
+ if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
DCHECK(target.CanBeHeldWeakly());
// The value of the WeakCell is dead.
JSFinalizationRegistry finalization_registry =
@@ -3721,8 +3719,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
}
HeapObject unregister_token = weak_cell.unregister_token();
- if (!unregister_token.InReadOnlySpace() &&
- !non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) {
+ if (!non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) {
DCHECK(unregister_token.CanBeHeldWeakly());
// The unregister token is dead. Remove any corresponding entries in the
// key map. Multiple WeakCell with the same token will have all their
diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h
index c2c69af431..07f21f4355 100644
--- a/src/heap/mark-compact.h
+++ b/src/heap/mark-compact.h
@@ -422,6 +422,7 @@ class MarkCompactCollector final : public CollectorBase {
void VerifyMarking();
#ifdef VERIFY_HEAP
void VerifyMarkbitsAreClean();
+ void VerifyMarkbitsAreDirty(ReadOnlySpace* space);
void VerifyMarkbitsAreClean(PagedSpaceBase* space);
void VerifyMarkbitsAreClean(NewSpace* space);
void VerifyMarkbitsAreClean(LargeObjectSpace* space);
diff --git a/src/heap/marking-barrier-inl.h b/src/heap/marking-barrier-inl.h
index 220a9a8aa8..b1eea01acc 100644
--- a/src/heap/marking-barrier-inl.h
+++ b/src/heap/marking-barrier-inl.h
@@ -14,8 +14,6 @@ namespace v8 {
namespace internal {
void MarkingBarrier::MarkValue(HeapObject host, HeapObject value) {
- if (value.InReadOnlySpace()) return;
-
DCHECK(IsCurrentMarkingBarrier(host));
DCHECK(is_activated_ || shared_heap_worklist_.has_value());
diff --git a/src/heap/marking-visitor-inl.h b/src/heap/marking-visitor-inl.h
index b877819184..9f47e8f071 100644
--- a/src/heap/marking-visitor-inl.h
+++ b/src/heap/marking-visitor-inl.h
@@ -257,7 +257,6 @@ int MarkingVisitorBase::
if (end < size) {
// The object can be pushed back onto the marking worklist only after
// progress bar was updated.
- DCHECK(ShouldMarkObject(object));
local_marking_worklists_->Push(object);
}
}
@@ -373,8 +372,7 @@ int MarkingVisitorBase::VisitEphemeronHashTable(
// WeakMaps and WeakSets and therefore cannot be ephemeron keys. See also
// MarkCompactCollector::ProcessEphemeron.
DCHECK(!key.InSharedWritableHeap());
- if (key.InReadOnlySpace() ||
- concrete_visitor()->marking_state()->IsBlackOrGrey(key)) {
+ if (concrete_visitor()->marking_state()->IsBlackOrGrey(key)) {
VisitPointer(table, value_slot);
} else {
Object value_obj = table.ValueAt(i);
@@ -407,8 +405,7 @@ int MarkingVisitorBase::VisitJSWeakRef(
if (weak_ref.target().IsHeapObject()) {
HeapObject target = HeapObject::cast(weak_ref.target());
SynchronizePageAccess(target);
- if (target.InReadOnlySpace() ||
- concrete_visitor()->marking_state()->IsBlackOrGrey(target)) {
+ if (concrete_visitor()->marking_state()->IsBlackOrGrey(target)) {
// Record the slot inside the JSWeakRef, since the
// VisitJSObjectSubclass above didn't visit it.
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
@@ -435,10 +432,8 @@ int MarkingVisitorBase::VisitWeakCell(
HeapObject unregister_token = weak_cell.relaxed_unregister_token();
SynchronizePageAccess(target);
SynchronizePageAccess(unregister_token);
- if ((target.InReadOnlySpace() ||
- concrete_visitor()->marking_state()->IsBlackOrGrey(target)) &&
- (unregister_token.InReadOnlySpace() ||
- concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token))) {
+ if (concrete_visitor()->marking_state()->IsBlackOrGrey(target) &&
+ concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token)) {
// Record the slots inside the WeakCell, since the IterateBody above
// didn't visit it.
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
@@ -463,7 +458,6 @@ int MarkingVisitorBase::VisitWeakCell(
template
int MarkingVisitorBase::MarkDescriptorArrayBlack(
DescriptorArray descriptors) {
- if (descriptors.InReadOnlySpace()) return 0;
concrete_visitor()->marking_state()->WhiteToGrey(descriptors);
if (concrete_visitor()->marking_state()->GreyToBlack(descriptors)) {
VisitMapPointer(descriptors);
diff --git a/src/heap/marking-visitor.h b/src/heap/marking-visitor.h
index 39c502e395..58463a267c 100644
--- a/src/heap/marking-visitor.h
+++ b/src/heap/marking-visitor.h
@@ -122,7 +122,6 @@ class MarkingVisitorBase : public HeapVisitor {
}
bool ShouldMarkObject(HeapObject object) const {
- if (object.InReadOnlySpace()) return false;
if (should_mark_shared_heap_) return true;
return !object.InSharedHeap();
}
diff --git a/src/heap/object-stats.cc b/src/heap/object-stats.cc
index c04fda22c2..40e29e41db 100644
--- a/src/heap/object-stats.cc
+++ b/src/heap/object-stats.cc
@@ -846,12 +846,8 @@ bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase array) {
}
bool ObjectStatsCollectorImpl::SameLiveness(HeapObject obj1, HeapObject obj2) {
- if (obj1.is_null() || obj2.is_null()) return true;
- auto col1 = obj1.InReadOnlySpace() ? Marking::ObjectColor::BLACK_OBJECT
- : marking_state_->Color(obj1);
- auto col2 = obj2.InReadOnlySpace() ? Marking::ObjectColor::BLACK_OBJECT
- : marking_state_->Color(obj2);
- return col1 == col2;
+ return obj1.is_null() || obj2.is_null() ||
+ marking_state_->Color(obj1) == marking_state_->Color(obj2);
}
void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map map) {
@@ -1097,7 +1093,7 @@ class ObjectStatsVisitor {
phase_(phase) {}
void Visit(HeapObject obj) {
- if (obj.InReadOnlySpace() || marking_state_->IsBlack(obj)) {
+ if (marking_state_->IsBlack(obj)) {
live_collector_->CollectStatistics(
obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes);
} else {
diff --git a/src/heap/read-only-spaces.cc b/src/heap/read-only-spaces.cc
index 26db3d2f53..e85c2bca6b 100644
--- a/src/heap/read-only-spaces.cc
+++ b/src/heap/read-only-spaces.cc
@@ -338,6 +338,7 @@ ReadOnlyPage::ReadOnlyPage(Heap* heap, BaseSpace* space, size_t chunk_size,
std::move(reservation)) {
allocated_bytes_ = 0;
SetFlags(Flag::NEVER_EVACUATE | Flag::READ_ONLY_HEAP);
+ heap->non_atomic_marking_state()->bitmap(this)->MarkAllBits();
}
void ReadOnlyPage::MakeHeaderRelocatable() {
@@ -544,6 +545,11 @@ void ReadOnlySpace::FreeLinearAllocationArea() {
return;
}
+ // Clear the bits in the unused black area.
+ ReadOnlyPage* page = pages_.back();
+ heap()->marking_state()->bitmap(page)->ClearRange(
+ page->AddressToMarkbitIndex(top_), page->AddressToMarkbitIndex(limit_));
+
heap()->CreateFillerObjectAt(top_, static_cast(limit_ - top_));
BasicMemoryChunk::UpdateHighWaterMark(top_);
@@ -656,9 +662,15 @@ AllocationResult ReadOnlySpace::AllocateRawUnaligned(int size_in_bytes) {
AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes,
AllocationAlignment alignment) {
- return USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned
- ? AllocateRawAligned(size_in_bytes, alignment)
- : AllocateRawUnaligned(size_in_bytes);
+ AllocationResult result =
+ USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned
+ ? AllocateRawAligned(size_in_bytes, alignment)
+ : AllocateRawUnaligned(size_in_bytes);
+ HeapObject heap_obj;
+ if (result.To(&heap_obj)) {
+ DCHECK(heap()->marking_state()->IsBlack(heap_obj));
+ }
+ return result;
}
size_t ReadOnlyPage::ShrinkToHighWaterMark() {
diff --git a/src/objects/heap-object.h b/src/objects/heap-object.h
index 26b5b10ee9..2e0ea5fe02 100644
--- a/src/objects/heap-object.h
+++ b/src/objects/heap-object.h
@@ -95,8 +95,6 @@ class HeapObject : public Object {
V8_INLINE bool InSharedWritableHeap() const;
- V8_INLINE bool InReadOnlySpace() const;
-
#define IS_TYPE_FUNCTION_DECL(Type) \
V8_INLINE bool Is##Type() const; \
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
diff --git a/src/objects/objects-inl.h b/src/objects/objects-inl.h
index 1aca9a80a8..03bf938775 100644
--- a/src/objects/objects-inl.h
+++ b/src/objects/objects-inl.h
@@ -207,8 +207,6 @@ bool HeapObject::InSharedWritableHeap() const {
return BasicMemoryChunk::FromHeapObject(*this)->InSharedHeap();
}
-bool HeapObject::InReadOnlySpace() const { return IsReadOnlyHeapObject(*this); }
-
bool HeapObject::IsJSObjectThatCanBeTrackedAsPrototype() const {
// Do not optimize objects in the shared heap because it is not
// threadsafe. Objects in the shared heap have fixed layouts and their maps
diff --git a/test/cctest/heap/test-concurrent-marking.cc b/test/cctest/heap/test-concurrent-marking.cc
index c4e2089c8e..e57fa68f37 100644
--- a/test/cctest/heap/test-concurrent-marking.cc
+++ b/test/cctest/heap/test-concurrent-marking.cc
@@ -26,11 +26,6 @@ void PublishSegment(MarkingWorklist& worklist, HeapObject object) {
local.Publish();
}
-HeapObject ObjectForTesting(i::Isolate* isolate) {
- return HeapObject::cast(
- isolate->roots_table().slot(RootIndex::kFirstStrongRoot).load(isolate));
-}
-
TEST(ConcurrentMarking) {
if (!i::v8_flags.concurrent_marking) return;
CcTest::InitializeVM();
@@ -47,7 +42,7 @@ TEST(ConcurrentMarking) {
new ConcurrentMarking(heap, &weak_objects);
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
PublishSegment(*collector->marking_worklists()->shared(),
- ObjectForTesting(heap->isolate()));
+ ReadOnlyRoots(heap).undefined_value());
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
concurrent_marking->Join();
delete concurrent_marking;
@@ -69,11 +64,11 @@ TEST(ConcurrentMarkingReschedule) {
new ConcurrentMarking(heap, &weak_objects);
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
PublishSegment(*collector->marking_worklists()->shared(),
- ObjectForTesting(heap->isolate()));
+ ReadOnlyRoots(heap).undefined_value());
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
concurrent_marking->Join();
PublishSegment(*collector->marking_worklists()->shared(),
- ObjectForTesting(heap->isolate()));
+ ReadOnlyRoots(heap).undefined_value());
concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR);
concurrent_marking->Join();
delete concurrent_marking;
@@ -96,12 +91,12 @@ TEST(ConcurrentMarkingPreemptAndReschedule) {
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
for (int i = 0; i < 5000; i++)
PublishSegment(*collector->marking_worklists()->shared(),
- ObjectForTesting(heap->isolate()));
+ ReadOnlyRoots(heap).undefined_value());
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
concurrent_marking->Pause();
for (int i = 0; i < 5000; i++)
PublishSegment(*collector->marking_worklists()->shared(),
- ObjectForTesting(heap->isolate()));
+ ReadOnlyRoots(heap).undefined_value());
concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR);
concurrent_marking->Join();
delete concurrent_marking;
diff --git a/test/cctest/heap/test-heap.cc b/test/cctest/heap/test-heap.cc
index 0ed33aa57b..3bd7942a16 100644
--- a/test/cctest/heap/test-heap.cc
+++ b/test/cctest/heap/test-heap.cc
@@ -5811,7 +5811,7 @@ TEST(Regress598319) {
// progress bar, we would fail here.
for (int i = 0; i < arr.get().length(); i++) {
HeapObject arr_value = HeapObject::cast(arr.get().get(i));
- CHECK(arr_value.InReadOnlySpace() || marking_state->IsBlack(arr_value));
+ CHECK(marking_state->IsBlack(arr_value));
}
}
diff --git a/test/unittests/heap/marking-worklist-unittest.cc b/test/unittests/heap/marking-worklist-unittest.cc
index 45bbdad4be..c6eabd85b4 100644
--- a/test/unittests/heap/marking-worklist-unittest.cc
+++ b/test/unittests/heap/marking-worklist-unittest.cc
@@ -22,10 +22,7 @@ TEST_F(MarkingWorklistTest, PushPop) {
MarkingWorklists holder;
MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object =
- HeapObject::cast(i_isolate()
- ->roots_table()
- .slot(RootIndex::kFirstStrongRoot)
- .load(i_isolate()));
+ ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.Push(pushed_object);
HeapObject popped_object;
EXPECT_TRUE(worklists.Pop(&popped_object));
@@ -36,10 +33,7 @@ TEST_F(MarkingWorklistTest, PushPopOnHold) {
MarkingWorklists holder;
MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object =
- HeapObject::cast(i_isolate()
- ->roots_table()
- .slot(RootIndex::kFirstStrongRoot)
- .load(i_isolate()));
+ ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.PushOnHold(pushed_object);
HeapObject popped_object;
EXPECT_TRUE(worklists.PopOnHold(&popped_object));
@@ -51,10 +45,7 @@ TEST_F(MarkingWorklistTest, MergeOnHold) {
MarkingWorklists::Local main_worklists(&holder);
MarkingWorklists::Local worker_worklists(&holder);
HeapObject pushed_object =
- HeapObject::cast(i_isolate()
- ->roots_table()
- .slot(RootIndex::kFirstStrongRoot)
- .load(i_isolate()));
+ ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worker_worklists.PushOnHold(pushed_object);
worker_worklists.Publish();
main_worklists.MergeOnHold();
@@ -68,10 +59,7 @@ TEST_F(MarkingWorklistTest, ShareWorkIfGlobalPoolIsEmpty) {
MarkingWorklists::Local main_worklists(&holder);
MarkingWorklists::Local worker_worklists(&holder);
HeapObject pushed_object =
- HeapObject::cast(i_isolate()
- ->roots_table()
- .slot(RootIndex::kFirstStrongRoot)
- .load(i_isolate()));
+ ReadOnlyRoots(i_isolate()->heap()).undefined_value();
main_worklists.Push(pushed_object);
main_worklists.ShareWork();
HeapObject popped_object;
@@ -85,10 +73,7 @@ TEST_F(MarkingWorklistTest, ContextWorklistsPushPop) {
holder.CreateContextWorklists({context});
MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object =
- HeapObject::cast(i_isolate()
- ->roots_table()
- .slot(RootIndex::kFirstStrongRoot)
- .load(i_isolate()));
+ ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.SwitchToContext(context);
worklists.Push(pushed_object);
worklists.SwitchToSharedForTesting();
@@ -104,10 +89,7 @@ TEST_F(MarkingWorklistTest, ContextWorklistsEmpty) {
holder.CreateContextWorklists({context});
MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object =
- HeapObject::cast(i_isolate()
- ->roots_table()
- .slot(RootIndex::kFirstStrongRoot)
- .load(i_isolate()));
+ ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.SwitchToContext(context);
worklists.Push(pushed_object);
EXPECT_FALSE(worklists.IsEmpty());
@@ -128,10 +110,7 @@ TEST_F(MarkingWorklistTest, ContextWorklistCrossTask) {
MarkingWorklists::Local main_worklists(&holder);
MarkingWorklists::Local worker_worklists(&holder);
HeapObject pushed_object =
- HeapObject::cast(i_isolate()
- ->roots_table()
- .slot(RootIndex::kFirstStrongRoot)
- .load(i_isolate()));
+ ReadOnlyRoots(i_isolate()->heap()).undefined_value();
main_worklists.SwitchToContext(context1);
main_worklists.Push(pushed_object);
main_worklists.ShareWork();