[gc][static-roots] Don't access markbits on R/O pages
Markbits on read only pages are initialized BLACK and never change. For any practical purposes they are unused. This CL prevents any access to the markbits of these pages. This is a precursor to removing them entirely from the page to make room for static roots. Bug: v8:13466 Change-Id: I61d3f6d9bbca750d0f34475859b34ff44f7fec1e Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4212397 Reviewed-by: Dominik Inführ <dinfuehr@chromium.org> Auto-Submit: Olivier Flückiger <olivf@chromium.org> Commit-Queue: Olivier Flückiger <olivf@chromium.org> Cr-Commit-Position: refs/heads/main@{#85665}
This commit is contained in:
parent
7edcd83d1b
commit
a2f63f1886
@ -64,7 +64,9 @@ BasicMemoryChunk::BasicMemoryChunk(Heap* heap, BaseSpace* space,
|
||||
high_water_mark_(area_start - reinterpret_cast<Address>(this)),
|
||||
owner_(space),
|
||||
reservation_(std::move(reservation)) {
|
||||
if (space->identity() != RO_SPACE) {
|
||||
marking_bitmap<AccessMode::NON_ATOMIC>()->Clear();
|
||||
}
|
||||
}
|
||||
|
||||
bool BasicMemoryChunk::InOldSpace() const {
|
||||
|
@ -306,6 +306,8 @@ class BasicMemoryChunk {
|
||||
|
||||
template <AccessMode mode>
|
||||
ConcurrentBitmap<mode>* marking_bitmap() const {
|
||||
// TODO(olivf) Change to DCHECK once we have some coverage
|
||||
CHECK(!InReadOnlySpace());
|
||||
return static_cast<ConcurrentBitmap<mode>*>(
|
||||
Bitmap::FromAddress(address() + kMarkingBitmapOffset));
|
||||
}
|
||||
|
@ -263,7 +263,7 @@ class YoungGenerationConcurrentMarkingVisitor final
|
||||
marking_state_(heap->isolate(), memory_chunk_data) {}
|
||||
|
||||
bool ShouldMarkObject(HeapObject object) const {
|
||||
return !object.InSharedHeap();
|
||||
return !object.InSharedHeap() && !object.InReadOnlySpace();
|
||||
}
|
||||
|
||||
void SynchronizePageAccess(HeapObject heap_object) {
|
||||
@ -787,6 +787,7 @@ void ConcurrentMarking::RunMajor(JobDelegate* delegate,
|
||||
done = true;
|
||||
break;
|
||||
}
|
||||
DCHECK(!object.InReadOnlySpace());
|
||||
objects_processed++;
|
||||
|
||||
Address new_space_top = kNullAddress;
|
||||
|
@ -86,6 +86,7 @@ void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
|
||||
}
|
||||
|
||||
bool MarkCompactCollector::ShouldMarkObject(HeapObject object) const {
|
||||
if (object.InReadOnlySpace()) return false;
|
||||
if (V8_LIKELY(!uses_shared_heap_)) return true;
|
||||
if (v8_flags.shared_space) {
|
||||
if (is_shared_heap_isolate_) return true;
|
||||
|
@ -293,7 +293,8 @@ class FullMarkingVerifier : public MarkingVerifier {
|
||||
CHECK(heap_->SharedHeapContains(heap_object));
|
||||
}
|
||||
|
||||
CHECK(marking_state_->IsBlack(heap_object));
|
||||
CHECK(heap_object.InReadOnlySpace() ||
|
||||
marking_state_->IsBlack(heap_object));
|
||||
}
|
||||
|
||||
V8_INLINE bool ShouldVerifyObject(HeapObject heap_object) {
|
||||
@ -626,14 +627,6 @@ void MarkCompactCollector::CollectGarbage() {
|
||||
}
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
void MarkCompactCollector::VerifyMarkbitsAreDirty(ReadOnlySpace* space) {
|
||||
ReadOnlyHeapObjectIterator iterator(space);
|
||||
for (HeapObject object = iterator.Next(); !object.is_null();
|
||||
object = iterator.Next()) {
|
||||
CHECK(non_atomic_marking_state()->IsBlack(object));
|
||||
}
|
||||
}
|
||||
|
||||
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpaceBase* space) {
|
||||
for (Page* p : *space) {
|
||||
CHECK(non_atomic_marking_state()->bitmap(p)->IsClean());
|
||||
@ -667,9 +660,6 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
|
||||
VerifyMarkbitsAreClean(heap_->old_space());
|
||||
VerifyMarkbitsAreClean(heap_->code_space());
|
||||
VerifyMarkbitsAreClean(heap_->new_space());
|
||||
// Read-only space should always be black since we never collect any objects
|
||||
// in it or linked from it.
|
||||
VerifyMarkbitsAreDirty(heap_->read_only_space());
|
||||
VerifyMarkbitsAreClean(heap_->lo_space());
|
||||
VerifyMarkbitsAreClean(heap_->code_lo_space());
|
||||
VerifyMarkbitsAreClean(heap_->new_lo_space());
|
||||
@ -1338,7 +1328,8 @@ class InternalizedStringTableCleaner final : public RootVisitor {
|
||||
if (o.IsHeapObject()) {
|
||||
HeapObject heap_object = HeapObject::cast(o);
|
||||
DCHECK(!Heap::InYoungGeneration(heap_object));
|
||||
if (marking_state->IsWhite(heap_object)) {
|
||||
if (!heap_object.InReadOnlySpace() &&
|
||||
marking_state->IsWhite(heap_object)) {
|
||||
pointers_removed_++;
|
||||
// Set the entry to the_hole_value (as deleted).
|
||||
p.store(StringTable::deleted_element());
|
||||
@ -2729,7 +2720,8 @@ bool ShouldRetainMap(MarkingState* marking_state, Map map, int age) {
|
||||
}
|
||||
Object constructor = map.GetConstructor();
|
||||
if (!constructor.IsHeapObject() ||
|
||||
marking_state->IsWhite(HeapObject::cast(constructor))) {
|
||||
(!HeapObject::cast(constructor).InReadOnlySpace() &&
|
||||
marking_state->IsWhite(HeapObject::cast(constructor)))) {
|
||||
// The constructor is dead, no new objects with this map can
|
||||
// be created. Do not retain this map.
|
||||
return false;
|
||||
@ -2768,7 +2760,8 @@ void MarkCompactCollector::RetainMaps() {
|
||||
}
|
||||
Object prototype = map.prototype();
|
||||
if (age > 0 && prototype.IsHeapObject() &&
|
||||
marking_state()->IsWhite(HeapObject::cast(prototype))) {
|
||||
(!HeapObject::cast(prototype).InReadOnlySpace() &&
|
||||
marking_state()->IsWhite(HeapObject::cast(prototype)))) {
|
||||
// The prototype is not marked, age the map.
|
||||
new_age = age - 1;
|
||||
} else {
|
||||
@ -2980,7 +2973,10 @@ class StringForwardingTableCleaner final {
|
||||
String original_string = String::cast(original);
|
||||
if (marking_state_->IsBlack(original_string)) {
|
||||
Object forward = record->ForwardStringObjectOrHash(isolate_);
|
||||
if (!forward.IsHeapObject()) return;
|
||||
if (!forward.IsHeapObject() ||
|
||||
HeapObject::cast(forward).InReadOnlySpace()) {
|
||||
return;
|
||||
}
|
||||
marking_state_->WhiteToBlack(HeapObject::cast(forward));
|
||||
} else {
|
||||
DisposeExternalResource(record);
|
||||
@ -3036,11 +3032,15 @@ class StringForwardingTableCleaner final {
|
||||
StringForwardingTable::Record* record) {
|
||||
if (original_string.IsInternalizedString()) return;
|
||||
Object forward = record->ForwardStringObjectOrHash(isolate_);
|
||||
if (!forward.IsHeapObject()) return;
|
||||
if (!forward.IsHeapObject()) {
|
||||
return;
|
||||
}
|
||||
String forward_string = String::cast(forward);
|
||||
|
||||
// Mark the forwarded string to keep it alive.
|
||||
if (!forward_string.InReadOnlySpace()) {
|
||||
marking_state_->WhiteToBlack(forward_string);
|
||||
}
|
||||
// Transition the original string to a ThinString and override the
|
||||
// forwarding index with the correct hash.
|
||||
original_string.MakeThin(isolate_, forward_string);
|
||||
@ -3679,7 +3679,8 @@ void MarkCompactCollector::ClearJSWeakRefs() {
|
||||
JSWeakRef weak_ref;
|
||||
while (local_weak_objects()->js_weak_refs_local.Pop(&weak_ref)) {
|
||||
HeapObject target = HeapObject::cast(weak_ref.target());
|
||||
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
|
||||
if (!target.InReadOnlySpace() &&
|
||||
!non_atomic_marking_state()->IsBlackOrGrey(target)) {
|
||||
weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value());
|
||||
} else {
|
||||
// The value of the JSWeakRef is alive.
|
||||
@ -3696,7 +3697,8 @@ void MarkCompactCollector::ClearJSWeakRefs() {
|
||||
}
|
||||
};
|
||||
HeapObject target = HeapObject::cast(weak_cell.target());
|
||||
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
|
||||
if (!target.InReadOnlySpace() &&
|
||||
!non_atomic_marking_state()->IsBlackOrGrey(target)) {
|
||||
DCHECK(target.CanBeHeldWeakly());
|
||||
// The value of the WeakCell is dead.
|
||||
JSFinalizationRegistry finalization_registry =
|
||||
@ -3718,7 +3720,8 @@ void MarkCompactCollector::ClearJSWeakRefs() {
|
||||
}
|
||||
|
||||
HeapObject unregister_token = weak_cell.unregister_token();
|
||||
if (!non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) {
|
||||
if (!unregister_token.InReadOnlySpace() &&
|
||||
!non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) {
|
||||
DCHECK(unregister_token.CanBeHeldWeakly());
|
||||
// The unregister token is dead. Remove any corresponding entries in the
|
||||
// key map. Multiple WeakCell with the same token will have all their
|
||||
|
@ -420,7 +420,6 @@ class MarkCompactCollector final : public CollectorBase {
|
||||
void VerifyMarking();
|
||||
#ifdef VERIFY_HEAP
|
||||
void VerifyMarkbitsAreClean();
|
||||
void VerifyMarkbitsAreDirty(ReadOnlySpace* space);
|
||||
void VerifyMarkbitsAreClean(PagedSpaceBase* space);
|
||||
void VerifyMarkbitsAreClean(NewSpace* space);
|
||||
void VerifyMarkbitsAreClean(LargeObjectSpace* space);
|
||||
|
@ -14,6 +14,8 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
void MarkingBarrier::MarkValue(HeapObject host, HeapObject value) {
|
||||
if (value.InReadOnlySpace()) return;
|
||||
|
||||
DCHECK(IsCurrentMarkingBarrier(host));
|
||||
DCHECK(is_activated_ || shared_heap_worklist_.has_value());
|
||||
|
||||
|
@ -257,6 +257,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::
|
||||
if (end < size) {
|
||||
// The object can be pushed back onto the marking worklist only after
|
||||
// progress bar was updated.
|
||||
DCHECK(ShouldMarkObject(object));
|
||||
local_marking_worklists_->Push(object);
|
||||
}
|
||||
}
|
||||
@ -372,7 +373,8 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
|
||||
// WeakMaps and WeakSets and therefore cannot be ephemeron keys. See also
|
||||
// MarkCompactCollector::ProcessEphemeron.
|
||||
DCHECK(!key.InSharedWritableHeap());
|
||||
if (concrete_visitor()->marking_state()->IsBlackOrGrey(key)) {
|
||||
if (key.InReadOnlySpace() ||
|
||||
concrete_visitor()->marking_state()->IsBlackOrGrey(key)) {
|
||||
VisitPointer(table, value_slot);
|
||||
} else {
|
||||
Object value_obj = table.ValueAt(i);
|
||||
@ -405,7 +407,8 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
|
||||
if (weak_ref.target().IsHeapObject()) {
|
||||
HeapObject target = HeapObject::cast(weak_ref.target());
|
||||
SynchronizePageAccess(target);
|
||||
if (concrete_visitor()->marking_state()->IsBlackOrGrey(target)) {
|
||||
if (target.InReadOnlySpace() ||
|
||||
concrete_visitor()->marking_state()->IsBlackOrGrey(target)) {
|
||||
// Record the slot inside the JSWeakRef, since the
|
||||
// VisitJSObjectSubclass above didn't visit it.
|
||||
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
|
||||
@ -432,8 +435,10 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
|
||||
HeapObject unregister_token = weak_cell.relaxed_unregister_token();
|
||||
SynchronizePageAccess(target);
|
||||
SynchronizePageAccess(unregister_token);
|
||||
if (concrete_visitor()->marking_state()->IsBlackOrGrey(target) &&
|
||||
concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token)) {
|
||||
if ((target.InReadOnlySpace() ||
|
||||
concrete_visitor()->marking_state()->IsBlackOrGrey(target)) &&
|
||||
(unregister_token.InReadOnlySpace() ||
|
||||
concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token))) {
|
||||
// Record the slots inside the WeakCell, since the IterateBody above
|
||||
// didn't visit it.
|
||||
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
|
||||
@ -458,6 +463,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
|
||||
template <typename ConcreteVisitor, typename MarkingState>
|
||||
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkDescriptorArrayBlack(
|
||||
DescriptorArray descriptors) {
|
||||
if (descriptors.InReadOnlySpace()) return 0;
|
||||
concrete_visitor()->marking_state()->WhiteToGrey(descriptors);
|
||||
if (concrete_visitor()->marking_state()->GreyToBlack(descriptors)) {
|
||||
VisitMapPointer(descriptors);
|
||||
|
@ -122,6 +122,7 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
|
||||
}
|
||||
|
||||
bool ShouldMarkObject(HeapObject object) const {
|
||||
if (object.InReadOnlySpace()) return false;
|
||||
if (should_mark_shared_heap_) return true;
|
||||
return !object.InSharedHeap();
|
||||
}
|
||||
|
@ -846,8 +846,12 @@ bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase array) {
|
||||
}
|
||||
|
||||
bool ObjectStatsCollectorImpl::SameLiveness(HeapObject obj1, HeapObject obj2) {
|
||||
return obj1.is_null() || obj2.is_null() ||
|
||||
marking_state_->Color(obj1) == marking_state_->Color(obj2);
|
||||
if (obj1.is_null() || obj2.is_null()) return true;
|
||||
auto col1 = obj1.InReadOnlySpace() ? Marking::ObjectColor::BLACK_OBJECT
|
||||
: marking_state_->Color(obj1);
|
||||
auto col2 = obj2.InReadOnlySpace() ? Marking::ObjectColor::BLACK_OBJECT
|
||||
: marking_state_->Color(obj2);
|
||||
return col1 == col2;
|
||||
}
|
||||
|
||||
void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map map) {
|
||||
@ -1093,7 +1097,7 @@ class ObjectStatsVisitor {
|
||||
phase_(phase) {}
|
||||
|
||||
void Visit(HeapObject obj) {
|
||||
if (marking_state_->IsBlack(obj)) {
|
||||
if (obj.InReadOnlySpace() || marking_state_->IsBlack(obj)) {
|
||||
live_collector_->CollectStatistics(
|
||||
obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes);
|
||||
} else {
|
||||
|
@ -338,7 +338,6 @@ ReadOnlyPage::ReadOnlyPage(Heap* heap, BaseSpace* space, size_t chunk_size,
|
||||
std::move(reservation)) {
|
||||
allocated_bytes_ = 0;
|
||||
SetFlags(Flag::NEVER_EVACUATE | Flag::READ_ONLY_HEAP);
|
||||
heap->non_atomic_marking_state()->bitmap(this)->MarkAllBits();
|
||||
}
|
||||
|
||||
void ReadOnlyPage::MakeHeaderRelocatable() {
|
||||
@ -545,11 +544,6 @@ void ReadOnlySpace::FreeLinearAllocationArea() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clear the bits in the unused black area.
|
||||
ReadOnlyPage* page = pages_.back();
|
||||
heap()->marking_state()->bitmap(page)->ClearRange(
|
||||
page->AddressToMarkbitIndex(top_), page->AddressToMarkbitIndex(limit_));
|
||||
|
||||
heap()->CreateFillerObjectAt(top_, static_cast<int>(limit_ - top_));
|
||||
|
||||
BasicMemoryChunk::UpdateHighWaterMark(top_);
|
||||
@ -662,15 +656,9 @@ AllocationResult ReadOnlySpace::AllocateRawUnaligned(int size_in_bytes) {
|
||||
|
||||
AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes,
|
||||
AllocationAlignment alignment) {
|
||||
AllocationResult result =
|
||||
USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned
|
||||
return USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned
|
||||
? AllocateRawAligned(size_in_bytes, alignment)
|
||||
: AllocateRawUnaligned(size_in_bytes);
|
||||
HeapObject heap_obj;
|
||||
if (result.To(&heap_obj)) {
|
||||
DCHECK(heap()->marking_state()->IsBlack(heap_obj));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
size_t ReadOnlyPage::ShrinkToHighWaterMark() {
|
||||
|
@ -95,6 +95,8 @@ class HeapObject : public Object {
|
||||
|
||||
V8_INLINE bool InSharedWritableHeap() const;
|
||||
|
||||
V8_INLINE bool InReadOnlySpace() const;
|
||||
|
||||
#define IS_TYPE_FUNCTION_DECL(Type) \
|
||||
V8_INLINE bool Is##Type() const; \
|
||||
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
|
||||
|
@ -207,6 +207,8 @@ bool HeapObject::InSharedWritableHeap() const {
|
||||
return BasicMemoryChunk::FromHeapObject(*this)->InSharedHeap();
|
||||
}
|
||||
|
||||
bool HeapObject::InReadOnlySpace() const { return IsReadOnlyHeapObject(*this); }
|
||||
|
||||
bool HeapObject::IsJSObjectThatCanBeTrackedAsPrototype() const {
|
||||
// Do not optimize objects in the shared heap because it is not
|
||||
// threadsafe. Objects in the shared heap have fixed layouts and their maps
|
||||
|
@ -26,6 +26,11 @@ void PublishSegment(MarkingWorklist& worklist, HeapObject object) {
|
||||
local.Publish();
|
||||
}
|
||||
|
||||
HeapObject ObjectForTesting(i::Isolate* isolate) {
|
||||
return HeapObject::cast(
|
||||
isolate->roots_table().slot(RootIndex::kFirstStrongRoot).load(isolate));
|
||||
}
|
||||
|
||||
TEST(ConcurrentMarking) {
|
||||
if (!i::v8_flags.concurrent_marking) return;
|
||||
CcTest::InitializeVM();
|
||||
@ -42,7 +47,7 @@ TEST(ConcurrentMarking) {
|
||||
new ConcurrentMarking(heap, &weak_objects);
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
PublishSegment(*collector->marking_worklists()->shared(),
|
||||
ReadOnlyRoots(heap).undefined_value());
|
||||
ObjectForTesting(heap->isolate()));
|
||||
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
||||
concurrent_marking->Join();
|
||||
delete concurrent_marking;
|
||||
@ -64,11 +69,11 @@ TEST(ConcurrentMarkingReschedule) {
|
||||
new ConcurrentMarking(heap, &weak_objects);
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
PublishSegment(*collector->marking_worklists()->shared(),
|
||||
ReadOnlyRoots(heap).undefined_value());
|
||||
ObjectForTesting(heap->isolate()));
|
||||
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
||||
concurrent_marking->Join();
|
||||
PublishSegment(*collector->marking_worklists()->shared(),
|
||||
ReadOnlyRoots(heap).undefined_value());
|
||||
ObjectForTesting(heap->isolate()));
|
||||
concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR);
|
||||
concurrent_marking->Join();
|
||||
delete concurrent_marking;
|
||||
@ -91,12 +96,12 @@ TEST(ConcurrentMarkingPreemptAndReschedule) {
|
||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||
for (int i = 0; i < 5000; i++)
|
||||
PublishSegment(*collector->marking_worklists()->shared(),
|
||||
ReadOnlyRoots(heap).undefined_value());
|
||||
ObjectForTesting(heap->isolate()));
|
||||
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
||||
concurrent_marking->Pause();
|
||||
for (int i = 0; i < 5000; i++)
|
||||
PublishSegment(*collector->marking_worklists()->shared(),
|
||||
ReadOnlyRoots(heap).undefined_value());
|
||||
ObjectForTesting(heap->isolate()));
|
||||
concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR);
|
||||
concurrent_marking->Join();
|
||||
delete concurrent_marking;
|
||||
|
@ -5811,7 +5811,7 @@ TEST(Regress598319) {
|
||||
// progress bar, we would fail here.
|
||||
for (int i = 0; i < arr.get().length(); i++) {
|
||||
HeapObject arr_value = HeapObject::cast(arr.get().get(i));
|
||||
CHECK(marking_state->IsBlack(arr_value));
|
||||
CHECK(arr_value.InReadOnlySpace() || marking_state->IsBlack(arr_value));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -22,7 +22,10 @@ TEST_F(MarkingWorklistTest, PushPop) {
|
||||
MarkingWorklists holder;
|
||||
MarkingWorklists::Local worklists(&holder);
|
||||
HeapObject pushed_object =
|
||||
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||
HeapObject::cast(i_isolate()
|
||||
->roots_table()
|
||||
.slot(RootIndex::kFirstStrongRoot)
|
||||
.load(i_isolate()));
|
||||
worklists.Push(pushed_object);
|
||||
HeapObject popped_object;
|
||||
EXPECT_TRUE(worklists.Pop(&popped_object));
|
||||
@ -33,7 +36,10 @@ TEST_F(MarkingWorklistTest, PushPopOnHold) {
|
||||
MarkingWorklists holder;
|
||||
MarkingWorklists::Local worklists(&holder);
|
||||
HeapObject pushed_object =
|
||||
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||
HeapObject::cast(i_isolate()
|
||||
->roots_table()
|
||||
.slot(RootIndex::kFirstStrongRoot)
|
||||
.load(i_isolate()));
|
||||
worklists.PushOnHold(pushed_object);
|
||||
HeapObject popped_object;
|
||||
EXPECT_TRUE(worklists.PopOnHold(&popped_object));
|
||||
@ -45,7 +51,10 @@ TEST_F(MarkingWorklistTest, MergeOnHold) {
|
||||
MarkingWorklists::Local main_worklists(&holder);
|
||||
MarkingWorklists::Local worker_worklists(&holder);
|
||||
HeapObject pushed_object =
|
||||
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||
HeapObject::cast(i_isolate()
|
||||
->roots_table()
|
||||
.slot(RootIndex::kFirstStrongRoot)
|
||||
.load(i_isolate()));
|
||||
worker_worklists.PushOnHold(pushed_object);
|
||||
worker_worklists.Publish();
|
||||
main_worklists.MergeOnHold();
|
||||
@ -59,7 +68,10 @@ TEST_F(MarkingWorklistTest, ShareWorkIfGlobalPoolIsEmpty) {
|
||||
MarkingWorklists::Local main_worklists(&holder);
|
||||
MarkingWorklists::Local worker_worklists(&holder);
|
||||
HeapObject pushed_object =
|
||||
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||
HeapObject::cast(i_isolate()
|
||||
->roots_table()
|
||||
.slot(RootIndex::kFirstStrongRoot)
|
||||
.load(i_isolate()));
|
||||
main_worklists.Push(pushed_object);
|
||||
main_worklists.ShareWork();
|
||||
HeapObject popped_object;
|
||||
@ -73,7 +85,10 @@ TEST_F(MarkingWorklistTest, ContextWorklistsPushPop) {
|
||||
holder.CreateContextWorklists({context});
|
||||
MarkingWorklists::Local worklists(&holder);
|
||||
HeapObject pushed_object =
|
||||
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||
HeapObject::cast(i_isolate()
|
||||
->roots_table()
|
||||
.slot(RootIndex::kFirstStrongRoot)
|
||||
.load(i_isolate()));
|
||||
worklists.SwitchToContext(context);
|
||||
worklists.Push(pushed_object);
|
||||
worklists.SwitchToSharedForTesting();
|
||||
@ -89,7 +104,10 @@ TEST_F(MarkingWorklistTest, ContextWorklistsEmpty) {
|
||||
holder.CreateContextWorklists({context});
|
||||
MarkingWorklists::Local worklists(&holder);
|
||||
HeapObject pushed_object =
|
||||
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||
HeapObject::cast(i_isolate()
|
||||
->roots_table()
|
||||
.slot(RootIndex::kFirstStrongRoot)
|
||||
.load(i_isolate()));
|
||||
worklists.SwitchToContext(context);
|
||||
worklists.Push(pushed_object);
|
||||
EXPECT_FALSE(worklists.IsEmpty());
|
||||
@ -110,7 +128,10 @@ TEST_F(MarkingWorklistTest, ContextWorklistCrossTask) {
|
||||
MarkingWorklists::Local main_worklists(&holder);
|
||||
MarkingWorklists::Local worker_worklists(&holder);
|
||||
HeapObject pushed_object =
|
||||
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||
HeapObject::cast(i_isolate()
|
||||
->roots_table()
|
||||
.slot(RootIndex::kFirstStrongRoot)
|
||||
.load(i_isolate()));
|
||||
main_worklists.SwitchToContext(context1);
|
||||
main_worklists.Push(pushed_object);
|
||||
main_worklists.ShareWork();
|
||||
|
Loading…
Reference in New Issue
Block a user