Revert "[gc][static-roots] Don't access markbits on R/O pages"
This reverts commit a2f63f1886
.
Reason for revert: It breaks chromium integration tests https://ci.chromium.org/ui/p/v8/builders/ci/V8%20Blink%20Linux/22574/overview
Original change's description:
> [gc][static-roots] Don't access markbits on R/O pages
>
> Markbits on read only pages are initialized BLACK and never change. For
> any practical purposes they are unused.
>
> This CL prevents any access to the markbits of these pages. This is a
> precursor to removing them entirely from the page to make room for
> static roots.
>
> Bug: v8:13466
> Change-Id: I61d3f6d9bbca750d0f34475859b34ff44f7fec1e
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4212397
> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
> Auto-Submit: Olivier Flückiger <olivf@chromium.org>
> Commit-Queue: Olivier Flückiger <olivf@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#85665}
Bug: v8:13466
Change-Id: Icda4be38da984fdefd40301238c361a86f912141
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4225673
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Auto-Submit: Ilya Rezvov <irezvov@chromium.org>
Owners-Override: Ilya Rezvov <irezvov@chromium.org>
Commit-Queue: Ilya Rezvov <irezvov@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85695}
This commit is contained in:
parent
908fc3c89c
commit
a37302d8f7
@ -64,9 +64,7 @@ BasicMemoryChunk::BasicMemoryChunk(Heap* heap, BaseSpace* space,
|
|||||||
high_water_mark_(area_start - reinterpret_cast<Address>(this)),
|
high_water_mark_(area_start - reinterpret_cast<Address>(this)),
|
||||||
owner_(space),
|
owner_(space),
|
||||||
reservation_(std::move(reservation)) {
|
reservation_(std::move(reservation)) {
|
||||||
if (space->identity() != RO_SPACE) {
|
marking_bitmap<AccessMode::NON_ATOMIC>()->Clear();
|
||||||
marking_bitmap<AccessMode::NON_ATOMIC>()->Clear();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool BasicMemoryChunk::InOldSpace() const {
|
bool BasicMemoryChunk::InOldSpace() const {
|
||||||
|
@ -306,8 +306,6 @@ class BasicMemoryChunk {
|
|||||||
|
|
||||||
template <AccessMode mode>
|
template <AccessMode mode>
|
||||||
ConcurrentBitmap<mode>* marking_bitmap() const {
|
ConcurrentBitmap<mode>* marking_bitmap() const {
|
||||||
// TODO(olivf) Change to DCHECK once we have some coverage
|
|
||||||
CHECK(!InReadOnlySpace());
|
|
||||||
return static_cast<ConcurrentBitmap<mode>*>(
|
return static_cast<ConcurrentBitmap<mode>*>(
|
||||||
Bitmap::FromAddress(address() + kMarkingBitmapOffset));
|
Bitmap::FromAddress(address() + kMarkingBitmapOffset));
|
||||||
}
|
}
|
||||||
|
@ -263,7 +263,7 @@ class YoungGenerationConcurrentMarkingVisitor final
|
|||||||
marking_state_(heap->isolate(), memory_chunk_data) {}
|
marking_state_(heap->isolate(), memory_chunk_data) {}
|
||||||
|
|
||||||
bool ShouldMarkObject(HeapObject object) const {
|
bool ShouldMarkObject(HeapObject object) const {
|
||||||
return !object.InSharedHeap() && !object.InReadOnlySpace();
|
return !object.InSharedHeap();
|
||||||
}
|
}
|
||||||
|
|
||||||
void SynchronizePageAccess(HeapObject heap_object) {
|
void SynchronizePageAccess(HeapObject heap_object) {
|
||||||
@ -787,7 +787,6 @@ void ConcurrentMarking::RunMajor(JobDelegate* delegate,
|
|||||||
done = true;
|
done = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
DCHECK(!object.InReadOnlySpace());
|
|
||||||
objects_processed++;
|
objects_processed++;
|
||||||
|
|
||||||
Address new_space_top = kNullAddress;
|
Address new_space_top = kNullAddress;
|
||||||
|
@ -86,7 +86,6 @@ void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool MarkCompactCollector::ShouldMarkObject(HeapObject object) const {
|
bool MarkCompactCollector::ShouldMarkObject(HeapObject object) const {
|
||||||
if (object.InReadOnlySpace()) return false;
|
|
||||||
if (V8_LIKELY(!uses_shared_heap_)) return true;
|
if (V8_LIKELY(!uses_shared_heap_)) return true;
|
||||||
if (v8_flags.shared_space) {
|
if (v8_flags.shared_space) {
|
||||||
if (is_shared_heap_isolate_) return true;
|
if (is_shared_heap_isolate_) return true;
|
||||||
|
@ -293,8 +293,7 @@ class FullMarkingVerifier : public MarkingVerifier {
|
|||||||
CHECK(heap_->SharedHeapContains(heap_object));
|
CHECK(heap_->SharedHeapContains(heap_object));
|
||||||
}
|
}
|
||||||
|
|
||||||
CHECK(heap_object.InReadOnlySpace() ||
|
CHECK(marking_state_->IsBlack(heap_object));
|
||||||
marking_state_->IsBlack(heap_object));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
V8_INLINE bool ShouldVerifyObject(HeapObject heap_object) {
|
V8_INLINE bool ShouldVerifyObject(HeapObject heap_object) {
|
||||||
@ -627,6 +626,14 @@ void MarkCompactCollector::CollectGarbage() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#ifdef VERIFY_HEAP
|
#ifdef VERIFY_HEAP
|
||||||
|
void MarkCompactCollector::VerifyMarkbitsAreDirty(ReadOnlySpace* space) {
|
||||||
|
ReadOnlyHeapObjectIterator iterator(space);
|
||||||
|
for (HeapObject object = iterator.Next(); !object.is_null();
|
||||||
|
object = iterator.Next()) {
|
||||||
|
CHECK(non_atomic_marking_state()->IsBlack(object));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpaceBase* space) {
|
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpaceBase* space) {
|
||||||
for (Page* p : *space) {
|
for (Page* p : *space) {
|
||||||
CHECK(non_atomic_marking_state()->bitmap(p)->IsClean());
|
CHECK(non_atomic_marking_state()->bitmap(p)->IsClean());
|
||||||
@ -660,6 +667,9 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
|
|||||||
VerifyMarkbitsAreClean(heap_->old_space());
|
VerifyMarkbitsAreClean(heap_->old_space());
|
||||||
VerifyMarkbitsAreClean(heap_->code_space());
|
VerifyMarkbitsAreClean(heap_->code_space());
|
||||||
VerifyMarkbitsAreClean(heap_->new_space());
|
VerifyMarkbitsAreClean(heap_->new_space());
|
||||||
|
// Read-only space should always be black since we never collect any objects
|
||||||
|
// in it or linked from it.
|
||||||
|
VerifyMarkbitsAreDirty(heap_->read_only_space());
|
||||||
VerifyMarkbitsAreClean(heap_->lo_space());
|
VerifyMarkbitsAreClean(heap_->lo_space());
|
||||||
VerifyMarkbitsAreClean(heap_->code_lo_space());
|
VerifyMarkbitsAreClean(heap_->code_lo_space());
|
||||||
VerifyMarkbitsAreClean(heap_->new_lo_space());
|
VerifyMarkbitsAreClean(heap_->new_lo_space());
|
||||||
@ -1328,8 +1338,7 @@ class InternalizedStringTableCleaner final : public RootVisitor {
|
|||||||
if (o.IsHeapObject()) {
|
if (o.IsHeapObject()) {
|
||||||
HeapObject heap_object = HeapObject::cast(o);
|
HeapObject heap_object = HeapObject::cast(o);
|
||||||
DCHECK(!Heap::InYoungGeneration(heap_object));
|
DCHECK(!Heap::InYoungGeneration(heap_object));
|
||||||
if (!heap_object.InReadOnlySpace() &&
|
if (marking_state->IsWhite(heap_object)) {
|
||||||
marking_state->IsWhite(heap_object)) {
|
|
||||||
pointers_removed_++;
|
pointers_removed_++;
|
||||||
// Set the entry to the_hole_value (as deleted).
|
// Set the entry to the_hole_value (as deleted).
|
||||||
p.store(StringTable::deleted_element());
|
p.store(StringTable::deleted_element());
|
||||||
@ -2721,8 +2730,7 @@ bool ShouldRetainMap(MarkingState* marking_state, Map map, int age) {
|
|||||||
}
|
}
|
||||||
Object constructor = map.GetConstructor();
|
Object constructor = map.GetConstructor();
|
||||||
if (!constructor.IsHeapObject() ||
|
if (!constructor.IsHeapObject() ||
|
||||||
(!HeapObject::cast(constructor).InReadOnlySpace() &&
|
marking_state->IsWhite(HeapObject::cast(constructor))) {
|
||||||
marking_state->IsWhite(HeapObject::cast(constructor)))) {
|
|
||||||
// The constructor is dead, no new objects with this map can
|
// The constructor is dead, no new objects with this map can
|
||||||
// be created. Do not retain this map.
|
// be created. Do not retain this map.
|
||||||
return false;
|
return false;
|
||||||
@ -2761,8 +2769,7 @@ void MarkCompactCollector::RetainMaps() {
|
|||||||
}
|
}
|
||||||
Object prototype = map.prototype();
|
Object prototype = map.prototype();
|
||||||
if (age > 0 && prototype.IsHeapObject() &&
|
if (age > 0 && prototype.IsHeapObject() &&
|
||||||
(!HeapObject::cast(prototype).InReadOnlySpace() &&
|
marking_state()->IsWhite(HeapObject::cast(prototype))) {
|
||||||
marking_state()->IsWhite(HeapObject::cast(prototype)))) {
|
|
||||||
// The prototype is not marked, age the map.
|
// The prototype is not marked, age the map.
|
||||||
new_age = age - 1;
|
new_age = age - 1;
|
||||||
} else {
|
} else {
|
||||||
@ -2974,10 +2981,7 @@ class StringForwardingTableCleaner final {
|
|||||||
String original_string = String::cast(original);
|
String original_string = String::cast(original);
|
||||||
if (marking_state_->IsBlack(original_string)) {
|
if (marking_state_->IsBlack(original_string)) {
|
||||||
Object forward = record->ForwardStringObjectOrHash(isolate_);
|
Object forward = record->ForwardStringObjectOrHash(isolate_);
|
||||||
if (!forward.IsHeapObject() ||
|
if (!forward.IsHeapObject()) return;
|
||||||
HeapObject::cast(forward).InReadOnlySpace()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
marking_state_->WhiteToBlack(HeapObject::cast(forward));
|
marking_state_->WhiteToBlack(HeapObject::cast(forward));
|
||||||
} else {
|
} else {
|
||||||
DisposeExternalResource(record);
|
DisposeExternalResource(record);
|
||||||
@ -3033,15 +3037,11 @@ class StringForwardingTableCleaner final {
|
|||||||
StringForwardingTable::Record* record) {
|
StringForwardingTable::Record* record) {
|
||||||
if (original_string.IsInternalizedString()) return;
|
if (original_string.IsInternalizedString()) return;
|
||||||
Object forward = record->ForwardStringObjectOrHash(isolate_);
|
Object forward = record->ForwardStringObjectOrHash(isolate_);
|
||||||
if (!forward.IsHeapObject()) {
|
if (!forward.IsHeapObject()) return;
|
||||||
return;
|
|
||||||
}
|
|
||||||
String forward_string = String::cast(forward);
|
String forward_string = String::cast(forward);
|
||||||
|
|
||||||
// Mark the forwarded string to keep it alive.
|
// Mark the forwarded string to keep it alive.
|
||||||
if (!forward_string.InReadOnlySpace()) {
|
marking_state_->WhiteToBlack(forward_string);
|
||||||
marking_state_->WhiteToBlack(forward_string);
|
|
||||||
}
|
|
||||||
// Transition the original string to a ThinString and override the
|
// Transition the original string to a ThinString and override the
|
||||||
// forwarding index with the correct hash.
|
// forwarding index with the correct hash.
|
||||||
original_string.MakeThin(isolate_, forward_string);
|
original_string.MakeThin(isolate_, forward_string);
|
||||||
@ -3680,8 +3680,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
|
|||||||
JSWeakRef weak_ref;
|
JSWeakRef weak_ref;
|
||||||
while (local_weak_objects()->js_weak_refs_local.Pop(&weak_ref)) {
|
while (local_weak_objects()->js_weak_refs_local.Pop(&weak_ref)) {
|
||||||
HeapObject target = HeapObject::cast(weak_ref.target());
|
HeapObject target = HeapObject::cast(weak_ref.target());
|
||||||
if (!target.InReadOnlySpace() &&
|
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
|
||||||
!non_atomic_marking_state()->IsBlackOrGrey(target)) {
|
|
||||||
weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value());
|
weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value());
|
||||||
} else {
|
} else {
|
||||||
// The value of the JSWeakRef is alive.
|
// The value of the JSWeakRef is alive.
|
||||||
@ -3698,8 +3697,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
HeapObject target = HeapObject::cast(weak_cell.target());
|
HeapObject target = HeapObject::cast(weak_cell.target());
|
||||||
if (!target.InReadOnlySpace() &&
|
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
|
||||||
!non_atomic_marking_state()->IsBlackOrGrey(target)) {
|
|
||||||
DCHECK(target.CanBeHeldWeakly());
|
DCHECK(target.CanBeHeldWeakly());
|
||||||
// The value of the WeakCell is dead.
|
// The value of the WeakCell is dead.
|
||||||
JSFinalizationRegistry finalization_registry =
|
JSFinalizationRegistry finalization_registry =
|
||||||
@ -3721,8 +3719,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
HeapObject unregister_token = weak_cell.unregister_token();
|
HeapObject unregister_token = weak_cell.unregister_token();
|
||||||
if (!unregister_token.InReadOnlySpace() &&
|
if (!non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) {
|
||||||
!non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) {
|
|
||||||
DCHECK(unregister_token.CanBeHeldWeakly());
|
DCHECK(unregister_token.CanBeHeldWeakly());
|
||||||
// The unregister token is dead. Remove any corresponding entries in the
|
// The unregister token is dead. Remove any corresponding entries in the
|
||||||
// key map. Multiple WeakCell with the same token will have all their
|
// key map. Multiple WeakCell with the same token will have all their
|
||||||
|
@ -422,6 +422,7 @@ class MarkCompactCollector final : public CollectorBase {
|
|||||||
void VerifyMarking();
|
void VerifyMarking();
|
||||||
#ifdef VERIFY_HEAP
|
#ifdef VERIFY_HEAP
|
||||||
void VerifyMarkbitsAreClean();
|
void VerifyMarkbitsAreClean();
|
||||||
|
void VerifyMarkbitsAreDirty(ReadOnlySpace* space);
|
||||||
void VerifyMarkbitsAreClean(PagedSpaceBase* space);
|
void VerifyMarkbitsAreClean(PagedSpaceBase* space);
|
||||||
void VerifyMarkbitsAreClean(NewSpace* space);
|
void VerifyMarkbitsAreClean(NewSpace* space);
|
||||||
void VerifyMarkbitsAreClean(LargeObjectSpace* space);
|
void VerifyMarkbitsAreClean(LargeObjectSpace* space);
|
||||||
|
@ -14,8 +14,6 @@ namespace v8 {
|
|||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
void MarkingBarrier::MarkValue(HeapObject host, HeapObject value) {
|
void MarkingBarrier::MarkValue(HeapObject host, HeapObject value) {
|
||||||
if (value.InReadOnlySpace()) return;
|
|
||||||
|
|
||||||
DCHECK(IsCurrentMarkingBarrier(host));
|
DCHECK(IsCurrentMarkingBarrier(host));
|
||||||
DCHECK(is_activated_ || shared_heap_worklist_.has_value());
|
DCHECK(is_activated_ || shared_heap_worklist_.has_value());
|
||||||
|
|
||||||
|
@ -257,7 +257,6 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::
|
|||||||
if (end < size) {
|
if (end < size) {
|
||||||
// The object can be pushed back onto the marking worklist only after
|
// The object can be pushed back onto the marking worklist only after
|
||||||
// progress bar was updated.
|
// progress bar was updated.
|
||||||
DCHECK(ShouldMarkObject(object));
|
|
||||||
local_marking_worklists_->Push(object);
|
local_marking_worklists_->Push(object);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -373,8 +372,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
|
|||||||
// WeakMaps and WeakSets and therefore cannot be ephemeron keys. See also
|
// WeakMaps and WeakSets and therefore cannot be ephemeron keys. See also
|
||||||
// MarkCompactCollector::ProcessEphemeron.
|
// MarkCompactCollector::ProcessEphemeron.
|
||||||
DCHECK(!key.InSharedWritableHeap());
|
DCHECK(!key.InSharedWritableHeap());
|
||||||
if (key.InReadOnlySpace() ||
|
if (concrete_visitor()->marking_state()->IsBlackOrGrey(key)) {
|
||||||
concrete_visitor()->marking_state()->IsBlackOrGrey(key)) {
|
|
||||||
VisitPointer(table, value_slot);
|
VisitPointer(table, value_slot);
|
||||||
} else {
|
} else {
|
||||||
Object value_obj = table.ValueAt(i);
|
Object value_obj = table.ValueAt(i);
|
||||||
@ -407,8 +405,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
|
|||||||
if (weak_ref.target().IsHeapObject()) {
|
if (weak_ref.target().IsHeapObject()) {
|
||||||
HeapObject target = HeapObject::cast(weak_ref.target());
|
HeapObject target = HeapObject::cast(weak_ref.target());
|
||||||
SynchronizePageAccess(target);
|
SynchronizePageAccess(target);
|
||||||
if (target.InReadOnlySpace() ||
|
if (concrete_visitor()->marking_state()->IsBlackOrGrey(target)) {
|
||||||
concrete_visitor()->marking_state()->IsBlackOrGrey(target)) {
|
|
||||||
// Record the slot inside the JSWeakRef, since the
|
// Record the slot inside the JSWeakRef, since the
|
||||||
// VisitJSObjectSubclass above didn't visit it.
|
// VisitJSObjectSubclass above didn't visit it.
|
||||||
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
|
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
|
||||||
@ -435,10 +432,8 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
|
|||||||
HeapObject unregister_token = weak_cell.relaxed_unregister_token();
|
HeapObject unregister_token = weak_cell.relaxed_unregister_token();
|
||||||
SynchronizePageAccess(target);
|
SynchronizePageAccess(target);
|
||||||
SynchronizePageAccess(unregister_token);
|
SynchronizePageAccess(unregister_token);
|
||||||
if ((target.InReadOnlySpace() ||
|
if (concrete_visitor()->marking_state()->IsBlackOrGrey(target) &&
|
||||||
concrete_visitor()->marking_state()->IsBlackOrGrey(target)) &&
|
concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token)) {
|
||||||
(unregister_token.InReadOnlySpace() ||
|
|
||||||
concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token))) {
|
|
||||||
// Record the slots inside the WeakCell, since the IterateBody above
|
// Record the slots inside the WeakCell, since the IterateBody above
|
||||||
// didn't visit it.
|
// didn't visit it.
|
||||||
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
|
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
|
||||||
@ -463,7 +458,6 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
|
|||||||
template <typename ConcreteVisitor, typename MarkingState>
|
template <typename ConcreteVisitor, typename MarkingState>
|
||||||
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkDescriptorArrayBlack(
|
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkDescriptorArrayBlack(
|
||||||
DescriptorArray descriptors) {
|
DescriptorArray descriptors) {
|
||||||
if (descriptors.InReadOnlySpace()) return 0;
|
|
||||||
concrete_visitor()->marking_state()->WhiteToGrey(descriptors);
|
concrete_visitor()->marking_state()->WhiteToGrey(descriptors);
|
||||||
if (concrete_visitor()->marking_state()->GreyToBlack(descriptors)) {
|
if (concrete_visitor()->marking_state()->GreyToBlack(descriptors)) {
|
||||||
VisitMapPointer(descriptors);
|
VisitMapPointer(descriptors);
|
||||||
|
@ -122,7 +122,6 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool ShouldMarkObject(HeapObject object) const {
|
bool ShouldMarkObject(HeapObject object) const {
|
||||||
if (object.InReadOnlySpace()) return false;
|
|
||||||
if (should_mark_shared_heap_) return true;
|
if (should_mark_shared_heap_) return true;
|
||||||
return !object.InSharedHeap();
|
return !object.InSharedHeap();
|
||||||
}
|
}
|
||||||
|
@ -846,12 +846,8 @@ bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase array) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool ObjectStatsCollectorImpl::SameLiveness(HeapObject obj1, HeapObject obj2) {
|
bool ObjectStatsCollectorImpl::SameLiveness(HeapObject obj1, HeapObject obj2) {
|
||||||
if (obj1.is_null() || obj2.is_null()) return true;
|
return obj1.is_null() || obj2.is_null() ||
|
||||||
auto col1 = obj1.InReadOnlySpace() ? Marking::ObjectColor::BLACK_OBJECT
|
marking_state_->Color(obj1) == marking_state_->Color(obj2);
|
||||||
: marking_state_->Color(obj1);
|
|
||||||
auto col2 = obj2.InReadOnlySpace() ? Marking::ObjectColor::BLACK_OBJECT
|
|
||||||
: marking_state_->Color(obj2);
|
|
||||||
return col1 == col2;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map map) {
|
void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map map) {
|
||||||
@ -1097,7 +1093,7 @@ class ObjectStatsVisitor {
|
|||||||
phase_(phase) {}
|
phase_(phase) {}
|
||||||
|
|
||||||
void Visit(HeapObject obj) {
|
void Visit(HeapObject obj) {
|
||||||
if (obj.InReadOnlySpace() || marking_state_->IsBlack(obj)) {
|
if (marking_state_->IsBlack(obj)) {
|
||||||
live_collector_->CollectStatistics(
|
live_collector_->CollectStatistics(
|
||||||
obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes);
|
obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes);
|
||||||
} else {
|
} else {
|
||||||
|
@ -338,6 +338,7 @@ ReadOnlyPage::ReadOnlyPage(Heap* heap, BaseSpace* space, size_t chunk_size,
|
|||||||
std::move(reservation)) {
|
std::move(reservation)) {
|
||||||
allocated_bytes_ = 0;
|
allocated_bytes_ = 0;
|
||||||
SetFlags(Flag::NEVER_EVACUATE | Flag::READ_ONLY_HEAP);
|
SetFlags(Flag::NEVER_EVACUATE | Flag::READ_ONLY_HEAP);
|
||||||
|
heap->non_atomic_marking_state()->bitmap(this)->MarkAllBits();
|
||||||
}
|
}
|
||||||
|
|
||||||
void ReadOnlyPage::MakeHeaderRelocatable() {
|
void ReadOnlyPage::MakeHeaderRelocatable() {
|
||||||
@ -544,6 +545,11 @@ void ReadOnlySpace::FreeLinearAllocationArea() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clear the bits in the unused black area.
|
||||||
|
ReadOnlyPage* page = pages_.back();
|
||||||
|
heap()->marking_state()->bitmap(page)->ClearRange(
|
||||||
|
page->AddressToMarkbitIndex(top_), page->AddressToMarkbitIndex(limit_));
|
||||||
|
|
||||||
heap()->CreateFillerObjectAt(top_, static_cast<int>(limit_ - top_));
|
heap()->CreateFillerObjectAt(top_, static_cast<int>(limit_ - top_));
|
||||||
|
|
||||||
BasicMemoryChunk::UpdateHighWaterMark(top_);
|
BasicMemoryChunk::UpdateHighWaterMark(top_);
|
||||||
@ -656,9 +662,15 @@ AllocationResult ReadOnlySpace::AllocateRawUnaligned(int size_in_bytes) {
|
|||||||
|
|
||||||
AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes,
|
AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes,
|
||||||
AllocationAlignment alignment) {
|
AllocationAlignment alignment) {
|
||||||
return USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned
|
AllocationResult result =
|
||||||
? AllocateRawAligned(size_in_bytes, alignment)
|
USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned
|
||||||
: AllocateRawUnaligned(size_in_bytes);
|
? AllocateRawAligned(size_in_bytes, alignment)
|
||||||
|
: AllocateRawUnaligned(size_in_bytes);
|
||||||
|
HeapObject heap_obj;
|
||||||
|
if (result.To(&heap_obj)) {
|
||||||
|
DCHECK(heap()->marking_state()->IsBlack(heap_obj));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t ReadOnlyPage::ShrinkToHighWaterMark() {
|
size_t ReadOnlyPage::ShrinkToHighWaterMark() {
|
||||||
|
@ -95,8 +95,6 @@ class HeapObject : public Object {
|
|||||||
|
|
||||||
V8_INLINE bool InSharedWritableHeap() const;
|
V8_INLINE bool InSharedWritableHeap() const;
|
||||||
|
|
||||||
V8_INLINE bool InReadOnlySpace() const;
|
|
||||||
|
|
||||||
#define IS_TYPE_FUNCTION_DECL(Type) \
|
#define IS_TYPE_FUNCTION_DECL(Type) \
|
||||||
V8_INLINE bool Is##Type() const; \
|
V8_INLINE bool Is##Type() const; \
|
||||||
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
|
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
|
||||||
|
@ -207,8 +207,6 @@ bool HeapObject::InSharedWritableHeap() const {
|
|||||||
return BasicMemoryChunk::FromHeapObject(*this)->InSharedHeap();
|
return BasicMemoryChunk::FromHeapObject(*this)->InSharedHeap();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool HeapObject::InReadOnlySpace() const { return IsReadOnlyHeapObject(*this); }
|
|
||||||
|
|
||||||
bool HeapObject::IsJSObjectThatCanBeTrackedAsPrototype() const {
|
bool HeapObject::IsJSObjectThatCanBeTrackedAsPrototype() const {
|
||||||
// Do not optimize objects in the shared heap because it is not
|
// Do not optimize objects in the shared heap because it is not
|
||||||
// threadsafe. Objects in the shared heap have fixed layouts and their maps
|
// threadsafe. Objects in the shared heap have fixed layouts and their maps
|
||||||
|
@ -26,11 +26,6 @@ void PublishSegment(MarkingWorklist& worklist, HeapObject object) {
|
|||||||
local.Publish();
|
local.Publish();
|
||||||
}
|
}
|
||||||
|
|
||||||
HeapObject ObjectForTesting(i::Isolate* isolate) {
|
|
||||||
return HeapObject::cast(
|
|
||||||
isolate->roots_table().slot(RootIndex::kFirstStrongRoot).load(isolate));
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST(ConcurrentMarking) {
|
TEST(ConcurrentMarking) {
|
||||||
if (!i::v8_flags.concurrent_marking) return;
|
if (!i::v8_flags.concurrent_marking) return;
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
@ -47,7 +42,7 @@ TEST(ConcurrentMarking) {
|
|||||||
new ConcurrentMarking(heap, &weak_objects);
|
new ConcurrentMarking(heap, &weak_objects);
|
||||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||||
PublishSegment(*collector->marking_worklists()->shared(),
|
PublishSegment(*collector->marking_worklists()->shared(),
|
||||||
ObjectForTesting(heap->isolate()));
|
ReadOnlyRoots(heap).undefined_value());
|
||||||
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
||||||
concurrent_marking->Join();
|
concurrent_marking->Join();
|
||||||
delete concurrent_marking;
|
delete concurrent_marking;
|
||||||
@ -69,11 +64,11 @@ TEST(ConcurrentMarkingReschedule) {
|
|||||||
new ConcurrentMarking(heap, &weak_objects);
|
new ConcurrentMarking(heap, &weak_objects);
|
||||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||||
PublishSegment(*collector->marking_worklists()->shared(),
|
PublishSegment(*collector->marking_worklists()->shared(),
|
||||||
ObjectForTesting(heap->isolate()));
|
ReadOnlyRoots(heap).undefined_value());
|
||||||
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
||||||
concurrent_marking->Join();
|
concurrent_marking->Join();
|
||||||
PublishSegment(*collector->marking_worklists()->shared(),
|
PublishSegment(*collector->marking_worklists()->shared(),
|
||||||
ObjectForTesting(heap->isolate()));
|
ReadOnlyRoots(heap).undefined_value());
|
||||||
concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR);
|
concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR);
|
||||||
concurrent_marking->Join();
|
concurrent_marking->Join();
|
||||||
delete concurrent_marking;
|
delete concurrent_marking;
|
||||||
@ -96,12 +91,12 @@ TEST(ConcurrentMarkingPreemptAndReschedule) {
|
|||||||
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
|
||||||
for (int i = 0; i < 5000; i++)
|
for (int i = 0; i < 5000; i++)
|
||||||
PublishSegment(*collector->marking_worklists()->shared(),
|
PublishSegment(*collector->marking_worklists()->shared(),
|
||||||
ObjectForTesting(heap->isolate()));
|
ReadOnlyRoots(heap).undefined_value());
|
||||||
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR);
|
||||||
concurrent_marking->Pause();
|
concurrent_marking->Pause();
|
||||||
for (int i = 0; i < 5000; i++)
|
for (int i = 0; i < 5000; i++)
|
||||||
PublishSegment(*collector->marking_worklists()->shared(),
|
PublishSegment(*collector->marking_worklists()->shared(),
|
||||||
ObjectForTesting(heap->isolate()));
|
ReadOnlyRoots(heap).undefined_value());
|
||||||
concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR);
|
concurrent_marking->RescheduleJobIfNeeded(GarbageCollector::MARK_COMPACTOR);
|
||||||
concurrent_marking->Join();
|
concurrent_marking->Join();
|
||||||
delete concurrent_marking;
|
delete concurrent_marking;
|
||||||
|
@ -5811,7 +5811,7 @@ TEST(Regress598319) {
|
|||||||
// progress bar, we would fail here.
|
// progress bar, we would fail here.
|
||||||
for (int i = 0; i < arr.get().length(); i++) {
|
for (int i = 0; i < arr.get().length(); i++) {
|
||||||
HeapObject arr_value = HeapObject::cast(arr.get().get(i));
|
HeapObject arr_value = HeapObject::cast(arr.get().get(i));
|
||||||
CHECK(arr_value.InReadOnlySpace() || marking_state->IsBlack(arr_value));
|
CHECK(marking_state->IsBlack(arr_value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,10 +22,7 @@ TEST_F(MarkingWorklistTest, PushPop) {
|
|||||||
MarkingWorklists holder;
|
MarkingWorklists holder;
|
||||||
MarkingWorklists::Local worklists(&holder);
|
MarkingWorklists::Local worklists(&holder);
|
||||||
HeapObject pushed_object =
|
HeapObject pushed_object =
|
||||||
HeapObject::cast(i_isolate()
|
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||||
->roots_table()
|
|
||||||
.slot(RootIndex::kFirstStrongRoot)
|
|
||||||
.load(i_isolate()));
|
|
||||||
worklists.Push(pushed_object);
|
worklists.Push(pushed_object);
|
||||||
HeapObject popped_object;
|
HeapObject popped_object;
|
||||||
EXPECT_TRUE(worklists.Pop(&popped_object));
|
EXPECT_TRUE(worklists.Pop(&popped_object));
|
||||||
@ -36,10 +33,7 @@ TEST_F(MarkingWorklistTest, PushPopOnHold) {
|
|||||||
MarkingWorklists holder;
|
MarkingWorklists holder;
|
||||||
MarkingWorklists::Local worklists(&holder);
|
MarkingWorklists::Local worklists(&holder);
|
||||||
HeapObject pushed_object =
|
HeapObject pushed_object =
|
||||||
HeapObject::cast(i_isolate()
|
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||||
->roots_table()
|
|
||||||
.slot(RootIndex::kFirstStrongRoot)
|
|
||||||
.load(i_isolate()));
|
|
||||||
worklists.PushOnHold(pushed_object);
|
worklists.PushOnHold(pushed_object);
|
||||||
HeapObject popped_object;
|
HeapObject popped_object;
|
||||||
EXPECT_TRUE(worklists.PopOnHold(&popped_object));
|
EXPECT_TRUE(worklists.PopOnHold(&popped_object));
|
||||||
@ -51,10 +45,7 @@ TEST_F(MarkingWorklistTest, MergeOnHold) {
|
|||||||
MarkingWorklists::Local main_worklists(&holder);
|
MarkingWorklists::Local main_worklists(&holder);
|
||||||
MarkingWorklists::Local worker_worklists(&holder);
|
MarkingWorklists::Local worker_worklists(&holder);
|
||||||
HeapObject pushed_object =
|
HeapObject pushed_object =
|
||||||
HeapObject::cast(i_isolate()
|
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||||
->roots_table()
|
|
||||||
.slot(RootIndex::kFirstStrongRoot)
|
|
||||||
.load(i_isolate()));
|
|
||||||
worker_worklists.PushOnHold(pushed_object);
|
worker_worklists.PushOnHold(pushed_object);
|
||||||
worker_worklists.Publish();
|
worker_worklists.Publish();
|
||||||
main_worklists.MergeOnHold();
|
main_worklists.MergeOnHold();
|
||||||
@ -68,10 +59,7 @@ TEST_F(MarkingWorklistTest, ShareWorkIfGlobalPoolIsEmpty) {
|
|||||||
MarkingWorklists::Local main_worklists(&holder);
|
MarkingWorklists::Local main_worklists(&holder);
|
||||||
MarkingWorklists::Local worker_worklists(&holder);
|
MarkingWorklists::Local worker_worklists(&holder);
|
||||||
HeapObject pushed_object =
|
HeapObject pushed_object =
|
||||||
HeapObject::cast(i_isolate()
|
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||||
->roots_table()
|
|
||||||
.slot(RootIndex::kFirstStrongRoot)
|
|
||||||
.load(i_isolate()));
|
|
||||||
main_worklists.Push(pushed_object);
|
main_worklists.Push(pushed_object);
|
||||||
main_worklists.ShareWork();
|
main_worklists.ShareWork();
|
||||||
HeapObject popped_object;
|
HeapObject popped_object;
|
||||||
@ -85,10 +73,7 @@ TEST_F(MarkingWorklistTest, ContextWorklistsPushPop) {
|
|||||||
holder.CreateContextWorklists({context});
|
holder.CreateContextWorklists({context});
|
||||||
MarkingWorklists::Local worklists(&holder);
|
MarkingWorklists::Local worklists(&holder);
|
||||||
HeapObject pushed_object =
|
HeapObject pushed_object =
|
||||||
HeapObject::cast(i_isolate()
|
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||||
->roots_table()
|
|
||||||
.slot(RootIndex::kFirstStrongRoot)
|
|
||||||
.load(i_isolate()));
|
|
||||||
worklists.SwitchToContext(context);
|
worklists.SwitchToContext(context);
|
||||||
worklists.Push(pushed_object);
|
worklists.Push(pushed_object);
|
||||||
worklists.SwitchToSharedForTesting();
|
worklists.SwitchToSharedForTesting();
|
||||||
@ -104,10 +89,7 @@ TEST_F(MarkingWorklistTest, ContextWorklistsEmpty) {
|
|||||||
holder.CreateContextWorklists({context});
|
holder.CreateContextWorklists({context});
|
||||||
MarkingWorklists::Local worklists(&holder);
|
MarkingWorklists::Local worklists(&holder);
|
||||||
HeapObject pushed_object =
|
HeapObject pushed_object =
|
||||||
HeapObject::cast(i_isolate()
|
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||||
->roots_table()
|
|
||||||
.slot(RootIndex::kFirstStrongRoot)
|
|
||||||
.load(i_isolate()));
|
|
||||||
worklists.SwitchToContext(context);
|
worklists.SwitchToContext(context);
|
||||||
worklists.Push(pushed_object);
|
worklists.Push(pushed_object);
|
||||||
EXPECT_FALSE(worklists.IsEmpty());
|
EXPECT_FALSE(worklists.IsEmpty());
|
||||||
@ -128,10 +110,7 @@ TEST_F(MarkingWorklistTest, ContextWorklistCrossTask) {
|
|||||||
MarkingWorklists::Local main_worklists(&holder);
|
MarkingWorklists::Local main_worklists(&holder);
|
||||||
MarkingWorklists::Local worker_worklists(&holder);
|
MarkingWorklists::Local worker_worklists(&holder);
|
||||||
HeapObject pushed_object =
|
HeapObject pushed_object =
|
||||||
HeapObject::cast(i_isolate()
|
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
|
||||||
->roots_table()
|
|
||||||
.slot(RootIndex::kFirstStrongRoot)
|
|
||||||
.load(i_isolate()));
|
|
||||||
main_worklists.SwitchToContext(context1);
|
main_worklists.SwitchToContext(context1);
|
||||||
main_worklists.Push(pushed_object);
|
main_worklists.Push(pushed_object);
|
||||||
main_worklists.ShareWork();
|
main_worklists.ShareWork();
|
||||||
|
Loading…
Reference in New Issue
Block a user