diff --git a/src/heap/heap.cc b/src/heap/heap.cc index 5e2d10b5b7..bfb422c273 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -3325,7 +3325,7 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size, FreeSpace::cast(filler)->relaxed_write_size(size); } if (mode == ClearRecordedSlots::kYes) { - UNREACHABLE(); + ClearRecordedSlotRange(addr, addr + size); } // At this point, we may be deserializing the heap from a snapshot, and @@ -3407,8 +3407,7 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, // Technically in new space this write might be omitted (except for // debug mode which iterates through the heap), but to play safer // we still do it. - // Recorded slots will be cleared by the sweeper. - CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kNo); + CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes); // Initialize header of the trimmed array. Since left trimming is only // performed on pages which are not concurrently swept creating a filler @@ -3477,9 +3476,8 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) { // TODO(hpayer): We should shrink the large object page if the size // of the object changed significantly. if (!lo_space()->Contains(object)) { - // Recorded slots will be cleared by the sweeper. HeapObject* filler = - CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kNo); + CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes); DCHECK_NOT_NULL(filler); // Clear the mark bits of the black area that belongs now to the filler. // This is an optimization. The sweeper will release black fillers anyway. diff --git a/src/objects.cc b/src/objects.cc index f14e465b97..8fae6cc697 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -4030,11 +4030,8 @@ void MigrateFastToFast(Handle object, Handle new_map) { if (instance_size_delta > 0) { Address address = object->address(); - // The object has shrunk and is not going to use these slots again. - // Since there will be no untagged stores in these slots, - // we can just let the sweeper remove slots in the filler. heap->CreateFillerObjectAt(address + new_instance_size, instance_size_delta, - ClearRecordedSlots::kNo); + ClearRecordedSlots::kYes); heap->AdjustLiveBytes(*object, -instance_size_delta); } @@ -4116,11 +4113,8 @@ void MigrateFastToSlow(Handle object, Handle new_map, DCHECK(instance_size_delta >= 0); if (instance_size_delta > 0) { - // The object has shrunk and is not going to use these slots again. - // Since there will be no untagged stores in these slots, - // we can just let the sweeper remove slots in the filler. heap->CreateFillerObjectAt(object->address() + new_instance_size, - instance_size_delta, ClearRecordedSlots::kNo); + instance_size_delta, ClearRecordedSlots::kYes); heap->AdjustLiveBytes(*object, -instance_size_delta); } @@ -4161,7 +4155,6 @@ void MigrateFastToSlow(Handle object, Handle new_map, // static void JSObject::NotifyMapChange(Handle old_map, Handle new_map, Isolate* isolate) { - DCHECK_LE(new_map->instance_size(), old_map->instance_size()); if (!old_map->is_prototype_map()) return; InvalidatePrototypeChains(*old_map);