diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index d2ee53dd44..da5fe3501a 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -3320,10 +3320,9 @@ int MarkCompactCollector::Sweeper::RawSweep( Page* p, FreeListRebuildingMode free_list_mode, FreeSpaceTreatmentMode free_space_mode) { Space* space = p->owner(); - AllocationSpace identity = space->identity(); DCHECK_NOT_NULL(space); - DCHECK(free_list_mode == IGNORE_FREE_LIST || identity == OLD_SPACE || - identity == CODE_SPACE || identity == MAP_SPACE); + DCHECK(free_list_mode == IGNORE_FREE_LIST || space->identity() == OLD_SPACE || + space->identity() == CODE_SPACE || space->identity() == MAP_SPACE); DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); // Before we sweep objects on the page, we free dead array buffers which @@ -3352,8 +3351,6 @@ int MarkCompactCollector::Sweeper::RawSweep( LiveObjectIterator it(p); HeapObject* object = NULL; - bool clear_slots = - p->old_to_new_slots() && (identity == OLD_SPACE || identity == MAP_SPACE); while ((object = it.Next()) != NULL) { DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); Address free_end = object->address(); @@ -3371,11 +3368,6 @@ int MarkCompactCollector::Sweeper::RawSweep( p->heap()->CreateFillerObjectAt(free_start, static_cast(size), ClearRecordedSlots::kNo); } - - if (clear_slots) { - RememberedSet::RemoveRange(p, free_start, free_end, - SlotSet::KEEP_EMPTY_BUCKETS); - } } Map* map = object->synchronized_map(); int size = object->SizeFromMap(map); @@ -3391,6 +3383,9 @@ int MarkCompactCollector::Sweeper::RawSweep( free_start = free_end + size; } + // Clear the mark bits of that page and reset live bytes count. + p->ClearLiveness(); + if (free_start != p->area_end()) { CHECK_GT(p->area_end(), free_start); size_t size = static_cast(p->area_end() - free_start); @@ -3405,16 +3400,7 @@ int MarkCompactCollector::Sweeper::RawSweep( p->heap()->CreateFillerObjectAt(free_start, static_cast(size), ClearRecordedSlots::kNo); } - - if (clear_slots) { - RememberedSet::RemoveRange(p, free_start, p->area_end(), - SlotSet::KEEP_EMPTY_BUCKETS); - } } - - // Clear the mark bits of that page and reset live bytes count. - p->ClearLiveness(); - p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); if (free_list_mode == IGNORE_FREE_LIST) return 0; return FreeList::GuaranteedAllocatable(static_cast(max_freed_bytes)); @@ -3830,7 +3816,9 @@ int MarkCompactCollector::Sweeper::ParallelSweepPage(Page* page, if (identity == NEW_SPACE) { RawSweep(page, IGNORE_FREE_LIST, free_space_mode); } else { - if (identity == CODE_SPACE) { + if (identity == OLD_SPACE || identity == MAP_SPACE) { + RememberedSet::ClearInvalidSlots(heap_, page); + } else { RememberedSet::ClearInvalidTypedSlots(heap_, page); } max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); diff --git a/src/heap/remembered-set.cc b/src/heap/remembered-set.cc index 786e195076..4e621611ac 100644 --- a/src/heap/remembered-set.cc +++ b/src/heap/remembered-set.cc @@ -14,6 +14,23 @@ namespace v8 { namespace internal { +template +void RememberedSet::ClearInvalidSlots(Heap* heap, + MemoryChunk* chunk) { + STATIC_ASSERT(direction == OLD_TO_NEW); + DCHECK(chunk->owner()->identity() == OLD_SPACE || + chunk->owner()->identity() == MAP_SPACE); + SlotSet* slots = GetSlotSet(chunk); + if (slots != nullptr) { + slots->Iterate( + [heap, chunk](Address addr) { + Object** slot = reinterpret_cast(addr); + return IsValidSlot(heap, chunk, slot) ? KEEP_SLOT : REMOVE_SLOT; + }, + SlotSet::KEEP_EMPTY_BUCKETS); + } +} + template void RememberedSet::ClearInvalidTypedSlots(Heap* heap, MemoryChunk* chunk) { @@ -43,6 +60,8 @@ bool RememberedSet::IsValidSlot(Heap* heap, MemoryChunk* chunk, chunk, reinterpret_cast
(slot)); } +template void RememberedSet::ClearInvalidSlots(Heap* heap, + MemoryChunk* chunk); template void RememberedSet::ClearInvalidTypedSlots( Heap* heap, MemoryChunk* chunk); diff --git a/src/heap/remembered-set.h b/src/heap/remembered-set.h index a625b13dbf..d9770025e3 100644 --- a/src/heap/remembered-set.h +++ b/src/heap/remembered-set.h @@ -202,8 +202,13 @@ class RememberedSet { // slots that are not part of live objects anymore. This method must be // called after marking, when the whole transitive closure is known and // must be called before sweeping when mark bits are still intact. + static void ClearInvalidSlots(Heap* heap); + + static void ClearInvalidSlots(Heap* heap, MemoryChunk* chunk); static void ClearInvalidTypedSlots(Heap* heap, MemoryChunk* chunk); + static void VerifyValidSlots(Heap* heap); + private: static SlotSet* GetSlotSet(MemoryChunk* chunk) { if (direction == OLD_TO_OLD) { diff --git a/src/heap/slot-set.h b/src/heap/slot-set.h index eb2fbb9e5b..7868865436 100644 --- a/src/heap/slot-set.h +++ b/src/heap/slot-set.h @@ -80,6 +80,15 @@ class SlotSet : public Malloced { } } + void PreFreeEmptyBucket(int bucket_index) { + base::AtomicValue* bucket_ptr = bucket[bucket_index].Value(); + if (bucket_ptr != nullptr) { + base::LockGuard guard(&to_be_freed_buckets_mutex_); + to_be_freed_buckets_.push(bucket_ptr); + bucket[bucket_index].SetValue(nullptr); + } + } + // The slot offsets specify a range of slots at addresses: // [page_start_ + start_offset ... page_start_ + end_offset). void RemoveRange(int start_offset, int end_offset, EmptyBucketMode mode) { @@ -99,10 +108,12 @@ class SlotSet : public Malloced { int current_cell = start_cell; ClearCell(current_bucket, current_cell, ~start_mask); current_cell++; - base::AtomicValue* bucket_ptr = bucket[current_bucket].Value(); if (current_bucket < end_bucket) { - if (bucket_ptr != nullptr) { - ClearBucket(bucket_ptr, current_cell, kCellsPerBucket); + if (bucket[current_bucket].Value() != nullptr) { + while (current_cell < kCellsPerBucket) { + bucket[current_bucket].Value()[current_cell].SetValue(0); + current_cell++; + } } // The rest of the current bucket is cleared. // Move on to the next bucket. @@ -116,23 +127,17 @@ class SlotSet : public Malloced { PreFreeEmptyBucket(current_bucket); } else if (mode == FREE_EMPTY_BUCKETS) { ReleaseBucket(current_bucket); - } else { - DCHECK(mode == KEEP_EMPTY_BUCKETS); - bucket_ptr = bucket[current_bucket].Value(); - if (bucket_ptr) { - ClearBucket(bucket_ptr, 0, kCellsPerBucket); - } } current_bucket++; } // All buckets between start_bucket and end_bucket are cleared. - bucket_ptr = bucket[current_bucket].Value(); DCHECK(current_bucket == end_bucket && current_cell <= end_cell); - if (current_bucket == kBuckets || bucket_ptr == nullptr) { + if (current_bucket == kBuckets || + bucket[current_bucket].Value() == nullptr) { return; } while (current_cell < end_cell) { - bucket_ptr[current_cell].SetValue(0); + bucket[current_bucket].Value()[current_cell].SetValue(0); current_cell++; } // All cells between start_cell and end_cell are cleared. @@ -237,26 +242,6 @@ class SlotSet : public Malloced { return result; } - void ClearBucket(base::AtomicValue* bucket, int start_cell, - int end_cell) { - DCHECK_GE(start_cell, 0); - DCHECK_LE(end_cell, kCellsPerBucket); - int current_cell = start_cell; - while (current_cell < kCellsPerBucket) { - bucket[current_cell].SetValue(0); - current_cell++; - } - } - - void PreFreeEmptyBucket(int bucket_index) { - base::AtomicValue* bucket_ptr = bucket[bucket_index].Value(); - if (bucket_ptr != nullptr) { - base::LockGuard guard(&to_be_freed_buckets_mutex_); - to_be_freed_buckets_.push(bucket_ptr); - bucket[bucket_index].SetValue(nullptr); - } - } - void ReleaseBucket(int bucket_index) { DeleteArray>(bucket[bucket_index].Value()); bucket[bucket_index].SetValue(nullptr); diff --git a/test/unittests/heap/slot-set-unittest.cc b/test/unittests/heap/slot-set-unittest.cc index c9b1464d67..d24ed1322b 100644 --- a/test/unittests/heap/slot-set-unittest.cc +++ b/test/unittests/heap/slot-set-unittest.cc @@ -101,21 +101,18 @@ void CheckRemoveRangeOn(uint32_t start, uint32_t end) { set.SetPageStart(0); uint32_t first = start == 0 ? 0 : start - kPointerSize; uint32_t last = end == Page::kPageSize ? end - kPointerSize : end; - for (const auto mode : - {SlotSet::FREE_EMPTY_BUCKETS, SlotSet::KEEP_EMPTY_BUCKETS}) { - for (uint32_t i = first; i <= last; i += kPointerSize) { - set.Insert(i); - } - set.RemoveRange(start, end, mode); - if (first != start) { - EXPECT_TRUE(set.Lookup(first)); - } - if (last == end) { - EXPECT_TRUE(set.Lookup(last)); - } - for (uint32_t i = start; i < end; i += kPointerSize) { - EXPECT_FALSE(set.Lookup(i)); - } + for (uint32_t i = first; i <= last; i += kPointerSize) { + set.Insert(i); + } + set.RemoveRange(start, end, SlotSet::FREE_EMPTY_BUCKETS); + if (first != start) { + EXPECT_TRUE(set.Lookup(first)); + } + if (last == end) { + EXPECT_TRUE(set.Lookup(last)); + } + for (uint32_t i = start; i < end; i += kPointerSize) { + EXPECT_FALSE(set.Lookup(i)); } } @@ -137,13 +134,10 @@ TEST(SlotSet, RemoveRange) { } SlotSet set; set.SetPageStart(0); - for (const auto mode : - {SlotSet::FREE_EMPTY_BUCKETS, SlotSet::KEEP_EMPTY_BUCKETS}) { - set.Insert(Page::kPageSize / 2); - set.RemoveRange(0, Page::kPageSize, mode); - for (uint32_t i = 0; i < Page::kPageSize; i += kPointerSize) { - EXPECT_FALSE(set.Lookup(i)); - } + set.Insert(Page::kPageSize / 2); + set.RemoveRange(0, Page::kPageSize, SlotSet::FREE_EMPTY_BUCKETS); + for (uint32_t i = 0; i < Page::kPageSize; i += kPointerSize) { + EXPECT_FALSE(set.Lookup(i)); } }