diff --git a/src/heap/cppgc/compactor.cc b/src/heap/cppgc/compactor.cc index c300793515..450020a21a 100644 --- a/src/heap/cppgc/compactor.cc +++ b/src/heap/cppgc/compactor.cc @@ -292,6 +292,7 @@ class CompactionState final { page->PayloadSize() - used_bytes_in_current_page_); } #endif + page->object_start_bitmap().MarkAsFullyPopulated(); } private: diff --git a/src/heap/cppgc/object-start-bitmap.h b/src/heap/cppgc/object-start-bitmap.h index da5df3932e..44af79b21c 100644 --- a/src/heap/cppgc/object-start-bitmap.h +++ b/src/heap/cppgc/object-start-bitmap.h @@ -66,6 +66,11 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap { // Clear the object start bitmap. inline void Clear(); + // Marks the bitmap as fully populated. Unpopulated bitmaps are in an + // inconsistent state and must be populated before they can be used to find + // object headers. + inline void MarkAsFullyPopulated(); + private: template inline void store(size_t cell_index, uint8_t value); @@ -83,6 +88,17 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap { inline void ObjectStartIndexAndBit(ConstAddress, size_t*, size_t*) const; const Address offset_; + // `fully_populated_` is used to denote that the bitmap is popluated with all + // currently allocated objects on the page and is in a consistent state. It is + // used to guard against using the bitmap for finding headers during + // concurrent sweeping. + // + // Although this flag can be used by both the main thread and concurrent + // sweeping threads, it is not atomic. The flag should never be accessed by + // multiple threads at the same time. If data races are observed on this flag, + // it likely means that the bitmap is queried while concurrent sweeping is + // active, which is not supported and should be avoided. + bool fully_populated_ = false; // The bitmap contains a bit for every kGranularity aligned address on a // a NormalPage, i.e., for a page of size kBlinkPageSize. std::array object_start_bit_map_; @@ -90,11 +106,13 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap { ObjectStartBitmap::ObjectStartBitmap(Address offset) : offset_(offset) { Clear(); + MarkAsFullyPopulated(); } template HeapObjectHeader* ObjectStartBitmap::FindHeader( ConstAddress address_maybe_pointing_to_the_middle_of_object) const { + DCHECK(fully_populated_); DCHECK_LE(offset_, address_maybe_pointing_to_the_middle_of_object); size_t object_offset = address_maybe_pointing_to_the_middle_of_object - offset_; @@ -187,7 +205,13 @@ inline void ObjectStartBitmap::Iterate(Callback callback) const { } } +void ObjectStartBitmap::MarkAsFullyPopulated() { + DCHECK(!fully_populated_); + fully_populated_ = true; +} + void ObjectStartBitmap::Clear() { + fully_populated_ = false; std::fill(object_start_bit_map_.begin(), object_start_bit_map_.end(), 0); } diff --git a/src/heap/cppgc/sweeper.cc b/src/heap/cppgc/sweeper.cc index 0aa12a614a..59167a04db 100644 --- a/src/heap/cppgc/sweeper.cc +++ b/src/heap/cppgc/sweeper.cc @@ -335,6 +335,7 @@ typename FinalizationBuilder::ResultType SweepNormalPage( bitmap.SetBit(start_of_gap); } page->SetAllocatedBytesAtLastGC(live_bytes); + bitmap.MarkAsFullyPopulated(); const bool is_empty = (start_of_gap == page->PayloadStart()); return builder.GetResult(is_empty, largest_new_free_list_entry); diff --git a/test/unittests/heap/cppgc/allocation-unittest.cc b/test/unittests/heap/cppgc/allocation-unittest.cc index a7b2be0b11..9a18c49a2c 100644 --- a/test/unittests/heap/cppgc/allocation-unittest.cc +++ b/test/unittests/heap/cppgc/allocation-unittest.cc @@ -184,11 +184,9 @@ TEST_F(CppgcAllocationTest, LargeDoubleWordAlignedAllocation) { TEST_F(CppgcAllocationTest, AlignToDoubleWordFromUnaligned) { static constexpr size_t kAlignmentMask = kDoubleWord - 1; auto* padding_object = - MakeGarbageCollected>(GetAllocationHandle()); - // First allocation is not aligned. - ASSERT_EQ(kWord, - reinterpret_cast(padding_object) & kAlignmentMask); - // The end should also not be properly aligned. + MakeGarbageCollected>(GetAllocationHandle()); + // The address from which the next object can be allocated, i.e. the end of + // |padding_object|, should not be properly aligned. ASSERT_EQ(kWord, (reinterpret_cast(padding_object) + sizeof(*padding_object)) & kAlignmentMask); @@ -204,11 +202,9 @@ TEST_F(CppgcAllocationTest, AlignToDoubleWordFromUnaligned) { TEST_F(CppgcAllocationTest, AlignToDoubleWordFromAligned) { static constexpr size_t kAlignmentMask = kDoubleWord - 1; auto* padding_object = - MakeGarbageCollected>(GetAllocationHandle()); - // First allocation is not aligned. - ASSERT_EQ(kWord, - reinterpret_cast(padding_object) & kAlignmentMask); - // The end should be properly aligned. + MakeGarbageCollected>(GetAllocationHandle()); + // The address from which the next object can be allocated, i.e. the end of + // |padding_object|, should be properly aligned. ASSERT_EQ(0u, (reinterpret_cast(padding_object) + sizeof(*padding_object)) & kAlignmentMask);