diff --git a/include/cppgc/internal/caged-heap-local-data.h b/include/cppgc/internal/caged-heap-local-data.h index fd275a982f..5669b2df2f 100644 --- a/include/cppgc/internal/caged-heap-local-data.h +++ b/include/cppgc/internal/caged-heap-local-data.h @@ -14,6 +14,10 @@ #include "cppgc/platform.h" #include "v8config.h" // NOLINT(build/include_directory) +#if __cpp_lib_bitopts +#include +#endif // __cpp_lib_bitopts + namespace cppgc { namespace internal { @@ -57,7 +61,15 @@ class V8_EXPORT AgeTable final { private: V8_INLINE size_t card(uintptr_t offset) const { constexpr size_t kGranularityBits = +#if __cpp_lib_bitopts + std::countr_zero(static_cast(kCardSizeInBytes)); +#elif V8_HAS_BUILTIN_CTZ __builtin_ctz(static_cast(kCardSizeInBytes)); +#else //! V8_HAS_BUILTIN_CTZ + // Hardcode and check with assert. + 9; +#endif // !V8_HAS_BUILTIN_CTZ + static_assert((1 << kGranularityBits) == kCardSizeInBytes); const size_t entry = offset >> kGranularityBits; CPPGC_DCHECK(table_.size() > entry); return entry; diff --git a/src/heap/cppgc/write-barrier.cc b/src/heap/cppgc/write-barrier.cc index 3f67881421..58bb99ba7a 100644 --- a/src/heap/cppgc/write-barrier.cc +++ b/src/heap/cppgc/write-barrier.cc @@ -23,13 +23,6 @@ namespace internal { // static AtomicEntryFlag WriteBarrier::write_barrier_enabled_; -#if defined(CPPGC_YOUNG_GENERATION) -// static -size_t YoungGenerationEnabler::is_enabled_; -// static -v8::base::LeakyObject YoungGenerationEnabler::mutex_; -#endif // defined(CPPGC_YOUNG_GENERATION) - namespace { template @@ -207,9 +200,17 @@ bool WriteBarrierTypeForCagedHeapPolicy::IsMarking( #endif // CPPGC_CAGED_HEAP #if defined(CPPGC_YOUNG_GENERATION) + +// static +YoungGenerationEnabler& YoungGenerationEnabler::Instance() { + static v8::base::LeakyObject instance; + return *instance.get(); +} + void YoungGenerationEnabler::Enable() { - v8::base::LockGuard _(mutex_.get()); - if (++is_enabled_ == 1) { + auto& instance = Instance(); + v8::base::LockGuard _(&instance.mutex_); + if (++instance.is_enabled_ == 1) { // Enter the flag so that the check in the write barrier will always trigger // when young generation is enabled. WriteBarrier::FlagUpdater::Enter(); @@ -217,17 +218,20 @@ void YoungGenerationEnabler::Enable() { } void YoungGenerationEnabler::Disable() { - v8::base::LockGuard _(mutex_.get()); - DCHECK_LT(0, is_enabled_); - if (--is_enabled_ == 0) { + auto& instance = Instance(); + v8::base::LockGuard _(&instance.mutex_); + DCHECK_LT(0, instance.is_enabled_); + if (--instance.is_enabled_ == 0) { WriteBarrier::FlagUpdater::Exit(); } } bool YoungGenerationEnabler::IsEnabled() { - v8::base::LockGuard _(mutex_.get()); - return is_enabled_; + auto& instance = Instance(); + v8::base::LockGuard _(&instance.mutex_); + return instance.is_enabled_; } + #endif // defined(CPPGC_YOUNG_GENERATION) } // namespace internal diff --git a/src/heap/cppgc/write-barrier.h b/src/heap/cppgc/write-barrier.h index 8c1d91b2de..3bfbcb2f4a 100644 --- a/src/heap/cppgc/write-barrier.h +++ b/src/heap/cppgc/write-barrier.h @@ -30,10 +30,15 @@ class V8_EXPORT_PRIVATE YoungGenerationEnabler final { static bool IsEnabled(); private: - YoungGenerationEnabler() = delete; + template + friend class v8::base::LeakyObject; - static size_t is_enabled_; - static v8::base::LeakyObject mutex_; + static YoungGenerationEnabler& Instance(); + + YoungGenerationEnabler() = default; + + size_t is_enabled_; + v8::base::Mutex mutex_; }; #endif // defined(CPPGC_YOUNG_GENERATION) diff --git a/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc b/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc index a94e5357dd..fd511c7be5 100644 --- a/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc +++ b/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc @@ -158,13 +158,14 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfNormalPage) { // Wait for concurrent sweeping to finish. WaitForConcurrentSweeping(); -#if !defined(CPPGC_YOUNG_GENERATION) - // Check that the marked object was unmarked. - EXPECT_FALSE(HeapObjectHeader::FromObject(marked_object).IsMarked()); -#else - // Check that the marked object is still marked. - EXPECT_TRUE(HeapObjectHeader::FromObject(marked_object).IsMarked()); -#endif + const auto& hoh = HeapObjectHeader::FromObject(marked_object); + if (Heap::From(GetHeap())->generational_gc_supported()) { + // Check that the marked object is still marked. + EXPECT_TRUE(hoh.IsMarked()); + } else { + // Check that the marked object was unmarked. + EXPECT_FALSE(hoh.IsMarked()); + } // Check that free list entries are created right away for non-finalizable // objects, but not immediately returned to the space's freelist. @@ -198,13 +199,14 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfLargePage) { // Wait for concurrent sweeping to finish. WaitForConcurrentSweeping(); -#if !defined(CPPGC_YOUNG_GENERATION) - // Check that the marked object was unmarked. - EXPECT_FALSE(HeapObjectHeader::FromObject(marked_object).IsMarked()); -#else - // Check that the marked object is still marked. - EXPECT_TRUE(HeapObjectHeader::FromObject(marked_object).IsMarked()); -#endif + const auto& hoh = HeapObjectHeader::FromObject(marked_object); + if (Heap::From(GetHeap())->generational_gc_supported()) { + // Check that the marked object is still marked. + EXPECT_TRUE(hoh.IsMarked()); + } else { + // Check that the marked object was unmarked. + EXPECT_FALSE(hoh.IsMarked()); + } // The page should not have been removed on the background threads. EXPECT_TRUE(PageInBackend(unmarked_page)); @@ -341,13 +343,14 @@ TEST_F(ConcurrentSweeperTest, IncrementalSweeping) { GetPlatform().RunAllForegroundTasks(); EXPECT_EQ(2u, g_destructor_callcount); -#if !defined(CPPGC_YOUNG_GENERATION) - EXPECT_FALSE(marked_normal_header.IsMarked()); - EXPECT_FALSE(marked_large_header.IsMarked()); -#else - EXPECT_TRUE(marked_normal_header.IsMarked()); - EXPECT_TRUE(marked_large_header.IsMarked()); -#endif + + if (Heap::From(GetHeap())->generational_gc_supported()) { + EXPECT_TRUE(marked_normal_header.IsMarked()); + EXPECT_TRUE(marked_large_header.IsMarked()); + } else { + EXPECT_FALSE(marked_normal_header.IsMarked()); + EXPECT_FALSE(marked_large_header.IsMarked()); + } FinishSweeping(); } diff --git a/test/unittests/heap/cppgc/explicit-management-unittest.cc b/test/unittests/heap/cppgc/explicit-management-unittest.cc index 2458f67381..4084004887 100644 --- a/test/unittests/heap/cppgc/explicit-management-unittest.cc +++ b/test/unittests/heap/cppgc/explicit-management-unittest.cc @@ -47,7 +47,6 @@ class DynamicallySized final : public GarbageCollected { } // namespace TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) { -#if !defined(CPPGC_YOUNG_GENERATION) auto* o = MakeGarbageCollected(GetHeap()->GetAllocationHandle()); const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space()); @@ -65,11 +64,9 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) { // LAB is included in allocated object size, so no change is expected. EXPECT_EQ(allocated_size_before, AllocatedObjectSize()); EXPECT_FALSE(space.free_list().ContainsForTesting({needle, size})); -#endif //! defined(CPPGC_YOUNG_GENERATION) } TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) { -#if !defined(CPPGC_YOUNG_GENERATION) auto* o = MakeGarbageCollected(GetHeap()->GetAllocationHandle()); const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space()); @@ -85,11 +82,9 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) { EXPECT_EQ(lab.start(), nullptr); EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize()); EXPECT_TRUE(space.free_list().ContainsForTesting({needle, size})); -#endif //! defined(CPPGC_YOUNG_GENERATION) } TEST_F(ExplicitManagementTest, FreeLargeObject) { -#if !defined(CPPGC_YOUNG_GENERATION) auto* o = MakeGarbageCollected( GetHeap()->GetAllocationHandle(), AdditionalBytes(kLargeObjectSizeThreshold)); @@ -103,11 +98,9 @@ TEST_F(ExplicitManagementTest, FreeLargeObject) { subtle::FreeUnreferencedObject(GetHeapHandle(), *o); EXPECT_FALSE(heap.page_backend()->Lookup(needle)); EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize()); -#endif //! defined(CPPGC_YOUNG_GENERATION) } TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) { -#if !defined(CPPGC_YOUNG_GENERATION) const size_t snapshot_before = AllocatedObjectSize(); auto* o = MakeGarbageCollected(GetHeap()->GetAllocationHandle()); @@ -120,7 +113,6 @@ TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) { ResetLinearAllocationBuffers(); subtle::FreeUnreferencedObject(GetHeapHandle(), *o); EXPECT_EQ(snapshot_before, AllocatedObjectSize()); -#endif //! defined(CPPGC_YOUNG_GENERATION) } TEST_F(ExplicitManagementTest, GrowAtLAB) { diff --git a/test/unittests/heap/cppgc/sweeper-unittest.cc b/test/unittests/heap/cppgc/sweeper-unittest.cc index 5d071ad130..97fff9bc92 100644 --- a/test/unittests/heap/cppgc/sweeper-unittest.cc +++ b/test/unittests/heap/cppgc/sweeper-unittest.cc @@ -261,13 +261,13 @@ TEST_F(SweeperTest, UnmarkObjects) { Sweep(); -#if !defined(CPPGC_YOUNG_GENERATION) - EXPECT_FALSE(normal_object_header.IsMarked()); - EXPECT_FALSE(large_object_header.IsMarked()); -#else - EXPECT_TRUE(normal_object_header.IsMarked()); - EXPECT_TRUE(large_object_header.IsMarked()); -#endif + if (Heap::From(GetHeap())->generational_gc_supported()) { + EXPECT_TRUE(normal_object_header.IsMarked()); + EXPECT_TRUE(large_object_header.IsMarked()); + } else { + EXPECT_FALSE(normal_object_header.IsMarked()); + EXPECT_FALSE(large_object_header.IsMarked()); + } } TEST_F(SweeperTest, LazySweepingDuringAllocation) { diff --git a/test/unittests/heap/cppgc/write-barrier-unittest.cc b/test/unittests/heap/cppgc/write-barrier-unittest.cc index 015fca9a67..845e580ccd 100644 --- a/test/unittests/heap/cppgc/write-barrier-unittest.cc +++ b/test/unittests/heap/cppgc/write-barrier-unittest.cc @@ -351,11 +351,10 @@ TEST_F(NoWriteBarrierTest, WriteBarrierBailoutWhenMarkingIsOff) { { EXPECT_FALSE(object1->IsMarked()); WriteBarrierParams params; -#if defined(CPPGC_YOUNG_GENERATION) - WriteBarrierType expected = WriteBarrierType::kGenerational; -#else // !CPPGC_YOUNG_GENERATION - WriteBarrierType expected = WriteBarrierType::kNone; -#endif // !CPPGC_YOUNG_GENERATION + const WriteBarrierType expected = + Heap::From(GetHeap())->generational_gc_supported() + ? WriteBarrierType::kGenerational + : WriteBarrierType::kNone; EXPECT_EQ(expected, HeapConsistency::GetWriteBarrierType( object2->next_ref().GetSlotForTesting(), object2->next_ref().Get(), params));