132727fd46
This CL adds a basic implementation of incremental marking for standalone GC. Followup CLs include: * Use bytes instead of time as deadline * Port incremental marking schedule from blink * Mark on allocation * Guarantees for progres/termination for standalone GC * etc... Calling StartIncrementalGarbageCollection triggers StartMarking which schedules incremental marking as non-nestable tasks. For unified heap, marking will continue running until it runs out of work but it won't finalize independently. For standalone, when incremental runs out of work it will schedule a new task in which it will finalize marking and trigger the rest of the GC. Users of standalone can also force finalization before incremental marking as finished using FinalizeIncrementalGarbageCollectionIfRunning. Calling CollectGarbage would also finalize an on-going incremental GC if one exists. Otherwise it will trigger an atomic GC. See the following doc for explanation of the various methods: https://docs.google.com/document/d/1ZhJY2fOoD8sH53ZxMh2927Zl8sXqA7azJgcQTWx-YKs/edit?usp=sharing Bug: chromium:1056170 Change-Id: I75ead414eb9da9f8b7f71c4638b9830fce7708ca Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2298009 Commit-Queue: Omer Katz <omerkatz@chromium.org> Reviewed-by: Anton Bikineev <bikineev@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#69480}
269 lines
7.8 KiB
C++
269 lines
7.8 KiB
C++
// Copyright 2020 the V8 project authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
// found in the LICENSE file.
|
|
|
|
#include "src/heap/cppgc/sweeper.h"
|
|
|
|
#include <algorithm>
|
|
|
|
#include "include/cppgc/allocation.h"
|
|
#include "include/cppgc/persistent.h"
|
|
#include "src/heap/cppgc/globals.h"
|
|
#include "src/heap/cppgc/heap-object-header.h"
|
|
#include "src/heap/cppgc/heap-page.h"
|
|
#include "src/heap/cppgc/heap-visitor.h"
|
|
#include "src/heap/cppgc/heap.h"
|
|
#include "src/heap/cppgc/page-memory.h"
|
|
#include "src/heap/cppgc/stats-collector.h"
|
|
#include "test/unittests/heap/cppgc/tests.h"
|
|
#include "testing/gtest/include/gtest/gtest.h"
|
|
|
|
namespace cppgc {
|
|
namespace internal {
|
|
|
|
namespace {
|
|
|
|
size_t g_destructor_callcount;
|
|
|
|
template <size_t Size>
|
|
class GCed : public GarbageCollected<GCed<Size>> {
|
|
public:
|
|
virtual ~GCed() { ++g_destructor_callcount; }
|
|
|
|
virtual void Trace(cppgc::Visitor*) const {}
|
|
|
|
private:
|
|
char array[Size];
|
|
};
|
|
|
|
class SweeperTest : public testing::TestWithHeap {
|
|
public:
|
|
SweeperTest() { g_destructor_callcount = 0; }
|
|
|
|
void Sweep() {
|
|
Heap* heap = Heap::From(GetHeap());
|
|
ResetLinearAllocationBuffers();
|
|
Sweeper& sweeper = heap->sweeper();
|
|
// Pretend do finish marking as StatsCollector verifies that Notify*
|
|
// methods are called in the right order.
|
|
heap->stats_collector()->NotifyMarkingStarted();
|
|
heap->stats_collector()->NotifyMarkingCompleted(0);
|
|
sweeper.Start(Sweeper::Config::kAtomic);
|
|
sweeper.FinishIfRunning();
|
|
}
|
|
|
|
void MarkObject(void* payload) {
|
|
HeapObjectHeader& header = HeapObjectHeader::FromPayload(payload);
|
|
header.TryMarkAtomic();
|
|
}
|
|
|
|
PageBackend* GetBackend() { return Heap::From(GetHeap())->page_backend(); }
|
|
};
|
|
|
|
} // namespace
|
|
|
|
TEST_F(SweeperTest, SweepUnmarkedNormalObject) {
|
|
constexpr size_t kObjectSize = 8;
|
|
using Type = GCed<kObjectSize>;
|
|
|
|
MakeGarbageCollected<Type>(GetAllocationHandle());
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
|
|
Sweep();
|
|
|
|
EXPECT_EQ(1u, g_destructor_callcount);
|
|
}
|
|
|
|
TEST_F(SweeperTest, DontSweepMarkedNormalObject) {
|
|
constexpr size_t kObjectSize = 8;
|
|
using Type = GCed<kObjectSize>;
|
|
|
|
auto* object = MakeGarbageCollected<Type>(GetAllocationHandle());
|
|
MarkObject(object);
|
|
BasePage* page = BasePage::FromPayload(object);
|
|
BaseSpace* space = page->space();
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
|
|
Sweep();
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
// Check that page is returned back to the space.
|
|
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page));
|
|
EXPECT_NE(nullptr, GetBackend()->Lookup(reinterpret_cast<Address>(object)));
|
|
}
|
|
|
|
TEST_F(SweeperTest, SweepUnmarkedLargeObject) {
|
|
constexpr size_t kObjectSize = kLargeObjectSizeThreshold * 2;
|
|
using Type = GCed<kObjectSize>;
|
|
|
|
auto* object = MakeGarbageCollected<Type>(GetAllocationHandle());
|
|
BasePage* page = BasePage::FromPayload(object);
|
|
BaseSpace* space = page->space();
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
|
|
Sweep();
|
|
|
|
EXPECT_EQ(1u, g_destructor_callcount);
|
|
// Check that page is gone.
|
|
EXPECT_EQ(space->end(), std::find(space->begin(), space->end(), page));
|
|
EXPECT_EQ(nullptr, GetBackend()->Lookup(reinterpret_cast<Address>(object)));
|
|
}
|
|
|
|
TEST_F(SweeperTest, DontSweepMarkedLargeObject) {
|
|
constexpr size_t kObjectSize = kLargeObjectSizeThreshold * 2;
|
|
using Type = GCed<kObjectSize>;
|
|
|
|
auto* object = MakeGarbageCollected<Type>(GetAllocationHandle());
|
|
MarkObject(object);
|
|
BasePage* page = BasePage::FromPayload(object);
|
|
BaseSpace* space = page->space();
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
|
|
Sweep();
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
// Check that page is returned back to the space.
|
|
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page));
|
|
EXPECT_NE(nullptr, GetBackend()->Lookup(reinterpret_cast<Address>(object)));
|
|
}
|
|
|
|
TEST_F(SweeperTest, SweepMultipleObjectsOnPage) {
|
|
constexpr size_t kObjectSize = 8;
|
|
using Type = GCed<kObjectSize>;
|
|
const size_t kNumberOfObjects =
|
|
NormalPage::PayloadSize() / (sizeof(Type) + sizeof(HeapObjectHeader));
|
|
|
|
for (size_t i = 0; i < kNumberOfObjects; ++i) {
|
|
MakeGarbageCollected<Type>(GetAllocationHandle());
|
|
}
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
|
|
Sweep();
|
|
|
|
EXPECT_EQ(kNumberOfObjects, g_destructor_callcount);
|
|
}
|
|
|
|
TEST_F(SweeperTest, SweepObjectsOnAllArenas) {
|
|
MakeGarbageCollected<GCed<1>>(GetAllocationHandle());
|
|
MakeGarbageCollected<GCed<32>>(GetAllocationHandle());
|
|
MakeGarbageCollected<GCed<64>>(GetAllocationHandle());
|
|
MakeGarbageCollected<GCed<128>>(GetAllocationHandle());
|
|
MakeGarbageCollected<GCed<2 * kLargeObjectSizeThreshold>>(
|
|
GetAllocationHandle());
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
|
|
Sweep();
|
|
|
|
EXPECT_EQ(5u, g_destructor_callcount);
|
|
}
|
|
|
|
TEST_F(SweeperTest, SweepMultiplePagesInSingleSpace) {
|
|
MakeGarbageCollected<GCed<2 * kLargeObjectSizeThreshold>>(
|
|
GetAllocationHandle());
|
|
MakeGarbageCollected<GCed<2 * kLargeObjectSizeThreshold>>(
|
|
GetAllocationHandle());
|
|
MakeGarbageCollected<GCed<2 * kLargeObjectSizeThreshold>>(
|
|
GetAllocationHandle());
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
|
|
Sweep();
|
|
|
|
EXPECT_EQ(3u, g_destructor_callcount);
|
|
}
|
|
|
|
TEST_F(SweeperTest, CoalesceFreeListEntries) {
|
|
constexpr size_t kObjectSize = 32;
|
|
using Type = GCed<kObjectSize>;
|
|
|
|
auto* object1 = MakeGarbageCollected<Type>(GetAllocationHandle());
|
|
auto* object2 = MakeGarbageCollected<Type>(GetAllocationHandle());
|
|
auto* object3 = MakeGarbageCollected<Type>(GetAllocationHandle());
|
|
auto* object4 = MakeGarbageCollected<Type>(GetAllocationHandle());
|
|
|
|
MarkObject(object1);
|
|
MarkObject(object4);
|
|
|
|
Address object2_start =
|
|
reinterpret_cast<Address>(&HeapObjectHeader::FromPayload(object2));
|
|
Address object3_end =
|
|
reinterpret_cast<Address>(&HeapObjectHeader::FromPayload(object3)) +
|
|
HeapObjectHeader::FromPayload(object3).GetSize();
|
|
|
|
const BasePage* page = BasePage::FromPayload(object2);
|
|
const FreeList& freelist = NormalPageSpace::From(page->space())->free_list();
|
|
|
|
const FreeList::Block coalesced_block = {
|
|
object2_start, static_cast<size_t>(object3_end - object2_start)};
|
|
|
|
EXPECT_EQ(0u, g_destructor_callcount);
|
|
EXPECT_FALSE(freelist.Contains(coalesced_block));
|
|
|
|
Sweep();
|
|
|
|
EXPECT_EQ(2u, g_destructor_callcount);
|
|
EXPECT_TRUE(freelist.Contains(coalesced_block));
|
|
}
|
|
|
|
namespace {
|
|
|
|
class GCInDestructor final : public GarbageCollected<GCInDestructor> {
|
|
public:
|
|
explicit GCInDestructor(Heap* heap) : heap_(heap) {}
|
|
~GCInDestructor() {
|
|
// Instead of directly calling GC, allocations should be supported here as
|
|
// well.
|
|
heap_->CollectGarbage(
|
|
internal::GarbageCollector::Config::ConservativeAtomicConfig());
|
|
}
|
|
void Trace(Visitor*) const {}
|
|
|
|
private:
|
|
Heap* heap_;
|
|
};
|
|
|
|
} // namespace
|
|
|
|
TEST_F(SweeperTest, SweepDoesNotTriggerRecursiveGC) {
|
|
auto* internal_heap = internal::Heap::From(GetHeap());
|
|
size_t saved_epoch = internal_heap->epoch();
|
|
MakeGarbageCollected<GCInDestructor>(GetAllocationHandle(), internal_heap);
|
|
PreciseGC();
|
|
EXPECT_EQ(saved_epoch + 1, internal_heap->epoch());
|
|
}
|
|
|
|
TEST_F(SweeperTest, UnmarkObjects) {
|
|
auto* normal_object = MakeGarbageCollected<GCed<32>>(GetAllocationHandle());
|
|
auto* large_object =
|
|
MakeGarbageCollected<GCed<kLargeObjectSizeThreshold * 2>>(
|
|
GetAllocationHandle());
|
|
|
|
auto& normal_object_header = HeapObjectHeader::FromPayload(normal_object);
|
|
auto& large_object_header = HeapObjectHeader::FromPayload(large_object);
|
|
|
|
normal_object_header.TryMarkAtomic();
|
|
large_object_header.TryMarkAtomic();
|
|
|
|
EXPECT_TRUE(normal_object_header.IsMarked());
|
|
EXPECT_TRUE(large_object_header.IsMarked());
|
|
|
|
Sweep();
|
|
|
|
#if !defined(CPPGC_YOUNG_GENERATION)
|
|
EXPECT_FALSE(normal_object_header.IsMarked());
|
|
EXPECT_FALSE(large_object_header.IsMarked());
|
|
#else
|
|
EXPECT_TRUE(normal_object_header.IsMarked());
|
|
EXPECT_TRUE(large_object_header.IsMarked());
|
|
#endif
|
|
}
|
|
|
|
} // namespace internal
|
|
} // namespace cppgc
|