cppgc: Add initial implementation of young generation
This adds the following things: - age table for 4K regions; - generational barrier for mixed 4K regions; - unmarking for major collections; - young generation flags. Bug: chromium:1029379 Change-Id: Ief1229f0dac5f90c5f06d3168c8ffb4b7d1f1b53 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2246566 Commit-Queue: Anton Bikineev <bikineev@chromium.org> Reviewed-by: Omer Katz <omerkatz@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#68379}
This commit is contained in:
parent
02deda1fc5
commit
5785d98b4b
10
BUILD.gn
10
BUILD.gn
@ -239,6 +239,9 @@ declare_args() {
|
||||
# Enable heap reservation of size 4GB. Only possible for 64bit archs.
|
||||
cppgc_enable_caged_heap = v8_current_cpu == "x64" || v8_current_cpu == "arm64"
|
||||
|
||||
# Enable young generation in cppgc.
|
||||
cppgc_enable_young_generation = false
|
||||
|
||||
# Enable V8 heap sandbox experimental feature.
|
||||
# Sets -DV8_HEAP_SANDBOX.
|
||||
v8_enable_heap_sandbox = ""
|
||||
@ -320,6 +323,9 @@ assert(!cppgc_enable_caged_heap || v8_current_cpu == "x64" ||
|
||||
v8_current_cpu == "arm64",
|
||||
"CppGC caged heap requires 64bit platforms")
|
||||
|
||||
assert(!cppgc_enable_young_generation || cppgc_enable_caged_heap,
|
||||
"Young generation in CppGC requires caged heap")
|
||||
|
||||
v8_random_seed = "314159265"
|
||||
v8_toolset_for_shell = "host"
|
||||
|
||||
@ -393,6 +399,9 @@ config("cppgc_base_config") {
|
||||
if (cppgc_enable_caged_heap) {
|
||||
defines += [ "CPPGC_CAGED_HEAP" ]
|
||||
}
|
||||
if (cppgc_enable_young_generation) {
|
||||
defines += [ "CPPGC_YOUNG_GENERATION" ]
|
||||
}
|
||||
}
|
||||
|
||||
# This config should be applied to code using the libsampler.
|
||||
@ -4189,6 +4198,7 @@ v8_source_set("cppgc_base") {
|
||||
if (cppgc_enable_caged_heap) {
|
||||
sources += [
|
||||
"include/cppgc/internal/caged-heap-local-data.h",
|
||||
"src/heap/cppgc/caged-heap-local-data.cc",
|
||||
"src/heap/cppgc/caged-heap.cc",
|
||||
"src/heap/cppgc/caged-heap.h",
|
||||
]
|
||||
|
@ -5,11 +5,60 @@
|
||||
#ifndef INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_LOCAL_DATA_H_
|
||||
#define INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_LOCAL_DATA_H_
|
||||
|
||||
#include <array>
|
||||
|
||||
#include "cppgc/internal/api-constants.h"
|
||||
#include "cppgc/internal/logging.h"
|
||||
#include "cppgc/platform.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
class HeapBase;
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
|
||||
// AgeTable contains entries that correspond to 4KB memory regions. Each entry
|
||||
// can be in one of three states: kOld, kYoung or kUnknown.
|
||||
class AgeTable final {
|
||||
static constexpr size_t kGranularityBits = 12; // 4KiB per byte.
|
||||
|
||||
public:
|
||||
enum class Age : uint8_t { kOld, kYoung, kUnknown };
|
||||
|
||||
static constexpr size_t kEntrySizeInBytes = 1 << kGranularityBits;
|
||||
|
||||
Age& operator[](uintptr_t offset) { return table_[entry(offset)]; }
|
||||
Age operator[](uintptr_t offset) const { return table_[entry(offset)]; }
|
||||
|
||||
void Reset(PageAllocator* allocator);
|
||||
|
||||
private:
|
||||
static constexpr size_t kAgeTableSize =
|
||||
api_constants::kCagedHeapReservationSize >> kGranularityBits;
|
||||
|
||||
size_t entry(uintptr_t offset) const {
|
||||
const size_t entry = offset >> kGranularityBits;
|
||||
CPPGC_DCHECK(table_.size() > entry);
|
||||
return entry;
|
||||
}
|
||||
|
||||
std::array<Age, kAgeTableSize> table_;
|
||||
};
|
||||
|
||||
static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
|
||||
"Size of AgeTable is 1MB");
|
||||
|
||||
#endif // CPPGC_YOUNG_GENERATION
|
||||
|
||||
struct CagedHeapLocalData final {
|
||||
explicit CagedHeapLocalData(HeapBase* heap_base) : heap_base(heap_base) {}
|
||||
|
||||
bool is_marking_in_progress = false;
|
||||
HeapBase* heap_base = nullptr;
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
AgeTable age_table;
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -7,6 +7,12 @@
|
||||
|
||||
namespace cppgc {
|
||||
|
||||
#if defined(__has_attribute)
|
||||
#define CPPGC_HAS_ATTRIBUTE(FEATURE) __has_attribute(FEATURE)
|
||||
#else
|
||||
#define CPPGC_HAS_ATTRIBUTE(FEATURE) 0
|
||||
#endif
|
||||
|
||||
#if defined(__has_cpp_attribute)
|
||||
#define CPPGC_HAS_CPP_ATTRIBUTE(FEATURE) __has_cpp_attribute(FEATURE)
|
||||
#else
|
||||
@ -21,6 +27,12 @@ namespace cppgc {
|
||||
#define CPPGC_NO_UNIQUE_ADDRESS
|
||||
#endif
|
||||
|
||||
#if CPPGC_HAS_ATTRIBUTE(unused) // NOLINTNEXTLINE
|
||||
#define CPPGC_UNUSED __attribute__((unused))
|
||||
#else
|
||||
#define CPPGC_UNUSED
|
||||
#endif
|
||||
|
||||
} // namespace cppgc
|
||||
|
||||
#endif // INCLUDE_CPPGC_INTERNAL_COMPILER_SPECIFIC_H_
|
||||
|
@ -117,7 +117,7 @@ class BasicMember;
|
||||
struct SentinelPointer {
|
||||
template <typename T>
|
||||
operator T*() const { // NOLINT
|
||||
static constexpr intptr_t kSentinelValue = -1;
|
||||
static constexpr intptr_t kSentinelValue = 1;
|
||||
return reinterpret_cast<T*>(kSentinelValue);
|
||||
}
|
||||
// Hidden friends.
|
||||
|
@ -25,15 +25,21 @@ class V8_EXPORT WriteBarrier final {
|
||||
~(api_constants::kCagedHeapReservationAlignment - 1);
|
||||
const uintptr_t slot_offset = reinterpret_cast<uintptr_t>(slot) - start;
|
||||
if (slot_offset > api_constants::kCagedHeapReservationSize) {
|
||||
// Check if slot is on stack or value is sentinel or nullptr.
|
||||
// Check if slot is on stack or value is sentinel or nullptr. This relies
|
||||
// on the fact that kSentinelPointer is encoded as 0x1.
|
||||
return;
|
||||
}
|
||||
|
||||
CagedHeapLocalData* local_data =
|
||||
reinterpret_cast<CagedHeapLocalData*>(start);
|
||||
if (V8_LIKELY(!local_data->is_marking_in_progress)) return;
|
||||
|
||||
MarkingBarrierSlow(value);
|
||||
if (V8_UNLIKELY(local_data->is_marking_in_progress)) {
|
||||
MarkingBarrierSlow(value);
|
||||
return;
|
||||
}
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
GenerationalBarrier(local_data, slot, slot_offset,
|
||||
reinterpret_cast<uintptr_t>(value) - start);
|
||||
#endif
|
||||
#else
|
||||
if (V8_LIKELY(!ProcessHeap::IsAnyIncrementalOrConcurrentMarking())) return;
|
||||
|
||||
@ -46,6 +52,24 @@ class V8_EXPORT WriteBarrier final {
|
||||
|
||||
static void MarkingBarrierSlow(const void* value);
|
||||
static void MarkingBarrierSlowWithSentinelCheck(const void* value);
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
static V8_INLINE void GenerationalBarrier(CagedHeapLocalData* local_data,
|
||||
const void* slot,
|
||||
uintptr_t slot_offset,
|
||||
uintptr_t value_offset) {
|
||||
const AgeTable& age_table = local_data->age_table;
|
||||
|
||||
// Bail out if the slot is in young generation.
|
||||
if (V8_LIKELY(age_table[slot_offset] == AgeTable::Age::kYoung)) return;
|
||||
|
||||
GenerationalBarrierSlow(local_data, age_table, slot, value_offset);
|
||||
}
|
||||
|
||||
static void GenerationalBarrierSlow(CagedHeapLocalData* local_data,
|
||||
const AgeTable& ageTable,
|
||||
const void* slot, uintptr_t value_offset);
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -5,16 +5,18 @@
|
||||
#ifndef INCLUDE_CPPGC_MACROS_H_
|
||||
#define INCLUDE_CPPGC_MACROS_H_
|
||||
|
||||
#include "cppgc/internal/compiler-specific.h"
|
||||
|
||||
namespace cppgc {
|
||||
|
||||
// Use if the object is only stack allocated.
|
||||
#define CPPGC_STACK_ALLOCATED() \
|
||||
public: \
|
||||
using IsStackAllocatedTypeMarker = int; \
|
||||
\
|
||||
private: \
|
||||
void* operator new(size_t) = delete; \
|
||||
void* operator new(size_t, void*) = delete; \
|
||||
#define CPPGC_STACK_ALLOCATED() \
|
||||
public: \
|
||||
using IsStackAllocatedTypeMarker CPPGC_UNUSED = int; \
|
||||
\
|
||||
private: \
|
||||
void* operator new(size_t) = delete; \
|
||||
void* operator new(size_t, void*) = delete; \
|
||||
static_assert(true, "Force semicolon.")
|
||||
|
||||
} // namespace cppgc
|
||||
|
36
src/heap/cppgc/caged-heap-local-data.cc
Normal file
36
src/heap/cppgc/caged-heap-local-data.cc
Normal file
@ -0,0 +1,36 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "include/cppgc/internal/caged-heap-local-data.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <type_traits>
|
||||
|
||||
#include "include/cppgc/platform.h"
|
||||
#include "src/base/macros.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
|
||||
static_assert(
|
||||
std::is_trivially_default_constructible<AgeTable>::value,
|
||||
"To support lazy committing, AgeTable must be trivially constructible");
|
||||
|
||||
void AgeTable::Reset(PageAllocator* allocator) {
|
||||
// TODO(chromium:1029379): Consider MADV_DONTNEED instead of MADV_FREE on
|
||||
// POSIX platforms.
|
||||
std::fill(table_.begin(), table_.end(), Age::kOld);
|
||||
const uintptr_t begin = RoundUp(reinterpret_cast<uintptr_t>(table_.begin()),
|
||||
allocator->CommitPageSize());
|
||||
const uintptr_t end = RoundDown(reinterpret_cast<uintptr_t>(table_.end()),
|
||||
allocator->CommitPageSize());
|
||||
allocator->DiscardSystemPages(reinterpret_cast<void*>(begin), end - begin);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
@ -10,14 +10,21 @@
|
||||
|
||||
#include "include/cppgc/internal/caged-heap-local-data.h"
|
||||
#include "src/base/bounded-page-allocator.h"
|
||||
#include "src/base/logging.h"
|
||||
#include "src/heap/cppgc/globals.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
STATIC_ASSERT(api_constants::kCagedHeapReservationSize ==
|
||||
kCagedHeapReservationSize);
|
||||
STATIC_ASSERT(api_constants::kCagedHeapReservationAlignment ==
|
||||
kCagedHeapReservationAlignment);
|
||||
|
||||
namespace {
|
||||
|
||||
VirtualMemory ReserveCagedHeap(PageAllocator* platform_allocator) {
|
||||
DCHECK_NOT_NULL(platform_allocator);
|
||||
DCHECK_EQ(0u,
|
||||
kCagedHeapReservationSize % platform_allocator->AllocatePageSize());
|
||||
|
||||
@ -49,15 +56,23 @@ std::unique_ptr<CagedHeap::AllocatorType> CreateBoundedAllocator(
|
||||
|
||||
} // namespace
|
||||
|
||||
CagedHeap::CagedHeap(PageAllocator* platform_allocator)
|
||||
CagedHeap::CagedHeap(HeapBase* heap_base, PageAllocator* platform_allocator)
|
||||
: reserved_area_(ReserveCagedHeap(platform_allocator)) {
|
||||
DCHECK_NOT_NULL(heap_base);
|
||||
|
||||
void* caged_heap_start = reserved_area_.address();
|
||||
CHECK(platform_allocator->SetPermissions(
|
||||
reserved_area_.address(),
|
||||
RoundUp(sizeof(CagedHeapLocalData), platform_allocator->CommitPageSize()),
|
||||
PageAllocator::kReadWrite));
|
||||
|
||||
new (reserved_area_.address()) CagedHeapLocalData;
|
||||
auto* local_data =
|
||||
new (reserved_area_.address()) CagedHeapLocalData(heap_base);
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
local_data->age_table.Reset(platform_allocator);
|
||||
#endif
|
||||
USE(local_data);
|
||||
|
||||
caged_heap_start = reinterpret_cast<void*>(
|
||||
RoundUp(reinterpret_cast<uintptr_t>(caged_heap_start) +
|
||||
sizeof(CagedHeapLocalData),
|
||||
|
@ -16,12 +16,13 @@ namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
struct CagedHeapLocalData;
|
||||
class HeapBase;
|
||||
|
||||
class CagedHeap final {
|
||||
public:
|
||||
using AllocatorType = v8::base::BoundedPageAllocator;
|
||||
|
||||
explicit CagedHeap(PageAllocator* platform_allocator);
|
||||
CagedHeap(HeapBase* heap, PageAllocator* platform_allocator);
|
||||
|
||||
CagedHeap(const CagedHeap&) = delete;
|
||||
CagedHeap& operator=(const CagedHeap&) = delete;
|
||||
@ -36,6 +37,11 @@ class CagedHeap final {
|
||||
return *static_cast<CagedHeapLocalData*>(reserved_area_.address());
|
||||
}
|
||||
|
||||
static uintptr_t OffsetFromAddress(void* address) {
|
||||
return reinterpret_cast<uintptr_t>(address) &
|
||||
(kCagedHeapReservationAlignment - 1);
|
||||
}
|
||||
|
||||
private:
|
||||
VirtualMemory reserved_area_;
|
||||
std::unique_ptr<AllocatorType> bounded_allocator_;
|
||||
|
@ -16,20 +16,27 @@ namespace internal {
|
||||
class GarbageCollector {
|
||||
public:
|
||||
struct Config {
|
||||
using CollectionType = Marker::MarkingConfig::CollectionType;
|
||||
using StackState = cppgc::Heap::StackState;
|
||||
using MarkingType = Marker::MarkingConfig::MarkingType;
|
||||
using SweepingType = Sweeper::Config;
|
||||
|
||||
static constexpr Config ConservativeAtomicConfig() {
|
||||
return {StackState::kMayContainHeapPointers, MarkingType::kAtomic,
|
||||
SweepingType::kAtomic};
|
||||
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr Config PreciseAtomicConfig() {
|
||||
return {StackState::kNoHeapPointers, MarkingType::kAtomic,
|
||||
SweepingType::kAtomic};
|
||||
return {CollectionType::kMajor, StackState::kNoHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
static constexpr Config MinorPreciseAtomicConfig() {
|
||||
return {CollectionType::kMinor, StackState::kNoHeapPointers,
|
||||
MarkingType::kAtomic, SweepingType::kAtomic};
|
||||
}
|
||||
|
||||
CollectionType collection_type = CollectionType::kMajor;
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
MarkingType marking_type = MarkingType::kAtomic;
|
||||
SweepingType sweeping_type = SweepingType::kAtomic;
|
||||
|
@ -46,10 +46,8 @@ constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2;
|
||||
constexpr GCInfoIndex kFreeListGCInfoIndex = 0;
|
||||
constexpr size_t kFreeListEntrySize = 2 * sizeof(uintptr_t);
|
||||
|
||||
#if defined(CPPGC_CAGED_HEAP)
|
||||
constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
|
||||
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
|
||||
#endif
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
@ -57,7 +57,7 @@ HeapBase::HeapBase(std::shared_ptr<cppgc::Platform> platform,
|
||||
: raw_heap_(this, custom_spaces),
|
||||
platform_(std::move(platform)),
|
||||
#if defined(CPPGC_CAGED_HEAP)
|
||||
caged_heap_(platform_->GetPageAllocator()),
|
||||
caged_heap_(this, platform_->GetPageAllocator()),
|
||||
page_backend_(std::make_unique<PageBackend>(&caged_heap_.allocator())),
|
||||
#else
|
||||
page_backend_(
|
||||
|
@ -6,6 +6,7 @@
|
||||
#define V8_HEAP_CPPGC_HEAP_BASE_H_
|
||||
|
||||
#include <memory>
|
||||
#include <set>
|
||||
|
||||
#include "include/cppgc/internal/persistent-node.h"
|
||||
#include "include/cppgc/macros.h"
|
||||
@ -103,6 +104,10 @@ class V8_EXPORT_PRIVATE HeapBase {
|
||||
return weak_persistent_region_;
|
||||
}
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
std::set<void*>& remembered_slots() { return remembered_slots_; }
|
||||
#endif
|
||||
|
||||
size_t ObjectPayloadSize() const;
|
||||
|
||||
protected:
|
||||
@ -126,6 +131,10 @@ class V8_EXPORT_PRIVATE HeapBase {
|
||||
PersistentRegion strong_persistent_region_;
|
||||
PersistentRegion weak_persistent_region_;
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
std::set<void*> remembered_slots_;
|
||||
#endif
|
||||
|
||||
size_t no_gc_scope_ = 0;
|
||||
|
||||
friend class testing::TestWithHeap;
|
||||
|
@ -112,6 +112,11 @@ bool HeapObjectHeader::TryMarkAtomic() {
|
||||
std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
template <HeapObjectHeader::AccessMode mode>
|
||||
bool HeapObjectHeader::IsYoung() const {
|
||||
return !IsMarked<mode>();
|
||||
}
|
||||
|
||||
template <HeapObjectHeader::AccessMode mode>
|
||||
bool HeapObjectHeader::IsFree() const {
|
||||
return GetGCInfoIndex() == kFreeListGCInfoIndex;
|
||||
|
@ -79,6 +79,9 @@ class HeapObjectHeader {
|
||||
void Unmark();
|
||||
inline bool TryMarkAtomic();
|
||||
|
||||
template <AccessMode = AccessMode::kNonAtomic>
|
||||
bool IsYoung() const;
|
||||
|
||||
template <AccessMode = AccessMode::kNonAtomic>
|
||||
bool IsFree() const;
|
||||
|
||||
|
@ -66,6 +66,24 @@ void BasePage::Destroy(BasePage* page) {
|
||||
}
|
||||
}
|
||||
|
||||
Address BasePage::PayloadStart() {
|
||||
return is_large() ? LargePage::From(this)->PayloadStart()
|
||||
: NormalPage::From(this)->PayloadStart();
|
||||
}
|
||||
|
||||
ConstAddress BasePage::PayloadStart() const {
|
||||
return const_cast<BasePage*>(this)->PayloadStart();
|
||||
}
|
||||
|
||||
Address BasePage::PayloadEnd() {
|
||||
return is_large() ? LargePage::From(this)->PayloadEnd()
|
||||
: NormalPage::From(this)->PayloadEnd();
|
||||
}
|
||||
|
||||
ConstAddress BasePage::PayloadEnd() const {
|
||||
return const_cast<BasePage*>(this)->PayloadEnd();
|
||||
}
|
||||
|
||||
HeapObjectHeader& BasePage::ObjectHeaderFromInnerAddress(void* address) const {
|
||||
return const_cast<HeapObjectHeader&>(
|
||||
ObjectHeaderFromInnerAddress(const_cast<const void*>(address)));
|
||||
|
@ -42,6 +42,11 @@ class V8_EXPORT_PRIVATE BasePage {
|
||||
|
||||
bool is_large() const { return type_ == PageType::kLarge; }
|
||||
|
||||
Address PayloadStart();
|
||||
ConstAddress PayloadStart() const;
|
||||
Address PayloadEnd();
|
||||
ConstAddress PayloadEnd() const;
|
||||
|
||||
// |address| must refer to real object.
|
||||
HeapObjectHeader& ObjectHeaderFromInnerAddress(void* address) const;
|
||||
const HeapObjectHeader& ObjectHeaderFromInnerAddress(
|
||||
|
@ -4,7 +4,10 @@
|
||||
|
||||
#include "src/heap/cppgc/heap.h"
|
||||
|
||||
#include "src/heap/cppgc/garbage-collector.h"
|
||||
#include "src/heap/cppgc/gc-invoker.h"
|
||||
#include "src/heap/cppgc/heap-object-header-inl.h"
|
||||
#include "src/heap/cppgc/heap-visitor.h"
|
||||
#include "src/heap/cppgc/marker.h"
|
||||
#include "src/heap/cppgc/prefinalizer-handler.h"
|
||||
#include "src/heap/cppgc/stack.h"
|
||||
@ -36,7 +39,9 @@ std::unique_ptr<Heap> Heap::Create(std::shared_ptr<cppgc::Platform> platform,
|
||||
|
||||
void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason,
|
||||
Heap::StackState stack_state) {
|
||||
internal::Heap::From(this)->CollectGarbage({stack_state});
|
||||
internal::Heap::From(this)->CollectGarbage(
|
||||
{internal::GarbageCollector::Config::CollectionType::kMajor,
|
||||
stack_state});
|
||||
}
|
||||
|
||||
AllocationHandle& Heap::GetAllocationHandle() {
|
||||
@ -45,6 +50,30 @@ AllocationHandle& Heap::GetAllocationHandle() {
|
||||
|
||||
namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
class Unmarker final : private HeapVisitor<Unmarker> {
|
||||
friend class HeapVisitor<Unmarker>;
|
||||
|
||||
public:
|
||||
explicit Unmarker(RawHeap* heap) { Traverse(heap); }
|
||||
|
||||
private:
|
||||
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
|
||||
if (header->IsMarked()) header->Unmark();
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
void CheckConfig(Heap::Config config) {
|
||||
CHECK_WITH_MSG(
|
||||
(config.collection_type != Heap::Config::CollectionType::kMinor) ||
|
||||
(config.stack_state == Heap::Config::StackState::kNoHeapPointers),
|
||||
"Minor GCs with stack is currently not supported");
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
// static
|
||||
cppgc::LivenessBroker LivenessBrokerFactory::Create() {
|
||||
return cppgc::LivenessBroker();
|
||||
@ -64,14 +93,21 @@ Heap::~Heap() {
|
||||
}
|
||||
|
||||
void Heap::CollectGarbage(Config config) {
|
||||
CheckConfig(config);
|
||||
|
||||
if (in_no_gc_scope()) return;
|
||||
|
||||
epoch_++;
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
if (config.collection_type == Config::CollectionType::kMajor)
|
||||
Unmarker unmarker(&raw_heap());
|
||||
#endif
|
||||
|
||||
// "Marking".
|
||||
marker_ = std::make_unique<Marker>(AsBase());
|
||||
const Marker::MarkingConfig marking_config{config.stack_state,
|
||||
config.marking_type};
|
||||
const Marker::MarkingConfig marking_config{
|
||||
config.collection_type, config.stack_state, config.marking_type};
|
||||
marker_->StartMarking(marking_config);
|
||||
marker_->FinishMarking(marking_config);
|
||||
// "Sweeping and finalization".
|
||||
|
@ -47,6 +47,35 @@ void ExitIncrementalMarkingIfNeeded(
|
||||
#endif
|
||||
}
|
||||
|
||||
// Visit remembered set that was recorded in the generational barrier.
|
||||
void VisitRememberedSlots(
|
||||
HeapBase& heap, MarkingVisitor* visitor) { // NOLINT(runtime/references)
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
for (void* slot : heap.remembered_slots()) {
|
||||
auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
|
||||
->ObjectHeaderFromInnerAddress(slot);
|
||||
if (slot_header.IsYoung()) continue;
|
||||
// The design of young generation requires collections to be executed at the
|
||||
// top level (with the guarantee that no objects are currently being in
|
||||
// construction). This can be ensured by running young GCs from safe points
|
||||
// or by reintroducing nested allocation scopes that avoid finalization.
|
||||
DCHECK(!MarkingVisitor::IsInConstruction(slot_header));
|
||||
|
||||
void* value = *reinterpret_cast<void**>(slot);
|
||||
visitor->DynamicallyMarkAddress(static_cast<Address>(value));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Assumes that all spaces have their LABs reset.
|
||||
void ResetRememberedSet(HeapBase& heap) { // NOLINT(runtime/references)
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
auto& local_data = heap.caged_heap().local_data();
|
||||
local_data.age_table.Reset(&heap.caged_heap().allocator());
|
||||
heap.remembered_slots().clear();
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename Worklist, typename Callback>
|
||||
bool DrainWorklistWithDeadline(v8::base::TimeTicks deadline, Worklist* worklist,
|
||||
Callback callback, int task_id) {
|
||||
@ -66,6 +95,7 @@ bool DrainWorklistWithDeadline(v8::base::TimeTicks deadline, Worklist* worklist,
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
constexpr int Marker::kMutatorThreadId;
|
||||
@ -118,6 +148,7 @@ void Marker::EnterAtomicPause(MarkingConfig config) {
|
||||
}
|
||||
|
||||
void Marker::LeaveAtomicPause() {
|
||||
ResetRememberedSet(heap());
|
||||
heap().stats_collector()->NotifyMarkingCompleted(
|
||||
marking_visitor_->marked_bytes());
|
||||
}
|
||||
@ -151,6 +182,9 @@ void Marker::VisitRoots() {
|
||||
if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers) {
|
||||
heap().stack()->IteratePointers(marking_visitor_.get());
|
||||
}
|
||||
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
|
||||
VisitRememberedSlots(heap(), marking_visitor_.get());
|
||||
}
|
||||
}
|
||||
|
||||
std::unique_ptr<MutatorThreadMarkingVisitor>
|
||||
|
@ -55,6 +55,10 @@ class V8_EXPORT_PRIVATE Marker {
|
||||
Worklist<HeapObjectHeader*, 64 /*local entries */, kNumMarkers>;
|
||||
|
||||
struct MarkingConfig {
|
||||
enum class CollectionType : uint8_t {
|
||||
kMinor,
|
||||
kMajor,
|
||||
};
|
||||
using StackState = cppgc::Heap::StackState;
|
||||
enum MarkingType : uint8_t {
|
||||
kAtomic,
|
||||
@ -64,6 +68,7 @@ class V8_EXPORT_PRIVATE Marker {
|
||||
|
||||
static constexpr MarkingConfig Default() { return {}; }
|
||||
|
||||
CollectionType collection_type = CollectionType::kMajor;
|
||||
StackState stack_state = StackState::kMayContainHeapPointers;
|
||||
MarkingType marking_type = MarkingType::kAtomic;
|
||||
};
|
||||
|
@ -24,6 +24,37 @@ namespace cppgc {
|
||||
namespace internal {
|
||||
namespace {
|
||||
|
||||
void MarkRangeAsYoung(BasePage* page, Address begin, Address end) {
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
DCHECK_LT(begin, end);
|
||||
|
||||
static constexpr auto kEntrySize = AgeTable::kEntrySizeInBytes;
|
||||
|
||||
const uintptr_t offset_begin = CagedHeap::OffsetFromAddress(begin);
|
||||
const uintptr_t offset_end = CagedHeap::OffsetFromAddress(end);
|
||||
|
||||
const uintptr_t young_offset_begin = (begin == page->PayloadStart())
|
||||
? RoundDown(offset_begin, kEntrySize)
|
||||
: RoundUp(offset_begin, kEntrySize);
|
||||
const uintptr_t young_offset_end = (end == page->PayloadEnd())
|
||||
? RoundUp(offset_end, kEntrySize)
|
||||
: RoundDown(offset_end, kEntrySize);
|
||||
|
||||
auto& age_table = page->heap()->caged_heap().local_data().age_table;
|
||||
for (auto offset = young_offset_begin; offset < young_offset_end;
|
||||
offset += AgeTable::kEntrySizeInBytes) {
|
||||
age_table[offset] = AgeTable::Age::kYoung;
|
||||
}
|
||||
|
||||
// Set to kUnknown the first and the last regions of the newly allocated
|
||||
// linear buffer.
|
||||
if (begin != page->PayloadStart() && !IsAligned(offset_begin, kEntrySize))
|
||||
age_table[offset_begin] = AgeTable::Age::kUnknown;
|
||||
if (end != page->PayloadEnd() && !IsAligned(offset_end, kEntrySize))
|
||||
age_table[offset_end] = AgeTable::Age::kUnknown;
|
||||
#endif
|
||||
}
|
||||
|
||||
void AddToFreeList(NormalPageSpace* space, Address start, size_t size) {
|
||||
auto& free_list = space->free_list();
|
||||
free_list.Add({start, size});
|
||||
@ -48,9 +79,9 @@ void ReplaceLinearAllocationBuffer(NormalPageSpace* space,
|
||||
if (new_size) {
|
||||
DCHECK_NOT_NULL(new_buffer);
|
||||
stats_collector->NotifyAllocation(new_size);
|
||||
NormalPage::From(BasePage::FromPayload(new_buffer))
|
||||
->object_start_bitmap()
|
||||
.ClearBit(new_buffer);
|
||||
auto* page = NormalPage::From(BasePage::FromPayload(new_buffer));
|
||||
page->object_start_bitmap().ClearBit(new_buffer);
|
||||
MarkRangeAsYoung(page, new_buffer, new_buffer + new_size);
|
||||
}
|
||||
}
|
||||
|
||||
@ -64,6 +95,8 @@ void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space,
|
||||
HeapObjectHeader(HeapObjectHeader::kLargeObjectSizeInHeader, gcinfo);
|
||||
|
||||
stats_collector->NotifyAllocation(size);
|
||||
MarkRangeAsYoung(page, page->PayloadStart(), page->PayloadEnd());
|
||||
|
||||
return header->Payload();
|
||||
}
|
||||
|
||||
@ -110,11 +143,15 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
|
||||
// 5. Add a new page to this heap.
|
||||
auto* new_page = NormalPage::Create(page_backend_, space);
|
||||
space->AddPage(new_page);
|
||||
AddToFreeList(space, new_page->PayloadStart(), new_page->PayloadSize());
|
||||
|
||||
// 6. Try to allocate from the freelist. This allocation must succeed.
|
||||
void* result = AllocateFromFreeList(space, size, gcinfo);
|
||||
CPPGC_CHECK(result);
|
||||
// 6. Set linear allocation buffer to new page.
|
||||
ReplaceLinearAllocationBuffer(space, stats_collector_,
|
||||
new_page->PayloadStart(),
|
||||
new_page->PayloadSize());
|
||||
|
||||
// 7. Allocate from it. The allocation must succeed.
|
||||
void* result = AllocateObjectOnSpace(space, size, gcinfo);
|
||||
CHECK(result);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -113,6 +113,13 @@ struct SpaceState {
|
||||
|
||||
using SpaceStates = std::vector<SpaceState>;
|
||||
|
||||
void StickyUnmark(HeapObjectHeader* header) {
|
||||
// Young generation in Oilpan uses sticky mark bits.
|
||||
#if !defined(CPPGC_YOUNG_GENERATION)
|
||||
header->Unmark<HeapObjectHeader::AccessMode::kAtomic>();
|
||||
#endif
|
||||
}
|
||||
|
||||
// Builder that finalizes objects and adds freelist entries right away.
|
||||
class InlinedFinalizationBuilder final {
|
||||
public:
|
||||
@ -203,7 +210,7 @@ typename FinalizationBuilder::ResultType SweepNormalPage(NormalPage* page) {
|
||||
start_of_gap, static_cast<size_t>(header_address - start_of_gap));
|
||||
bitmap.SetBit(start_of_gap);
|
||||
}
|
||||
header->Unmark<kAtomicAccess>();
|
||||
StickyUnmark(header);
|
||||
bitmap.SetBit(begin);
|
||||
begin += size;
|
||||
start_of_gap = begin;
|
||||
@ -366,7 +373,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
|
||||
bool VisitLargePage(LargePage* page) {
|
||||
HeapObjectHeader* header = page->ObjectHeader();
|
||||
if (header->IsMarked()) {
|
||||
header->Unmark();
|
||||
StickyUnmark(header);
|
||||
page->space()->AddPage(page);
|
||||
} else {
|
||||
header->Finalize();
|
||||
@ -414,7 +421,7 @@ class ConcurrentSweepTask final : public v8::JobTask,
|
||||
bool VisitLargePage(LargePage* page) {
|
||||
HeapObjectHeader* header = page->ObjectHeader();
|
||||
if (header->IsMarked()) {
|
||||
header->Unmark();
|
||||
StickyUnmark(header);
|
||||
page->space()->AddPage(page);
|
||||
return true;
|
||||
}
|
||||
|
@ -69,5 +69,16 @@ void WriteBarrier::MarkingBarrierSlow(const void* value) {
|
||||
MarkValue(page, heap->marker(), value);
|
||||
}
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
void WriteBarrier::GenerationalBarrierSlow(CagedHeapLocalData* local_data,
|
||||
const AgeTable& age_table,
|
||||
const void* slot,
|
||||
uintptr_t value_offset) {
|
||||
if (age_table[value_offset] == AgeTable::Age::kOld) return;
|
||||
// Record slot.
|
||||
local_data->heap_base->remembered_slots().insert(const_cast<void*>(slot));
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
@ -59,6 +59,7 @@ v8_source_set("cppgc_unittests_sources") {
|
||||
"heap/cppgc/marker-unittest.cc",
|
||||
"heap/cppgc/marking-visitor-unittest.cc",
|
||||
"heap/cppgc/member-unittest.cc",
|
||||
"heap/cppgc/minor-gc-unittest.cc",
|
||||
"heap/cppgc/object-start-bitmap-unittest.cc",
|
||||
"heap/cppgc/page-memory-unittest.cc",
|
||||
"heap/cppgc/persistent-unittest.cc",
|
||||
|
@ -147,8 +147,13 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfNormalPage) {
|
||||
// Wait for concurrent sweeping to finish.
|
||||
GetPlatform().WaitAllBackgroundTasks();
|
||||
|
||||
#if !defined(CPPGC_YOUNG_GENERATION)
|
||||
// Check that the marked object was unmarked.
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(marked_object).IsMarked());
|
||||
#else
|
||||
// Check that the marked object is still marked.
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(marked_object).IsMarked());
|
||||
#endif
|
||||
|
||||
// Check that free list entries are created right away for non-finalizable
|
||||
// objects, but not immediately returned to the space's freelist.
|
||||
@ -181,8 +186,13 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfLargePage) {
|
||||
// Wait for concurrent sweeping to finish.
|
||||
GetPlatform().WaitAllBackgroundTasks();
|
||||
|
||||
#if !defined(CPPGC_YOUNG_GENERATION)
|
||||
// Check that the marked object was unmarked.
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(marked_object).IsMarked());
|
||||
#else
|
||||
// Check that the marked object is still marked.
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(marked_object).IsMarked());
|
||||
#endif
|
||||
|
||||
// Check that free list entries are created right away for non-finalizable
|
||||
// objects, but not immediately returned to the space's freelist.
|
||||
@ -295,8 +305,13 @@ TEST_F(ConcurrentSweeperTest, IncrementalSweeping) {
|
||||
GetPlatform().WaitAllForegroundTasks();
|
||||
|
||||
EXPECT_EQ(2u, g_destructor_callcount);
|
||||
#if !defined(CPPGC_YOUNG_GENERATION)
|
||||
EXPECT_FALSE(marked_normal_header.IsMarked());
|
||||
EXPECT_FALSE(marked_large_header.IsMarked());
|
||||
#else
|
||||
EXPECT_TRUE(marked_normal_header.IsMarked());
|
||||
EXPECT_TRUE(marked_large_header.IsMarked());
|
||||
#endif
|
||||
|
||||
FinishSweeping();
|
||||
}
|
||||
|
@ -22,7 +22,9 @@ class MarkerTest : public testing::TestWithHeap {
|
||||
public:
|
||||
using MarkingConfig = Marker::MarkingConfig;
|
||||
|
||||
void DoMarking(MarkingConfig config) {
|
||||
void DoMarking(MarkingConfig::StackState stack_state) {
|
||||
const MarkingConfig config = {MarkingConfig::CollectionType::kMajor,
|
||||
stack_state};
|
||||
auto* heap = Heap::From(GetHeap());
|
||||
Marker marker(heap->AsBase());
|
||||
marker.StartMarking(config);
|
||||
@ -61,7 +63,7 @@ TEST_F(MarkerTest, PersistentIsMarked) {
|
||||
Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(header.IsMarked());
|
||||
}
|
||||
|
||||
@ -70,7 +72,7 @@ TEST_F(MarkerTest, ReachableMemberIsMarked) {
|
||||
parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromPayload(parent->child());
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(header.IsMarked());
|
||||
}
|
||||
|
||||
@ -78,14 +80,14 @@ TEST_F(MarkerTest, UnreachableMemberIsNotMarked) {
|
||||
Member<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
}
|
||||
|
||||
TEST_F(MarkerTest, ObjectReachableFromStackIsMarked) {
|
||||
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(object).IsMarked());
|
||||
DoMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
|
||||
access(object);
|
||||
}
|
||||
@ -94,7 +96,7 @@ TEST_F(MarkerTest, ObjectReachableOnlyFromStackIsNotMarkedIfStackIsEmpty) {
|
||||
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_FALSE(header.IsMarked());
|
||||
access(object);
|
||||
}
|
||||
@ -104,14 +106,14 @@ TEST_F(MarkerTest, WeakReferenceToUnreachableObjectIsCleared) {
|
||||
WeakPersistent<GCed> weak_object =
|
||||
MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_FALSE(weak_object);
|
||||
}
|
||||
{
|
||||
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
parent->SetWeakChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_FALSE(parent->weak_child());
|
||||
}
|
||||
}
|
||||
@ -122,7 +124,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
WeakPersistent<GCed> weak_object(object);
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(weak_object);
|
||||
}
|
||||
{
|
||||
@ -130,7 +132,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
parent->SetWeakChild(object);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
}
|
||||
// Reachable from Member
|
||||
@ -140,7 +142,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
parent->SetChild(weak_object);
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(weak_object);
|
||||
}
|
||||
{
|
||||
@ -148,7 +150,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
parent->SetWeakChild(parent->child());
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
}
|
||||
// Reachable from stack
|
||||
@ -156,7 +158,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
WeakPersistent<GCed> weak_object(object);
|
||||
EXPECT_TRUE(weak_object);
|
||||
DoMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(weak_object);
|
||||
access(object);
|
||||
}
|
||||
@ -165,7 +167,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
|
||||
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
parent->SetWeakChild(object);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
DoMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(parent->weak_child());
|
||||
access(object);
|
||||
}
|
||||
@ -180,7 +182,7 @@ TEST_F(MarkerTest, DeepHierarchyIsMarked) {
|
||||
parent->SetWeakChild(parent->child());
|
||||
parent = parent->child();
|
||||
}
|
||||
DoMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
|
||||
parent = root;
|
||||
for (int i = 0; i < kHierarchyDepth; ++i) {
|
||||
@ -194,7 +196,7 @@ TEST_F(MarkerTest, NestedObjectsOnStackAreMarked) {
|
||||
GCed* root = MakeGarbageCollected<GCed>(GetAllocationHandle());
|
||||
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
root->child()->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
|
||||
DoMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(root->child()).IsMarked());
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(root->child()->child()).IsMarked());
|
||||
@ -214,27 +216,31 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
|
||||
|
||||
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
|
||||
Marker marker(Heap::From(GetHeap())->AsBase());
|
||||
marker.StartMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
marker.StartMarking({MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>(
|
||||
GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
|
||||
Member<GCedWithCallback> member(obj);
|
||||
marker.GetMarkingVisitorForTesting()->Trace(member);
|
||||
});
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(object).IsMarked());
|
||||
marker.FinishMarking({MarkingConfig::StackState::kNoHeapPointers});
|
||||
marker.FinishMarking({MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
|
||||
}
|
||||
|
||||
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
|
||||
Marker marker(Heap::From(GetHeap())->AsBase());
|
||||
marker.StartMarking({MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
static const Marker::MarkingConfig config = {
|
||||
MarkingConfig::CollectionType::kMajor,
|
||||
MarkingConfig::StackState::kMayContainHeapPointers};
|
||||
marker.StartMarking(config);
|
||||
MakeGarbageCollected<GCedWithCallback>(
|
||||
GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
|
||||
Member<GCedWithCallback> member(obj);
|
||||
marker.GetMarkingVisitorForTesting()->Trace(member);
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(obj).IsMarked());
|
||||
marker.FinishMarking(
|
||||
{MarkingConfig::StackState::kMayContainHeapPointers});
|
||||
marker.FinishMarking(config);
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked());
|
||||
});
|
||||
}
|
||||
|
239
test/unittests/heap/cppgc/minor-gc-unittest.cc
Normal file
239
test/unittests/heap/cppgc/minor-gc-unittest.cc
Normal file
@ -0,0 +1,239 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
|
||||
#include "include/cppgc/allocation.h"
|
||||
#include "include/cppgc/persistent.h"
|
||||
#include "src/heap/cppgc/heap-object-header-inl.h"
|
||||
#include "src/heap/cppgc/heap.h"
|
||||
#include "test/unittests/heap/cppgc/tests.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
class SimpleGCedBase : public GarbageCollected<SimpleGCedBase> {
|
||||
public:
|
||||
static size_t destructed_objects;
|
||||
|
||||
virtual ~SimpleGCedBase() { ++destructed_objects; }
|
||||
|
||||
virtual void Trace(Visitor* v) const { v->Trace(next); }
|
||||
|
||||
Member<SimpleGCedBase> next;
|
||||
};
|
||||
|
||||
size_t SimpleGCedBase::destructed_objects;
|
||||
|
||||
template <size_t Size>
|
||||
class SimpleGCed final : public SimpleGCedBase {
|
||||
char array[Size];
|
||||
};
|
||||
|
||||
using Small = SimpleGCed<64>;
|
||||
using Large = SimpleGCed<kLargeObjectSizeThreshold * 2>;
|
||||
|
||||
template <typename Type>
|
||||
struct OtherType;
|
||||
template <>
|
||||
struct OtherType<Small> {
|
||||
using Type = Large;
|
||||
};
|
||||
template <>
|
||||
struct OtherType<Large> {
|
||||
using Type = Small;
|
||||
};
|
||||
|
||||
class MinorGCTest : public testing::TestWithHeap {
|
||||
public:
|
||||
MinorGCTest() {
|
||||
CollectMajor();
|
||||
SimpleGCedBase::destructed_objects = 0;
|
||||
}
|
||||
|
||||
static size_t DestructedObjects() {
|
||||
return SimpleGCedBase::destructed_objects;
|
||||
}
|
||||
|
||||
void CollectMinor() {
|
||||
Heap::From(GetHeap())->CollectGarbage(
|
||||
Heap::Config::MinorPreciseAtomicConfig());
|
||||
}
|
||||
void CollectMajor() {
|
||||
Heap::From(GetHeap())->CollectGarbage(Heap::Config::PreciseAtomicConfig());
|
||||
}
|
||||
};
|
||||
|
||||
template <typename SmallOrLarge>
|
||||
class MinorGCTestForType : public MinorGCTest {
|
||||
public:
|
||||
using Type = SmallOrLarge;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
using ObjectTypes = ::testing::Types<Small, Large>;
|
||||
TYPED_TEST_SUITE(MinorGCTestForType, ObjectTypes);
|
||||
|
||||
TYPED_TEST(MinorGCTestForType, MinorCollection) {
|
||||
using Type = typename TestFixture::Type;
|
||||
|
||||
MakeGarbageCollected<Type>(this->GetAllocationHandle());
|
||||
EXPECT_EQ(0u, TestFixture::DestructedObjects());
|
||||
MinorGCTest::CollectMinor();
|
||||
EXPECT_EQ(1u, TestFixture::DestructedObjects());
|
||||
|
||||
{
|
||||
Heap::NoGCScope no_gc_scope_(*Heap::From(this->GetHeap()));
|
||||
|
||||
Type* prev = nullptr;
|
||||
for (size_t i = 0; i < 64; ++i) {
|
||||
auto* ptr = MakeGarbageCollected<Type>(this->GetAllocationHandle());
|
||||
ptr->next = prev;
|
||||
prev = ptr;
|
||||
}
|
||||
}
|
||||
|
||||
MinorGCTest::CollectMinor();
|
||||
EXPECT_EQ(65u, TestFixture::DestructedObjects());
|
||||
}
|
||||
|
||||
TYPED_TEST(MinorGCTestForType, StickyBits) {
|
||||
using Type = typename TestFixture::Type;
|
||||
|
||||
Persistent<Type> p1 = MakeGarbageCollected<Type>(this->GetAllocationHandle());
|
||||
TestFixture::CollectMinor();
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(p1.Get()).IsYoung());
|
||||
TestFixture::CollectMajor();
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(p1.Get()).IsYoung());
|
||||
EXPECT_EQ(0u, TestFixture::DestructedObjects());
|
||||
}
|
||||
|
||||
TYPED_TEST(MinorGCTestForType, OldObjectIsNotVisited) {
|
||||
using Type = typename TestFixture::Type;
|
||||
|
||||
Persistent<Type> p = MakeGarbageCollected<Type>(this->GetAllocationHandle());
|
||||
TestFixture::CollectMinor();
|
||||
EXPECT_EQ(0u, TestFixture::DestructedObjects());
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(p.Get()).IsYoung());
|
||||
|
||||
// Check that the old deleted object won't be visited during minor GC.
|
||||
Type* raw = p.Release();
|
||||
TestFixture::CollectMinor();
|
||||
EXPECT_EQ(0u, TestFixture::DestructedObjects());
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(raw).IsYoung());
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(raw).IsFree());
|
||||
|
||||
// Check that the old deleted object will be revisited in major GC.
|
||||
TestFixture::CollectMajor();
|
||||
EXPECT_EQ(1u, TestFixture::DestructedObjects());
|
||||
}
|
||||
|
||||
template <typename Type1, typename Type2>
|
||||
void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
|
||||
Persistent<Type1> old =
|
||||
MakeGarbageCollected<Type1>(heap->GetAllocationHandle());
|
||||
test->CollectMinor();
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(old.Get()).IsYoung());
|
||||
|
||||
Type2* young = nullptr;
|
||||
|
||||
{
|
||||
Heap::NoGCScope no_gc_scope_(*Heap::From(heap));
|
||||
|
||||
// Allocate young objects.
|
||||
for (size_t i = 0; i < 64; ++i) {
|
||||
auto* ptr = MakeGarbageCollected<Type2>(heap->GetAllocationHandle());
|
||||
ptr->next = young;
|
||||
young = ptr;
|
||||
EXPECT_TRUE(HeapObjectHeader::FromPayload(young).IsYoung());
|
||||
}
|
||||
}
|
||||
|
||||
const auto& set = Heap::From(heap)->remembered_slots();
|
||||
auto set_size_before = set.size();
|
||||
|
||||
// Issue generational barrier.
|
||||
old->next = young;
|
||||
|
||||
EXPECT_EQ(set_size_before + 1u, set.size());
|
||||
|
||||
// Check that the remembered set is visited.
|
||||
test->CollectMinor();
|
||||
|
||||
EXPECT_EQ(0u, MinorGCTest::DestructedObjects());
|
||||
EXPECT_TRUE(set.empty());
|
||||
|
||||
for (size_t i = 0; i < 64; ++i) {
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(young).IsFree());
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(young).IsYoung());
|
||||
young = static_cast<Type2*>(young->next.Get());
|
||||
}
|
||||
|
||||
old.Release();
|
||||
test->CollectMajor();
|
||||
EXPECT_EQ(65u, MinorGCTest::DestructedObjects());
|
||||
}
|
||||
|
||||
TYPED_TEST(MinorGCTestForType, InterGenerationalPointerForSamePageTypes) {
|
||||
using Type = typename TestFixture::Type;
|
||||
InterGenerationalPointerTest<Type, Type>(this, this->GetHeap());
|
||||
}
|
||||
|
||||
TYPED_TEST(MinorGCTestForType, InterGenerationalPointerForDifferentPageTypes) {
|
||||
using Type = typename TestFixture::Type;
|
||||
InterGenerationalPointerTest<Type, typename OtherType<Type>::Type>(
|
||||
this, this->GetHeap());
|
||||
}
|
||||
|
||||
TYPED_TEST(MinorGCTestForType, OmitGenerationalBarrierForOnStackObject) {
|
||||
using Type = typename TestFixture::Type;
|
||||
|
||||
class StackAllocated : GarbageCollected<StackAllocated> {
|
||||
CPPGC_STACK_ALLOCATED();
|
||||
|
||||
public:
|
||||
Type* ptr = nullptr;
|
||||
} stack_object;
|
||||
|
||||
auto* new_object = MakeGarbageCollected<Type>(this->GetAllocationHandle());
|
||||
|
||||
const auto& set = Heap::From(this->GetHeap())->remembered_slots();
|
||||
const size_t set_size_before_barrier = set.size();
|
||||
|
||||
// Try issuing generational barrier for on-stack object.
|
||||
stack_object.ptr = new_object;
|
||||
WriteBarrier::MarkingBarrier(reinterpret_cast<void*>(&stack_object.ptr),
|
||||
new_object);
|
||||
|
||||
EXPECT_EQ(set_size_before_barrier, set.size());
|
||||
}
|
||||
|
||||
TYPED_TEST(MinorGCTestForType, OmitGenerationalBarrierForSentinels) {
|
||||
using Type = typename TestFixture::Type;
|
||||
|
||||
Persistent<Type> old =
|
||||
MakeGarbageCollected<Type>(this->GetAllocationHandle());
|
||||
|
||||
TestFixture::CollectMinor();
|
||||
EXPECT_FALSE(HeapObjectHeader::FromPayload(old.Get()).IsYoung());
|
||||
|
||||
const auto& set = Heap::From(this->GetHeap())->remembered_slots();
|
||||
const size_t set_size_before_barrier = set.size();
|
||||
|
||||
// Try issuing generational barrier for nullptr.
|
||||
old->next = static_cast<Type*>(nullptr);
|
||||
EXPECT_EQ(set_size_before_barrier, set.size());
|
||||
|
||||
// Try issuing generational barrier for sentinel.
|
||||
old->next = static_cast<Type*>(kSentinelPointer);
|
||||
EXPECT_EQ(set_size_before_barrier, set.size());
|
||||
}
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
||||
|
||||
#endif
|
@ -257,8 +257,13 @@ TEST_F(SweeperTest, UnmarkObjects) {
|
||||
|
||||
Sweep();
|
||||
|
||||
#if !defined(CPPGC_YOUNG_GENERATION)
|
||||
EXPECT_FALSE(normal_object_header.IsMarked());
|
||||
EXPECT_FALSE(large_object_header.IsMarked());
|
||||
#else
|
||||
EXPECT_TRUE(normal_object_header.IsMarked());
|
||||
EXPECT_TRUE(large_object_header.IsMarked());
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
|
@ -32,6 +32,7 @@ class IncrementalMarkingScope {
|
||||
|
||||
private:
|
||||
static constexpr Marker::MarkingConfig kIncrementalConfig{
|
||||
Marker::MarkingConfig::CollectionType::kMajor,
|
||||
Marker::MarkingConfig::StackState::kNoHeapPointers,
|
||||
Marker::MarkingConfig::MarkingType::kIncremental};
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user