cppgc: Add explicit FreeUnreferencedObject() call
Add an explicit FreeUnreferencedObject() call that can be used to reclaim objects that are guaranteed to not be referenced anymore by the embedder. It is up to the embedder to ensure correctness. Change-Id: I7f2d86d9639e8b805f79a8fd0a346903f63171e5 Bug: chromium:1056170 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2737301 Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Omer Katz <omerkatz@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#73262}
This commit is contained in:
parent
2cd2c4885a
commit
0fe9c8358a
2
BUILD.gn
2
BUILD.gn
@ -4797,6 +4797,7 @@ v8_header_set("cppgc_headers") {
|
||||
"include/cppgc/custom-space.h",
|
||||
"include/cppgc/default-platform.h",
|
||||
"include/cppgc/ephemeron-pair.h",
|
||||
"include/cppgc/explicit-management.h",
|
||||
"include/cppgc/garbage-collected.h",
|
||||
"include/cppgc/heap-consistency.h",
|
||||
"include/cppgc/heap-state.h",
|
||||
@ -4849,6 +4850,7 @@ v8_source_set("cppgc_base") {
|
||||
"src/heap/cppgc/concurrent-marker.cc",
|
||||
"src/heap/cppgc/concurrent-marker.h",
|
||||
"src/heap/cppgc/default-platform.cc",
|
||||
"src/heap/cppgc/explicit-management.cc",
|
||||
"src/heap/cppgc/free-list.cc",
|
||||
"src/heap/cppgc/free-list.h",
|
||||
"src/heap/cppgc/garbage-collector.h",
|
||||
|
45
include/cppgc/explicit-management.h
Normal file
45
include/cppgc/explicit-management.h
Normal file
@ -0,0 +1,45 @@
|
||||
// Copyright 2021 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef INCLUDE_CPPGC_EXPLICIT_MANAGEMENT_H_
|
||||
#define INCLUDE_CPPGC_EXPLICIT_MANAGEMENT_H_
|
||||
|
||||
#include <cstddef>
|
||||
|
||||
#include "cppgc/internal/logging.h"
|
||||
#include "cppgc/type-traits.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
V8_EXPORT void FreeUnreferencedObject(void*);
|
||||
|
||||
} // namespace internal
|
||||
|
||||
namespace subtle {
|
||||
|
||||
/**
|
||||
* Informs the garbage collector that `object` can be immediately reclaimed. The
|
||||
* destructor may not be invoked immediately but only on next garbage
|
||||
* collection.
|
||||
*
|
||||
* It is up to the embedder to guarantee that no other object holds a reference
|
||||
* to `object` after calling `FreeUnreferencedObject()`. In case such a
|
||||
* reference exists, it's use results in a use-after-free.
|
||||
*
|
||||
* \param object Reference to an object that is of type `GarbageCollected` and
|
||||
* should be immediately reclaimed.
|
||||
*/
|
||||
template <typename T>
|
||||
void FreeUnreferencedObject(T* object) {
|
||||
static_assert(IsGarbageCollectedTypeV<T>,
|
||||
"Object must be of type GarbageCollected.");
|
||||
if (!object) return;
|
||||
internal::FreeUnreferencedObject(object);
|
||||
}
|
||||
|
||||
} // namespace subtle
|
||||
} // namespace cppgc
|
||||
|
||||
#endif // INCLUDE_CPPGC_EXPLICIT_MANAGEMENT_H_
|
66
src/heap/cppgc/explicit-management.cc
Normal file
66
src/heap/cppgc/explicit-management.cc
Normal file
@ -0,0 +1,66 @@
|
||||
// Copyright 2021 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "include/cppgc/explicit-management.h"
|
||||
|
||||
#include <tuple>
|
||||
|
||||
#include "src/heap/cppgc/heap-base.h"
|
||||
#include "src/heap/cppgc/heap-object-header.h"
|
||||
#include "src/heap/cppgc/heap-page.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
namespace {
|
||||
|
||||
std::pair<bool, BasePage*> CanExplicitlyFree(void* object) {
|
||||
// object is guaranteed to be of type GarbageCollected, so getting the
|
||||
// BasePage is okay for regular and large objects.
|
||||
auto* base_page = BasePage::FromPayload(object);
|
||||
auto* heap = base_page->heap();
|
||||
// Whenever the GC is active, avoid modifying the object as it may mess with
|
||||
// state that the GC needs.
|
||||
const bool in_gc = heap->in_atomic_pause() || heap->marker() ||
|
||||
heap->sweeper().IsSweepingInProgress();
|
||||
return {!in_gc, base_page};
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void FreeUnreferencedObject(void* object) {
|
||||
bool can_free;
|
||||
BasePage* base_page;
|
||||
std::tie(can_free, base_page) = CanExplicitlyFree(object);
|
||||
if (!can_free) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto& header = HeapObjectHeader::FromPayload(object);
|
||||
header.Finalize();
|
||||
|
||||
if (base_page->is_large()) { // Large object.
|
||||
base_page->space()->RemovePage(base_page);
|
||||
base_page->heap()->stats_collector()->NotifyExplicitFree(
|
||||
LargePage::From(base_page)->PayloadSize());
|
||||
LargePage::Destroy(LargePage::From(base_page));
|
||||
} else { // Regular object.
|
||||
const size_t header_size = header.GetSize();
|
||||
auto* normal_page = NormalPage::From(base_page);
|
||||
auto& normal_space = *static_cast<NormalPageSpace*>(base_page->space());
|
||||
auto& lab = normal_space.linear_allocation_buffer();
|
||||
ConstAddress payload_end = header.PayloadEnd();
|
||||
SET_MEMORY_INACCESSIBLE(&header, header_size);
|
||||
if (payload_end == lab.start()) { // Returning to LAB.
|
||||
lab.Set(reinterpret_cast<Address>(&header), lab.size() + header_size);
|
||||
normal_page->object_start_bitmap().ClearBit(lab.start());
|
||||
} else { // Returning to free list.
|
||||
base_page->heap()->stats_collector()->NotifyExplicitFree(header_size);
|
||||
normal_space.free_list().Add({&header, header_size});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
@ -170,6 +170,8 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
|
||||
stack_state_of_prev_gc_ = stack_state;
|
||||
}
|
||||
|
||||
void SetInAtomicPauseForTesting(bool value) { in_atomic_pause_ = value; }
|
||||
|
||||
protected:
|
||||
// Used by the incremental scheduler to finalize a GC if supported.
|
||||
virtual void FinalizeIncrementalGarbageCollectionIfNeeded(
|
||||
|
@ -64,6 +64,8 @@ class HeapObjectHeader {
|
||||
|
||||
// The payload starts directly after the HeapObjectHeader.
|
||||
inline Address Payload() const;
|
||||
template <AccessMode mode = AccessMode::kNonAtomic>
|
||||
inline Address PayloadEnd() const;
|
||||
|
||||
template <AccessMode mode = AccessMode::kNonAtomic>
|
||||
inline GCInfoIndex GetGCInfoIndex() const;
|
||||
@ -182,6 +184,12 @@ Address HeapObjectHeader::Payload() const {
|
||||
sizeof(HeapObjectHeader);
|
||||
}
|
||||
|
||||
template <AccessMode mode>
|
||||
Address HeapObjectHeader::PayloadEnd() const {
|
||||
return reinterpret_cast<Address>(const_cast<HeapObjectHeader*>(this)) +
|
||||
GetSize<mode>();
|
||||
}
|
||||
|
||||
template <AccessMode mode>
|
||||
GCInfoIndex HeapObjectHeader::GetGCInfoIndex() const {
|
||||
const uint16_t encoded =
|
||||
|
@ -57,6 +57,10 @@ void StatsCollector::NotifySafePointForConservativeCollection() {
|
||||
}
|
||||
}
|
||||
|
||||
void StatsCollector::NotifySafePointForTesting() {
|
||||
AllocatedObjectSizeSafepointImpl();
|
||||
}
|
||||
|
||||
void StatsCollector::AllocatedObjectSizeSafepointImpl() {
|
||||
allocated_bytes_since_end_of_marking_ +=
|
||||
static_cast<int64_t>(allocated_bytes_since_safepoint_) -
|
||||
|
@ -261,6 +261,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
|
||||
// their actual allocation/reclamation as possible.
|
||||
void NotifySafePointForConservativeCollection();
|
||||
|
||||
void NotifySafePointForTesting();
|
||||
|
||||
// Indicates a new garbage collection cycle.
|
||||
void NotifyMarkingStarted(CollectionType, IsForcedGC);
|
||||
// Indicates that marking of the current garbage collection cycle is
|
||||
|
@ -92,6 +92,7 @@ v8_source_set("cppgc_unittests_sources") {
|
||||
"heap/cppgc/cross-thread-persistent-unittest.cc",
|
||||
"heap/cppgc/custom-spaces-unittest.cc",
|
||||
"heap/cppgc/ephemeron-pair-unittest.cc",
|
||||
"heap/cppgc/explicit-management-unittest.cc",
|
||||
"heap/cppgc/finalizer-trait-unittest.cc",
|
||||
"heap/cppgc/free-list-unittest.cc",
|
||||
"heap/cppgc/garbage-collected-unittest.cc",
|
||||
|
119
test/unittests/heap/cppgc/explicit-management-unittest.cc
Normal file
119
test/unittests/heap/cppgc/explicit-management-unittest.cc
Normal file
@ -0,0 +1,119 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "include/cppgc/explicit-management.h"
|
||||
|
||||
#include "include/cppgc/garbage-collected.h"
|
||||
#include "src/heap/cppgc/globals.h"
|
||||
#include "src/heap/cppgc/heap-object-header.h"
|
||||
#include "src/heap/cppgc/heap-space.h"
|
||||
#include "src/heap/cppgc/page-memory.h"
|
||||
#include "src/heap/cppgc/sweeper.h"
|
||||
#include "test/unittests/heap/cppgc/tests.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
namespace cppgc {
|
||||
namespace internal {
|
||||
|
||||
class ExplicitManagementTest : public testing::TestSupportingAllocationOnly {
|
||||
public:
|
||||
size_t AllocatedObjectSize() const {
|
||||
auto* heap = Heap::From(GetHeap());
|
||||
heap->stats_collector()->NotifySafePointForTesting();
|
||||
return heap->stats_collector()->allocated_object_size();
|
||||
}
|
||||
|
||||
void ResetLinearAllocationBuffers() const {
|
||||
return Heap::From(GetHeap())
|
||||
->object_allocator()
|
||||
.ResetLinearAllocationBuffers();
|
||||
}
|
||||
};
|
||||
|
||||
namespace {
|
||||
|
||||
class DynamicallySized final : public GarbageCollected<DynamicallySized> {
|
||||
public:
|
||||
void Trace(Visitor*) const {}
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) {
|
||||
auto* o =
|
||||
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
|
||||
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
|
||||
const auto& lab = space->linear_allocation_buffer();
|
||||
auto& header = HeapObjectHeader::FromPayload(o);
|
||||
const size_t size = header.GetSize();
|
||||
Address needle = reinterpret_cast<Address>(&header);
|
||||
// Test checks freeing to LAB.
|
||||
ASSERT_EQ(lab.start(), header.PayloadEnd());
|
||||
const size_t lab_size_before_free = lab.size();
|
||||
const size_t allocated_size_before = AllocatedObjectSize();
|
||||
subtle::FreeUnreferencedObject(o);
|
||||
EXPECT_EQ(lab.start(), reinterpret_cast<Address>(needle));
|
||||
EXPECT_EQ(lab_size_before_free + size, lab.size());
|
||||
// LAB is included in allocated object size, so no change is expected.
|
||||
EXPECT_EQ(allocated_size_before, AllocatedObjectSize());
|
||||
EXPECT_FALSE(space->free_list().ContainsForTesting({needle, size}));
|
||||
}
|
||||
|
||||
TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) {
|
||||
auto* o =
|
||||
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
|
||||
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
|
||||
const auto& lab = space->linear_allocation_buffer();
|
||||
auto& header = HeapObjectHeader::FromPayload(o);
|
||||
const size_t size = header.GetSize();
|
||||
Address needle = reinterpret_cast<Address>(&header);
|
||||
// Test checks freeing to free list.
|
||||
ResetLinearAllocationBuffers();
|
||||
ASSERT_EQ(lab.start(), nullptr);
|
||||
const size_t allocated_size_before = AllocatedObjectSize();
|
||||
subtle::FreeUnreferencedObject(o);
|
||||
EXPECT_EQ(lab.start(), nullptr);
|
||||
EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize());
|
||||
EXPECT_TRUE(space->free_list().ContainsForTesting({needle, size}));
|
||||
}
|
||||
|
||||
TEST_F(ExplicitManagementTest, FreeLargeObject) {
|
||||
auto* o = MakeGarbageCollected<DynamicallySized>(
|
||||
GetHeap()->GetAllocationHandle(),
|
||||
AdditionalBytes(kLargeObjectSizeThreshold));
|
||||
const auto* page = BasePage::FromPayload(o);
|
||||
auto* heap = page->heap();
|
||||
ASSERT_TRUE(page->is_large());
|
||||
ConstAddress needle = reinterpret_cast<ConstAddress>(o);
|
||||
const size_t size = LargePage::From(page)->PayloadSize();
|
||||
EXPECT_TRUE(heap->page_backend()->Lookup(needle));
|
||||
const size_t allocated_size_before = AllocatedObjectSize();
|
||||
subtle::FreeUnreferencedObject(o);
|
||||
EXPECT_FALSE(heap->page_backend()->Lookup(needle));
|
||||
EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize());
|
||||
}
|
||||
|
||||
TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) {
|
||||
const size_t snapshot_before = AllocatedObjectSize();
|
||||
auto* o =
|
||||
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
|
||||
auto* heap = BasePage::FromPayload(o)->heap();
|
||||
heap->SetInAtomicPauseForTesting(true);
|
||||
const size_t allocated_size_before = AllocatedObjectSize();
|
||||
subtle::FreeUnreferencedObject(o);
|
||||
EXPECT_EQ(allocated_size_before, AllocatedObjectSize());
|
||||
heap->SetInAtomicPauseForTesting(false);
|
||||
ResetLinearAllocationBuffers();
|
||||
subtle::FreeUnreferencedObject(o);
|
||||
EXPECT_EQ(snapshot_before, AllocatedObjectSize());
|
||||
}
|
||||
|
||||
TEST_F(ExplicitManagementTest, FreeNull) {
|
||||
DynamicallySized* o = nullptr;
|
||||
// Noop.
|
||||
subtle::FreeUnreferencedObject(o);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace cppgc
|
@ -35,6 +35,14 @@ TEST(HeapObjectHeaderTest, Payload) {
|
||||
header.Payload());
|
||||
}
|
||||
|
||||
TEST(HeapObjectHeaderTest, PayloadEnd) {
|
||||
constexpr GCInfoIndex kGCInfoIndex = 17;
|
||||
constexpr size_t kSize = kAllocationGranularity;
|
||||
HeapObjectHeader header(kSize, kGCInfoIndex);
|
||||
EXPECT_EQ(reinterpret_cast<ConstAddress>(&header) + kSize,
|
||||
header.PayloadEnd());
|
||||
}
|
||||
|
||||
TEST(HeapObjectHeaderTest, GetGCInfoIndex) {
|
||||
constexpr GCInfoIndex kGCInfoIndex = 17;
|
||||
constexpr size_t kSize = kAllocationGranularity;
|
||||
|
Loading…
Reference in New Issue
Block a user