cppgc: Add HeapStatsCollector

This ports HeapStatsCollector (former ThreadHeapStatsCollector) from
Blink. The CL only ports accounting of allocated object size which is
needed for a simple growing strategy in a follow up.

HeapStatsCollector is a global dependency for most sub components as
it provides infrastructure for measuring time (through trace scopes)
and space.

The general idea of HeapStatsCollector is to act as sink where all sub
components push time and space information. This information is then
gathered and made available via an event that is implemented as POD.
Time-dependent info is available through regular getters (pull) and
observers (push).

Change-Id: I40b4d76e1a40c56e5df1a7353622318cde730e26
Bug: chromium:1056170
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2225902
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68150}
This commit is contained in:
Michael Lippautz 2020-06-03 17:33:09 +02:00 committed by Commit Bot
parent bdc4b6763e
commit ab671ee816
15 changed files with 477 additions and 9 deletions

View File

@ -4158,6 +4158,8 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/source-location.cc",
"src/heap/cppgc/stack.cc",
"src/heap/cppgc/stack.h",
"src/heap/cppgc/stats-collector.cc",
"src/heap/cppgc/stats-collector.h",
"src/heap/cppgc/sweeper.cc",
"src/heap/cppgc/sweeper.h",
"src/heap/cppgc/virtual-memory.cc",

View File

@ -15,6 +15,7 @@
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/stack.h"
#include "src/heap/cppgc/stats-collector.h"
#include "src/heap/cppgc/sweeper.h"
#include "src/heap/cppgc/virtual-memory.h"
@ -132,8 +133,9 @@ Heap::Heap(std::shared_ptr<cppgc::Platform> platform, size_t custom_spaces)
page_backend_(
std::make_unique<PageBackend>(platform_->GetPageAllocator())),
#endif
object_allocator_(&raw_heap_),
sweeper_(&raw_heap_, platform_.get()),
stats_collector_(std::make_unique<StatsCollector>()),
object_allocator_(&raw_heap_, stats_collector_.get()),
sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()),
stack_(std::make_unique<Stack>(v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()) {
}

View File

@ -34,6 +34,7 @@ namespace testing {
class TestWithHeap;
}
class StatsCollector;
class Stack;
class V8_EXPORT_PRIVATE LivenessBrokerFactory {
@ -119,6 +120,11 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
RawHeap& raw_heap() { return raw_heap_; }
const RawHeap& raw_heap() const { return raw_heap_; }
StatsCollector* stats_collector() { return stats_collector_.get(); }
const StatsCollector* stats_collector() const {
return stats_collector_.get();
}
Stack* stack() { return stack_.get(); }
PageBackend* page_backend() { return page_backend_.get(); }
@ -144,6 +150,7 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
std::unique_ptr<v8::base::BoundedPageAllocator> bounded_allocator_;
#endif
std::unique_ptr<PageBackend> page_backend_;
std::unique_ptr<StatsCollector> stats_collector_;
ObjectAllocator object_allocator_;
Sweeper sweeper_;

View File

@ -9,6 +9,7 @@
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/marking-visitor.h"
#include "src/heap/cppgc/stats-collector.h"
namespace cppgc {
namespace internal {
@ -97,6 +98,8 @@ Marker::~Marker() {
}
void Marker::StartMarking(MarkingConfig config) {
heap()->stats_collector()->NotifyMarkingStarted();
config_ = config;
VisitRoots();
EnterIncrementalMarkingIfNeeded(config);
@ -115,6 +118,9 @@ void Marker::FinishMarking(MarkingConfig config) {
MarkNotFullyConstructedObjects();
}
AdvanceMarkingWithDeadline(v8::base::TimeDelta::Max());
heap()->stats_collector()->NotifyMarkingCompleted(
marking_visitor_->marked_bytes());
}
void Marker::ProcessWeakness() {

View File

@ -13,6 +13,7 @@
#include "src/heap/cppgc/object-allocator-inl.h"
#include "src/heap/cppgc/object-start-bitmap.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/stats-collector.h"
#include "src/heap/cppgc/sweeper.h"
namespace cppgc {
@ -30,10 +31,18 @@ void* AllocateLargeObject(RawHeap* raw_heap, LargePageSpace* space, size_t size,
} // namespace
ObjectAllocator::ObjectAllocator(RawHeap* heap) : raw_heap_(heap) {}
ObjectAllocator::ObjectAllocator(RawHeap* heap, StatsCollector* stats_collector)
: raw_heap_(heap), stats_collector_(stats_collector) {}
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace* space, size_t size,
GCInfoIndex gcinfo) {
void* memory = OutOfLineAllocateImpl(space, size, gcinfo);
stats_collector_->NotifySafePointForConservativeCollection();
return memory;
}
void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
size_t size, GCInfoIndex gcinfo) {
DCHECK_EQ(0, size & kAllocationMask);
DCHECK_LE(kFreeListEntrySize, size);
@ -74,9 +83,11 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace* space, size_t size,
auto& current_lab = space->linear_allocation_buffer();
if (current_lab.size()) {
space->AddToFreeList(current_lab.start(), current_lab.size());
stats_collector_->NotifyExplicitFree(current_lab.size());
}
current_lab.Set(static_cast<Address>(entry.address), entry.size);
stats_collector_->NotifyAllocation(current_lab.size());
NormalPage::From(BasePage::FromPayload(current_lab.start()))
->object_start_bitmap()
.ClearBit(current_lab.start());

View File

@ -12,9 +12,11 @@
namespace cppgc {
namespace internal {
class StatsCollector;
class V8_EXPORT_PRIVATE ObjectAllocator final {
public:
explicit ObjectAllocator(RawHeap* heap);
ObjectAllocator(RawHeap* heap, StatsCollector* stats_collector);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo,
@ -29,9 +31,11 @@ class V8_EXPORT_PRIVATE ObjectAllocator final {
inline void* AllocateObjectOnSpace(NormalPageSpace* space, size_t size,
GCInfoIndex gcinfo);
void* OutOfLineAllocate(NormalPageSpace*, size_t, GCInfoIndex);
void* OutOfLineAllocateImpl(NormalPageSpace*, size_t, GCInfoIndex);
void* AllocateFromFreeList(NormalPageSpace*, size_t, GCInfoIndex);
RawHeap* raw_heap_;
StatsCollector* stats_collector_;
};
} // namespace internal

View File

@ -0,0 +1,110 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/stats-collector.h"
#include <algorithm>
#include <cmath>
#include "src/base/logging.h"
namespace cppgc {
namespace internal {
// static
constexpr size_t StatsCollector::kAllocationThresholdBytes;
void StatsCollector::RegisterObserver(AllocationObserver* observer) {
DCHECK_EQ(allocation_observers_.end(),
std::find(allocation_observers_.begin(),
allocation_observers_.end(), observer));
allocation_observers_.push_back(observer);
}
void StatsCollector::UnregisterObserver(AllocationObserver* observer) {
auto it = std::find(allocation_observers_.begin(),
allocation_observers_.end(), observer);
DCHECK_NE(allocation_observers_.end(), it);
allocation_observers_.erase(it);
}
void StatsCollector::NotifyAllocation(size_t bytes) {
// The current GC may not have been started. This is ok as recording considers
// the whole time range between garbage collections.
allocated_bytes_since_safepoint_ += bytes;
}
void StatsCollector::NotifyExplicitFree(size_t bytes) {
// See IncreaseAllocatedObjectSize for lifetime of the counter.
explicitly_freed_bytes_since_safepoint_ += bytes;
}
void StatsCollector::NotifySafePointForConservativeCollection() {
if (std::abs(allocated_bytes_since_safepoint_ -
explicitly_freed_bytes_since_safepoint_) >=
static_cast<int64_t>(kAllocationThresholdBytes)) {
AllocatedObjectSizeSafepointImpl();
}
}
void StatsCollector::AllocatedObjectSizeSafepointImpl() {
allocated_bytes_since_end_of_marking_ +=
static_cast<int64_t>(allocated_bytes_since_safepoint_) -
static_cast<int64_t>(explicitly_freed_bytes_since_safepoint_);
// These observer methods may start or finalize GC. In case they trigger a
// final GC pause, the delta counters are reset there and the following
// observer calls are called with '0' updates.
ForAllAllocationObservers([this](AllocationObserver* observer) {
// Recompute delta here so that a GC finalization is able to clear the
// delta for other observer calls.
int64_t delta = allocated_bytes_since_safepoint_ -
explicitly_freed_bytes_since_safepoint_;
if (delta < 0) {
observer->AllocatedObjectSizeDecreased(static_cast<size_t>(-delta));
} else {
observer->AllocatedObjectSizeIncreased(static_cast<size_t>(delta));
}
});
allocated_bytes_since_safepoint_ = 0;
explicitly_freed_bytes_since_safepoint_ = 0;
}
void StatsCollector::NotifyMarkingStarted() {
DCHECK_EQ(GarbageCollectionState::kNotRunning, gc_state_);
gc_state_ = GarbageCollectionState::kMarking;
}
void StatsCollector::NotifyMarkingCompleted(size_t marked_bytes) {
DCHECK_EQ(GarbageCollectionState::kMarking, gc_state_);
gc_state_ = GarbageCollectionState::kSweeping;
current_.marked_bytes = marked_bytes;
allocated_bytes_since_end_of_marking_ = 0;
allocated_bytes_since_safepoint_ = 0;
explicitly_freed_bytes_since_safepoint_ = 0;
}
const StatsCollector::Event& StatsCollector::NotifySweepingCompleted() {
DCHECK_EQ(GarbageCollectionState::kSweeping, gc_state_);
gc_state_ = GarbageCollectionState::kNotRunning;
previous_ = std::move(current_);
current_ = Event();
return previous_;
}
size_t StatsCollector::allocated_object_size() const {
// During sweeping we refer to the current Event as that already holds the
// correct marking information. In all other phases, the previous event holds
// the most up-to-date marking information.
const Event& event =
gc_state_ == GarbageCollectionState::kSweeping ? current_ : previous_;
DCHECK_GE(static_cast<int64_t>(event.marked_bytes) +
allocated_bytes_since_end_of_marking_,
0);
return static_cast<size_t>(static_cast<int64_t>(event.marked_bytes) +
allocated_bytes_since_end_of_marking_);
}
} // namespace internal
} // namespace cppgc

View File

@ -0,0 +1,122 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_STATS_COLLECTOR_H_
#define V8_HEAP_CPPGC_STATS_COLLECTOR_H_
#include <stddef.h>
#include <stdint.h>
#include <vector>
#include "src/base/macros.h"
namespace cppgc {
namespace internal {
// Sink for various time and memory statistics.
class V8_EXPORT_PRIVATE StatsCollector final {
public:
// POD to hold interesting data accumulated during a garbage collection cycle.
//
// The event is always fully populated when looking at previous events but
// may only be partially populated when looking at the current event.
struct Event final {
// Marked bytes collected during marking.
size_t marked_bytes = 0;
};
class AllocationObserver {
public:
// Called after observing at least
// StatsCollector::kAllocationThresholdBytes changed bytes through
// allocation or explicit free. Reports both, negative and positive
// increments, to allow observer to decide whether absolute values or only
// the deltas is interesting.
//
// May trigger GC.
virtual void AllocatedObjectSizeIncreased(size_t) = 0;
virtual void AllocatedObjectSizeDecreased(size_t) = 0;
};
// Observers are implemented using virtual calls. Avoid notifications below
// reasonably interesting sizes.
static constexpr size_t kAllocationThresholdBytes = 1024;
StatsCollector() = default;
StatsCollector(const StatsCollector&) = delete;
StatsCollector& operator=(const StatsCollector&) = delete;
void RegisterObserver(AllocationObserver*);
void UnregisterObserver(AllocationObserver*);
void NotifyAllocation(size_t);
void NotifyExplicitFree(size_t);
// Safepoints should only be invoked when garabge collections are possible.
// This is necessary as increments and decrements are reported as close to
// their actual allocation/reclamation as possible.
void NotifySafePointForConservativeCollection();
// Indicates a new garbage collection cycle.
void NotifyMarkingStarted();
// Indicates that marking of the current garbage collection cycle is
// completed.
void NotifyMarkingCompleted(size_t marked_bytes);
// Indicates the end of a garbage collection cycle. This means that sweeping
// is finished at this point.
const Event& NotifySweepingCompleted();
// Size of live objects in bytes on the heap. Based on the most recent marked
// bytes and the bytes allocated since last marking.
size_t allocated_object_size() const;
private:
enum class GarbageCollectionState : uint8_t {
kNotRunning,
kMarking,
kSweeping
};
// Invokes |callback| for all registered observers.
template <typename Callback>
void ForAllAllocationObservers(Callback callback);
void AllocatedObjectSizeSafepointImpl();
// Allocated bytes since the end of marking. These bytes are reset after
// marking as they are accounted in marked_bytes then. May be negative in case
// an object was explicitly freed that was marked as live in the previous
// cycle.
int64_t allocated_bytes_since_end_of_marking_ = 0;
// Counters for allocation and free. The individual values are never negative
// but their delta may be because of the same reason the overall
// allocated_bytes_since_end_of_marking_ may be negative. Keep integer
// arithmetic for simplicity.
int64_t allocated_bytes_since_safepoint_ = 0;
int64_t explicitly_freed_bytes_since_safepoint_ = 0;
// vector to allow fast iteration of observers. Register/Unregisters only
// happens on startup/teardown.
std::vector<AllocationObserver*> allocation_observers_;
GarbageCollectionState gc_state_ = GarbageCollectionState::kNotRunning;
// The event being filled by the current GC cycle between NotifyMarkingStarted
// and NotifySweepingFinished.
Event current_;
// The previous GC event which is populated at NotifySweepingFinished.
Event previous_;
};
template <typename Callback>
void StatsCollector::ForAllAllocationObservers(Callback callback) {
for (AllocationObserver* observer : allocation_observers_) {
callback(observer);
}
}
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_STATS_COLLECTOR_H_

View File

@ -22,6 +22,7 @@
#include "src/heap/cppgc/object-start-bitmap.h"
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/sanitizers.h"
#include "src/heap/cppgc/stats-collector.h"
namespace cppgc {
namespace internal {
@ -466,8 +467,10 @@ class PrepareForSweepVisitor final
class Sweeper::SweeperImpl final {
public:
explicit SweeperImpl(RawHeap* heap, cppgc::Platform* platform)
SweeperImpl(RawHeap* heap, cppgc::Platform* platform,
StatsCollector* stats_collector)
: heap_(heap),
stats_collector_(stats_collector),
space_states_(heap->size()),
platform_(platform),
foreground_task_runner_(platform_->GetForegroundTaskRunner()) {}
@ -505,6 +508,8 @@ class Sweeper::SweeperImpl final {
SynchronizeAndFinalizeConcurrentSweeping();
is_in_progress_ = false;
stats_collector_->NotifySweepingCompleted();
}
private:
@ -597,6 +602,7 @@ class Sweeper::SweeperImpl final {
}
RawHeap* heap_;
StatsCollector* stats_collector_;
SpaceStates space_states_;
cppgc::Platform* platform_;
std::shared_ptr<v8::TaskRunner> foreground_task_runner_;
@ -605,8 +611,9 @@ class Sweeper::SweeperImpl final {
bool is_in_progress_ = false;
};
Sweeper::Sweeper(RawHeap* heap, cppgc::Platform* platform)
: impl_(std::make_unique<SweeperImpl>(heap, platform)) {}
Sweeper::Sweeper(RawHeap* heap, cppgc::Platform* platform,
StatsCollector* stats_collector)
: impl_(std::make_unique<SweeperImpl>(heap, platform, stats_collector)) {}
Sweeper::~Sweeper() = default;

View File

@ -15,13 +15,14 @@ class Platform;
namespace internal {
class StatsCollector;
class RawHeap;
class V8_EXPORT_PRIVATE Sweeper final {
public:
enum class Config { kAtomic, kIncrementalAndConcurrent };
explicit Sweeper(RawHeap*, cppgc::Platform*);
Sweeper(RawHeap*, cppgc::Platform*, StatsCollector*);
~Sweeper();
Sweeper(const Sweeper&) = delete;

View File

@ -63,6 +63,7 @@ v8_source_set("cppgc_unittests_sources") {
"heap/cppgc/prefinalizer-unittest.cc",
"heap/cppgc/source-location-unittest.cc",
"heap/cppgc/stack-unittest.cc",
"heap/cppgc/stats-collector-unittest.cc",
"heap/cppgc/sweeper-unittest.cc",
"heap/cppgc/test-platform.cc",
"heap/cppgc/test-platform.h",

View File

@ -16,6 +16,7 @@
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/page-memory-inl.h"
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/stats-collector.h"
#include "src/heap/cppgc/sweeper.h"
#include "test/unittests/heap/cppgc/test-platform.h"
#include "test/unittests/heap/cppgc/tests.h"
@ -82,6 +83,10 @@ class ConcurrentSweeperTest : public testing::TestWithHeap {
void StartSweeping() {
Heap* heap = Heap::From(GetHeap());
ResetLocalAllocationBuffers(heap);
// Pretend do finish marking as StatsCollector verifies that Notify*
// methods are called in the right order.
heap->stats_collector()->NotifyMarkingStarted();
heap->stats_collector()->NotifyMarkingCompleted(0);
Sweeper& sweeper = heap->sweeper();
sweeper.Start(Sweeper::Config::kIncrementalAndConcurrent);
}

View File

@ -9,6 +9,7 @@
#include "include/cppgc/persistent.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/marking-visitor.h"
#include "src/heap/cppgc/stats-collector.h"
#include "test/unittests/heap/cppgc/tests.h"
#include "testing/gtest/include/gtest/gtest.h"
@ -22,10 +23,14 @@ class MarkerTest : public testing::TestWithHeap {
using MarkingConfig = Marker::MarkingConfig;
void DoMarking(MarkingConfig config) {
Marker marker(Heap::From(GetHeap()));
auto* heap = Heap::From(GetHeap());
Marker marker(heap);
marker.StartMarking(config);
marker.FinishMarking(config);
marker.ProcessWeakness();
// Pretend do finish sweeping as StatsCollector verifies that Notify*
// methods are called in the right order.
heap->stats_collector()->NotifySweepingCompleted();
}
};

View File

@ -0,0 +1,180 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/stats-collector.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace cppgc {
namespace internal {
namespace {
constexpr size_t kNoMarkedBytes = 0;
constexpr size_t kMinReportedSize = StatsCollector::kAllocationThresholdBytes;
class StatsCollectorTest : public ::testing::Test {
public:
void FakeAllocate(size_t bytes) {
stats.NotifyAllocation(bytes);
stats.NotifySafePointForConservativeCollection();
}
void FakeFree(size_t bytes) {
stats.NotifyExplicitFree(bytes);
stats.NotifySafePointForConservativeCollection();
}
StatsCollector stats;
};
} // namespace
TEST_F(StatsCollectorTest, NoMarkedBytes) {
stats.NotifyMarkingStarted();
stats.NotifyMarkingCompleted(kNoMarkedBytes);
auto event = stats.NotifySweepingCompleted();
EXPECT_EQ(0u, event.marked_bytes);
}
TEST_F(StatsCollectorTest, EventPrevGCMarkedObjectSize) {
stats.NotifyMarkingStarted();
stats.NotifyMarkingCompleted(1024);
auto event = stats.NotifySweepingCompleted();
EXPECT_EQ(1024u, event.marked_bytes);
}
TEST_F(StatsCollectorTest, AllocationNoReportBelowAllocationThresholdBytes) {
constexpr size_t kObjectSize = 17;
EXPECT_LT(kObjectSize, StatsCollector::kAllocationThresholdBytes);
FakeAllocate(kObjectSize);
EXPECT_EQ(0u, stats.allocated_object_size());
}
TEST_F(StatsCollectorTest, AlllocationReportAboveAllocationThresholdBytes) {
constexpr size_t kObjectSize = StatsCollector::kAllocationThresholdBytes;
EXPECT_GE(kObjectSize, StatsCollector::kAllocationThresholdBytes);
FakeAllocate(kObjectSize);
EXPECT_EQ(kObjectSize, stats.allocated_object_size());
}
TEST_F(StatsCollectorTest, InitialAllocatedObjectSize) {
stats.NotifyMarkingStarted();
EXPECT_EQ(0u, stats.allocated_object_size());
stats.NotifyMarkingCompleted(kNoMarkedBytes);
EXPECT_EQ(0u, stats.allocated_object_size());
stats.NotifySweepingCompleted();
EXPECT_EQ(0u, stats.allocated_object_size());
}
TEST_F(StatsCollectorTest, AllocatedObjectSize) {
stats.NotifyMarkingStarted();
FakeAllocate(kMinReportedSize);
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
stats.NotifyMarkingCompleted(kMinReportedSize);
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
stats.NotifySweepingCompleted();
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
}
TEST_F(StatsCollectorTest, AllocatedObjectSizeNoMarkedBytes) {
stats.NotifyMarkingStarted();
FakeAllocate(kMinReportedSize);
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
stats.NotifyMarkingCompleted(kNoMarkedBytes);
EXPECT_EQ(0u, stats.allocated_object_size());
stats.NotifySweepingCompleted();
EXPECT_EQ(0u, stats.allocated_object_size());
}
TEST_F(StatsCollectorTest, AllocatedObjectSizeAllocateAfterMarking) {
stats.NotifyMarkingStarted();
FakeAllocate(kMinReportedSize);
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
stats.NotifyMarkingCompleted(kMinReportedSize);
FakeAllocate(kMinReportedSize);
EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size());
stats.NotifySweepingCompleted();
EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size());
}
class MockAllocationObserver : public StatsCollector::AllocationObserver {
public:
MOCK_METHOD1(AllocatedObjectSizeIncreased, void(size_t));
MOCK_METHOD1(AllocatedObjectSizeDecreased, void(size_t));
};
TEST_F(StatsCollectorTest, RegisterUnregisterObserver) {
MockAllocationObserver observer;
stats.RegisterObserver(&observer);
stats.UnregisterObserver(&observer);
}
TEST_F(StatsCollectorTest, ObserveAllocatedObjectSize) {
MockAllocationObserver observer;
stats.RegisterObserver(&observer);
EXPECT_CALL(observer, AllocatedObjectSizeIncreased(kMinReportedSize));
FakeAllocate(kMinReportedSize);
EXPECT_CALL(observer, AllocatedObjectSizeDecreased(kMinReportedSize));
FakeFree(kMinReportedSize);
stats.UnregisterObserver(&observer);
}
namespace {
void FakeGC(StatsCollector* stats, size_t marked_bytes) {
stats->NotifyMarkingStarted();
stats->NotifyMarkingCompleted(marked_bytes);
stats->NotifySweepingCompleted();
}
class AllocationObserverTriggeringGC final
: public StatsCollector::AllocationObserver {
public:
explicit AllocationObserverTriggeringGC(StatsCollector* stats)
: stats(stats) {}
void AllocatedObjectSizeIncreased(size_t bytes) final {
increase_call_count++;
increased_size_bytes += bytes;
if (increase_call_count == 1) {
FakeGC(stats, bytes);
}
}
// // Mock out the rest to trigger warnings if used.
MOCK_METHOD1(AllocatedObjectSizeDecreased, void(size_t));
size_t increase_call_count = 0;
size_t increased_size_bytes = 0;
StatsCollector* stats;
};
} // namespace
TEST_F(StatsCollectorTest, ObserverTriggersGC) {
AllocationObserverTriggeringGC gc_observer(&stats);
MockAllocationObserver mock_observer;
// // Internal detail: First registered observer is also notified first.
stats.RegisterObserver(&gc_observer);
stats.RegisterObserver(&mock_observer);
// Since the GC clears counters, it should see an increase call with a delta
// of zero bytes.
EXPECT_CALL(mock_observer, AllocatedObjectSizeIncreased(0));
// Trigger scenario.
FakeAllocate(kMinReportedSize);
EXPECT_EQ(1u, gc_observer.increase_call_count);
EXPECT_EQ(kMinReportedSize, gc_observer.increased_size_bytes);
stats.UnregisterObserver(&gc_observer);
stats.UnregisterObserver(&mock_observer);
}
} // namespace internal
} // namespace cppgc

View File

@ -16,6 +16,7 @@
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/page-memory-inl.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/stats-collector.h"
#include "test/unittests/heap/cppgc/tests.h"
#include "testing/gtest/include/gtest/gtest.h"
@ -60,6 +61,10 @@ class SweeperTest : public testing::TestWithHeap {
Heap* heap = Heap::From(GetHeap());
ResetLocalAllocationBuffers(heap);
Sweeper& sweeper = heap->sweeper();
// Pretend do finish marking as StatsCollector verifies that Notify*
// methods are called in the right order.
heap->stats_collector()->NotifyMarkingStarted();
heap->stats_collector()->NotifyMarkingCompleted(0);
sweeper.Start(Sweeper::Config::kAtomic);
sweeper.Finish();
}