[heap] Moving scheduling GCs from LocalEmbedderHeapTracer to CppHeap
Bug: v8:13207 Change-Id: Id595a34677cc58319043c0e784beb5eed9be7411 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4128506 Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Anton Bikineev <bikineev@chromium.org> Cr-Commit-Position: refs/heads/main@{#85073}
This commit is contained in:
parent
1fe5f0f8e1
commit
dd68531420
@ -91,10 +91,10 @@ class V8_BASE_EXPORT TimeDelta final {
|
||||
return TimeDelta(nanoseconds / TimeConstants::kNanosecondsPerMicrosecond);
|
||||
}
|
||||
|
||||
static TimeDelta FromSecondsD(double seconds) {
|
||||
static constexpr TimeDelta FromSecondsD(double seconds) {
|
||||
return FromDouble(seconds * TimeConstants::kMicrosecondsPerSecond);
|
||||
}
|
||||
static TimeDelta FromMillisecondsD(double milliseconds) {
|
||||
static constexpr TimeDelta FromMillisecondsD(double milliseconds) {
|
||||
return FromDouble(milliseconds *
|
||||
TimeConstants::kMicrosecondsPerMillisecond);
|
||||
}
|
||||
@ -210,8 +210,7 @@ class V8_BASE_EXPORT TimeDelta final {
|
||||
}
|
||||
|
||||
private:
|
||||
// TODO(v8:10620): constexpr requires constexpr saturated_cast.
|
||||
static inline TimeDelta FromDouble(double value);
|
||||
static constexpr inline TimeDelta FromDouble(double value);
|
||||
|
||||
template<class TimeClass> friend class time_internal::TimeBase;
|
||||
// Constructs a delta given the duration in microseconds. This is private
|
||||
@ -224,7 +223,7 @@ class V8_BASE_EXPORT TimeDelta final {
|
||||
};
|
||||
|
||||
// static
|
||||
TimeDelta TimeDelta::FromDouble(double value) {
|
||||
constexpr TimeDelta TimeDelta::FromDouble(double value) {
|
||||
return TimeDelta(saturated_cast<int64_t>(value));
|
||||
}
|
||||
|
||||
|
@ -588,6 +588,8 @@ bool ShouldReduceMemory(CppHeap::GarbageCollectionFlags flags) {
|
||||
return IsMemoryReducingGC(flags) || IsForceGC(flags);
|
||||
}
|
||||
|
||||
constexpr size_t kIncrementalMarkingCheckInterval = 128 * KB;
|
||||
|
||||
} // namespace
|
||||
|
||||
CppHeap::MarkingType CppHeap::SelectMarkingType() const {
|
||||
@ -761,6 +763,18 @@ bool CppHeap::FinishConcurrentMarkingIfNeeded() {
|
||||
return marker_->JoinConcurrentMarkingIfNeeded();
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
void RecordEmbedderSpeed(GCTracer* tracer, base::TimeDelta marking_time,
|
||||
size_t marked_bytes) {
|
||||
constexpr auto kMinReportingTime = base::TimeDelta::FromMillisecondsD(0.5);
|
||||
if (marking_time > kMinReportingTime) {
|
||||
tracer->RecordEmbedderSpeed(marked_bytes, marking_time.InMillisecondsF());
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void CppHeap::TraceEpilogue() {
|
||||
CHECK(in_atomic_pause_);
|
||||
CHECK(marking_done_);
|
||||
@ -785,11 +799,13 @@ void CppHeap::TraceEpilogue() {
|
||||
}
|
||||
marker_.reset();
|
||||
if (isolate_) {
|
||||
auto* tracer = isolate_->heap()->local_embedder_heap_tracer();
|
||||
DCHECK_NOT_NULL(tracer);
|
||||
tracer->UpdateRemoteStats(
|
||||
stats_collector_->marked_bytes(),
|
||||
stats_collector_->marking_time().InMillisecondsF());
|
||||
used_size_ = stats_collector_->marked_bytes();
|
||||
// Force a check next time increased memory is reported. This allows for
|
||||
// setting limits close to actual heap sizes.
|
||||
allocated_size_limit_for_check_ = 0;
|
||||
|
||||
RecordEmbedderSpeed(isolate_->heap()->tracer(),
|
||||
stats_collector_->marking_time(), used_size_);
|
||||
}
|
||||
// The allocated bytes counter in v8 was reset to the current marked bytes, so
|
||||
// any pending allocated bytes updates should be discarded.
|
||||
@ -858,18 +874,36 @@ void CppHeap::ReportBufferedAllocationSizeIfPossible() {
|
||||
return;
|
||||
}
|
||||
|
||||
// We are in attached state.
|
||||
DCHECK_NOT_NULL(isolate_);
|
||||
|
||||
// The calls below may trigger full GCs that are synchronous and also execute
|
||||
// epilogue callbacks. Since such callbacks may allocate, the counter must
|
||||
// already be zeroed by that time.
|
||||
const int64_t bytes_to_report = buffered_allocated_bytes_;
|
||||
buffered_allocated_bytes_ = 0;
|
||||
|
||||
auto* const tracer = isolate_->heap()->local_embedder_heap_tracer();
|
||||
DCHECK_NOT_NULL(tracer);
|
||||
if (bytes_to_report < 0) {
|
||||
tracer->DecreaseAllocatedSize(static_cast<size_t>(-bytes_to_report));
|
||||
DCHECK_GE(used_size_.load(std::memory_order_relaxed), bytes_to_report);
|
||||
used_size_.fetch_sub(bytes_to_report, std::memory_order_relaxed);
|
||||
} else {
|
||||
tracer->IncreaseAllocatedSize(static_cast<size_t>(bytes_to_report));
|
||||
used_size_.fetch_add(bytes_to_report, std::memory_order_relaxed);
|
||||
allocated_size_ += bytes_to_report;
|
||||
|
||||
if (v8_flags.global_gc_scheduling && v8_flags.incremental_marking) {
|
||||
if (allocated_size_ > allocated_size_limit_for_check_) {
|
||||
Heap* heap = isolate_->heap();
|
||||
heap->StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
heap->GCFlagsForIncrementalMarking(),
|
||||
kGCCallbackScheduleIdleGarbageCollection);
|
||||
if (heap->AllocationLimitOvershotByLargeMargin()) {
|
||||
heap->FinalizeIncrementalMarkingAtomically(
|
||||
i::GarbageCollectionReason::kExternalFinalize);
|
||||
}
|
||||
allocated_size_limit_for_check_ =
|
||||
allocated_size_ + kIncrementalMarkingCheckInterval;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1069,6 +1103,7 @@ const cppgc::EmbedderStackState* CppHeap::override_stack_state() const {
|
||||
void CppHeap::StartIncrementalGarbageCollection(cppgc::internal::GCConfig) {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
|
||||
size_t CppHeap::epoch() const { UNIMPLEMENTED(); }
|
||||
|
||||
void CppHeap::ResetCrossHeapRememberedSet() {
|
||||
|
@ -158,6 +158,11 @@ class V8_EXPORT_PRIVATE CppHeap final
|
||||
|
||||
Isolate* isolate() const { return isolate_; }
|
||||
|
||||
size_t used_size() const {
|
||||
return used_size_.load(std::memory_order_relaxed);
|
||||
}
|
||||
size_t allocated_size() const { return allocated_size_; }
|
||||
|
||||
::heap::base::Stack* stack() final;
|
||||
|
||||
std::unique_ptr<CppMarkingState> CreateCppMarkingState();
|
||||
@ -223,6 +228,15 @@ class V8_EXPORT_PRIVATE CppHeap final
|
||||
bool force_incremental_marking_for_testing_ = false;
|
||||
bool is_in_v8_marking_step_ = false;
|
||||
|
||||
// Used size of objects. Reported to V8's regular heap growing strategy.
|
||||
std::atomic<size_t> used_size_{0};
|
||||
// Total bytes allocated since the last GC. Monotonically increasing value.
|
||||
// Used to approximate allocation rate.
|
||||
size_t allocated_size_ = 0;
|
||||
// Limit for |allocated_size| in bytes to avoid checking for starting a GC
|
||||
// on each increment.
|
||||
size_t allocated_size_limit_for_check_ = 0;
|
||||
|
||||
friend class MetricRecorderAdapter;
|
||||
};
|
||||
|
||||
|
@ -56,18 +56,6 @@ void LocalEmbedderHeapTracer::TraceEpilogue() {
|
||||
cpp_heap()->TraceEpilogue();
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::UpdateRemoteStats(size_t allocated_size,
|
||||
double time) {
|
||||
remote_stats_.used_size = allocated_size;
|
||||
// Force a check next time increased memory is reported. This allows for
|
||||
// setting limits close to actual heap sizes.
|
||||
remote_stats_.allocated_size_limit_for_check = 0;
|
||||
constexpr double kMinReportingTimeMs = 0.5;
|
||||
if (time > kMinReportingTimeMs) {
|
||||
isolate_->heap()->tracer()->RecordEmbedderSpeed(allocated_size, time);
|
||||
}
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::EnterFinalPause() {
|
||||
if (!InUse()) return;
|
||||
|
||||
@ -93,19 +81,6 @@ LocalEmbedderHeapTracer::ExtractWrapperInfo(Isolate* isolate,
|
||||
return {nullptr, nullptr};
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::StartIncrementalMarkingIfNeeded() {
|
||||
if (!v8_flags.global_gc_scheduling || !v8_flags.incremental_marking) return;
|
||||
|
||||
Heap* heap = isolate_->heap();
|
||||
heap->StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
heap->GCFlagsForIncrementalMarking(),
|
||||
kGCCallbackScheduleIdleGarbageCollection);
|
||||
if (heap->AllocationLimitOvershotByLargeMargin()) {
|
||||
heap->FinalizeIncrementalMarkingAtomically(
|
||||
i::GarbageCollectionReason::kExternalFinalize);
|
||||
}
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::EmbedderWriteBarrier(Heap* heap,
|
||||
JSObject js_object) {
|
||||
DCHECK(InUse());
|
||||
|
@ -87,33 +87,8 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
|
||||
embedder_worklist_empty_ = is_empty;
|
||||
}
|
||||
|
||||
void IncreaseAllocatedSize(size_t bytes) {
|
||||
remote_stats_.used_size.fetch_add(bytes, std::memory_order_relaxed);
|
||||
remote_stats_.allocated_size += bytes;
|
||||
if (remote_stats_.allocated_size >
|
||||
remote_stats_.allocated_size_limit_for_check) {
|
||||
StartIncrementalMarkingIfNeeded();
|
||||
remote_stats_.allocated_size_limit_for_check =
|
||||
remote_stats_.allocated_size + kEmbedderAllocatedThreshold;
|
||||
}
|
||||
}
|
||||
|
||||
void DecreaseAllocatedSize(size_t bytes) {
|
||||
DCHECK_GE(remote_stats_.used_size.load(std::memory_order_relaxed), bytes);
|
||||
remote_stats_.used_size.fetch_sub(bytes, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
void StartIncrementalMarkingIfNeeded();
|
||||
|
||||
size_t used_size() const {
|
||||
return remote_stats_.used_size.load(std::memory_order_relaxed);
|
||||
}
|
||||
size_t allocated_size() const { return remote_stats_.allocated_size; }
|
||||
|
||||
WrapperInfo ExtractWrapperInfo(Isolate* isolate, JSObject js_object);
|
||||
|
||||
void UpdateRemoteStats(size_t, double);
|
||||
|
||||
cppgc::EmbedderStackState embedder_stack_state() const {
|
||||
return embedder_stack_state_;
|
||||
}
|
||||
@ -121,8 +96,6 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
|
||||
void EmbedderWriteBarrier(Heap*, JSObject);
|
||||
|
||||
private:
|
||||
static constexpr size_t kEmbedderAllocatedThreshold = 128 * KB;
|
||||
|
||||
CppHeap* cpp_heap() {
|
||||
DCHECK_NOT_NULL(cpp_heap_);
|
||||
DCHECK_IMPLIES(isolate_, cpp_heap_ == isolate_->heap()->cpp_heap());
|
||||
@ -143,19 +116,6 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
|
||||
// segments of potential embedder fields to move to the main thread.
|
||||
bool embedder_worklist_empty_ = false;
|
||||
|
||||
struct RemoteStatistics {
|
||||
// Used size of objects in bytes reported by the embedder. Updated via
|
||||
// TraceSummary at the end of tracing and incrementally when the GC is not
|
||||
// in progress.
|
||||
std::atomic<size_t> used_size{0};
|
||||
// Totally bytes allocated by the embedder. Monotonically
|
||||
// increasing value. Used to approximate allocation rate.
|
||||
size_t allocated_size = 0;
|
||||
// Limit for |allocated_size| in bytes to avoid checking for starting a GC
|
||||
// on each increment.
|
||||
size_t allocated_size_limit_for_check = 0;
|
||||
} remote_stats_;
|
||||
|
||||
friend class EmbedderStackStateScope;
|
||||
};
|
||||
|
||||
|
@ -5123,9 +5123,7 @@ size_t Heap::OldGenerationSizeOfObjects() {
|
||||
}
|
||||
|
||||
size_t Heap::EmbedderSizeOfObjects() const {
|
||||
return local_embedder_heap_tracer()
|
||||
? local_embedder_heap_tracer()->used_size()
|
||||
: 0;
|
||||
return cpp_heap_ ? CppHeap::From(cpp_heap_)->used_size() : 0;
|
||||
}
|
||||
|
||||
size_t Heap::GlobalSizeOfObjects() {
|
||||
@ -6837,9 +6835,7 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) {
|
||||
}
|
||||
|
||||
size_t Heap::EmbedderAllocationCounter() const {
|
||||
return local_embedder_heap_tracer()
|
||||
? local_embedder_heap_tracer()->allocated_size()
|
||||
: 0;
|
||||
return cpp_heap_ ? CppHeap::From(cpp_heap_)->allocated_size() : 0;
|
||||
}
|
||||
|
||||
void Heap::CreateObjectStats() {
|
||||
|
Loading…
Reference in New Issue
Block a user