Reland "[cppgc-js] Allow overriding marking support"

This is a reland of commit 2115ba5053.

Adds flags to allow overriding marking support. This adds
compatibility with EmbedderHeapTracer which allows for disabling
incremental marking support with `--no-incremental-marking-wrappers`.

The corresponding CppHeap flags are
* `--cppheap-incremental-marking`
* `--cppheap-concurrent-marking`

This allows embedders that use types that do not support incremental
and concurrent marking to switch from EmbedderHeapTracer to CppHeap.

Bug: v8:13207
Change-Id: I43a47d7d035bff5d4b437c5bf01336a895b61217
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3851543
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82693}
This commit is contained in:
Michael Lippautz 2022-08-24 14:44:21 +02:00 committed by V8 LUCI CQ
parent 8c8f65984a
commit d75462ecee
8 changed files with 68 additions and 33 deletions

View File

@ -79,6 +79,18 @@ struct WrapperDescriptor final {
struct V8_EXPORT CppHeapCreateParams {
std::vector<std::unique_ptr<cppgc::CustomSpaceBase>> custom_spaces;
WrapperDescriptor wrapper_descriptor;
/**
* Specifies which kind of marking are supported by the heap. The type may be
* further reduced via runtime flags when attaching the heap to an Isolate.
*/
cppgc::Heap::MarkingType marking_support =
cppgc::Heap::MarkingType::kIncrementalAndConcurrent;
/**
* Specifies which kind of sweeping is supported by the heap. The type may be
* further reduced via runtime flags when attaching the heap to an Isolate.
*/
cppgc::Heap::SweepingType sweeping_support =
cppgc::Heap::SweepingType::kIncrementalAndConcurrent;
};
/**

View File

@ -388,6 +388,9 @@ DEFINE_BOOL_READONLY(disable_write_barriers, V8_DISABLE_WRITE_BARRIERS_BOOL,
// Disable incremental marking barriers
DEFINE_NEG_IMPLICATION(disable_write_barriers, incremental_marking)
DEFINE_NEG_IMPLICATION(disable_write_barriers, concurrent_marking)
DEFINE_NEG_IMPLICATION(disable_write_barriers, cppheap_incremental_marking)
DEFINE_NEG_IMPLICATION(disable_write_barriers, cppheap_concurrent_marking)
#ifdef V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS
#define V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS_BOOL true
@ -1420,6 +1423,18 @@ DEFINE_BOOL(clear_free_memory, false, "initialize free memory with 0")
DEFINE_BOOL(crash_on_aborted_evacuation, false,
"crash when evacuation of page fails")
// v8::CppHeap flags that allow fine-grained control of how C++ memory is
// reclaimed in the garbage collector.
DEFINE_BOOL(cppheap_incremental_marking, false,
"use incremental marking for CppHeap")
DEFINE_NEG_NEG_IMPLICATION(incremental_marking, cppheap_incremental_marking)
DEFINE_WEAK_IMPLICATION(incremental_marking, cppheap_incremental_marking)
DEFINE_BOOL(cppheap_concurrent_marking, false,
"use concurrent marking for CppHeap")
DEFINE_NEG_NEG_IMPLICATION(cppheap_incremental_marking,
cppheap_concurrent_marking)
DEFINE_WEAK_IMPLICATION(concurrent_marking, cppheap_concurrent_marking)
// assembler-ia32.cc / assembler-arm.cc / assembler-arm64.cc / assembler-x64.cc
#ifdef V8_ENABLE_DEBUG_CODE
DEFINE_BOOL(debug_code, DEBUG_BOOL,
@ -2240,6 +2255,7 @@ DEFINE_NEG_IMPLICATION(single_threaded_gc, parallel_pointer_update)
DEFINE_NEG_IMPLICATION(single_threaded_gc, parallel_scavenge)
DEFINE_NEG_IMPLICATION(single_threaded_gc, concurrent_array_buffer_sweeping)
DEFINE_NEG_IMPLICATION(single_threaded_gc, stress_concurrent_allocation)
DEFINE_NEG_IMPLICATION(single_threaded_gc, cppheap_concurrent_marking)
// Web snapshots: 1) expose WebSnapshot.* API 2) interpret scripts as web
// snapshots if they start with a magic number.

View File

@ -118,8 +118,9 @@ constexpr uint16_t WrapperDescriptor::kUnknownEmbedderId;
// static
std::unique_ptr<CppHeap> CppHeap::Create(v8::Platform* platform,
const CppHeapCreateParams& params) {
return std::make_unique<internal::CppHeap>(platform, params.custom_spaces,
params.wrapper_descriptor);
return std::make_unique<internal::CppHeap>(
platform, params.custom_spaces, params.wrapper_descriptor,
params.marking_support, params.sweeping_support);
}
cppgc::AllocationHandle& CppHeap::GetAllocationHandle() {
@ -477,15 +478,14 @@ void CppHeap::InitializeOncePerProcess() {
CppHeap::CppHeap(
v8::Platform* platform,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
const v8::WrapperDescriptor& wrapper_descriptor)
const v8::WrapperDescriptor& wrapper_descriptor,
cppgc::Heap::MarkingType marking_support,
cppgc::Heap::SweepingType sweeping_support)
: cppgc::internal::HeapBase(
std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces,
cppgc::internal::HeapBase::StackSupport::
kSupportsConservativeStackScan,
// Default marking and sweeping types are only incremental. The types
// are updated respecting flags only on GC as the flags are not set
// properly during heap setup.
MarkingType::kIncremental, SweepingType::kIncremental),
marking_support, sweeping_support),
wrapper_descriptor_(wrapper_descriptor) {
CHECK_NE(WrapperDescriptor::kUnknownEmbedderId,
wrapper_descriptor_.embedder_id_for_garbage_collected);
@ -521,6 +521,7 @@ void CppHeap::AttachIsolate(Isolate* isolate) {
SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this));
isolate_->heap()->SetStackStart(base::Stack::GetStackStart());
oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl);
ReduceGCCapabilititesFromFlags();
no_gc_scope_--;
}
@ -582,13 +583,17 @@ CppHeap::SweepingType CppHeap::SelectSweepingType() const {
return sweeping_support();
}
void CppHeap::UpdateSupportedGCTypesFromFlags() {
// Keep the selection simple for now as production configurations do not turn
// off parallel and/or concurrent marking independently.
if (!FLAG_parallel_marking || !FLAG_concurrent_marking) {
marking_support_ = MarkingType::kIncremental;
void CppHeap::ReduceGCCapabilititesFromFlags() {
CHECK_IMPLIES(FLAG_cppheap_concurrent_marking,
FLAG_cppheap_incremental_marking);
if (FLAG_cppheap_concurrent_marking) {
marking_support_ = static_cast<MarkingType>(
std::min(marking_support_, MarkingType::kIncrementalAndConcurrent));
} else if (FLAG_cppheap_incremental_marking) {
marking_support_ = static_cast<MarkingType>(
std::min(marking_support_, MarkingType::kIncremental));
} else {
marking_support_ = MarkingType::kIncrementalAndConcurrent;
marking_support_ = MarkingType::kAtomic;
}
sweeping_support_ = FLAG_single_threaded_gc
@ -600,8 +605,6 @@ void CppHeap::InitializeTracing(CollectionType collection_type,
GarbageCollectionFlags gc_flags) {
CHECK(!sweeper_.IsSweepingInProgress());
UpdateSupportedGCTypesFromFlags();
// Check that previous cycle metrics for the same collection type have been
// reported.
if (GetMetricRecorder()) {

View File

@ -110,10 +110,10 @@ class V8_EXPORT_PRIVATE CppHeap final
return static_cast<const CppHeap*>(heap);
}
CppHeap(
v8::Platform* platform,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
const v8::WrapperDescriptor& wrapper_descriptor);
CppHeap(v8::Platform*,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>&,
const v8::WrapperDescriptor&, cppgc::Heap::MarkingType,
cppgc::Heap::SweepingType);
~CppHeap() final;
CppHeap(const CppHeap&) = delete;
@ -167,7 +167,7 @@ class V8_EXPORT_PRIVATE CppHeap final
std::unique_ptr<CppMarkingState> CreateCppMarkingStateForMutatorThread();
private:
void UpdateSupportedGCTypesFromFlags();
void ReduceGCCapabilititesFromFlags();
void FinalizeIncrementalGarbageCollectionIfNeeded(
cppgc::Heap::StackState) final {

View File

@ -102,10 +102,8 @@ void LocalEmbedderHeapTracer::EnterFinalPause() {
bool LocalEmbedderHeapTracer::Trace(double max_duration) {
if (!InUse()) return true;
if (cpp_heap_)
return cpp_heap()->AdvanceTracing(max_duration);
else
return remote_tracer_->AdvanceTracing(max_duration);
return cpp_heap_ ? cpp_heap_->AdvanceTracing(max_duration)
: remote_tracer_->AdvanceTracing(max_duration);
}
bool LocalEmbedderHeapTracer::IsRemoteTracingDone() {

View File

@ -110,8 +110,18 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
bool IsRemoteTracingDone();
bool ShouldFinalizeIncrementalMarking() {
return !FLAG_incremental_marking_wrappers || !InUse() ||
(IsRemoteTracingDone() && embedder_worklist_empty_);
// Covers cases where no remote tracer is in use or the flags for
// incremental marking have been disabled.
if (!SupportsIncrementalEmbedderSteps()) return true;
return IsRemoteTracingDone() && embedder_worklist_empty_;
}
bool SupportsIncrementalEmbedderSteps() const {
if (!InUse()) return false;
return cpp_heap_ ? FLAG_cppheap_incremental_marking
: FLAG_incremental_marking_wrappers;
}
void SetEmbedderWorklistEmpty(bool is_empty) {

View File

@ -455,7 +455,9 @@ void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
void IncrementalMarking::EmbedderStep(double expected_duration_ms,
double* duration_ms) {
if (!ShouldDoEmbedderStep()) {
DCHECK(IsMarking());
if (!heap_->local_embedder_heap_tracer()
->SupportsIncrementalEmbedderSteps()) {
*duration_ms = 0.0;
return;
}
@ -609,11 +611,6 @@ bool IncrementalMarking::TryInitializeTaskTimeout() {
}
}
bool IncrementalMarking::ShouldDoEmbedderStep() {
return IsMarking() && FLAG_incremental_marking_wrappers &&
heap_->local_embedder_heap_tracer()->InUse();
}
void IncrementalMarking::FastForwardSchedule() {
if (scheduled_bytes_to_mark_ < bytes_marked_) {
scheduled_bytes_to_mark_ = bytes_marked_;

View File

@ -174,7 +174,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
void StartMarking();
bool ShouldDoEmbedderStep();
void EmbedderStep(double expected_duration_ms, double* duration_ms);
void StartBlackAllocation();