[heap] Rework external callbacks in PerformGarbageCollection

- Bail out of callbacks bottlenecks if there's no registered callbacks
  without emitting scopes.
- Unify blocks for external callbacks

Change-Id: I30744ef8b2f537ecb84b22babcae6df1442018bf
Bug: v8:12612
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4208930
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85591}
This commit is contained in:
Michael Lippautz 2023-01-31 20:34:25 +01:00 committed by V8 LUCI CQ
parent 12ecfa78cd
commit fb1c8489f6
8 changed files with 161 additions and 124 deletions

View File

@ -22,6 +22,8 @@ include_rules = [
"+src/heap/factory-inl.h",
# TODO(v8:10496): Don't expose so much (through transitive includes) outside
# of heap/.
"+src/heap/gc-tracer.h",
"+src/heap/gc-tracer-inl.h",
"+src/heap/heap.h",
"+src/heap/heap-verifier.h",
"+src/heap/heap-inl.h",

View File

@ -14,9 +14,12 @@
#include "src/base/compiler-specific.h"
#include "src/base/logging.h"
#include "src/base/sanitizer/asan.h"
#include "src/common/assert-scope.h"
#include "src/common/globals.h"
#include "src/execution/vm-state-inl.h"
#include "src/heap/base/stack.h"
#include "src/heap/gc-tracer-inl.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap-write-barrier-inl.h"
#include "src/heap/heap-write-barrier.h"
@ -735,19 +738,24 @@ void GlobalHandles::ProcessWeakYoungObjects(
}
void GlobalHandles::InvokeSecondPassPhantomCallbacks() {
DCHECK(AllowJavascriptExecution::IsAllowed(isolate()));
DCHECK(AllowGarbageCollection::IsAllowed());
if (second_pass_callbacks_.empty()) return;
GCCallbacksScope scope(isolate()->heap());
// The callbacks may execute JS, which in turn may lead to another GC run.
// If we are already processing the callbacks, we do not want to start over
// from within the inner GC. Newly added callbacks will always be run by the
// outermost GC run only.
GCCallbacksScope scope(isolate()->heap());
if (scope.CheckReenter()) {
TRACE_EVENT0("v8", "V8.GCPhantomHandleProcessingCallback");
isolate()->heap()->CallGCPrologueCallbacks(
GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags,
GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
{
AllowJavascriptExecution allow_js(isolate());
TRACE_GC(isolate_->heap()->tracer(),
GCTracer::Scope::HEAP_EXTERNAL_SECOND_PASS_CALLBACKS);
while (!second_pass_callbacks_.empty()) {
auto callback = second_pass_callbacks_.back();
second_pass_callbacks_.pop_back();
@ -755,7 +763,8 @@ void GlobalHandles::InvokeSecondPassPhantomCallbacks() {
}
}
isolate()->heap()->CallGCEpilogueCallbacks(
GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags);
GCType::kGCTypeProcessWeakCallbacks, kNoGCCallbackFlags,
GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
}
}
@ -853,35 +862,35 @@ void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate,
}
void GlobalHandles::PostGarbageCollectionProcessing(
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
v8::GCCallbackFlags gc_callback_flags) {
// Process weak global handle callbacks. This must be done after the
// GC is completely done, because the callbacks may invoke arbitrary
// API functions.
DCHECK_EQ(Heap::NOT_IN_GC, isolate_->heap()->gc_state());
if (second_pass_callbacks_.empty()) return;
const bool synchronous_second_pass =
v8_flags.optimize_for_size || v8_flags.predictable ||
isolate_->heap()->IsTearingDown() ||
(gc_callback_flags &
(kGCCallbackFlagForced | kGCCallbackFlagCollectAllAvailableGarbage |
kGCCallbackFlagSynchronousPhantomCallbackProcessing)) != 0;
if (synchronous_second_pass) {
InvokeSecondPassPhantomCallbacks();
return;
}
if (second_pass_callbacks_.empty() || second_pass_callbacks_task_posted_)
return;
second_pass_callbacks_task_posted_ = true;
V8::GetCurrentPlatform()
->GetForegroundTaskRunner(reinterpret_cast<v8::Isolate*>(isolate()))
->PostTask(MakeCancelableTask(isolate(), [this] {
DCHECK(second_pass_callbacks_task_posted_);
second_pass_callbacks_task_posted_ = false;
InvokeSecondPassPhantomCallbacks();
}));
if (!second_pass_callbacks_task_posted_) {
second_pass_callbacks_task_posted_ = true;
V8::GetCurrentPlatform()
->GetForegroundTaskRunner(reinterpret_cast<v8::Isolate*>(isolate()))
->PostTask(MakeCancelableTask(isolate(), [this] {
DCHECK(second_pass_callbacks_task_posted_);
second_pass_callbacks_task_posted_ = false;
InvokeSecondPassPhantomCallbacks();
}));
}
}
void GlobalHandles::IterateStrongRoots(RootVisitor* v) {

View File

@ -83,8 +83,7 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
void InvokeSecondPassPhantomCallbacks();
// Schedule or invoke second pass weak callbacks.
void PostGarbageCollectionProcessing(
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags);
void PostGarbageCollectionProcessing(v8::GCCallbackFlags gc_callback_flags);
void IterateStrongRoots(RootVisitor* v);
void IterateWeakRoots(RootVisitor* v);

View File

@ -160,12 +160,12 @@ void Heap::SetFunctionsMarkedForManualOptimization(Object hash_table) {
hash_table.ptr();
}
PagedSpace* Heap::paged_space(int idx) {
PagedSpace* Heap::paged_space(int idx) const {
DCHECK(idx == OLD_SPACE || idx == CODE_SPACE || idx == SHARED_SPACE);
return static_cast<PagedSpace*>(space_[idx].get());
}
Space* Heap::space(int idx) { return space_[idx].get(); }
Space* Heap::space(int idx) const { return space_[idx].get(); }
Address* Heap::NewSpaceAllocationTopAddress() {
return new_space_ ? new_space_->allocation_top_address() : nullptr;

View File

@ -256,7 +256,7 @@ Heap::Heap()
Heap::~Heap() = default;
size_t Heap::MaxReserved() {
size_t Heap::MaxReserved() const {
const size_t kMaxNewLargeObjectSpaceSize = max_semi_space_size_;
return static_cast<size_t>(2 * max_semi_space_size_ +
kMaxNewLargeObjectSpaceSize +
@ -368,7 +368,7 @@ size_t Heap::Capacity() {
return NewSpaceCapacity() + OldGenerationCapacity();
}
size_t Heap::OldGenerationCapacity() {
size_t Heap::OldGenerationCapacity() const {
if (!HasBeenSetUp()) return 0;
PagedSpaceIterator spaces(this);
size_t total = 0;
@ -452,7 +452,7 @@ size_t Heap::Available() {
return total;
}
bool Heap::CanExpandOldGeneration(size_t size) {
bool Heap::CanExpandOldGeneration(size_t size) const {
if (force_oom_ || force_gc_on_next_allocation_) return false;
if (OldGenerationCapacity() + size > max_old_generation_size()) return false;
// The OldGenerationCapacity does not account compaction spaces used
@ -478,7 +478,7 @@ bool Heap::CanExpandOldGenerationBackground(LocalHeap* local_heap,
memory_allocator()->Size() + size <= MaxReserved();
}
bool Heap::CanPromoteYoungAndExpandOldGeneration(size_t size) {
bool Heap::CanPromoteYoungAndExpandOldGeneration(size_t size) const {
size_t new_space_capacity = NewSpaceCapacity();
size_t new_lo_space_capacity = new_lo_space_ ? new_lo_space_->Size() : 0;
@ -494,7 +494,7 @@ bool Heap::HasBeenSetUp() const {
GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
GarbageCollectionReason gc_reason,
const char** reason) {
const char** reason) const {
if (gc_reason == GarbageCollectionReason::kFinalizeMinorMC) {
DCHECK(new_space());
*reason = "finalize MinorMC";
@ -1620,6 +1620,24 @@ Heap::DevToolsTraceEventScope::~DevToolsTraceEventScope() {
heap_->SizeOfObjects());
}
namespace {
template <typename Callback>
void InvokeExternalCallbacks(Isolate* isolate, Callback callback) {
AllowGarbageCollection allow_gc;
AllowJavascriptExecution allow_js(isolate);
// Temporary override any embedder stack state as callbacks may create
// their own state on the stack and recursively trigger GC.
EmbedderStackStateScope embedder_scope(
isolate->heap(), EmbedderStackStateScope::kExplicitInvocation,
StackState::kMayContainHeapPointers);
VMState<EXTERNAL> callback_state(isolate);
callback();
}
} // namespace
bool Heap::CollectGarbage(AllocationSpace space,
GarbageCollectionReason gc_reason,
const v8::GCCallbackFlags gc_callback_flags) {
@ -1645,11 +1663,9 @@ bool Heap::CollectGarbage(AllocationSpace space,
DCHECK(AllowGarbageCollection::IsAllowed());
GarbageCollector collector;
const char* collector_reason = nullptr;
collector = SelectGarbageCollector(space, gc_reason, &collector_reason);
const GarbageCollector collector =
SelectGarbageCollector(space, gc_reason, &collector_reason);
current_or_last_garbage_collector_ = collector;
if (collector == GarbageCollector::MARK_COMPACTOR &&
@ -1657,30 +1673,28 @@ bool Heap::CollectGarbage(AllocationSpace space,
CollectGarbage(NEW_SPACE, GarbageCollectionReason::kFinalizeMinorMC);
}
// Ensure that all pending phantom callbacks are invoked.
isolate()->global_handles()->InvokeSecondPassPhantomCallbacks();
const GCType gc_type = GetGCTypeFromGarbageCollector(collector);
GCType gc_type = GetGCTypeFromGarbageCollector(collector);
{
GCCallbacksScope scope(this);
// Temporary override any embedder stack state as callbacks may create
// their own state on the stack and recursively trigger GC.
EmbedderStackStateScope embedder_scope(
this, EmbedderStackStateScope::kExplicitInvocation,
StackState::kMayContainHeapPointers);
if (scope.CheckReenter()) {
AllowGarbageCollection allow_gc;
AllowJavascriptExecution allow_js(isolate());
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
VMState<EXTERNAL> callback_state(isolate_);
HandleScope handle_scope(isolate_);
CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags);
}
}
// Prologue callbacks. These callbacks may trigger GC themselves and thus
// cannot be related exactly to garbage collection cycles.
//
// GCTracer scopes are managed by callees.
InvokeExternalCallbacks(isolate(), [this, gc_callback_flags, gc_type]() {
// Ensure that all pending phantom callbacks are invoked.
isolate()->global_handles()->InvokeSecondPassPhantomCallbacks();
// Part 2: The main garbage collection phase.
// Prologue callbacks registered with Heap.
CallGCPrologueCallbacks(gc_type, gc_callback_flags,
GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
});
// The main garbage collection phase.
DisallowGarbageCollection no_gc_during_gc;
if (force_shared_gc_with_empty_stack_for_testing_) {
embedder_stack_state_ = StackState::kNoHeapPointers;
}
size_t freed_global_handles = 0;
size_t committed_memory_before = collector == GarbageCollector::MARK_COMPACTOR
? CommittedOldGenerationMemory()
@ -1769,31 +1783,18 @@ bool Heap::CollectGarbage(AllocationSpace space,
}
}
// Part 3: Invoke all callbacks which should happen after the actual garbage
// collection is triggered. Note that these callbacks may trigger another
// garbage collection since they may allocate.
// Epilogue callbacks. These callbacks may trigger GC themselves and thus
// cannot be related exactly to garbage collection cycles.
//
// GCTracer scopes are managed by callees.
InvokeExternalCallbacks(isolate(), [this, gc_callback_flags, gc_type]() {
// Epilogue callbacks registered with Heap.
CallGCEpilogueCallbacks(gc_type, gc_callback_flags,
GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
{
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
{
AllowGarbageCollection allow_gc;
AllowJavascriptExecution allow_js(isolate());
isolate_->global_handles()->PostGarbageCollectionProcessing(
collector, gc_callback_flags);
}
}
{
GCCallbacksScope scope(this);
if (scope.CheckReenter()) {
AllowGarbageCollection allow_gc;
AllowJavascriptExecution allow_js(isolate());
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
VMState<EXTERNAL> callback_state(isolate_);
HandleScope handle_scope(isolate_);
CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
}
}
isolate()->global_handles()->PostGarbageCollectionProcessing(
gc_callback_flags);
});
if (collector == GarbageCollector::MARK_COMPACTOR &&
(gc_callback_flags & (kGCCallbackFlagForced |
@ -2517,14 +2518,30 @@ void Heap::RecomputeLimits(GarbageCollector collector) {
}
}
void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
RCS_SCOPE(isolate(), RuntimeCallCounterId::kGCPrologueCallback);
gc_prologue_callbacks_.Invoke(gc_type, flags);
void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags,
GCTracer::Scope::ScopeId scope_id) {
if (gc_prologue_callbacks_.IsEmpty()) return;
GCCallbacksScope scope(this);
if (scope.CheckReenter()) {
RCS_SCOPE(isolate(), RuntimeCallCounterId::kGCPrologueCallback);
TRACE_GC(tracer(), scope_id);
HandleScope handle_scope(isolate());
gc_prologue_callbacks_.Invoke(gc_type, flags);
}
}
void Heap::CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags) {
RCS_SCOPE(isolate(), RuntimeCallCounterId::kGCEpilogueCallback);
gc_epilogue_callbacks_.Invoke(gc_type, flags);
void Heap::CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags,
GCTracer::Scope::ScopeId scope_id) {
if (gc_epilogue_callbacks_.IsEmpty()) return;
GCCallbacksScope scope(this);
if (scope.CheckReenter()) {
RCS_SCOPE(isolate(), RuntimeCallCounterId::kGCEpilogueCallback);
TRACE_GC(tracer(), scope_id);
HandleScope handle_scope(isolate());
gc_epilogue_callbacks_.Invoke(gc_type, flags);
}
}
void Heap::MarkCompact() {
@ -3791,7 +3808,7 @@ void Heap::ReduceNewSpaceSize() {
size_t Heap::NewSpaceSize() { return new_space() ? new_space()->Size() : 0; }
size_t Heap::NewSpaceCapacity() {
size_t Heap::NewSpaceCapacity() const {
return new_space() ? new_space()->Capacity() : 0;
}
@ -3809,25 +3826,17 @@ void Heap::FinalizeIncrementalMarkingAtomically(
}
void Heap::InvokeIncrementalMarkingPrologueCallbacks() {
GCCallbacksScope scope(this);
if (scope.CheckReenter()) {
AllowGarbageCollection allow_allocation;
TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE);
VMState<EXTERNAL> state(isolate_);
HandleScope handle_scope(isolate_);
CallGCPrologueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags);
}
AllowGarbageCollection allow_allocation;
VMState<EXTERNAL> state(isolate_);
CallGCPrologueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags,
GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE);
}
void Heap::InvokeIncrementalMarkingEpilogueCallbacks() {
GCCallbacksScope scope(this);
if (scope.CheckReenter()) {
AllowGarbageCollection allow_allocation;
TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE);
VMState<EXTERNAL> state(isolate_);
HandleScope handle_scope(isolate_);
CallGCEpilogueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags);
}
AllowGarbageCollection allow_allocation;
VMState<EXTERNAL> state(isolate_);
CallGCEpilogueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags,
GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE);
}
void Heap::NotifyObjectLayoutChange(
@ -5104,7 +5113,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
GetFromRingBuffer(stats->last_few_messages);
}
size_t Heap::OldGenerationSizeOfObjects() {
size_t Heap::OldGenerationSizeOfObjects() const {
PagedSpaceIterator spaces(this);
size_t total = 0;
for (PagedSpace* space = spaces.Next(); space != nullptr;
@ -5121,15 +5130,15 @@ size_t Heap::EmbedderSizeOfObjects() const {
return cpp_heap_ ? CppHeap::From(cpp_heap_)->used_size() : 0;
}
size_t Heap::GlobalSizeOfObjects() {
size_t Heap::GlobalSizeOfObjects() const {
return OldGenerationSizeOfObjects() + EmbedderSizeOfObjects();
}
uint64_t Heap::AllocatedExternalMemorySinceMarkCompact() {
uint64_t Heap::AllocatedExternalMemorySinceMarkCompact() const {
return external_memory_.AllocatedSinceMarkCompact();
}
bool Heap::AllocationLimitOvershotByLargeMargin() {
bool Heap::AllocationLimitOvershotByLargeMargin() const {
// This guards against too eager finalization in small heaps.
// The number is chosen based on v8.browsing_mobile on Nexus 7v2.
constexpr size_t kMarginForSmallHeaps = 32u * MB;
@ -5836,6 +5845,13 @@ void Heap::StartTearDown() {
FreeMainThreadSharedLinearAllocationAreas();
}
void Heap::ForceSharedGCWithEmptyStackForTesting() {
// No mutex or atomics as this variable is always set from only a single
// thread before invoking a shared GC. The shared GC then resets the flag
// while the initiating thread is guaranteed to wait on a condition variable.
force_shared_gc_with_empty_stack_for_testing_ = true;
}
void Heap::TearDownWithSharedHeap() {
DCHECK_EQ(gc_state(), TEAR_DOWN);

View File

@ -248,9 +248,9 @@ class Heap {
class ExternalMemoryAccounting {
public:
int64_t total() { return total_.load(std::memory_order_relaxed); }
int64_t limit() { return limit_.load(std::memory_order_relaxed); }
int64_t low_since_mark_compact() {
int64_t total() const { return total_.load(std::memory_order_relaxed); }
int64_t limit() const { return limit_.load(std::memory_order_relaxed); }
int64_t low_since_mark_compact() const {
return low_since_mark_compact_.load(std::memory_order_relaxed);
}
@ -269,7 +269,7 @@ class Heap {
return amount;
}
int64_t AllocatedSinceMarkCompact() {
int64_t AllocatedSinceMarkCompact() const {
int64_t total_bytes = total();
int64_t low_since_mark_compact_bytes = low_since_mark_compact();
@ -502,7 +502,7 @@ class Heap {
inline Address* OldSpaceAllocationLimitAddress();
size_t NewSpaceSize();
size_t NewSpaceCapacity();
size_t NewSpaceCapacity() const;
// Move len non-weak tagged elements from src_slot to dst_slot of dst_object.
// The source and destination memory ranges can overlap.
@ -660,6 +660,7 @@ class Heap {
}
bool IsGCWithStack() const;
V8_EXPORT_PRIVATE void ForceSharedGCWithEmptyStackForTesting();
// Performs GC after background allocation failure.
void CollectGarbageForBackground(LocalHeap* local_heap);
@ -853,8 +854,8 @@ class Heap {
return shared_lo_allocation_space_;
}
inline PagedSpace* paged_space(int idx);
inline Space* space(int idx);
inline PagedSpace* paged_space(int idx) const;
inline Space* space(int idx) const;
// ===========================================================================
// Getters to other components. ==============================================
@ -1284,7 +1285,7 @@ class Heap {
// ===========================================================================
// Returns the maximum amount of memory reserved for the heap.
V8_EXPORT_PRIVATE size_t MaxReserved();
V8_EXPORT_PRIVATE size_t MaxReserved() const;
size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
size_t MaxOldGenerationSize() { return max_old_generation_size(); }
@ -1313,7 +1314,7 @@ class Heap {
size_t Capacity();
// Returns the capacity of the old generation.
V8_EXPORT_PRIVATE size_t OldGenerationCapacity();
V8_EXPORT_PRIVATE size_t OldGenerationCapacity() const;
// Returns the amount of memory currently held alive by the unmapper.
size_t CommittedMemoryOfUnmapper();
@ -1431,18 +1432,18 @@ class Heap {
// Returns the size of objects residing in non-new spaces.
// Excludes external memory held by those objects.
V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects();
V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects() const;
// Returns the size of objects held by the EmbedderHeapTracer.
V8_EXPORT_PRIVATE size_t EmbedderSizeOfObjects() const;
// Returns the global size of objects (embedder + V8 non-new spaces).
V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects();
V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects() const;
// We allow incremental marking to overshoot the V8 and global allocation
// limit for performance reasons. If the overshoot is too large then we are
// more eager to finalize incremental marking.
bool AllocationLimitOvershotByLargeMargin();
bool AllocationLimitOvershotByLargeMargin() const;
// Return the maximum size objects can be before having to allocate them as
// large objects. This takes into account allocating in the code space for
@ -1466,8 +1467,10 @@ class Heap {
void RemoveGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
void* data);
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags,
GCTracer::Scope::ScopeId scope_id);
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags,
GCTracer::Scope::ScopeId scope_id);
// ===========================================================================
// Allocation methods. =======================================================
@ -1634,8 +1637,9 @@ class Heap {
// over all objects.
V8_EXPORT_PRIVATE void MakeHeapIterable();
V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration(size_t size);
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size);
V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration(
size_t size) const;
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size) const;
inline bool ShouldReduceMemory() const {
return (current_gc_flags_ & kReduceMemoryFootprintMask) != 0;
@ -1757,7 +1761,7 @@ class Heap {
// Checks whether a global GC is necessary
GarbageCollector SelectGarbageCollector(AllocationSpace space,
GarbageCollectionReason gc_reason,
const char** reason);
const char** reason) const;
// Free all LABs in the heap.
void FreeLinearAllocationAreas();
@ -1974,7 +1978,7 @@ class Heap {
size_t global_allocation_limit() const { return global_allocation_limit_; }
size_t max_old_generation_size() {
size_t max_old_generation_size() const {
return max_old_generation_size_.load(std::memory_order_relaxed);
}
@ -2193,7 +2197,7 @@ class Heap {
std::atomic<HeapState> gc_state_{NOT_IN_GC};
// Returns the amount of external memory registered since last global gc.
V8_EXPORT_PRIVATE uint64_t AllocatedExternalMemorySinceMarkCompact();
V8_EXPORT_PRIVATE uint64_t AllocatedExternalMemorySinceMarkCompact() const;
// Starts marking when stress_marking_percentage_% of the marking start limit
// is reached.
@ -2382,6 +2386,7 @@ class Heap {
bool force_oom_ = false;
bool force_gc_on_next_allocation_ = false;
bool delay_sweeper_tasks_for_testing_ = false;
bool force_shared_gc_with_empty_stack_for_testing_ = false;
UnorderedHeapObjectMap<HeapObject> retainer_;
UnorderedHeapObjectMap<Root> retaining_root_;
@ -2638,12 +2643,12 @@ class V8_NODISCARD IgnoreLocalGCRequests {
// is done.
class V8_EXPORT_PRIVATE PagedSpaceIterator {
public:
explicit PagedSpaceIterator(Heap* heap)
explicit PagedSpaceIterator(const Heap* heap)
: heap_(heap), counter_(FIRST_GROWABLE_PAGED_SPACE) {}
PagedSpace* Next();
private:
Heap* heap_;
const Heap* const heap_;
int counter_;
};

View File

@ -574,6 +574,7 @@
F(HEAP_EXTERNAL_EPILOGUE) \
F(HEAP_EXTERNAL_NEAR_HEAP_LIMIT) \
F(HEAP_EXTERNAL_PROLOGUE) \
F(HEAP_EXTERNAL_SECOND_PASS_CALLBACKS) \
F(HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES) \
F(HEAP_PROLOGUE) \
F(HEAP_PROLOGUE_SAFEPOINT) \

View File

@ -2009,8 +2009,13 @@ class WorkerIsolateThread : public v8::base::Thread {
{
// Disable CSS for the shared heap and all clients.
DisableConservativeStackScanningScopeForTesting no_stack_scanning(
i_client->shared_heap_isolate()->heap());
// DisableConservativeStackScanningScopeForTesting no_stack_scanning(
// i_client->shared_heap_isolate()->heap());
Isolate* gc_isolate = v8_flags.shared_space
? i_client->shared_space_isolate()
: i_client->shared_heap_isolate();
gc_isolate->heap()->ForceSharedGCWithEmptyStackForTesting();
i_client->heap()->CollectGarbageShared(i_client->main_thread_local_heap(),
GarbageCollectionReason::kTesting);
}