[heap] Remove OptionalAlwaysAllocateScope

Simply using the AlwaysAllocateScope on threads other than the
isolate's main thread may violate DCHECKs that this scope isn't
enabled.

However, we were using this mechanism to make GC allocations less
likely to fail. This CL therefore switches to another approach where
we simply allow allocations from the GC.

Bug: v8:13267, chromium:1401180
Change-Id: I3c6a34440af3ed87a8b50707006153416b3dbf42
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4111642
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#84882}
This commit is contained in:
Dominik Inführ 2022-12-15 15:53:35 +01:00 committed by V8 LUCI CQ
parent b411ad250c
commit a7e4e3d0ec
6 changed files with 13 additions and 52 deletions

View File

@ -229,7 +229,8 @@ ConcurrentAllocator::AllocateFromSpaceFreeList(size_t min_size_in_bytes,
}
}
if (owning_heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap_) &&
if (owning_heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap_,
origin) &&
owning_heap()->CanExpandOldGenerationBackground(local_heap_,
space_->AreaSize())) {
result = space_->TryExpandBackground(max_size_in_bytes);

View File

@ -508,15 +508,6 @@ AlwaysAllocateScope::~AlwaysAllocateScope() {
heap_->always_allocate_scope_count_--;
}
OptionalAlwaysAllocateScope::OptionalAlwaysAllocateScope(Heap* heap)
: heap_(heap) {
if (heap_) heap_->always_allocate_scope_count_++;
}
OptionalAlwaysAllocateScope::~OptionalAlwaysAllocateScope() {
if (heap_) heap_->always_allocate_scope_count_--;
}
AlwaysAllocateScopeForTesting::AlwaysAllocateScopeForTesting(Heap* heap)
: scope_(heap) {}

View File

@ -2550,12 +2550,6 @@ void Heap::MarkCompact() {
PROFILE(isolate_, CodeMovingGCEvent());
CodeSpaceMemoryModificationScope code_modification(this);
// Disable soft allocation limits in the shared heap, if one exists, as
// promotions into the shared heap should always succeed.
OptionalAlwaysAllocateScope always_allocate_shared_heap(
isolate()->has_shared_heap() ? isolate()->shared_heap_isolate()->heap()
: nullptr);
UpdateOldGenerationAllocationCounter();
uint64_t size_of_objects_before_gc = SizeOfObjects();
@ -2594,11 +2588,6 @@ void Heap::MinorMarkCompact() {
TRACE_GC(tracer(), GCTracer::Scope::MINOR_MC);
AlwaysAllocateScope always_allocate(this);
// Disable soft allocation limits in the shared heap, if one exists, as
// promotions into the shared heap should always succeed.
OptionalAlwaysAllocateScope always_allocate_shared_heap(
isolate()->has_shared_heap() ? isolate()->shared_heap_isolate()->heap()
: nullptr);
minor_mark_compact_collector_->Prepare();
minor_mark_compact_collector_->CollectGarbage();
@ -2648,12 +2637,6 @@ void Heap::Scavenge() {
// trigger one during scavenge: scavenges allocation should always succeed.
AlwaysAllocateScope scope(this);
// Disable soft allocation limits in the shared heap, if one exists, as
// promotions into the shared heap should always succeed.
OptionalAlwaysAllocateScope always_allocate_shared_heap(
isolate()->has_shared_heap() ? isolate()->shared_heap_isolate()->heap()
: nullptr);
// Bump-pointer allocations done during scavenge are not real allocations.
// Pause the inline allocation steps.
PauseAllocationObserversScope pause_observers(this);
@ -5196,10 +5179,14 @@ bool Heap::ShouldOptimizeForLoadTime() {
// major GC. It happens when the old generation allocation limit is reached and
// - either we need to optimize for memory usage,
// - or the incremental marking is not in progress and we cannot start it.
bool Heap::ShouldExpandOldGenerationOnSlowAllocation(LocalHeap* local_heap) {
bool Heap::ShouldExpandOldGenerationOnSlowAllocation(LocalHeap* local_heap,
AllocationOrigin origin) {
if (always_allocate() || OldGenerationSpaceAvailable() > 0) return true;
// We reached the old generation allocation limit.
// Allocations in the GC should always succeed if possible.
if (origin == AllocationOrigin::kGC) return true;
// Background threads need to be allowed to allocate without GC after teardown
// was initiated.
if (gc_state() == TEAR_DOWN) return true;

View File

@ -2001,8 +2001,8 @@ class Heap {
V8_EXPORT_PRIVATE bool CanExpandOldGenerationBackground(LocalHeap* local_heap,
size_t size);
bool ShouldExpandOldGenerationOnSlowAllocation(
LocalHeap* local_heap = nullptr);
bool ShouldExpandOldGenerationOnSlowAllocation(LocalHeap* local_heap,
AllocationOrigin origin);
bool IsRetryOfFailedAllocation(LocalHeap* local_heap);
bool IsMainThreadParked(LocalHeap* local_heap);
bool IsMajorMarkingComplete(LocalHeap* local_heap);
@ -2451,7 +2451,6 @@ class Heap {
friend class LocalHeap;
friend class MarkingBarrier;
friend class OldLargeObjectSpace;
friend class OptionalAlwaysAllocateScope;
template <typename ConcreteVisitor, typename MarkingState>
friend class MarkingVisitorBase;
friend class MarkCompactCollector;
@ -2553,24 +2552,6 @@ class V8_NODISCARD GCCallbacksScope final {
Heap* const heap_;
};
// Like AlwaysAllocateScope if the heap argument to the constructor is
// non-null. No-op otherwise.
//
// This class exists because AlwaysAllocateScope doesn't compose with
// base::Optional, since supporting that composition requires making
// base::Optional a friend class, defeating the purpose of hiding its
// constructor.
class V8_NODISCARD OptionalAlwaysAllocateScope {
public:
inline ~OptionalAlwaysAllocateScope();
private:
friend class Heap;
explicit inline OptionalAlwaysAllocateScope(Heap* heap);
Heap* heap_;
};
class V8_NODISCARD AlwaysAllocateScopeForTesting {
public:
explicit inline AlwaysAllocateScopeForTesting(Heap* heap);

View File

@ -149,7 +149,7 @@ AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size,
// If so, fail the allocation.
if (!heap()->CanExpandOldGeneration(object_size) ||
!heap()->ShouldExpandOldGenerationOnSlowAllocation(
heap()->main_thread_local_heap())) {
heap()->main_thread_local_heap(), AllocationOrigin::kRuntime)) {
return AllocationResult::Failure();
}
@ -185,7 +185,8 @@ AllocationResult OldLargeObjectSpace::AllocateRawBackground(
// Check if we want to force a GC before growing the old space further.
// If so, fail the allocation.
if (!heap()->CanExpandOldGenerationBackground(local_heap, object_size) ||
!heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap)) {
!heap()->ShouldExpandOldGenerationOnSlowAllocation(
local_heap, AllocationOrigin::kRuntime)) {
return AllocationResult::Failure();
}

View File

@ -936,7 +936,7 @@ bool PagedSpaceBase::RawRefillLabMain(int size_in_bytes,
if (identity() != NEW_SPACE &&
heap()->ShouldExpandOldGenerationOnSlowAllocation(
heap()->main_thread_local_heap()) &&
heap()->main_thread_local_heap(), origin) &&
heap()->CanExpandOldGeneration(AreaSize())) {
if (TryExpand(size_in_bytes, origin)) {
return true;