From a7e4e3d0ec3b354a2f6341545395243aa878509d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dominik=20Inf=C3=BChr?= Date: Thu, 15 Dec 2022 15:53:35 +0100 Subject: [PATCH] [heap] Remove OptionalAlwaysAllocateScope MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Simply using the AlwaysAllocateScope on threads other than the isolate's main thread may violate DCHECKs that this scope isn't enabled. However, we were using this mechanism to make GC allocations less likely to fail. This CL therefore switches to another approach where we simply allow allocations from the GC. Bug: v8:13267, chromium:1401180 Change-Id: I3c6a34440af3ed87a8b50707006153416b3dbf42 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4111642 Commit-Queue: Dominik Inführ Reviewed-by: Michael Lippautz Cr-Commit-Position: refs/heads/main@{#84882} --- src/heap/concurrent-allocator.cc | 3 ++- src/heap/heap-inl.h | 9 --------- src/heap/heap.cc | 23 +++++------------------ src/heap/heap.h | 23 ++--------------------- src/heap/large-spaces.cc | 5 +++-- src/heap/paged-spaces.cc | 2 +- 6 files changed, 13 insertions(+), 52 deletions(-) diff --git a/src/heap/concurrent-allocator.cc b/src/heap/concurrent-allocator.cc index 7d87b6efbb..7f63bc09bd 100644 --- a/src/heap/concurrent-allocator.cc +++ b/src/heap/concurrent-allocator.cc @@ -229,7 +229,8 @@ ConcurrentAllocator::AllocateFromSpaceFreeList(size_t min_size_in_bytes, } } - if (owning_heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap_) && + if (owning_heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap_, + origin) && owning_heap()->CanExpandOldGenerationBackground(local_heap_, space_->AreaSize())) { result = space_->TryExpandBackground(max_size_in_bytes); diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h index 0bade3c641..b1d96b1273 100644 --- a/src/heap/heap-inl.h +++ b/src/heap/heap-inl.h @@ -508,15 +508,6 @@ AlwaysAllocateScope::~AlwaysAllocateScope() { heap_->always_allocate_scope_count_--; } -OptionalAlwaysAllocateScope::OptionalAlwaysAllocateScope(Heap* heap) - : heap_(heap) { - if (heap_) heap_->always_allocate_scope_count_++; -} - -OptionalAlwaysAllocateScope::~OptionalAlwaysAllocateScope() { - if (heap_) heap_->always_allocate_scope_count_--; -} - AlwaysAllocateScopeForTesting::AlwaysAllocateScopeForTesting(Heap* heap) : scope_(heap) {} diff --git a/src/heap/heap.cc b/src/heap/heap.cc index 51a90ddcab..585a17344c 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -2550,12 +2550,6 @@ void Heap::MarkCompact() { PROFILE(isolate_, CodeMovingGCEvent()); CodeSpaceMemoryModificationScope code_modification(this); - // Disable soft allocation limits in the shared heap, if one exists, as - // promotions into the shared heap should always succeed. - OptionalAlwaysAllocateScope always_allocate_shared_heap( - isolate()->has_shared_heap() ? isolate()->shared_heap_isolate()->heap() - : nullptr); - UpdateOldGenerationAllocationCounter(); uint64_t size_of_objects_before_gc = SizeOfObjects(); @@ -2594,11 +2588,6 @@ void Heap::MinorMarkCompact() { TRACE_GC(tracer(), GCTracer::Scope::MINOR_MC); AlwaysAllocateScope always_allocate(this); - // Disable soft allocation limits in the shared heap, if one exists, as - // promotions into the shared heap should always succeed. - OptionalAlwaysAllocateScope always_allocate_shared_heap( - isolate()->has_shared_heap() ? isolate()->shared_heap_isolate()->heap() - : nullptr); minor_mark_compact_collector_->Prepare(); minor_mark_compact_collector_->CollectGarbage(); @@ -2648,12 +2637,6 @@ void Heap::Scavenge() { // trigger one during scavenge: scavenges allocation should always succeed. AlwaysAllocateScope scope(this); - // Disable soft allocation limits in the shared heap, if one exists, as - // promotions into the shared heap should always succeed. - OptionalAlwaysAllocateScope always_allocate_shared_heap( - isolate()->has_shared_heap() ? isolate()->shared_heap_isolate()->heap() - : nullptr); - // Bump-pointer allocations done during scavenge are not real allocations. // Pause the inline allocation steps. PauseAllocationObserversScope pause_observers(this); @@ -5196,10 +5179,14 @@ bool Heap::ShouldOptimizeForLoadTime() { // major GC. It happens when the old generation allocation limit is reached and // - either we need to optimize for memory usage, // - or the incremental marking is not in progress and we cannot start it. -bool Heap::ShouldExpandOldGenerationOnSlowAllocation(LocalHeap* local_heap) { +bool Heap::ShouldExpandOldGenerationOnSlowAllocation(LocalHeap* local_heap, + AllocationOrigin origin) { if (always_allocate() || OldGenerationSpaceAvailable() > 0) return true; // We reached the old generation allocation limit. + // Allocations in the GC should always succeed if possible. + if (origin == AllocationOrigin::kGC) return true; + // Background threads need to be allowed to allocate without GC after teardown // was initiated. if (gc_state() == TEAR_DOWN) return true; diff --git a/src/heap/heap.h b/src/heap/heap.h index 3391feda3b..0c4734ecae 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -2001,8 +2001,8 @@ class Heap { V8_EXPORT_PRIVATE bool CanExpandOldGenerationBackground(LocalHeap* local_heap, size_t size); - bool ShouldExpandOldGenerationOnSlowAllocation( - LocalHeap* local_heap = nullptr); + bool ShouldExpandOldGenerationOnSlowAllocation(LocalHeap* local_heap, + AllocationOrigin origin); bool IsRetryOfFailedAllocation(LocalHeap* local_heap); bool IsMainThreadParked(LocalHeap* local_heap); bool IsMajorMarkingComplete(LocalHeap* local_heap); @@ -2451,7 +2451,6 @@ class Heap { friend class LocalHeap; friend class MarkingBarrier; friend class OldLargeObjectSpace; - friend class OptionalAlwaysAllocateScope; template friend class MarkingVisitorBase; friend class MarkCompactCollector; @@ -2553,24 +2552,6 @@ class V8_NODISCARD GCCallbacksScope final { Heap* const heap_; }; -// Like AlwaysAllocateScope if the heap argument to the constructor is -// non-null. No-op otherwise. -// -// This class exists because AlwaysAllocateScope doesn't compose with -// base::Optional, since supporting that composition requires making -// base::Optional a friend class, defeating the purpose of hiding its -// constructor. -class V8_NODISCARD OptionalAlwaysAllocateScope { - public: - inline ~OptionalAlwaysAllocateScope(); - - private: - friend class Heap; - - explicit inline OptionalAlwaysAllocateScope(Heap* heap); - Heap* heap_; -}; - class V8_NODISCARD AlwaysAllocateScopeForTesting { public: explicit inline AlwaysAllocateScopeForTesting(Heap* heap); diff --git a/src/heap/large-spaces.cc b/src/heap/large-spaces.cc index 3320acc096..07a5c46426 100644 --- a/src/heap/large-spaces.cc +++ b/src/heap/large-spaces.cc @@ -149,7 +149,7 @@ AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size, // If so, fail the allocation. if (!heap()->CanExpandOldGeneration(object_size) || !heap()->ShouldExpandOldGenerationOnSlowAllocation( - heap()->main_thread_local_heap())) { + heap()->main_thread_local_heap(), AllocationOrigin::kRuntime)) { return AllocationResult::Failure(); } @@ -185,7 +185,8 @@ AllocationResult OldLargeObjectSpace::AllocateRawBackground( // Check if we want to force a GC before growing the old space further. // If so, fail the allocation. if (!heap()->CanExpandOldGenerationBackground(local_heap, object_size) || - !heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap)) { + !heap()->ShouldExpandOldGenerationOnSlowAllocation( + local_heap, AllocationOrigin::kRuntime)) { return AllocationResult::Failure(); } diff --git a/src/heap/paged-spaces.cc b/src/heap/paged-spaces.cc index e85b504da8..0d7e8e7103 100644 --- a/src/heap/paged-spaces.cc +++ b/src/heap/paged-spaces.cc @@ -936,7 +936,7 @@ bool PagedSpaceBase::RawRefillLabMain(int size_in_bytes, if (identity() != NEW_SPACE && heap()->ShouldExpandOldGenerationOnSlowAllocation( - heap()->main_thread_local_heap()) && + heap()->main_thread_local_heap(), origin) && heap()->CanExpandOldGeneration(AreaSize())) { if (TryExpand(size_in_bytes, origin)) { return true;