From 1f4e8c7c29faf4785b6cc764db6381b13c98873c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dominik=20Inf=C3=BChr?= Date: Tue, 14 Jul 2020 13:45:40 +0200 Subject: [PATCH] [heap] Background allocation supports sweeping MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Before actually failing to allocate, let the background thread help to sweep all pages of that space. As a drive-by also rename allocation functions to make background and main thread allocation more similar. Bug: v8:10315 Change-Id: I26d4b622de949d4943e35071cee1df8b3d2889c2 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2297383 Reviewed-by: Ulan Degenbaev Commit-Queue: Dominik Inführ Cr-Commit-Position: refs/heads/master@{#68843} --- src/heap/concurrent-allocator.cc | 6 ++-- src/heap/paged-spaces.cc | 51 ++++++++++++++++++++------------ src/heap/paged-spaces.h | 13 ++++---- 3 files changed, 41 insertions(+), 29 deletions(-) diff --git a/src/heap/concurrent-allocator.cc b/src/heap/concurrent-allocator.cc index 2875eb671b..46f6682d5f 100644 --- a/src/heap/concurrent-allocator.cc +++ b/src/heap/concurrent-allocator.cc @@ -111,7 +111,7 @@ AllocationResult ConcurrentAllocator::AllocateInLabSlow( } bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) { - auto result = space_->SlowGetLinearAllocationAreaBackground( + auto result = space_->RawRefillLabBackground( local_heap_, kLabSize, kMaxLabSize, kWordAligned, origin); if (!result) return false; @@ -135,8 +135,8 @@ bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) { AllocationResult ConcurrentAllocator::AllocateOutsideLab( int object_size, AllocationAlignment alignment, AllocationOrigin origin) { - auto result = space_->SlowGetLinearAllocationAreaBackground( - local_heap_, object_size, object_size, alignment, origin); + auto result = space_->RawRefillLabBackground(local_heap_, object_size, + object_size, alignment, origin); if (!result) return AllocationResult::Retry(OLD_SPACE); HeapObject object = HeapObject::FromAddress(result->first); diff --git a/src/heap/paged-spaces.cc b/src/heap/paged-spaces.cc index 7de78b89fc..13f266f247 100644 --- a/src/heap/paged-spaces.cc +++ b/src/heap/paged-spaces.cc @@ -512,8 +512,8 @@ std::unique_ptr PagedSpace::GetObjectIterator(Heap* heap) { new PagedSpaceObjectIterator(heap, this)); } -bool PagedSpace::RefillLabFromFreeListMain(size_t size_in_bytes, - AllocationOrigin origin) { +bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes, + AllocationOrigin origin) { DCHECK(IsAligned(size_in_bytes, kTaggedSize)); DCHECK_LE(top(), limit()); #ifdef DEBUG @@ -561,12 +561,9 @@ bool PagedSpace::RefillLabFromFreeListMain(size_t size_in_bytes, return true; } -base::Optional> -PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap, - size_t min_size_in_bytes, - size_t max_size_in_bytes, - AllocationAlignment alignment, - AllocationOrigin origin) { +base::Optional> PagedSpace::RawRefillLabBackground( + LocalHeap* local_heap, size_t min_size_in_bytes, size_t max_size_in_bytes, + AllocationAlignment alignment, AllocationOrigin origin) { DCHECK(!is_local_space() && identity() == OLD_SPACE); DCHECK_EQ(origin, AllocationOrigin::kRuntime); @@ -589,6 +586,8 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap, local_heap, min_size_in_bytes, max_size_in_bytes, alignment, origin); if (result) return result; + // Now contribute to sweeping from background thread and then try to + // reallocate. Sweeper::FreeSpaceMayContainInvalidatedSlots invalidated_slots_in_free_space = Sweeper::FreeSpaceMayContainInvalidatedSlots::kNo; @@ -620,7 +619,19 @@ PagedSpace::SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap, if (result) return result; } - // TODO(dinfuehr): Complete sweeping here and try allocation again. + if (collector->sweeping_in_progress()) { + // Complete sweeping for this space. + collector->DrainSweepingWorklistForSpace(identity()); + + { + ParkedMutexGuard lock(local_heap, &allocation_mutex_); + RefillFreeList(); + } + + // Last try to acquire memory from free list. + return TryAllocationFromFreeListBackground( + local_heap, min_size_in_bytes, max_size_in_bytes, alignment, origin); + } return {}; } @@ -873,13 +884,13 @@ bool CompactionSpace::RefillLabMain(int size_in_bytes, } bool OffThreadSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) { - if (RefillLabFromFreeListMain(size_in_bytes, origin)) return true; + if (TryAllocationFromFreeListMain(size_in_bytes, origin)) return true; if (heap()->CanExpandOldGenerationBackground(size_in_bytes) && Expand()) { DCHECK((CountTotalPages() > 1) || (static_cast(size_in_bytes) <= free_list_->Available())); - return RefillLabFromFreeListMain(static_cast(size_in_bytes), - origin); + return TryAllocationFromFreeListMain(static_cast(size_in_bytes), + origin); } return false; @@ -893,7 +904,7 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { DCHECK_GE(size_in_bytes, 0); const int kMaxPagesToSweep = 1; - if (RefillLabFromFreeListMain(size_in_bytes, origin)) return true; + if (TryAllocationFromFreeListMain(size_in_bytes, origin)) return true; MarkCompactCollector* collector = heap()->mark_compact_collector(); // Sweeping is still in progress. @@ -908,7 +919,8 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { RefillFreeList(); // Retry the free list allocation. - if (RefillLabFromFreeListMain(static_cast(size_in_bytes), origin)) + if (TryAllocationFromFreeListMain(static_cast(size_in_bytes), + origin)) return true; if (ContributeToSweepingMain(size_in_bytes, kMaxPagesToSweep, size_in_bytes, @@ -923,7 +935,8 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { Page* page = main_space->RemovePageSafe(size_in_bytes); if (page != nullptr) { AddPage(page); - if (RefillLabFromFreeListMain(static_cast(size_in_bytes), origin)) + if (TryAllocationFromFreeListMain(static_cast(size_in_bytes), + origin)) return true; } } @@ -937,8 +950,8 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { } DCHECK((CountTotalPages() > 1) || (static_cast(size_in_bytes) <= free_list_->Available())); - return RefillLabFromFreeListMain(static_cast(size_in_bytes), - origin); + return TryAllocationFromFreeListMain(static_cast(size_in_bytes), + origin); } } @@ -953,7 +966,7 @@ bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { RefillFreeList(); // Last try to acquire memory from free list. - return RefillLabFromFreeListMain(size_in_bytes, origin); + return TryAllocationFromFreeListMain(size_in_bytes, origin); } return false; } @@ -975,7 +988,7 @@ bool PagedSpace::ContributeToSweepingMain(int required_freed_bytes, invalidated_slots_in_free_space); RefillFreeList(); if (max_freed >= size_in_bytes) - return RefillLabFromFreeListMain(size_in_bytes, origin); + return TryAllocationFromFreeListMain(size_in_bytes, origin); } return false; } diff --git a/src/heap/paged-spaces.h b/src/heap/paged-spaces.h index 40a74a5009..3caf3c7f7f 100644 --- a/src/heap/paged-spaces.h +++ b/src/heap/paged-spaces.h @@ -148,11 +148,10 @@ class V8_EXPORT_PRIVATE PagedSpace // Allocate the requested number of bytes in the space from a background // thread. V8_WARN_UNUSED_RESULT base::Optional> - SlowGetLinearAllocationAreaBackground(LocalHeap* local_heap, - size_t min_size_in_bytes, - size_t max_size_in_bytes, - AllocationAlignment alignment, - AllocationOrigin origin); + RawRefillLabBackground(LocalHeap* local_heap, size_t min_size_in_bytes, + size_t max_size_in_bytes, + AllocationAlignment alignment, + AllocationOrigin origin); size_t Free(Address start, size_t size_in_bytes, SpaceAccountingMode mode) { if (size_in_bytes == 0) return 0; @@ -364,8 +363,8 @@ class V8_EXPORT_PRIVATE PagedSpace inline AllocationResult TryAllocateLinearlyAligned( int* size_in_bytes, AllocationAlignment alignment); - V8_WARN_UNUSED_RESULT bool RefillLabFromFreeListMain(size_t size_in_bytes, - AllocationOrigin origin); + V8_WARN_UNUSED_RESULT bool TryAllocationFromFreeListMain( + size_t size_in_bytes, AllocationOrigin origin); V8_WARN_UNUSED_RESULT bool ContributeToSweepingMain(int required_freed_bytes, int max_pages,