[heap] Stop allocation in StressConcurrentAllocatorTask on tear down
StressConcurrentAllocatorTask now checks whether tear down was started for the isolate to avoid allocation failures. As a drive-by change remove the unused method ConcurrentAllocator::PerformCollectionAndAllocateAgain. Bug: v8:10315 Change-Id: Iba329ebbd782e9f8f11e9b8ec644bf28ab9c80ab Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2423703 Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org> Cr-Commit-Position: refs/heads/master@{#70044}
This commit is contained in:
parent
a0afb01145
commit
5b7cb7f4eb
@ -27,6 +27,9 @@ void StressConcurrentAllocatorTask::RunInternal() {
|
|||||||
MemoryChunkLayout::ObjectStartOffsetInDataPage());
|
MemoryChunkLayout::ObjectStartOffsetInDataPage());
|
||||||
|
|
||||||
for (int i = 0; i < kNumIterations; i++) {
|
for (int i = 0; i < kNumIterations; i++) {
|
||||||
|
// Isolate tear down started, stop allocation...
|
||||||
|
if (heap->gc_state() == Heap::TEAR_DOWN) return;
|
||||||
|
|
||||||
Address address = local_heap.AllocateRawOrFail(
|
Address address = local_heap.AllocateRawOrFail(
|
||||||
kSmallObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime,
|
kSmallObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime,
|
||||||
AllocationAlignment::kWordAligned);
|
AllocationAlignment::kWordAligned);
|
||||||
@ -62,27 +65,6 @@ void StressConcurrentAllocatorTask::Schedule(Isolate* isolate) {
|
|||||||
kDelayInSeconds);
|
kDelayInSeconds);
|
||||||
}
|
}
|
||||||
|
|
||||||
Address ConcurrentAllocator::PerformCollectionAndAllocateAgain(
|
|
||||||
int object_size, AllocationAlignment alignment, AllocationOrigin origin) {
|
|
||||||
Heap* heap = local_heap_->heap();
|
|
||||||
local_heap_->allocation_failed_ = true;
|
|
||||||
|
|
||||||
for (int i = 0; i < 3; i++) {
|
|
||||||
{
|
|
||||||
ParkedScope scope(local_heap_);
|
|
||||||
heap->RequestAndWaitForCollection();
|
|
||||||
}
|
|
||||||
|
|
||||||
AllocationResult result = AllocateRaw(object_size, alignment, origin);
|
|
||||||
if (!result.IsRetry()) {
|
|
||||||
local_heap_->allocation_failed_ = false;
|
|
||||||
return result.ToObjectChecked().address();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
heap->FatalProcessOutOfMemory("ConcurrentAllocator: allocation failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
void ConcurrentAllocator::FreeLinearAllocationArea() {
|
void ConcurrentAllocator::FreeLinearAllocationArea() {
|
||||||
lab_.CloseAndMakeIterable();
|
lab_.CloseAndMakeIterable();
|
||||||
}
|
}
|
||||||
|
@ -63,9 +63,6 @@ class ConcurrentAllocator {
|
|||||||
V8_EXPORT_PRIVATE AllocationResult AllocateOutsideLab(
|
V8_EXPORT_PRIVATE AllocationResult AllocateOutsideLab(
|
||||||
int object_size, AllocationAlignment alignment, AllocationOrigin origin);
|
int object_size, AllocationAlignment alignment, AllocationOrigin origin);
|
||||||
|
|
||||||
V8_EXPORT_PRIVATE Address PerformCollectionAndAllocateAgain(
|
|
||||||
int object_size, AllocationAlignment alignment, AllocationOrigin origin);
|
|
||||||
|
|
||||||
LocalHeap* const local_heap_;
|
LocalHeap* const local_heap_;
|
||||||
PagedSpace* const space_;
|
PagedSpace* const space_;
|
||||||
LocalAllocationBuffer lab_;
|
LocalAllocationBuffer lab_;
|
||||||
|
Loading…
Reference in New Issue
Block a user