[heap] Simplify AllocateRawAligned and AllocateRawUnaligned

AllocateRaw(Aligned|Unaligned) will now refill LAB first and then
invoke AllocateFast(Aligned|Unaligned) again.

Bug: v8:10315
Change-Id: Ifa6e645b33a0c57a118de9cc15b0a38ad407a0c9
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2319991
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69074}
This commit is contained in:
Dominik Inführ 2020-07-27 11:00:21 +02:00 committed by Commit Bot
parent aba951101c
commit ccd0bf3f6b

View File

@ -589,63 +589,26 @@ AllocationResult NewSpace::AllocateRawSlow(int size_in_bytes,
AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes, AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes,
AllocationOrigin origin) { AllocationOrigin origin) {
Address top = allocation_info_.top(); if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
if (allocation_info_.limit() < top + size_in_bytes) { return AllocationResult::Retry();
// See if we can create room.
if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
return AllocationResult::Retry();
}
top = allocation_info_.top();
} }
HeapObject obj = HeapObject::FromAddress(top); AllocationResult result = AllocateFastUnaligned(size_in_bytes, origin);
allocation_info_.set_top(top + size_in_bytes); DCHECK(!result.IsRetry());
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); return result;
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return obj;
} }
AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
AllocationAlignment alignment, AllocationAlignment alignment,
AllocationOrigin origin) { AllocationOrigin origin) {
Address top = allocation_info_.top(); if (!EnsureAllocation(size_in_bytes, alignment)) {
int filler_size = Heap::GetFillToAlign(top, alignment); return AllocationResult::Retry();
int aligned_size_in_bytes = size_in_bytes + filler_size;
if (allocation_info_.limit() - top <
static_cast<uintptr_t>(aligned_size_in_bytes)) {
// See if we can create room.
if (!EnsureAllocation(size_in_bytes, alignment)) {
return AllocationResult::Retry();
}
top = allocation_info_.top();
filler_size = Heap::GetFillToAlign(top, alignment);
aligned_size_in_bytes = size_in_bytes + filler_size;
} }
HeapObject obj = HeapObject::FromAddress(top); AllocationResult result =
allocation_info_.set_top(top + aligned_size_in_bytes); AllocateFastAligned(size_in_bytes, alignment, origin);
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); DCHECK(!result.IsRetry());
return result;
if (filler_size > 0) {
obj = Heap::PrecedeWithFiller(ReadOnlyRoots(heap()), obj, filler_size);
}
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return obj;
} }
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP