mirror of
https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator
synced 2024-11-05 04:10:06 +00:00
parent
85a2580529
commit
14469a5eea
@ -11229,14 +11229,15 @@ VmaVirtualBlock_T::VmaVirtualBlock_T(const VmaVirtualBlockCreateInfo& createInfo
|
||||
const uint32_t algorithm = createInfo.flags & VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK;
|
||||
switch (algorithm)
|
||||
{
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
case 0:
|
||||
m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(VK_NULL_HANDLE, 1, true);
|
||||
break;
|
||||
case VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT:
|
||||
m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_Linear)(VK_NULL_HANDLE, 1, true);
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(VK_NULL_HANDLE, 1, true);
|
||||
}
|
||||
|
||||
m_Metadata->Init(createInfo.size);
|
||||
@ -11720,14 +11721,16 @@ void VmaDeviceMemoryBlock::Init(
|
||||
|
||||
switch (algorithm)
|
||||
{
|
||||
case 0:
|
||||
m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(),
|
||||
bufferImageGranularity, false); // isVirtual
|
||||
break;
|
||||
case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
|
||||
m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator->GetAllocationCallbacks(),
|
||||
bufferImageGranularity, false); // isVirtual
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
// Fall-through.
|
||||
case 0:
|
||||
m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(),
|
||||
bufferImageGranularity, false); // isVirtual
|
||||
}
|
||||
@ -12973,20 +12976,18 @@ VmaDefragmentationContext_T::VmaDefragmentationContext_T(
|
||||
{
|
||||
case 0: // Default algorithm
|
||||
m_Algorithm = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT;
|
||||
case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT:
|
||||
{
|
||||
m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount);
|
||||
break;
|
||||
}
|
||||
case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT:
|
||||
m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount);
|
||||
break;
|
||||
case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT:
|
||||
{
|
||||
if (hAllocator->GetBufferImageGranularity() > 1)
|
||||
{
|
||||
m_AlgorithmState = vma_new_array(hAllocator, StateExtensive, m_BlockVectorCount);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
|
||||
@ -13234,50 +13235,49 @@ VkResult VmaDefragmentationContext_T::DefragmentPassEnd(VmaDefragmentationPassMo
|
||||
// Move blocks with immovable allocations according to algorithm
|
||||
if (immovableBlocks.size() > 0)
|
||||
{
|
||||
switch (m_Algorithm)
|
||||
do
|
||||
{
|
||||
case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT:
|
||||
{
|
||||
if (m_AlgorithmState != VMA_NULL)
|
||||
if(m_Algorithm == VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT)
|
||||
{
|
||||
bool swapped = false;
|
||||
// Move to the start of free blocks range
|
||||
for (const FragmentedBlock& block : immovableBlocks)
|
||||
if (m_AlgorithmState != VMA_NULL)
|
||||
{
|
||||
StateExtensive& state = reinterpret_cast<StateExtensive*>(m_AlgorithmState)[block.data];
|
||||
if (state.operation != StateExtensive::Operation::Cleanup)
|
||||
bool swapped = false;
|
||||
// Move to the start of free blocks range
|
||||
for (const FragmentedBlock& block : immovableBlocks)
|
||||
{
|
||||
VmaBlockVector* vector = m_pBlockVectors[block.data];
|
||||
VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
|
||||
|
||||
for (size_t i = 0, count = vector->GetBlockCount() - m_ImmovableBlockCount; i < count; ++i)
|
||||
StateExtensive& state = reinterpret_cast<StateExtensive*>(m_AlgorithmState)[block.data];
|
||||
if (state.operation != StateExtensive::Operation::Cleanup)
|
||||
{
|
||||
if (vector->GetBlock(i) == block.block)
|
||||
VmaBlockVector* vector = m_pBlockVectors[block.data];
|
||||
VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex);
|
||||
|
||||
for (size_t i = 0, count = vector->GetBlockCount() - m_ImmovableBlockCount; i < count; ++i)
|
||||
{
|
||||
VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[vector->GetBlockCount() - ++m_ImmovableBlockCount]);
|
||||
if (state.firstFreeBlock != SIZE_MAX)
|
||||
if (vector->GetBlock(i) == block.block)
|
||||
{
|
||||
if (i + 1 < state.firstFreeBlock)
|
||||
VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[vector->GetBlockCount() - ++m_ImmovableBlockCount]);
|
||||
if (state.firstFreeBlock != SIZE_MAX)
|
||||
{
|
||||
if (state.firstFreeBlock > 1)
|
||||
VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[--state.firstFreeBlock]);
|
||||
else
|
||||
--state.firstFreeBlock;
|
||||
if (i + 1 < state.firstFreeBlock)
|
||||
{
|
||||
if (state.firstFreeBlock > 1)
|
||||
VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[--state.firstFreeBlock]);
|
||||
else
|
||||
--state.firstFreeBlock;
|
||||
}
|
||||
}
|
||||
swapped = true;
|
||||
break;
|
||||
}
|
||||
swapped = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (swapped)
|
||||
result = VK_INCOMPLETE;
|
||||
break;
|
||||
}
|
||||
if (swapped)
|
||||
result = VK_INCOMPLETE;
|
||||
break;
|
||||
}
|
||||
}
|
||||
default:
|
||||
{
|
||||
|
||||
// Move to the beginning
|
||||
for (const FragmentedBlock& block : immovableBlocks)
|
||||
{
|
||||
@ -13293,9 +13293,7 @@ VkResult VmaDefragmentationContext_T::DefragmentPassEnd(VmaDefragmentationPassMo
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
} while (false);
|
||||
}
|
||||
|
||||
// Bulk-map destination blocks
|
||||
@ -13313,14 +13311,15 @@ bool VmaDefragmentationContext_T::ComputeDefragmentation(VmaBlockVector& vector,
|
||||
{
|
||||
case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT:
|
||||
return ComputeDefragmentation_Fast(vector);
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT:
|
||||
return ComputeDefragmentation_Balanced(vector, index, true);
|
||||
case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT:
|
||||
return ComputeDefragmentation_Full(vector);
|
||||
case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT:
|
||||
return ComputeDefragmentation_Extensive(vector, index);
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
return ComputeDefragmentation_Balanced(vector, index, true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -13386,10 +13385,10 @@ bool VmaDefragmentationContext_T::ReallocWithinBlock(VmaBlockVector& vector, Vma
|
||||
continue;
|
||||
case CounterStatus::End:
|
||||
return true;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
case CounterStatus::Pass:
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
}
|
||||
|
||||
VkDeviceSize offset = moveData.move.srcAllocation->GetOffset();
|
||||
@ -13475,10 +13474,10 @@ bool VmaDefragmentationContext_T::ComputeDefragmentation_Fast(VmaBlockVector& ve
|
||||
continue;
|
||||
case CounterStatus::End:
|
||||
return true;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
case CounterStatus::Pass:
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
}
|
||||
|
||||
// Check all previous blocks for free space
|
||||
@ -13522,10 +13521,10 @@ bool VmaDefragmentationContext_T::ComputeDefragmentation_Balanced(VmaBlockVector
|
||||
continue;
|
||||
case CounterStatus::End:
|
||||
return true;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
case CounterStatus::Pass:
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
}
|
||||
|
||||
// Check all previous blocks for free space
|
||||
@ -13609,10 +13608,10 @@ bool VmaDefragmentationContext_T::ComputeDefragmentation_Full(VmaBlockVector& ve
|
||||
continue;
|
||||
case CounterStatus::End:
|
||||
return true;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
case CounterStatus::Pass:
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
}
|
||||
|
||||
// Check all previous blocks for free space
|
||||
@ -13700,10 +13699,10 @@ bool VmaDefragmentationContext_T::ComputeDefragmentation_Extensive(VmaBlockVecto
|
||||
continue;
|
||||
case CounterStatus::End:
|
||||
return true;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
case CounterStatus::Pass:
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
}
|
||||
|
||||
// Check all previous blocks for free space
|
||||
@ -13741,14 +13740,15 @@ bool VmaDefragmentationContext_T::ComputeDefragmentation_Extensive(VmaBlockVecto
|
||||
case StateExtensive::Operation::FindFreeBlockBuffer:
|
||||
vectorState.operation = StateExtensive::Operation::MoveBuffers;
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
case StateExtensive::Operation::FindFreeBlockTexture:
|
||||
vectorState.operation = StateExtensive::Operation::MoveTextures;
|
||||
break;
|
||||
case StateExtensive::Operation::FindFreeBlockAll:
|
||||
vectorState.operation = StateExtensive::Operation::MoveAll;
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
vectorState.operation = StateExtensive::Operation::MoveTextures;
|
||||
}
|
||||
vectorState.firstFreeBlock = last;
|
||||
// Nothing done, block found without reallocations, can perform another reallocs in same pass
|
||||
@ -13886,10 +13886,10 @@ bool VmaDefragmentationContext_T::MoveDataToFreeBlocks(VmaSuballocationType curr
|
||||
continue;
|
||||
case CounterStatus::End:
|
||||
return true;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
case CounterStatus::Pass:
|
||||
break;
|
||||
default:
|
||||
VMA_ASSERT(0);
|
||||
}
|
||||
|
||||
// Move only single type of resources at once
|
||||
|
Loading…
Reference in New Issue
Block a user