23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1651 #ifndef VMA_RECORDING_ENABLED 1653 #define VMA_RECORDING_ENABLED 1 1655 #define VMA_RECORDING_ENABLED 0 1660 #define NOMINMAX // For windows.h 1664 #include <vulkan/vulkan.h> 1667 #if VMA_RECORDING_ENABLED 1668 #include <windows.h> 1671 #if !defined(VMA_DEDICATED_ALLOCATION) 1672 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1673 #define VMA_DEDICATED_ALLOCATION 1 1675 #define VMA_DEDICATED_ALLOCATION 0 1693 uint32_t memoryType,
1694 VkDeviceMemory memory,
1699 uint32_t memoryType,
1700 VkDeviceMemory memory,
1773 #if VMA_DEDICATED_ALLOCATION 1774 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1775 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1902 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1910 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1920 uint32_t memoryTypeIndex,
1921 VkMemoryPropertyFlags* pFlags);
1933 uint32_t frameIndex);
1966 #ifndef VMA_STATS_STRING_ENABLED 1967 #define VMA_STATS_STRING_ENABLED 1 1970 #if VMA_STATS_STRING_ENABLED 1977 char** ppStatsString,
1978 VkBool32 detailedMap);
1982 char* pStatsString);
1984 #endif // #if VMA_STATS_STRING_ENABLED 2217 uint32_t memoryTypeBits,
2219 uint32_t* pMemoryTypeIndex);
2235 const VkBufferCreateInfo* pBufferCreateInfo,
2237 uint32_t* pMemoryTypeIndex);
2253 const VkImageCreateInfo* pImageCreateInfo,
2255 uint32_t* pMemoryTypeIndex);
2427 size_t* pLostAllocationCount);
2526 const VkMemoryRequirements* pVkMemoryRequirements,
2552 const VkMemoryRequirements* pVkMemoryRequirements,
2554 size_t allocationCount,
2599 size_t allocationCount,
2625 VkDeviceSize newSize);
3005 size_t allocationCount,
3006 VkBool32* pAllocationsChanged,
3072 const VkBufferCreateInfo* pBufferCreateInfo,
3097 const VkImageCreateInfo* pImageCreateInfo,
3123 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3126 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3127 #define VMA_IMPLEMENTATION 3130 #ifdef VMA_IMPLEMENTATION 3131 #undef VMA_IMPLEMENTATION 3153 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3154 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3166 #if VMA_USE_STL_CONTAINERS 3167 #define VMA_USE_STL_VECTOR 1 3168 #define VMA_USE_STL_UNORDERED_MAP 1 3169 #define VMA_USE_STL_LIST 1 3172 #ifndef VMA_USE_STL_SHARED_MUTEX 3174 #if __cplusplus >= 201703L 3175 #define VMA_USE_STL_SHARED_MUTEX 1 3179 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3180 #define VMA_USE_STL_SHARED_MUTEX 1 3182 #define VMA_USE_STL_SHARED_MUTEX 0 3190 #if VMA_USE_STL_VECTOR 3194 #if VMA_USE_STL_UNORDERED_MAP 3195 #include <unordered_map> 3198 #if VMA_USE_STL_LIST 3207 #include <algorithm> 3212 #define VMA_NULL nullptr 3215 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3217 void *aligned_alloc(
size_t alignment,
size_t size)
3220 if(alignment <
sizeof(
void*))
3222 alignment =
sizeof(
void*);
3225 return memalign(alignment, size);
3227 #elif defined(__APPLE__) || defined(__ANDROID__) 3229 void *aligned_alloc(
size_t alignment,
size_t size)
3232 if(alignment <
sizeof(
void*))
3234 alignment =
sizeof(
void*);
3238 if(posix_memalign(&pointer, alignment, size) == 0)
3252 #define VMA_ASSERT(expr) assert(expr) 3254 #define VMA_ASSERT(expr) 3260 #ifndef VMA_HEAVY_ASSERT 3262 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3264 #define VMA_HEAVY_ASSERT(expr) 3268 #ifndef VMA_ALIGN_OF 3269 #define VMA_ALIGN_OF(type) (__alignof(type)) 3272 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3274 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3276 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3280 #ifndef VMA_SYSTEM_FREE 3282 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3284 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3289 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3293 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3297 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3301 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3304 #ifndef VMA_DEBUG_LOG 3305 #define VMA_DEBUG_LOG(format, ...) 3315 #if VMA_STATS_STRING_ENABLED 3316 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3318 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3320 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3322 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3324 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3326 snprintf(outStr, strLen,
"%p", ptr);
3334 void Lock() { m_Mutex.lock(); }
3335 void Unlock() { m_Mutex.unlock(); }
3339 #define VMA_MUTEX VmaMutex 3343 #ifndef VMA_RW_MUTEX 3344 #if VMA_USE_STL_SHARED_MUTEX 3346 #include <shared_mutex> 3350 void LockRead() { m_Mutex.lock_shared(); }
3351 void UnlockRead() { m_Mutex.unlock_shared(); }
3352 void LockWrite() { m_Mutex.lock(); }
3353 void UnlockWrite() { m_Mutex.unlock(); }
3355 std::shared_mutex m_Mutex;
3357 #define VMA_RW_MUTEX VmaRWMutex 3358 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3364 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3365 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3366 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3367 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3368 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3372 #define VMA_RW_MUTEX VmaRWMutex 3378 void LockRead() { m_Mutex.Lock(); }
3379 void UnlockRead() { m_Mutex.Unlock(); }
3380 void LockWrite() { m_Mutex.Lock(); }
3381 void UnlockWrite() { m_Mutex.Unlock(); }
3385 #define VMA_RW_MUTEX VmaRWMutex 3386 #endif // #if VMA_USE_STL_SHARED_MUTEX 3387 #endif // #ifndef VMA_RW_MUTEX 3397 #ifndef VMA_ATOMIC_UINT32 3399 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3402 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3407 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3410 #ifndef VMA_DEBUG_ALIGNMENT 3415 #define VMA_DEBUG_ALIGNMENT (1) 3418 #ifndef VMA_DEBUG_MARGIN 3423 #define VMA_DEBUG_MARGIN (0) 3426 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3431 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3434 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3440 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3443 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3448 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3451 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3456 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3459 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3460 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3464 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3465 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3469 #ifndef VMA_CLASS_NO_COPY 3470 #define VMA_CLASS_NO_COPY(className) \ 3472 className(const className&) = delete; \ 3473 className& operator=(const className&) = delete; 3476 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3479 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3481 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3482 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3488 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3490 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3491 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3494 static inline uint32_t VmaCountBitsSet(uint32_t v)
3496 uint32_t c = v - ((v >> 1) & 0x55555555);
3497 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3498 c = ((c >> 4) + c) & 0x0F0F0F0F;
3499 c = ((c >> 8) + c) & 0x00FF00FF;
3500 c = ((c >> 16) + c) & 0x0000FFFF;
3506 template <
typename T>
3507 static inline T VmaAlignUp(T val, T align)
3509 return (val + align - 1) / align * align;
3513 template <
typename T>
3514 static inline T VmaAlignDown(T val, T align)
3516 return val / align * align;
3520 template <
typename T>
3521 static inline T VmaRoundDiv(T x, T y)
3523 return (x + (y / (T)2)) / y;
3531 template <
typename T>
3532 inline bool VmaIsPow2(T x)
3534 return (x & (x-1)) == 0;
3538 static inline uint32_t VmaNextPow2(uint32_t v)
3549 static inline uint64_t VmaNextPow2(uint64_t v)
3563 static inline uint32_t VmaPrevPow2(uint32_t v)
3573 static inline uint64_t VmaPrevPow2(uint64_t v)
3585 static inline bool VmaStrIsEmpty(
const char* pStr)
3587 return pStr == VMA_NULL || *pStr ==
'\0';
3590 #if VMA_STATS_STRING_ENABLED 3592 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3608 #endif // #if VMA_STATS_STRING_ENABLED 3612 template<
typename Iterator,
typename Compare>
3613 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3615 Iterator centerValue = end; --centerValue;
3616 Iterator insertIndex = beg;
3617 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3619 if(cmp(*memTypeIndex, *centerValue))
3621 if(insertIndex != memTypeIndex)
3623 VMA_SWAP(*memTypeIndex, *insertIndex);
3628 if(insertIndex != centerValue)
3630 VMA_SWAP(*insertIndex, *centerValue);
3635 template<
typename Iterator,
typename Compare>
3636 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3640 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3641 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3642 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3646 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3648 #endif // #ifndef VMA_SORT 3657 static inline bool VmaBlocksOnSamePage(
3658 VkDeviceSize resourceAOffset,
3659 VkDeviceSize resourceASize,
3660 VkDeviceSize resourceBOffset,
3661 VkDeviceSize pageSize)
3663 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3664 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3665 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3666 VkDeviceSize resourceBStart = resourceBOffset;
3667 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3668 return resourceAEndPage == resourceBStartPage;
3671 enum VmaSuballocationType
3673 VMA_SUBALLOCATION_TYPE_FREE = 0,
3674 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3675 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3676 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3677 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3678 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3679 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3688 static inline bool VmaIsBufferImageGranularityConflict(
3689 VmaSuballocationType suballocType1,
3690 VmaSuballocationType suballocType2)
3692 if(suballocType1 > suballocType2)
3694 VMA_SWAP(suballocType1, suballocType2);
3697 switch(suballocType1)
3699 case VMA_SUBALLOCATION_TYPE_FREE:
3701 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3703 case VMA_SUBALLOCATION_TYPE_BUFFER:
3705 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3706 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3707 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3709 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3710 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3711 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3712 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3714 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3715 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3723 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3725 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3726 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3727 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3729 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3733 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3735 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3736 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3737 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3739 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3751 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3753 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3754 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3755 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3756 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3762 VMA_CLASS_NO_COPY(VmaMutexLock)
3764 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3765 m_pMutex(useMutex ? &mutex : VMA_NULL)
3766 {
if(m_pMutex) { m_pMutex->Lock(); } }
3768 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3770 VMA_MUTEX* m_pMutex;
3774 struct VmaMutexLockRead
3776 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3778 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3779 m_pMutex(useMutex ? &mutex : VMA_NULL)
3780 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3781 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3783 VMA_RW_MUTEX* m_pMutex;
3787 struct VmaMutexLockWrite
3789 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3791 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3792 m_pMutex(useMutex ? &mutex : VMA_NULL)
3793 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3794 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3796 VMA_RW_MUTEX* m_pMutex;
3799 #if VMA_DEBUG_GLOBAL_MUTEX 3800 static VMA_MUTEX gDebugGlobalMutex;
3801 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3803 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3807 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3818 template <
typename CmpLess,
typename IterT,
typename KeyT>
3819 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3821 size_t down = 0, up = (end - beg);
3824 const size_t mid = (down + up) / 2;
3825 if(cmp(*(beg+mid), key))
3842 template<
typename T>
3843 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3845 for(uint32_t i = 0; i < count; ++i)
3847 const T iPtr = arr[i];
3848 if(iPtr == VMA_NULL)
3852 for(uint32_t j = i + 1; j < count; ++j)
3866 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3868 if((pAllocationCallbacks != VMA_NULL) &&
3869 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3871 return (*pAllocationCallbacks->pfnAllocation)(
3872 pAllocationCallbacks->pUserData,
3875 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3879 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3883 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3885 if((pAllocationCallbacks != VMA_NULL) &&
3886 (pAllocationCallbacks->pfnFree != VMA_NULL))
3888 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3892 VMA_SYSTEM_FREE(ptr);
3896 template<
typename T>
3897 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3899 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3902 template<
typename T>
3903 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3905 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3908 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3910 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3912 template<
typename T>
3913 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3916 VmaFree(pAllocationCallbacks, ptr);
3919 template<
typename T>
3920 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3924 for(
size_t i = count; i--; )
3928 VmaFree(pAllocationCallbacks, ptr);
3933 template<
typename T>
3934 class VmaStlAllocator
3937 const VkAllocationCallbacks*
const m_pCallbacks;
3938 typedef T value_type;
3940 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3941 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3943 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3944 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3946 template<
typename U>
3947 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3949 return m_pCallbacks == rhs.m_pCallbacks;
3951 template<
typename U>
3952 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3954 return m_pCallbacks != rhs.m_pCallbacks;
3957 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3960 #if VMA_USE_STL_VECTOR 3962 #define VmaVector std::vector 3964 template<
typename T,
typename allocatorT>
3965 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3967 vec.insert(vec.begin() + index, item);
3970 template<
typename T,
typename allocatorT>
3971 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3973 vec.erase(vec.begin() + index);
3976 #else // #if VMA_USE_STL_VECTOR 3981 template<
typename T,
typename AllocatorT>
3985 typedef T value_type;
3987 VmaVector(
const AllocatorT& allocator) :
3988 m_Allocator(allocator),
3995 VmaVector(
size_t count,
const AllocatorT& allocator) :
3996 m_Allocator(allocator),
3997 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4003 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4004 m_Allocator(src.m_Allocator),
4005 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4006 m_Count(src.m_Count),
4007 m_Capacity(src.m_Count)
4011 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4017 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4020 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4024 resize(rhs.m_Count);
4027 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4033 bool empty()
const {
return m_Count == 0; }
4034 size_t size()
const {
return m_Count; }
4035 T* data() {
return m_pArray; }
4036 const T* data()
const {
return m_pArray; }
4038 T& operator[](
size_t index)
4040 VMA_HEAVY_ASSERT(index < m_Count);
4041 return m_pArray[index];
4043 const T& operator[](
size_t index)
const 4045 VMA_HEAVY_ASSERT(index < m_Count);
4046 return m_pArray[index];
4051 VMA_HEAVY_ASSERT(m_Count > 0);
4054 const T& front()
const 4056 VMA_HEAVY_ASSERT(m_Count > 0);
4061 VMA_HEAVY_ASSERT(m_Count > 0);
4062 return m_pArray[m_Count - 1];
4064 const T& back()
const 4066 VMA_HEAVY_ASSERT(m_Count > 0);
4067 return m_pArray[m_Count - 1];
4070 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4072 newCapacity = VMA_MAX(newCapacity, m_Count);
4074 if((newCapacity < m_Capacity) && !freeMemory)
4076 newCapacity = m_Capacity;
4079 if(newCapacity != m_Capacity)
4081 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4084 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4086 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4087 m_Capacity = newCapacity;
4088 m_pArray = newArray;
4092 void resize(
size_t newCount,
bool freeMemory =
false)
4094 size_t newCapacity = m_Capacity;
4095 if(newCount > m_Capacity)
4097 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4101 newCapacity = newCount;
4104 if(newCapacity != m_Capacity)
4106 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4107 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4108 if(elementsToCopy != 0)
4110 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4112 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4113 m_Capacity = newCapacity;
4114 m_pArray = newArray;
4120 void clear(
bool freeMemory =
false)
4122 resize(0, freeMemory);
4125 void insert(
size_t index,
const T& src)
4127 VMA_HEAVY_ASSERT(index <= m_Count);
4128 const size_t oldCount = size();
4129 resize(oldCount + 1);
4130 if(index < oldCount)
4132 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4134 m_pArray[index] = src;
4137 void remove(
size_t index)
4139 VMA_HEAVY_ASSERT(index < m_Count);
4140 const size_t oldCount = size();
4141 if(index < oldCount - 1)
4143 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4145 resize(oldCount - 1);
4148 void push_back(
const T& src)
4150 const size_t newIndex = size();
4151 resize(newIndex + 1);
4152 m_pArray[newIndex] = src;
4157 VMA_HEAVY_ASSERT(m_Count > 0);
4161 void push_front(
const T& src)
4168 VMA_HEAVY_ASSERT(m_Count > 0);
4172 typedef T* iterator;
4174 iterator begin() {
return m_pArray; }
4175 iterator end() {
return m_pArray + m_Count; }
4178 AllocatorT m_Allocator;
4184 template<
typename T,
typename allocatorT>
4185 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4187 vec.insert(index, item);
4190 template<
typename T,
typename allocatorT>
4191 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4196 #endif // #if VMA_USE_STL_VECTOR 4198 template<
typename CmpLess,
typename VectorT>
4199 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4201 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4203 vector.data() + vector.size(),
4205 CmpLess()) - vector.data();
4206 VmaVectorInsert(vector, indexToInsert, value);
4207 return indexToInsert;
4210 template<
typename CmpLess,
typename VectorT>
4211 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4214 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4219 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4221 size_t indexToRemove = it - vector.begin();
4222 VmaVectorRemove(vector, indexToRemove);
4228 template<
typename CmpLess,
typename IterT,
typename KeyT>
4229 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4232 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4233 beg, end, value, comparator);
4235 (!comparator(*it, value) && !comparator(value, *it)))
4250 template<
typename T>
4251 class VmaPoolAllocator
4253 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4255 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4256 ~VmaPoolAllocator();
4264 uint32_t NextFreeIndex;
4272 uint32_t FirstFreeIndex;
4275 const VkAllocationCallbacks* m_pAllocationCallbacks;
4276 const uint32_t m_FirstBlockCapacity;
4277 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4279 ItemBlock& CreateNewBlock();
4282 template<
typename T>
4283 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4284 m_pAllocationCallbacks(pAllocationCallbacks),
4285 m_FirstBlockCapacity(firstBlockCapacity),
4286 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4288 VMA_ASSERT(m_FirstBlockCapacity > 1);
4291 template<
typename T>
4292 VmaPoolAllocator<T>::~VmaPoolAllocator()
4297 template<
typename T>
4298 void VmaPoolAllocator<T>::Clear()
4300 for(
size_t i = m_ItemBlocks.size(); i--; )
4301 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4302 m_ItemBlocks.clear();
4305 template<
typename T>
4306 T* VmaPoolAllocator<T>::Alloc()
4308 for(
size_t i = m_ItemBlocks.size(); i--; )
4310 ItemBlock& block = m_ItemBlocks[i];
4312 if(block.FirstFreeIndex != UINT32_MAX)
4314 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4315 block.FirstFreeIndex = pItem->NextFreeIndex;
4316 return &pItem->Value;
4321 ItemBlock& newBlock = CreateNewBlock();
4322 Item*
const pItem = &newBlock.pItems[0];
4323 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4324 return &pItem->Value;
4327 template<
typename T>
4328 void VmaPoolAllocator<T>::Free(T* ptr)
4331 for(
size_t i = m_ItemBlocks.size(); i--; )
4333 ItemBlock& block = m_ItemBlocks[i];
4337 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4340 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4342 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4343 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4344 block.FirstFreeIndex = index;
4348 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4351 template<
typename T>
4352 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4354 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4355 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4357 const ItemBlock newBlock = {
4358 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4362 m_ItemBlocks.push_back(newBlock);
4365 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4366 newBlock.pItems[i].NextFreeIndex = i + 1;
4367 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4368 return m_ItemBlocks.back();
4374 #if VMA_USE_STL_LIST 4376 #define VmaList std::list 4378 #else // #if VMA_USE_STL_LIST 4380 template<
typename T>
4389 template<
typename T>
4392 VMA_CLASS_NO_COPY(VmaRawList)
4394 typedef VmaListItem<T> ItemType;
4396 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4400 size_t GetCount()
const {
return m_Count; }
4401 bool IsEmpty()
const {
return m_Count == 0; }
4403 ItemType* Front() {
return m_pFront; }
4404 const ItemType* Front()
const {
return m_pFront; }
4405 ItemType* Back() {
return m_pBack; }
4406 const ItemType* Back()
const {
return m_pBack; }
4408 ItemType* PushBack();
4409 ItemType* PushFront();
4410 ItemType* PushBack(
const T& value);
4411 ItemType* PushFront(
const T& value);
4416 ItemType* InsertBefore(ItemType* pItem);
4418 ItemType* InsertAfter(ItemType* pItem);
4420 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4421 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4423 void Remove(ItemType* pItem);
4426 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4427 VmaPoolAllocator<ItemType> m_ItemAllocator;
4433 template<
typename T>
4434 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4435 m_pAllocationCallbacks(pAllocationCallbacks),
4436 m_ItemAllocator(pAllocationCallbacks, 128),
4443 template<
typename T>
4444 VmaRawList<T>::~VmaRawList()
4450 template<
typename T>
4451 void VmaRawList<T>::Clear()
4453 if(IsEmpty() ==
false)
4455 ItemType* pItem = m_pBack;
4456 while(pItem != VMA_NULL)
4458 ItemType*
const pPrevItem = pItem->pPrev;
4459 m_ItemAllocator.Free(pItem);
4462 m_pFront = VMA_NULL;
4468 template<
typename T>
4469 VmaListItem<T>* VmaRawList<T>::PushBack()
4471 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4472 pNewItem->pNext = VMA_NULL;
4475 pNewItem->pPrev = VMA_NULL;
4476 m_pFront = pNewItem;
4482 pNewItem->pPrev = m_pBack;
4483 m_pBack->pNext = pNewItem;
4490 template<
typename T>
4491 VmaListItem<T>* VmaRawList<T>::PushFront()
4493 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4494 pNewItem->pPrev = VMA_NULL;
4497 pNewItem->pNext = VMA_NULL;
4498 m_pFront = pNewItem;
4504 pNewItem->pNext = m_pFront;
4505 m_pFront->pPrev = pNewItem;
4506 m_pFront = pNewItem;
4512 template<
typename T>
4513 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4515 ItemType*
const pNewItem = PushBack();
4516 pNewItem->Value = value;
4520 template<
typename T>
4521 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4523 ItemType*
const pNewItem = PushFront();
4524 pNewItem->Value = value;
4528 template<
typename T>
4529 void VmaRawList<T>::PopBack()
4531 VMA_HEAVY_ASSERT(m_Count > 0);
4532 ItemType*
const pBackItem = m_pBack;
4533 ItemType*
const pPrevItem = pBackItem->pPrev;
4534 if(pPrevItem != VMA_NULL)
4536 pPrevItem->pNext = VMA_NULL;
4538 m_pBack = pPrevItem;
4539 m_ItemAllocator.Free(pBackItem);
4543 template<
typename T>
4544 void VmaRawList<T>::PopFront()
4546 VMA_HEAVY_ASSERT(m_Count > 0);
4547 ItemType*
const pFrontItem = m_pFront;
4548 ItemType*
const pNextItem = pFrontItem->pNext;
4549 if(pNextItem != VMA_NULL)
4551 pNextItem->pPrev = VMA_NULL;
4553 m_pFront = pNextItem;
4554 m_ItemAllocator.Free(pFrontItem);
4558 template<
typename T>
4559 void VmaRawList<T>::Remove(ItemType* pItem)
4561 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4562 VMA_HEAVY_ASSERT(m_Count > 0);
4564 if(pItem->pPrev != VMA_NULL)
4566 pItem->pPrev->pNext = pItem->pNext;
4570 VMA_HEAVY_ASSERT(m_pFront == pItem);
4571 m_pFront = pItem->pNext;
4574 if(pItem->pNext != VMA_NULL)
4576 pItem->pNext->pPrev = pItem->pPrev;
4580 VMA_HEAVY_ASSERT(m_pBack == pItem);
4581 m_pBack = pItem->pPrev;
4584 m_ItemAllocator.Free(pItem);
4588 template<
typename T>
4589 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4591 if(pItem != VMA_NULL)
4593 ItemType*
const prevItem = pItem->pPrev;
4594 ItemType*
const newItem = m_ItemAllocator.Alloc();
4595 newItem->pPrev = prevItem;
4596 newItem->pNext = pItem;
4597 pItem->pPrev = newItem;
4598 if(prevItem != VMA_NULL)
4600 prevItem->pNext = newItem;
4604 VMA_HEAVY_ASSERT(m_pFront == pItem);
4614 template<
typename T>
4615 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4617 if(pItem != VMA_NULL)
4619 ItemType*
const nextItem = pItem->pNext;
4620 ItemType*
const newItem = m_ItemAllocator.Alloc();
4621 newItem->pNext = nextItem;
4622 newItem->pPrev = pItem;
4623 pItem->pNext = newItem;
4624 if(nextItem != VMA_NULL)
4626 nextItem->pPrev = newItem;
4630 VMA_HEAVY_ASSERT(m_pBack == pItem);
4640 template<
typename T>
4641 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4643 ItemType*
const newItem = InsertBefore(pItem);
4644 newItem->Value = value;
4648 template<
typename T>
4649 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4651 ItemType*
const newItem = InsertAfter(pItem);
4652 newItem->Value = value;
4656 template<
typename T,
typename AllocatorT>
4659 VMA_CLASS_NO_COPY(VmaList)
4670 T& operator*()
const 4672 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4673 return m_pItem->Value;
4675 T* operator->()
const 4677 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4678 return &m_pItem->Value;
4681 iterator& operator++()
4683 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4684 m_pItem = m_pItem->pNext;
4687 iterator& operator--()
4689 if(m_pItem != VMA_NULL)
4691 m_pItem = m_pItem->pPrev;
4695 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4696 m_pItem = m_pList->Back();
4701 iterator operator++(
int)
4703 iterator result = *
this;
4707 iterator operator--(
int)
4709 iterator result = *
this;
4714 bool operator==(
const iterator& rhs)
const 4716 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4717 return m_pItem == rhs.m_pItem;
4719 bool operator!=(
const iterator& rhs)
const 4721 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4722 return m_pItem != rhs.m_pItem;
4726 VmaRawList<T>* m_pList;
4727 VmaListItem<T>* m_pItem;
4729 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4735 friend class VmaList<T, AllocatorT>;
4738 class const_iterator
4747 const_iterator(
const iterator& src) :
4748 m_pList(src.m_pList),
4749 m_pItem(src.m_pItem)
4753 const T& operator*()
const 4755 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4756 return m_pItem->Value;
4758 const T* operator->()
const 4760 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4761 return &m_pItem->Value;
4764 const_iterator& operator++()
4766 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4767 m_pItem = m_pItem->pNext;
4770 const_iterator& operator--()
4772 if(m_pItem != VMA_NULL)
4774 m_pItem = m_pItem->pPrev;
4778 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4779 m_pItem = m_pList->Back();
4784 const_iterator operator++(
int)
4786 const_iterator result = *
this;
4790 const_iterator operator--(
int)
4792 const_iterator result = *
this;
4797 bool operator==(
const const_iterator& rhs)
const 4799 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4800 return m_pItem == rhs.m_pItem;
4802 bool operator!=(
const const_iterator& rhs)
const 4804 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4805 return m_pItem != rhs.m_pItem;
4809 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4815 const VmaRawList<T>* m_pList;
4816 const VmaListItem<T>* m_pItem;
4818 friend class VmaList<T, AllocatorT>;
4821 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4823 bool empty()
const {
return m_RawList.IsEmpty(); }
4824 size_t size()
const {
return m_RawList.GetCount(); }
4826 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4827 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4829 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4830 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4832 void clear() { m_RawList.Clear(); }
4833 void push_back(
const T& value) { m_RawList.PushBack(value); }
4834 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4835 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4838 VmaRawList<T> m_RawList;
4841 #endif // #if VMA_USE_STL_LIST 4849 #if VMA_USE_STL_UNORDERED_MAP 4851 #define VmaPair std::pair 4853 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4854 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4856 #else // #if VMA_USE_STL_UNORDERED_MAP 4858 template<
typename T1,
typename T2>
4864 VmaPair() : first(), second() { }
4865 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4871 template<
typename KeyT,
typename ValueT>
4875 typedef VmaPair<KeyT, ValueT> PairType;
4876 typedef PairType* iterator;
4878 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4880 iterator begin() {
return m_Vector.begin(); }
4881 iterator end() {
return m_Vector.end(); }
4883 void insert(
const PairType& pair);
4884 iterator find(
const KeyT& key);
4885 void erase(iterator it);
4888 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4891 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4893 template<
typename FirstT,
typename SecondT>
4894 struct VmaPairFirstLess
4896 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4898 return lhs.first < rhs.first;
4900 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4902 return lhs.first < rhsFirst;
4906 template<
typename KeyT,
typename ValueT>
4907 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4909 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4911 m_Vector.data() + m_Vector.size(),
4913 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4914 VmaVectorInsert(m_Vector, indexToInsert, pair);
4917 template<
typename KeyT,
typename ValueT>
4918 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4920 PairType* it = VmaBinaryFindFirstNotLess(
4922 m_Vector.data() + m_Vector.size(),
4924 VmaPairFirstLess<KeyT, ValueT>());
4925 if((it != m_Vector.end()) && (it->first == key))
4931 return m_Vector.end();
4935 template<
typename KeyT,
typename ValueT>
4936 void VmaMap<KeyT, ValueT>::erase(iterator it)
4938 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4941 #endif // #if VMA_USE_STL_UNORDERED_MAP 4947 class VmaDeviceMemoryBlock;
4949 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4951 struct VmaAllocation_T
4954 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4958 FLAG_USER_DATA_STRING = 0x01,
4962 enum ALLOCATION_TYPE
4964 ALLOCATION_TYPE_NONE,
4965 ALLOCATION_TYPE_BLOCK,
4966 ALLOCATION_TYPE_DEDICATED,
4974 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
4978 m_pUserData = VMA_NULL;
4979 m_LastUseFrameIndex = currentFrameIndex;
4980 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
4981 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
4983 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
4985 #if VMA_STATS_STRING_ENABLED 4986 m_CreationFrameIndex = currentFrameIndex;
4987 m_BufferImageUsage = 0;
4993 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4996 VMA_ASSERT(m_pUserData == VMA_NULL);
4999 void InitBlockAllocation(
5000 VmaDeviceMemoryBlock* block,
5001 VkDeviceSize offset,
5002 VkDeviceSize alignment,
5004 VmaSuballocationType suballocationType,
5008 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5009 VMA_ASSERT(block != VMA_NULL);
5010 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5011 m_Alignment = alignment;
5013 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5014 m_SuballocationType = (uint8_t)suballocationType;
5015 m_BlockAllocation.m_Block = block;
5016 m_BlockAllocation.m_Offset = offset;
5017 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5022 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5023 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5024 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5025 m_BlockAllocation.m_Block = VMA_NULL;
5026 m_BlockAllocation.m_Offset = 0;
5027 m_BlockAllocation.m_CanBecomeLost =
true;
5030 void ChangeBlockAllocation(
5032 VmaDeviceMemoryBlock* block,
5033 VkDeviceSize offset);
5035 void ChangeSize(VkDeviceSize newSize);
5036 void ChangeOffset(VkDeviceSize newOffset);
5039 void InitDedicatedAllocation(
5040 uint32_t memoryTypeIndex,
5041 VkDeviceMemory hMemory,
5042 VmaSuballocationType suballocationType,
5046 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5047 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5048 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5051 m_SuballocationType = (uint8_t)suballocationType;
5052 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5053 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5054 m_DedicatedAllocation.m_hMemory = hMemory;
5055 m_DedicatedAllocation.m_pMappedData = pMappedData;
5058 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5059 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5060 VkDeviceSize GetSize()
const {
return m_Size; }
5061 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5062 void* GetUserData()
const {
return m_pUserData; }
5063 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5064 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5066 VmaDeviceMemoryBlock* GetBlock()
const 5068 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5069 return m_BlockAllocation.m_Block;
5071 VkDeviceSize GetOffset()
const;
5072 VkDeviceMemory GetMemory()
const;
5073 uint32_t GetMemoryTypeIndex()
const;
5074 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5075 void* GetMappedData()
const;
5076 bool CanBecomeLost()
const;
5078 uint32_t GetLastUseFrameIndex()
const 5080 return m_LastUseFrameIndex.load();
5082 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5084 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5094 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5096 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5098 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5109 void BlockAllocMap();
5110 void BlockAllocUnmap();
5111 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5114 #if VMA_STATS_STRING_ENABLED 5115 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5116 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5118 void InitBufferImageUsage(uint32_t bufferImageUsage)
5120 VMA_ASSERT(m_BufferImageUsage == 0);
5121 m_BufferImageUsage = bufferImageUsage;
5124 void PrintParameters(
class VmaJsonWriter& json)
const;
5128 VkDeviceSize m_Alignment;
5129 VkDeviceSize m_Size;
5131 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5133 uint8_t m_SuballocationType;
5140 struct BlockAllocation
5142 VmaDeviceMemoryBlock* m_Block;
5143 VkDeviceSize m_Offset;
5144 bool m_CanBecomeLost;
5148 struct DedicatedAllocation
5150 uint32_t m_MemoryTypeIndex;
5151 VkDeviceMemory m_hMemory;
5152 void* m_pMappedData;
5158 BlockAllocation m_BlockAllocation;
5160 DedicatedAllocation m_DedicatedAllocation;
5163 #if VMA_STATS_STRING_ENABLED 5164 uint32_t m_CreationFrameIndex;
5165 uint32_t m_BufferImageUsage;
5175 struct VmaSuballocation
5177 VkDeviceSize offset;
5180 VmaSuballocationType type;
5184 struct VmaSuballocationOffsetLess
5186 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5188 return lhs.offset < rhs.offset;
5191 struct VmaSuballocationOffsetGreater
5193 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5195 return lhs.offset > rhs.offset;
5199 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5202 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5204 enum class VmaAllocationRequestType
5226 struct VmaAllocationRequest
5228 VkDeviceSize offset;
5229 VkDeviceSize sumFreeSize;
5230 VkDeviceSize sumItemSize;
5231 VmaSuballocationList::iterator item;
5232 size_t itemsToMakeLostCount;
5234 VmaAllocationRequestType type;
5236 VkDeviceSize CalcCost()
const 5238 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5246 class VmaBlockMetadata
5250 virtual ~VmaBlockMetadata() { }
5251 virtual void Init(VkDeviceSize size) { m_Size = size; }
5254 virtual bool Validate()
const = 0;
5255 VkDeviceSize GetSize()
const {
return m_Size; }
5256 virtual size_t GetAllocationCount()
const = 0;
5257 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5258 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5260 virtual bool IsEmpty()
const = 0;
5262 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5264 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5266 #if VMA_STATS_STRING_ENABLED 5267 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5273 virtual bool CreateAllocationRequest(
5274 uint32_t currentFrameIndex,
5275 uint32_t frameInUseCount,
5276 VkDeviceSize bufferImageGranularity,
5277 VkDeviceSize allocSize,
5278 VkDeviceSize allocAlignment,
5280 VmaSuballocationType allocType,
5281 bool canMakeOtherLost,
5284 VmaAllocationRequest* pAllocationRequest) = 0;
5286 virtual bool MakeRequestedAllocationsLost(
5287 uint32_t currentFrameIndex,
5288 uint32_t frameInUseCount,
5289 VmaAllocationRequest* pAllocationRequest) = 0;
5291 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5293 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5297 const VmaAllocationRequest& request,
5298 VmaSuballocationType type,
5299 VkDeviceSize allocSize,
5304 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5307 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5310 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5312 #if VMA_STATS_STRING_ENABLED 5313 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5314 VkDeviceSize unusedBytes,
5315 size_t allocationCount,
5316 size_t unusedRangeCount)
const;
5317 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5318 VkDeviceSize offset,
5320 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5321 VkDeviceSize offset,
5322 VkDeviceSize size)
const;
5323 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5327 VkDeviceSize m_Size;
5328 const VkAllocationCallbacks* m_pAllocationCallbacks;
5331 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5332 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5336 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5338 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5341 virtual ~VmaBlockMetadata_Generic();
5342 virtual void Init(VkDeviceSize size);
5344 virtual bool Validate()
const;
5345 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5346 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5347 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5348 virtual bool IsEmpty()
const;
5350 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5351 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5353 #if VMA_STATS_STRING_ENABLED 5354 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5357 virtual bool CreateAllocationRequest(
5358 uint32_t currentFrameIndex,
5359 uint32_t frameInUseCount,
5360 VkDeviceSize bufferImageGranularity,
5361 VkDeviceSize allocSize,
5362 VkDeviceSize allocAlignment,
5364 VmaSuballocationType allocType,
5365 bool canMakeOtherLost,
5367 VmaAllocationRequest* pAllocationRequest);
5369 virtual bool MakeRequestedAllocationsLost(
5370 uint32_t currentFrameIndex,
5371 uint32_t frameInUseCount,
5372 VmaAllocationRequest* pAllocationRequest);
5374 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5376 virtual VkResult CheckCorruption(
const void* pBlockData);
5379 const VmaAllocationRequest& request,
5380 VmaSuballocationType type,
5381 VkDeviceSize allocSize,
5385 virtual void FreeAtOffset(VkDeviceSize offset);
5387 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5392 bool IsBufferImageGranularityConflictPossible(
5393 VkDeviceSize bufferImageGranularity,
5394 VmaSuballocationType& inOutPrevSuballocType)
const;
5397 friend class VmaDefragmentationAlgorithm_Generic;
5398 friend class VmaDefragmentationAlgorithm_Fast;
5400 uint32_t m_FreeCount;
5401 VkDeviceSize m_SumFreeSize;
5402 VmaSuballocationList m_Suballocations;
5405 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5407 bool ValidateFreeSuballocationList()
const;
5411 bool CheckAllocation(
5412 uint32_t currentFrameIndex,
5413 uint32_t frameInUseCount,
5414 VkDeviceSize bufferImageGranularity,
5415 VkDeviceSize allocSize,
5416 VkDeviceSize allocAlignment,
5417 VmaSuballocationType allocType,
5418 VmaSuballocationList::const_iterator suballocItem,
5419 bool canMakeOtherLost,
5420 VkDeviceSize* pOffset,
5421 size_t* itemsToMakeLostCount,
5422 VkDeviceSize* pSumFreeSize,
5423 VkDeviceSize* pSumItemSize)
const;
5425 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5429 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5432 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5435 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5516 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5518 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5521 virtual ~VmaBlockMetadata_Linear();
5522 virtual void Init(VkDeviceSize size);
5524 virtual bool Validate()
const;
5525 virtual size_t GetAllocationCount()
const;
5526 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5527 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5528 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5530 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5531 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5533 #if VMA_STATS_STRING_ENABLED 5534 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5537 virtual bool CreateAllocationRequest(
5538 uint32_t currentFrameIndex,
5539 uint32_t frameInUseCount,
5540 VkDeviceSize bufferImageGranularity,
5541 VkDeviceSize allocSize,
5542 VkDeviceSize allocAlignment,
5544 VmaSuballocationType allocType,
5545 bool canMakeOtherLost,
5547 VmaAllocationRequest* pAllocationRequest);
5549 virtual bool MakeRequestedAllocationsLost(
5550 uint32_t currentFrameIndex,
5551 uint32_t frameInUseCount,
5552 VmaAllocationRequest* pAllocationRequest);
5554 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5556 virtual VkResult CheckCorruption(
const void* pBlockData);
5559 const VmaAllocationRequest& request,
5560 VmaSuballocationType type,
5561 VkDeviceSize allocSize,
5565 virtual void FreeAtOffset(VkDeviceSize offset);
5575 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5577 enum SECOND_VECTOR_MODE
5579 SECOND_VECTOR_EMPTY,
5584 SECOND_VECTOR_RING_BUFFER,
5590 SECOND_VECTOR_DOUBLE_STACK,
5593 VkDeviceSize m_SumFreeSize;
5594 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5595 uint32_t m_1stVectorIndex;
5596 SECOND_VECTOR_MODE m_2ndVectorMode;
5598 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5599 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5600 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5601 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5604 size_t m_1stNullItemsBeginCount;
5606 size_t m_1stNullItemsMiddleCount;
5608 size_t m_2ndNullItemsCount;
5610 bool ShouldCompact1st()
const;
5611 void CleanupAfterFree();
5613 bool CreateAllocationRequest_LowerAddress(
5614 uint32_t currentFrameIndex,
5615 uint32_t frameInUseCount,
5616 VkDeviceSize bufferImageGranularity,
5617 VkDeviceSize allocSize,
5618 VkDeviceSize allocAlignment,
5619 VmaSuballocationType allocType,
5620 bool canMakeOtherLost,
5622 VmaAllocationRequest* pAllocationRequest);
5623 bool CreateAllocationRequest_UpperAddress(
5624 uint32_t currentFrameIndex,
5625 uint32_t frameInUseCount,
5626 VkDeviceSize bufferImageGranularity,
5627 VkDeviceSize allocSize,
5628 VkDeviceSize allocAlignment,
5629 VmaSuballocationType allocType,
5630 bool canMakeOtherLost,
5632 VmaAllocationRequest* pAllocationRequest);
5646 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5648 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5651 virtual ~VmaBlockMetadata_Buddy();
5652 virtual void Init(VkDeviceSize size);
5654 virtual bool Validate()
const;
5655 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5656 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5657 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5658 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5660 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5661 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5663 #if VMA_STATS_STRING_ENABLED 5664 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5667 virtual bool CreateAllocationRequest(
5668 uint32_t currentFrameIndex,
5669 uint32_t frameInUseCount,
5670 VkDeviceSize bufferImageGranularity,
5671 VkDeviceSize allocSize,
5672 VkDeviceSize allocAlignment,
5674 VmaSuballocationType allocType,
5675 bool canMakeOtherLost,
5677 VmaAllocationRequest* pAllocationRequest);
5679 virtual bool MakeRequestedAllocationsLost(
5680 uint32_t currentFrameIndex,
5681 uint32_t frameInUseCount,
5682 VmaAllocationRequest* pAllocationRequest);
5684 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5686 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5689 const VmaAllocationRequest& request,
5690 VmaSuballocationType type,
5691 VkDeviceSize allocSize,
5694 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5695 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5698 static const VkDeviceSize MIN_NODE_SIZE = 32;
5699 static const size_t MAX_LEVELS = 30;
5701 struct ValidationContext
5703 size_t calculatedAllocationCount;
5704 size_t calculatedFreeCount;
5705 VkDeviceSize calculatedSumFreeSize;
5707 ValidationContext() :
5708 calculatedAllocationCount(0),
5709 calculatedFreeCount(0),
5710 calculatedSumFreeSize(0) { }
5715 VkDeviceSize offset;
5745 VkDeviceSize m_UsableSize;
5746 uint32_t m_LevelCount;
5752 } m_FreeList[MAX_LEVELS];
5754 size_t m_AllocationCount;
5758 VkDeviceSize m_SumFreeSize;
5760 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5761 void DeleteNode(Node* node);
5762 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5763 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5764 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5766 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5767 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5771 void AddToFreeListFront(uint32_t level, Node* node);
5775 void RemoveFromFreeList(uint32_t level, Node* node);
5777 #if VMA_STATS_STRING_ENABLED 5778 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5788 class VmaDeviceMemoryBlock
5790 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5792 VmaBlockMetadata* m_pMetadata;
5796 ~VmaDeviceMemoryBlock()
5798 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5799 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5806 uint32_t newMemoryTypeIndex,
5807 VkDeviceMemory newMemory,
5808 VkDeviceSize newSize,
5810 uint32_t algorithm);
5814 VmaPool GetParentPool()
const {
return m_hParentPool; }
5815 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5816 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5817 uint32_t GetId()
const {
return m_Id; }
5818 void* GetMappedData()
const {
return m_pMappedData; }
5821 bool Validate()
const;
5826 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5829 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5830 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5832 VkResult BindBufferMemory(
5836 VkResult BindImageMemory(
5843 uint32_t m_MemoryTypeIndex;
5845 VkDeviceMemory m_hMemory;
5853 uint32_t m_MapCount;
5854 void* m_pMappedData;
5857 struct VmaPointerLess
5859 bool operator()(
const void* lhs,
const void* rhs)
const 5865 struct VmaDefragmentationMove
5867 size_t srcBlockIndex;
5868 size_t dstBlockIndex;
5869 VkDeviceSize srcOffset;
5870 VkDeviceSize dstOffset;
5874 class VmaDefragmentationAlgorithm;
5882 struct VmaBlockVector
5884 VMA_CLASS_NO_COPY(VmaBlockVector)
5889 uint32_t memoryTypeIndex,
5890 VkDeviceSize preferredBlockSize,
5891 size_t minBlockCount,
5892 size_t maxBlockCount,
5893 VkDeviceSize bufferImageGranularity,
5894 uint32_t frameInUseCount,
5896 bool explicitBlockSize,
5897 uint32_t algorithm);
5900 VkResult CreateMinBlocks();
5902 VmaPool GetParentPool()
const {
return m_hParentPool; }
5903 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5904 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5905 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5906 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5907 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5911 bool IsEmpty()
const {
return m_Blocks.empty(); }
5912 bool IsCorruptionDetectionEnabled()
const;
5915 uint32_t currentFrameIndex,
5917 VkDeviceSize alignment,
5919 VmaSuballocationType suballocType,
5920 size_t allocationCount,
5929 #if VMA_STATS_STRING_ENABLED 5930 void PrintDetailedMap(
class VmaJsonWriter& json);
5933 void MakePoolAllocationsLost(
5934 uint32_t currentFrameIndex,
5935 size_t* pLostAllocationCount);
5936 VkResult CheckCorruption();
5940 class VmaBlockVectorDefragmentationContext* pCtx,
5942 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5943 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5944 VkCommandBuffer commandBuffer);
5945 void DefragmentationEnd(
5946 class VmaBlockVectorDefragmentationContext* pCtx,
5952 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5953 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5954 size_t CalcAllocationCount()
const;
5955 bool IsBufferImageGranularityConflictPossible()
const;
5958 friend class VmaDefragmentationAlgorithm_Generic;
5962 const uint32_t m_MemoryTypeIndex;
5963 const VkDeviceSize m_PreferredBlockSize;
5964 const size_t m_MinBlockCount;
5965 const size_t m_MaxBlockCount;
5966 const VkDeviceSize m_BufferImageGranularity;
5967 const uint32_t m_FrameInUseCount;
5968 const bool m_IsCustomPool;
5969 const bool m_ExplicitBlockSize;
5970 const uint32_t m_Algorithm;
5974 bool m_HasEmptyBlock;
5975 VMA_RW_MUTEX m_Mutex;
5977 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5978 uint32_t m_NextBlockId;
5980 VkDeviceSize CalcMaxBlockSize()
const;
5983 void Remove(VmaDeviceMemoryBlock* pBlock);
5987 void IncrementallySortBlocks();
5989 VkResult AllocatePage(
5990 uint32_t currentFrameIndex,
5992 VkDeviceSize alignment,
5994 VmaSuballocationType suballocType,
5998 VkResult AllocateFromBlock(
5999 VmaDeviceMemoryBlock* pBlock,
6000 uint32_t currentFrameIndex,
6002 VkDeviceSize alignment,
6005 VmaSuballocationType suballocType,
6009 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6012 void ApplyDefragmentationMovesCpu(
6013 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6014 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6016 void ApplyDefragmentationMovesGpu(
6017 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6018 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6019 VkCommandBuffer commandBuffer);
6030 VMA_CLASS_NO_COPY(VmaPool_T)
6032 VmaBlockVector m_BlockVector;
6037 VkDeviceSize preferredBlockSize);
6040 uint32_t GetId()
const {
return m_Id; }
6041 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6043 #if VMA_STATS_STRING_ENABLED 6058 class VmaDefragmentationAlgorithm
6060 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6062 VmaDefragmentationAlgorithm(
6064 VmaBlockVector* pBlockVector,
6065 uint32_t currentFrameIndex) :
6066 m_hAllocator(hAllocator),
6067 m_pBlockVector(pBlockVector),
6068 m_CurrentFrameIndex(currentFrameIndex)
6071 virtual ~VmaDefragmentationAlgorithm()
6075 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6076 virtual void AddAll() = 0;
6078 virtual VkResult Defragment(
6079 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6080 VkDeviceSize maxBytesToMove,
6081 uint32_t maxAllocationsToMove) = 0;
6083 virtual VkDeviceSize GetBytesMoved()
const = 0;
6084 virtual uint32_t GetAllocationsMoved()
const = 0;
6088 VmaBlockVector*
const m_pBlockVector;
6089 const uint32_t m_CurrentFrameIndex;
6091 struct AllocationInfo
6094 VkBool32* m_pChanged;
6097 m_hAllocation(VK_NULL_HANDLE),
6098 m_pChanged(VMA_NULL)
6102 m_hAllocation(hAlloc),
6103 m_pChanged(pChanged)
6109 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6111 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6113 VmaDefragmentationAlgorithm_Generic(
6115 VmaBlockVector* pBlockVector,
6116 uint32_t currentFrameIndex,
6117 bool overlappingMoveSupported);
6118 virtual ~VmaDefragmentationAlgorithm_Generic();
6120 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6121 virtual void AddAll() { m_AllAllocations =
true; }
6123 virtual VkResult Defragment(
6124 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6125 VkDeviceSize maxBytesToMove,
6126 uint32_t maxAllocationsToMove);
6128 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6129 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6132 uint32_t m_AllocationCount;
6133 bool m_AllAllocations;
6135 VkDeviceSize m_BytesMoved;
6136 uint32_t m_AllocationsMoved;
6138 struct AllocationInfoSizeGreater
6140 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6142 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6146 struct AllocationInfoOffsetGreater
6148 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6150 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6156 size_t m_OriginalBlockIndex;
6157 VmaDeviceMemoryBlock* m_pBlock;
6158 bool m_HasNonMovableAllocations;
6159 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6161 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6162 m_OriginalBlockIndex(SIZE_MAX),
6164 m_HasNonMovableAllocations(true),
6165 m_Allocations(pAllocationCallbacks)
6169 void CalcHasNonMovableAllocations()
6171 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6172 const size_t defragmentAllocCount = m_Allocations.size();
6173 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6176 void SortAllocationsBySizeDescending()
6178 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6181 void SortAllocationsByOffsetDescending()
6183 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6187 struct BlockPointerLess
6189 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6191 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6193 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6195 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6201 struct BlockInfoCompareMoveDestination
6203 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6205 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6209 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6213 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6221 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6222 BlockInfoVector m_Blocks;
6224 VkResult DefragmentRound(
6225 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6226 VkDeviceSize maxBytesToMove,
6227 uint32_t maxAllocationsToMove);
6229 size_t CalcBlocksWithNonMovableCount()
const;
6231 static bool MoveMakesSense(
6232 size_t dstBlockIndex, VkDeviceSize dstOffset,
6233 size_t srcBlockIndex, VkDeviceSize srcOffset);
6236 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6238 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6240 VmaDefragmentationAlgorithm_Fast(
6242 VmaBlockVector* pBlockVector,
6243 uint32_t currentFrameIndex,
6244 bool overlappingMoveSupported);
6245 virtual ~VmaDefragmentationAlgorithm_Fast();
6247 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6248 virtual void AddAll() { m_AllAllocations =
true; }
6250 virtual VkResult Defragment(
6251 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6252 VkDeviceSize maxBytesToMove,
6253 uint32_t maxAllocationsToMove);
6255 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6256 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6261 size_t origBlockIndex;
6264 class FreeSpaceDatabase
6270 s.blockInfoIndex = SIZE_MAX;
6271 for(
size_t i = 0; i < MAX_COUNT; ++i)
6273 m_FreeSpaces[i] = s;
6277 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6279 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6285 size_t bestIndex = SIZE_MAX;
6286 for(
size_t i = 0; i < MAX_COUNT; ++i)
6289 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6294 if(m_FreeSpaces[i].size < size &&
6295 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6301 if(bestIndex != SIZE_MAX)
6303 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6304 m_FreeSpaces[bestIndex].offset = offset;
6305 m_FreeSpaces[bestIndex].size = size;
6309 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6310 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6312 size_t bestIndex = SIZE_MAX;
6313 VkDeviceSize bestFreeSpaceAfter = 0;
6314 for(
size_t i = 0; i < MAX_COUNT; ++i)
6317 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6319 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6321 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6323 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6325 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6328 bestFreeSpaceAfter = freeSpaceAfter;
6334 if(bestIndex != SIZE_MAX)
6336 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6337 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6339 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6342 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6343 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6344 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6349 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6359 static const size_t MAX_COUNT = 4;
6363 size_t blockInfoIndex;
6364 VkDeviceSize offset;
6366 } m_FreeSpaces[MAX_COUNT];
6369 const bool m_OverlappingMoveSupported;
6371 uint32_t m_AllocationCount;
6372 bool m_AllAllocations;
6374 VkDeviceSize m_BytesMoved;
6375 uint32_t m_AllocationsMoved;
6377 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6379 void PreprocessMetadata();
6380 void PostprocessMetadata();
6381 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6384 struct VmaBlockDefragmentationContext
6388 BLOCK_FLAG_USED = 0x00000001,
6393 VmaBlockDefragmentationContext() :
6395 hBuffer(VK_NULL_HANDLE)
6400 class VmaBlockVectorDefragmentationContext
6402 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6406 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6408 VmaBlockVectorDefragmentationContext(
6411 VmaBlockVector* pBlockVector,
6412 uint32_t currFrameIndex,
6414 ~VmaBlockVectorDefragmentationContext();
6416 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6417 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6418 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6420 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6421 void AddAll() { m_AllAllocations =
true; }
6423 void Begin(
bool overlappingMoveSupported);
6430 VmaBlockVector*
const m_pBlockVector;
6431 const uint32_t m_CurrFrameIndex;
6432 const uint32_t m_AlgorithmFlags;
6434 VmaDefragmentationAlgorithm* m_pAlgorithm;
6442 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6443 bool m_AllAllocations;
6446 struct VmaDefragmentationContext_T
6449 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6451 VmaDefragmentationContext_T(
6453 uint32_t currFrameIndex,
6456 ~VmaDefragmentationContext_T();
6458 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6459 void AddAllocations(
6460 uint32_t allocationCount,
6462 VkBool32* pAllocationsChanged);
6470 VkResult Defragment(
6471 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6472 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6477 const uint32_t m_CurrFrameIndex;
6478 const uint32_t m_Flags;
6481 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6483 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6486 #if VMA_RECORDING_ENABLED 6493 void WriteConfiguration(
6494 const VkPhysicalDeviceProperties& devProps,
6495 const VkPhysicalDeviceMemoryProperties& memProps,
6496 bool dedicatedAllocationExtensionEnabled);
6499 void RecordCreateAllocator(uint32_t frameIndex);
6500 void RecordDestroyAllocator(uint32_t frameIndex);
6501 void RecordCreatePool(uint32_t frameIndex,
6504 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6505 void RecordAllocateMemory(uint32_t frameIndex,
6506 const VkMemoryRequirements& vkMemReq,
6509 void RecordAllocateMemoryPages(uint32_t frameIndex,
6510 const VkMemoryRequirements& vkMemReq,
6512 uint64_t allocationCount,
6514 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6515 const VkMemoryRequirements& vkMemReq,
6516 bool requiresDedicatedAllocation,
6517 bool prefersDedicatedAllocation,
6520 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6521 const VkMemoryRequirements& vkMemReq,
6522 bool requiresDedicatedAllocation,
6523 bool prefersDedicatedAllocation,
6526 void RecordFreeMemory(uint32_t frameIndex,
6528 void RecordFreeMemoryPages(uint32_t frameIndex,
6529 uint64_t allocationCount,
6531 void RecordResizeAllocation(
6532 uint32_t frameIndex,
6534 VkDeviceSize newSize);
6535 void RecordSetAllocationUserData(uint32_t frameIndex,
6537 const void* pUserData);
6538 void RecordCreateLostAllocation(uint32_t frameIndex,
6540 void RecordMapMemory(uint32_t frameIndex,
6542 void RecordUnmapMemory(uint32_t frameIndex,
6544 void RecordFlushAllocation(uint32_t frameIndex,
6545 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6546 void RecordInvalidateAllocation(uint32_t frameIndex,
6547 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6548 void RecordCreateBuffer(uint32_t frameIndex,
6549 const VkBufferCreateInfo& bufCreateInfo,
6552 void RecordCreateImage(uint32_t frameIndex,
6553 const VkImageCreateInfo& imageCreateInfo,
6556 void RecordDestroyBuffer(uint32_t frameIndex,
6558 void RecordDestroyImage(uint32_t frameIndex,
6560 void RecordTouchAllocation(uint32_t frameIndex,
6562 void RecordGetAllocationInfo(uint32_t frameIndex,
6564 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6566 void RecordDefragmentationBegin(uint32_t frameIndex,
6569 void RecordDefragmentationEnd(uint32_t frameIndex,
6579 class UserDataString
6583 const char* GetString()
const {
return m_Str; }
6593 VMA_MUTEX m_FileMutex;
6595 int64_t m_StartCounter;
6597 void GetBasicParams(CallParams& outParams);
6600 template<
typename T>
6601 void PrintPointerList(uint64_t count,
const T* pItems)
6605 fprintf(m_File,
"%p", pItems[0]);
6606 for(uint64_t i = 1; i < count; ++i)
6608 fprintf(m_File,
" %p", pItems[i]);
6613 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6617 #endif // #if VMA_RECORDING_ENABLED 6622 class VmaAllocationObjectAllocator
6624 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6626 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6633 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6637 struct VmaAllocator_T
6639 VMA_CLASS_NO_COPY(VmaAllocator_T)
6642 bool m_UseKhrDedicatedAllocation;
6644 bool m_AllocationCallbacksSpecified;
6645 VkAllocationCallbacks m_AllocationCallbacks;
6647 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6650 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6651 VMA_MUTEX m_HeapSizeLimitMutex;
6653 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6654 VkPhysicalDeviceMemoryProperties m_MemProps;
6657 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6660 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6661 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6662 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6668 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6670 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6674 return m_VulkanFunctions;
6677 VkDeviceSize GetBufferImageGranularity()
const 6680 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6681 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6684 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6685 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6687 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6689 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6690 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6693 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6695 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6696 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6699 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6701 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6702 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6703 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6706 bool IsIntegratedGpu()
const 6708 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6711 #if VMA_RECORDING_ENABLED 6712 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6715 void GetBufferMemoryRequirements(
6717 VkMemoryRequirements& memReq,
6718 bool& requiresDedicatedAllocation,
6719 bool& prefersDedicatedAllocation)
const;
6720 void GetImageMemoryRequirements(
6722 VkMemoryRequirements& memReq,
6723 bool& requiresDedicatedAllocation,
6724 bool& prefersDedicatedAllocation)
const;
6727 VkResult AllocateMemory(
6728 const VkMemoryRequirements& vkMemReq,
6729 bool requiresDedicatedAllocation,
6730 bool prefersDedicatedAllocation,
6731 VkBuffer dedicatedBuffer,
6732 VkImage dedicatedImage,
6734 VmaSuballocationType suballocType,
6735 size_t allocationCount,
6740 size_t allocationCount,
6743 VkResult ResizeAllocation(
6745 VkDeviceSize newSize);
6747 void CalculateStats(
VmaStats* pStats);
6749 #if VMA_STATS_STRING_ENABLED 6750 void PrintDetailedMap(
class VmaJsonWriter& json);
6753 VkResult DefragmentationBegin(
6757 VkResult DefragmentationEnd(
6764 void DestroyPool(
VmaPool pool);
6767 void SetCurrentFrameIndex(uint32_t frameIndex);
6768 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6770 void MakePoolAllocationsLost(
6772 size_t* pLostAllocationCount);
6773 VkResult CheckPoolCorruption(
VmaPool hPool);
6774 VkResult CheckCorruption(uint32_t memoryTypeBits);
6778 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6779 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6784 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6785 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6787 void FlushOrInvalidateAllocation(
6789 VkDeviceSize offset, VkDeviceSize size,
6790 VMA_CACHE_OPERATION op);
6792 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6798 uint32_t GetGpuDefragmentationMemoryTypeBits();
6801 VkDeviceSize m_PreferredLargeHeapBlockSize;
6803 VkPhysicalDevice m_PhysicalDevice;
6804 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6805 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
6807 VMA_RW_MUTEX m_PoolsMutex;
6809 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6810 uint32_t m_NextPoolId;
6814 #if VMA_RECORDING_ENABLED 6815 VmaRecorder* m_pRecorder;
6820 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6822 VkResult AllocateMemoryOfType(
6824 VkDeviceSize alignment,
6825 bool dedicatedAllocation,
6826 VkBuffer dedicatedBuffer,
6827 VkImage dedicatedImage,
6829 uint32_t memTypeIndex,
6830 VmaSuballocationType suballocType,
6831 size_t allocationCount,
6835 VkResult AllocateDedicatedMemoryPage(
6837 VmaSuballocationType suballocType,
6838 uint32_t memTypeIndex,
6839 const VkMemoryAllocateInfo& allocInfo,
6841 bool isUserDataString,
6846 VkResult AllocateDedicatedMemory(
6848 VmaSuballocationType suballocType,
6849 uint32_t memTypeIndex,
6851 bool isUserDataString,
6853 VkBuffer dedicatedBuffer,
6854 VkImage dedicatedImage,
6855 size_t allocationCount,
6865 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
6871 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6873 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6876 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6878 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6881 template<
typename T>
6884 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6887 template<
typename T>
6888 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6890 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6893 template<
typename T>
6894 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6899 VmaFree(hAllocator, ptr);
6903 template<
typename T>
6904 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6908 for(
size_t i = count; i--; )
6910 VmaFree(hAllocator, ptr);
6917 #if VMA_STATS_STRING_ENABLED 6919 class VmaStringBuilder
6922 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6923 size_t GetLength()
const {
return m_Data.size(); }
6924 const char* GetData()
const {
return m_Data.data(); }
6926 void Add(
char ch) { m_Data.push_back(ch); }
6927 void Add(
const char* pStr);
6928 void AddNewLine() { Add(
'\n'); }
6929 void AddNumber(uint32_t num);
6930 void AddNumber(uint64_t num);
6931 void AddPointer(
const void* ptr);
6934 VmaVector< char, VmaStlAllocator<char> > m_Data;
6937 void VmaStringBuilder::Add(
const char* pStr)
6939 const size_t strLen = strlen(pStr);
6942 const size_t oldCount = m_Data.size();
6943 m_Data.resize(oldCount + strLen);
6944 memcpy(m_Data.data() + oldCount, pStr, strLen);
6948 void VmaStringBuilder::AddNumber(uint32_t num)
6951 VmaUint32ToStr(buf,
sizeof(buf), num);
6955 void VmaStringBuilder::AddNumber(uint64_t num)
6958 VmaUint64ToStr(buf,
sizeof(buf), num);
6962 void VmaStringBuilder::AddPointer(
const void* ptr)
6965 VmaPtrToStr(buf,
sizeof(buf), ptr);
6969 #endif // #if VMA_STATS_STRING_ENABLED 6974 #if VMA_STATS_STRING_ENABLED 6978 VMA_CLASS_NO_COPY(VmaJsonWriter)
6980 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6983 void BeginObject(
bool singleLine =
false);
6986 void BeginArray(
bool singleLine =
false);
6989 void WriteString(
const char* pStr);
6990 void BeginString(
const char* pStr = VMA_NULL);
6991 void ContinueString(
const char* pStr);
6992 void ContinueString(uint32_t n);
6993 void ContinueString(uint64_t n);
6994 void ContinueString_Pointer(
const void* ptr);
6995 void EndString(
const char* pStr = VMA_NULL);
6997 void WriteNumber(uint32_t n);
6998 void WriteNumber(uint64_t n);
6999 void WriteBool(
bool b);
7003 static const char*
const INDENT;
7005 enum COLLECTION_TYPE
7007 COLLECTION_TYPE_OBJECT,
7008 COLLECTION_TYPE_ARRAY,
7012 COLLECTION_TYPE type;
7013 uint32_t valueCount;
7014 bool singleLineMode;
7017 VmaStringBuilder& m_SB;
7018 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7019 bool m_InsideString;
7021 void BeginValue(
bool isString);
7022 void WriteIndent(
bool oneLess =
false);
7025 const char*
const VmaJsonWriter::INDENT =
" ";
7027 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7029 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7030 m_InsideString(false)
7034 VmaJsonWriter::~VmaJsonWriter()
7036 VMA_ASSERT(!m_InsideString);
7037 VMA_ASSERT(m_Stack.empty());
7040 void VmaJsonWriter::BeginObject(
bool singleLine)
7042 VMA_ASSERT(!m_InsideString);
7048 item.type = COLLECTION_TYPE_OBJECT;
7049 item.valueCount = 0;
7050 item.singleLineMode = singleLine;
7051 m_Stack.push_back(item);
7054 void VmaJsonWriter::EndObject()
7056 VMA_ASSERT(!m_InsideString);
7061 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7065 void VmaJsonWriter::BeginArray(
bool singleLine)
7067 VMA_ASSERT(!m_InsideString);
7073 item.type = COLLECTION_TYPE_ARRAY;
7074 item.valueCount = 0;
7075 item.singleLineMode = singleLine;
7076 m_Stack.push_back(item);
7079 void VmaJsonWriter::EndArray()
7081 VMA_ASSERT(!m_InsideString);
7086 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7090 void VmaJsonWriter::WriteString(
const char* pStr)
7096 void VmaJsonWriter::BeginString(
const char* pStr)
7098 VMA_ASSERT(!m_InsideString);
7102 m_InsideString =
true;
7103 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7105 ContinueString(pStr);
7109 void VmaJsonWriter::ContinueString(
const char* pStr)
7111 VMA_ASSERT(m_InsideString);
7113 const size_t strLen = strlen(pStr);
7114 for(
size_t i = 0; i < strLen; ++i)
7147 VMA_ASSERT(0 &&
"Character not currently supported.");
7153 void VmaJsonWriter::ContinueString(uint32_t n)
7155 VMA_ASSERT(m_InsideString);
7159 void VmaJsonWriter::ContinueString(uint64_t n)
7161 VMA_ASSERT(m_InsideString);
7165 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7167 VMA_ASSERT(m_InsideString);
7168 m_SB.AddPointer(ptr);
7171 void VmaJsonWriter::EndString(
const char* pStr)
7173 VMA_ASSERT(m_InsideString);
7174 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7176 ContinueString(pStr);
7179 m_InsideString =
false;
7182 void VmaJsonWriter::WriteNumber(uint32_t n)
7184 VMA_ASSERT(!m_InsideString);
7189 void VmaJsonWriter::WriteNumber(uint64_t n)
7191 VMA_ASSERT(!m_InsideString);
7196 void VmaJsonWriter::WriteBool(
bool b)
7198 VMA_ASSERT(!m_InsideString);
7200 m_SB.Add(b ?
"true" :
"false");
7203 void VmaJsonWriter::WriteNull()
7205 VMA_ASSERT(!m_InsideString);
7210 void VmaJsonWriter::BeginValue(
bool isString)
7212 if(!m_Stack.empty())
7214 StackItem& currItem = m_Stack.back();
7215 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7216 currItem.valueCount % 2 == 0)
7218 VMA_ASSERT(isString);
7221 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7222 currItem.valueCount % 2 != 0)
7226 else if(currItem.valueCount > 0)
7235 ++currItem.valueCount;
7239 void VmaJsonWriter::WriteIndent(
bool oneLess)
7241 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7245 size_t count = m_Stack.size();
7246 if(count > 0 && oneLess)
7250 for(
size_t i = 0; i < count; ++i)
7257 #endif // #if VMA_STATS_STRING_ENABLED 7261 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7263 if(IsUserDataString())
7265 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7267 FreeUserDataString(hAllocator);
7269 if(pUserData != VMA_NULL)
7271 const char*
const newStrSrc = (
char*)pUserData;
7272 const size_t newStrLen = strlen(newStrSrc);
7273 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7274 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7275 m_pUserData = newStrDst;
7280 m_pUserData = pUserData;
7284 void VmaAllocation_T::ChangeBlockAllocation(
7286 VmaDeviceMemoryBlock* block,
7287 VkDeviceSize offset)
7289 VMA_ASSERT(block != VMA_NULL);
7290 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7293 if(block != m_BlockAllocation.m_Block)
7295 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7296 if(IsPersistentMap())
7298 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7299 block->Map(hAllocator, mapRefCount, VMA_NULL);
7302 m_BlockAllocation.m_Block = block;
7303 m_BlockAllocation.m_Offset = offset;
7306 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7308 VMA_ASSERT(newSize > 0);
7312 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7314 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7315 m_BlockAllocation.m_Offset = newOffset;
7318 VkDeviceSize VmaAllocation_T::GetOffset()
const 7322 case ALLOCATION_TYPE_BLOCK:
7323 return m_BlockAllocation.m_Offset;
7324 case ALLOCATION_TYPE_DEDICATED:
7332 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7336 case ALLOCATION_TYPE_BLOCK:
7337 return m_BlockAllocation.m_Block->GetDeviceMemory();
7338 case ALLOCATION_TYPE_DEDICATED:
7339 return m_DedicatedAllocation.m_hMemory;
7342 return VK_NULL_HANDLE;
7346 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7350 case ALLOCATION_TYPE_BLOCK:
7351 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7352 case ALLOCATION_TYPE_DEDICATED:
7353 return m_DedicatedAllocation.m_MemoryTypeIndex;
7360 void* VmaAllocation_T::GetMappedData()
const 7364 case ALLOCATION_TYPE_BLOCK:
7367 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7368 VMA_ASSERT(pBlockData != VMA_NULL);
7369 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7376 case ALLOCATION_TYPE_DEDICATED:
7377 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7378 return m_DedicatedAllocation.m_pMappedData;
7385 bool VmaAllocation_T::CanBecomeLost()
const 7389 case ALLOCATION_TYPE_BLOCK:
7390 return m_BlockAllocation.m_CanBecomeLost;
7391 case ALLOCATION_TYPE_DEDICATED:
7399 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7401 VMA_ASSERT(CanBecomeLost());
7407 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7410 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7415 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7421 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7431 #if VMA_STATS_STRING_ENABLED 7434 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7443 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7445 json.WriteString(
"Type");
7446 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7448 json.WriteString(
"Size");
7449 json.WriteNumber(m_Size);
7451 if(m_pUserData != VMA_NULL)
7453 json.WriteString(
"UserData");
7454 if(IsUserDataString())
7456 json.WriteString((
const char*)m_pUserData);
7461 json.ContinueString_Pointer(m_pUserData);
7466 json.WriteString(
"CreationFrameIndex");
7467 json.WriteNumber(m_CreationFrameIndex);
7469 json.WriteString(
"LastUseFrameIndex");
7470 json.WriteNumber(GetLastUseFrameIndex());
7472 if(m_BufferImageUsage != 0)
7474 json.WriteString(
"Usage");
7475 json.WriteNumber(m_BufferImageUsage);
7481 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7483 VMA_ASSERT(IsUserDataString());
7484 if(m_pUserData != VMA_NULL)
7486 char*
const oldStr = (
char*)m_pUserData;
7487 const size_t oldStrLen = strlen(oldStr);
7488 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7489 m_pUserData = VMA_NULL;
7493 void VmaAllocation_T::BlockAllocMap()
7495 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7497 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7503 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7507 void VmaAllocation_T::BlockAllocUnmap()
7509 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7511 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7517 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7521 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7523 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7527 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7529 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7530 *ppData = m_DedicatedAllocation.m_pMappedData;
7536 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7537 return VK_ERROR_MEMORY_MAP_FAILED;
7542 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7543 hAllocator->m_hDevice,
7544 m_DedicatedAllocation.m_hMemory,
7549 if(result == VK_SUCCESS)
7551 m_DedicatedAllocation.m_pMappedData = *ppData;
7558 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7560 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7562 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7567 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7568 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7569 hAllocator->m_hDevice,
7570 m_DedicatedAllocation.m_hMemory);
7575 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7579 #if VMA_STATS_STRING_ENABLED 7581 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7585 json.WriteString(
"Blocks");
7588 json.WriteString(
"Allocations");
7591 json.WriteString(
"UnusedRanges");
7594 json.WriteString(
"UsedBytes");
7597 json.WriteString(
"UnusedBytes");
7602 json.WriteString(
"AllocationSize");
7603 json.BeginObject(
true);
7604 json.WriteString(
"Min");
7606 json.WriteString(
"Avg");
7608 json.WriteString(
"Max");
7615 json.WriteString(
"UnusedRangeSize");
7616 json.BeginObject(
true);
7617 json.WriteString(
"Min");
7619 json.WriteString(
"Avg");
7621 json.WriteString(
"Max");
7629 #endif // #if VMA_STATS_STRING_ENABLED 7631 struct VmaSuballocationItemSizeLess
7634 const VmaSuballocationList::iterator lhs,
7635 const VmaSuballocationList::iterator rhs)
const 7637 return lhs->size < rhs->size;
7640 const VmaSuballocationList::iterator lhs,
7641 VkDeviceSize rhsSize)
const 7643 return lhs->size < rhsSize;
7651 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7653 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7657 #if VMA_STATS_STRING_ENABLED 7659 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7660 VkDeviceSize unusedBytes,
7661 size_t allocationCount,
7662 size_t unusedRangeCount)
const 7666 json.WriteString(
"TotalBytes");
7667 json.WriteNumber(GetSize());
7669 json.WriteString(
"UnusedBytes");
7670 json.WriteNumber(unusedBytes);
7672 json.WriteString(
"Allocations");
7673 json.WriteNumber((uint64_t)allocationCount);
7675 json.WriteString(
"UnusedRanges");
7676 json.WriteNumber((uint64_t)unusedRangeCount);
7678 json.WriteString(
"Suballocations");
7682 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7683 VkDeviceSize offset,
7686 json.BeginObject(
true);
7688 json.WriteString(
"Offset");
7689 json.WriteNumber(offset);
7691 hAllocation->PrintParameters(json);
7696 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7697 VkDeviceSize offset,
7698 VkDeviceSize size)
const 7700 json.BeginObject(
true);
7702 json.WriteString(
"Offset");
7703 json.WriteNumber(offset);
7705 json.WriteString(
"Type");
7706 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7708 json.WriteString(
"Size");
7709 json.WriteNumber(size);
7714 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7720 #endif // #if VMA_STATS_STRING_ENABLED 7725 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7726 VmaBlockMetadata(hAllocator),
7729 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7730 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7734 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7738 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7740 VmaBlockMetadata::Init(size);
7743 m_SumFreeSize = size;
7745 VmaSuballocation suballoc = {};
7746 suballoc.offset = 0;
7747 suballoc.size = size;
7748 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7749 suballoc.hAllocation = VK_NULL_HANDLE;
7751 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7752 m_Suballocations.push_back(suballoc);
7753 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7755 m_FreeSuballocationsBySize.push_back(suballocItem);
7758 bool VmaBlockMetadata_Generic::Validate()
const 7760 VMA_VALIDATE(!m_Suballocations.empty());
7763 VkDeviceSize calculatedOffset = 0;
7765 uint32_t calculatedFreeCount = 0;
7767 VkDeviceSize calculatedSumFreeSize = 0;
7770 size_t freeSuballocationsToRegister = 0;
7772 bool prevFree =
false;
7774 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7775 suballocItem != m_Suballocations.cend();
7778 const VmaSuballocation& subAlloc = *suballocItem;
7781 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7783 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7785 VMA_VALIDATE(!prevFree || !currFree);
7787 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7791 calculatedSumFreeSize += subAlloc.size;
7792 ++calculatedFreeCount;
7793 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7795 ++freeSuballocationsToRegister;
7799 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7803 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7804 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7807 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7810 calculatedOffset += subAlloc.size;
7811 prevFree = currFree;
7816 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7818 VkDeviceSize lastSize = 0;
7819 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7821 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7824 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7826 VMA_VALIDATE(suballocItem->size >= lastSize);
7828 lastSize = suballocItem->size;
7832 VMA_VALIDATE(ValidateFreeSuballocationList());
7833 VMA_VALIDATE(calculatedOffset == GetSize());
7834 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7835 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7840 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7842 if(!m_FreeSuballocationsBySize.empty())
7844 return m_FreeSuballocationsBySize.back()->size;
7852 bool VmaBlockMetadata_Generic::IsEmpty()
const 7854 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7857 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7861 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7873 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7874 suballocItem != m_Suballocations.cend();
7877 const VmaSuballocation& suballoc = *suballocItem;
7878 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7891 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7893 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7895 inoutStats.
size += GetSize();
7902 #if VMA_STATS_STRING_ENABLED 7904 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7906 PrintDetailedMap_Begin(json,
7908 m_Suballocations.size() - (size_t)m_FreeCount,
7912 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7913 suballocItem != m_Suballocations.cend();
7914 ++suballocItem, ++i)
7916 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7918 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7922 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7926 PrintDetailedMap_End(json);
7929 #endif // #if VMA_STATS_STRING_ENABLED 7931 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7932 uint32_t currentFrameIndex,
7933 uint32_t frameInUseCount,
7934 VkDeviceSize bufferImageGranularity,
7935 VkDeviceSize allocSize,
7936 VkDeviceSize allocAlignment,
7938 VmaSuballocationType allocType,
7939 bool canMakeOtherLost,
7941 VmaAllocationRequest* pAllocationRequest)
7943 VMA_ASSERT(allocSize > 0);
7944 VMA_ASSERT(!upperAddress);
7945 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7946 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7947 VMA_HEAVY_ASSERT(Validate());
7949 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7952 if(canMakeOtherLost ==
false &&
7953 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7959 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7960 if(freeSuballocCount > 0)
7965 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7966 m_FreeSuballocationsBySize.data(),
7967 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7968 allocSize + 2 * VMA_DEBUG_MARGIN,
7969 VmaSuballocationItemSizeLess());
7970 size_t index = it - m_FreeSuballocationsBySize.data();
7971 for(; index < freeSuballocCount; ++index)
7976 bufferImageGranularity,
7980 m_FreeSuballocationsBySize[index],
7982 &pAllocationRequest->offset,
7983 &pAllocationRequest->itemsToMakeLostCount,
7984 &pAllocationRequest->sumFreeSize,
7985 &pAllocationRequest->sumItemSize))
7987 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7992 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7994 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7995 it != m_Suballocations.end();
7998 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8001 bufferImageGranularity,
8007 &pAllocationRequest->offset,
8008 &pAllocationRequest->itemsToMakeLostCount,
8009 &pAllocationRequest->sumFreeSize,
8010 &pAllocationRequest->sumItemSize))
8012 pAllocationRequest->item = it;
8020 for(
size_t index = freeSuballocCount; index--; )
8025 bufferImageGranularity,
8029 m_FreeSuballocationsBySize[index],
8031 &pAllocationRequest->offset,
8032 &pAllocationRequest->itemsToMakeLostCount,
8033 &pAllocationRequest->sumFreeSize,
8034 &pAllocationRequest->sumItemSize))
8036 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8043 if(canMakeOtherLost)
8048 VmaAllocationRequest tmpAllocRequest = {};
8049 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8050 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8051 suballocIt != m_Suballocations.end();
8054 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8055 suballocIt->hAllocation->CanBecomeLost())
8060 bufferImageGranularity,
8066 &tmpAllocRequest.offset,
8067 &tmpAllocRequest.itemsToMakeLostCount,
8068 &tmpAllocRequest.sumFreeSize,
8069 &tmpAllocRequest.sumItemSize))
8073 *pAllocationRequest = tmpAllocRequest;
8074 pAllocationRequest->item = suballocIt;
8077 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8079 *pAllocationRequest = tmpAllocRequest;
8080 pAllocationRequest->item = suballocIt;
8093 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8094 uint32_t currentFrameIndex,
8095 uint32_t frameInUseCount,
8096 VmaAllocationRequest* pAllocationRequest)
8098 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8100 while(pAllocationRequest->itemsToMakeLostCount > 0)
8102 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8104 ++pAllocationRequest->item;
8106 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8107 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8108 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8109 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8111 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8112 --pAllocationRequest->itemsToMakeLostCount;
8120 VMA_HEAVY_ASSERT(Validate());
8121 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8122 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8127 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8129 uint32_t lostAllocationCount = 0;
8130 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8131 it != m_Suballocations.end();
8134 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8135 it->hAllocation->CanBecomeLost() &&
8136 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8138 it = FreeSuballocation(it);
8139 ++lostAllocationCount;
8142 return lostAllocationCount;
8145 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8147 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8148 it != m_Suballocations.end();
8151 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8153 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8155 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8156 return VK_ERROR_VALIDATION_FAILED_EXT;
8158 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8160 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8161 return VK_ERROR_VALIDATION_FAILED_EXT;
8169 void VmaBlockMetadata_Generic::Alloc(
8170 const VmaAllocationRequest& request,
8171 VmaSuballocationType type,
8172 VkDeviceSize allocSize,
8175 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8176 VMA_ASSERT(request.item != m_Suballocations.end());
8177 VmaSuballocation& suballoc = *request.item;
8179 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8181 VMA_ASSERT(request.offset >= suballoc.offset);
8182 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8183 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8184 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8188 UnregisterFreeSuballocation(request.item);
8190 suballoc.offset = request.offset;
8191 suballoc.size = allocSize;
8192 suballoc.type = type;
8193 suballoc.hAllocation = hAllocation;
8198 VmaSuballocation paddingSuballoc = {};
8199 paddingSuballoc.offset = request.offset + allocSize;
8200 paddingSuballoc.size = paddingEnd;
8201 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8202 VmaSuballocationList::iterator next = request.item;
8204 const VmaSuballocationList::iterator paddingEndItem =
8205 m_Suballocations.insert(next, paddingSuballoc);
8206 RegisterFreeSuballocation(paddingEndItem);
8212 VmaSuballocation paddingSuballoc = {};
8213 paddingSuballoc.offset = request.offset - paddingBegin;
8214 paddingSuballoc.size = paddingBegin;
8215 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8216 const VmaSuballocationList::iterator paddingBeginItem =
8217 m_Suballocations.insert(request.item, paddingSuballoc);
8218 RegisterFreeSuballocation(paddingBeginItem);
8222 m_FreeCount = m_FreeCount - 1;
8223 if(paddingBegin > 0)
8231 m_SumFreeSize -= allocSize;
8234 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8236 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8237 suballocItem != m_Suballocations.end();
8240 VmaSuballocation& suballoc = *suballocItem;
8241 if(suballoc.hAllocation == allocation)
8243 FreeSuballocation(suballocItem);
8244 VMA_HEAVY_ASSERT(Validate());
8248 VMA_ASSERT(0 &&
"Not found!");
8251 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8253 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8254 suballocItem != m_Suballocations.end();
8257 VmaSuballocation& suballoc = *suballocItem;
8258 if(suballoc.offset == offset)
8260 FreeSuballocation(suballocItem);
8264 VMA_ASSERT(0 &&
"Not found!");
8267 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8269 typedef VmaSuballocationList::iterator iter_type;
8270 for(iter_type suballocItem = m_Suballocations.begin();
8271 suballocItem != m_Suballocations.end();
8274 VmaSuballocation& suballoc = *suballocItem;
8275 if(suballoc.hAllocation == alloc)
8277 iter_type nextItem = suballocItem;
8281 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8284 if(newSize < alloc->GetSize())
8286 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8289 if(nextItem != m_Suballocations.end())
8292 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8295 UnregisterFreeSuballocation(nextItem);
8296 nextItem->offset -= sizeDiff;
8297 nextItem->size += sizeDiff;
8298 RegisterFreeSuballocation(nextItem);
8304 VmaSuballocation newFreeSuballoc;
8305 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8306 newFreeSuballoc.offset = suballoc.offset + newSize;
8307 newFreeSuballoc.size = sizeDiff;
8308 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8309 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8310 RegisterFreeSuballocation(newFreeSuballocIt);
8319 VmaSuballocation newFreeSuballoc;
8320 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8321 newFreeSuballoc.offset = suballoc.offset + newSize;
8322 newFreeSuballoc.size = sizeDiff;
8323 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8324 m_Suballocations.push_back(newFreeSuballoc);
8326 iter_type newFreeSuballocIt = m_Suballocations.end();
8327 RegisterFreeSuballocation(--newFreeSuballocIt);
8332 suballoc.size = newSize;
8333 m_SumFreeSize += sizeDiff;
8338 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8341 if(nextItem != m_Suballocations.end())
8344 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8347 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8353 if(nextItem->size > sizeDiff)
8356 UnregisterFreeSuballocation(nextItem);
8357 nextItem->offset += sizeDiff;
8358 nextItem->size -= sizeDiff;
8359 RegisterFreeSuballocation(nextItem);
8365 UnregisterFreeSuballocation(nextItem);
8366 m_Suballocations.erase(nextItem);
8382 suballoc.size = newSize;
8383 m_SumFreeSize -= sizeDiff;
8390 VMA_ASSERT(0 &&
"Not found!");
8394 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8396 VkDeviceSize lastSize = 0;
8397 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8399 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8401 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8402 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8403 VMA_VALIDATE(it->size >= lastSize);
8404 lastSize = it->size;
8409 bool VmaBlockMetadata_Generic::CheckAllocation(
8410 uint32_t currentFrameIndex,
8411 uint32_t frameInUseCount,
8412 VkDeviceSize bufferImageGranularity,
8413 VkDeviceSize allocSize,
8414 VkDeviceSize allocAlignment,
8415 VmaSuballocationType allocType,
8416 VmaSuballocationList::const_iterator suballocItem,
8417 bool canMakeOtherLost,
8418 VkDeviceSize* pOffset,
8419 size_t* itemsToMakeLostCount,
8420 VkDeviceSize* pSumFreeSize,
8421 VkDeviceSize* pSumItemSize)
const 8423 VMA_ASSERT(allocSize > 0);
8424 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8425 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8426 VMA_ASSERT(pOffset != VMA_NULL);
8428 *itemsToMakeLostCount = 0;
8432 if(canMakeOtherLost)
8434 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8436 *pSumFreeSize = suballocItem->size;
8440 if(suballocItem->hAllocation->CanBecomeLost() &&
8441 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8443 ++*itemsToMakeLostCount;
8444 *pSumItemSize = suballocItem->size;
8453 if(GetSize() - suballocItem->offset < allocSize)
8459 *pOffset = suballocItem->offset;
8462 if(VMA_DEBUG_MARGIN > 0)
8464 *pOffset += VMA_DEBUG_MARGIN;
8468 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8472 if(bufferImageGranularity > 1)
8474 bool bufferImageGranularityConflict =
false;
8475 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8476 while(prevSuballocItem != m_Suballocations.cbegin())
8479 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8480 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8482 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8484 bufferImageGranularityConflict =
true;
8492 if(bufferImageGranularityConflict)
8494 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8500 if(*pOffset >= suballocItem->offset + suballocItem->size)
8506 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8509 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8511 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8513 if(suballocItem->offset + totalSize > GetSize())
8520 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8521 if(totalSize > suballocItem->size)
8523 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8524 while(remainingSize > 0)
8527 if(lastSuballocItem == m_Suballocations.cend())
8531 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8533 *pSumFreeSize += lastSuballocItem->size;
8537 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8538 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8539 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8541 ++*itemsToMakeLostCount;
8542 *pSumItemSize += lastSuballocItem->size;
8549 remainingSize = (lastSuballocItem->size < remainingSize) ?
8550 remainingSize - lastSuballocItem->size : 0;
8556 if(bufferImageGranularity > 1)
8558 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8560 while(nextSuballocItem != m_Suballocations.cend())
8562 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8563 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8565 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8567 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8568 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8569 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8571 ++*itemsToMakeLostCount;
8590 const VmaSuballocation& suballoc = *suballocItem;
8591 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8593 *pSumFreeSize = suballoc.size;
8596 if(suballoc.size < allocSize)
8602 *pOffset = suballoc.offset;
8605 if(VMA_DEBUG_MARGIN > 0)
8607 *pOffset += VMA_DEBUG_MARGIN;
8611 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8615 if(bufferImageGranularity > 1)
8617 bool bufferImageGranularityConflict =
false;
8618 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8619 while(prevSuballocItem != m_Suballocations.cbegin())
8622 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8623 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8625 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8627 bufferImageGranularityConflict =
true;
8635 if(bufferImageGranularityConflict)
8637 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8642 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8645 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8648 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8655 if(bufferImageGranularity > 1)
8657 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8659 while(nextSuballocItem != m_Suballocations.cend())
8661 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8662 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8664 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8683 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8685 VMA_ASSERT(item != m_Suballocations.end());
8686 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8688 VmaSuballocationList::iterator nextItem = item;
8690 VMA_ASSERT(nextItem != m_Suballocations.end());
8691 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8693 item->size += nextItem->size;
8695 m_Suballocations.erase(nextItem);
8698 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8701 VmaSuballocation& suballoc = *suballocItem;
8702 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8703 suballoc.hAllocation = VK_NULL_HANDLE;
8707 m_SumFreeSize += suballoc.size;
8710 bool mergeWithNext =
false;
8711 bool mergeWithPrev =
false;
8713 VmaSuballocationList::iterator nextItem = suballocItem;
8715 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8717 mergeWithNext =
true;
8720 VmaSuballocationList::iterator prevItem = suballocItem;
8721 if(suballocItem != m_Suballocations.begin())
8724 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8726 mergeWithPrev =
true;
8732 UnregisterFreeSuballocation(nextItem);
8733 MergeFreeWithNext(suballocItem);
8738 UnregisterFreeSuballocation(prevItem);
8739 MergeFreeWithNext(prevItem);
8740 RegisterFreeSuballocation(prevItem);
8745 RegisterFreeSuballocation(suballocItem);
8746 return suballocItem;
8750 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8752 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8753 VMA_ASSERT(item->size > 0);
8757 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8759 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8761 if(m_FreeSuballocationsBySize.empty())
8763 m_FreeSuballocationsBySize.push_back(item);
8767 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8775 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8777 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8778 VMA_ASSERT(item->size > 0);
8782 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8784 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8786 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8787 m_FreeSuballocationsBySize.data(),
8788 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8790 VmaSuballocationItemSizeLess());
8791 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8792 index < m_FreeSuballocationsBySize.size();
8795 if(m_FreeSuballocationsBySize[index] == item)
8797 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8800 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8802 VMA_ASSERT(0 &&
"Not found.");
8808 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8809 VkDeviceSize bufferImageGranularity,
8810 VmaSuballocationType& inOutPrevSuballocType)
const 8812 if(bufferImageGranularity == 1 || IsEmpty())
8817 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8818 bool typeConflictFound =
false;
8819 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8820 it != m_Suballocations.cend();
8823 const VmaSuballocationType suballocType = it->type;
8824 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8826 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8827 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8829 typeConflictFound =
true;
8831 inOutPrevSuballocType = suballocType;
8835 return typeConflictFound || minAlignment >= bufferImageGranularity;
8841 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8842 VmaBlockMetadata(hAllocator),
8844 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8845 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8846 m_1stVectorIndex(0),
8847 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8848 m_1stNullItemsBeginCount(0),
8849 m_1stNullItemsMiddleCount(0),
8850 m_2ndNullItemsCount(0)
8854 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8858 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8860 VmaBlockMetadata::Init(size);
8861 m_SumFreeSize = size;
8864 bool VmaBlockMetadata_Linear::Validate()
const 8866 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8867 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8869 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8870 VMA_VALIDATE(!suballocations1st.empty() ||
8871 suballocations2nd.empty() ||
8872 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8874 if(!suballocations1st.empty())
8877 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8879 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8881 if(!suballocations2nd.empty())
8884 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8887 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8888 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8890 VkDeviceSize sumUsedSize = 0;
8891 const size_t suballoc1stCount = suballocations1st.size();
8892 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8894 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8896 const size_t suballoc2ndCount = suballocations2nd.size();
8897 size_t nullItem2ndCount = 0;
8898 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8900 const VmaSuballocation& suballoc = suballocations2nd[i];
8901 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8903 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8904 VMA_VALIDATE(suballoc.offset >= offset);
8908 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8909 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8910 sumUsedSize += suballoc.size;
8917 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8920 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8923 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8925 const VmaSuballocation& suballoc = suballocations1st[i];
8926 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8927 suballoc.hAllocation == VK_NULL_HANDLE);
8930 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8932 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8934 const VmaSuballocation& suballoc = suballocations1st[i];
8935 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8937 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8938 VMA_VALIDATE(suballoc.offset >= offset);
8939 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8943 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8944 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8945 sumUsedSize += suballoc.size;
8952 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8954 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8956 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8958 const size_t suballoc2ndCount = suballocations2nd.size();
8959 size_t nullItem2ndCount = 0;
8960 for(
size_t i = suballoc2ndCount; i--; )
8962 const VmaSuballocation& suballoc = suballocations2nd[i];
8963 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8965 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8966 VMA_VALIDATE(suballoc.offset >= offset);
8970 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8971 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8972 sumUsedSize += suballoc.size;
8979 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8982 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8985 VMA_VALIDATE(offset <= GetSize());
8986 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8991 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8993 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8994 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8997 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8999 const VkDeviceSize size = GetSize();
9011 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9013 switch(m_2ndVectorMode)
9015 case SECOND_VECTOR_EMPTY:
9021 const size_t suballocations1stCount = suballocations1st.size();
9022 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9023 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9024 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9026 firstSuballoc.offset,
9027 size - (lastSuballoc.offset + lastSuballoc.size));
9031 case SECOND_VECTOR_RING_BUFFER:
9036 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9037 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9038 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9039 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9043 case SECOND_VECTOR_DOUBLE_STACK:
9048 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9049 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9050 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9051 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9061 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9063 const VkDeviceSize size = GetSize();
9064 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9065 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9066 const size_t suballoc1stCount = suballocations1st.size();
9067 const size_t suballoc2ndCount = suballocations2nd.size();
9078 VkDeviceSize lastOffset = 0;
9080 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9082 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9083 size_t nextAlloc2ndIndex = 0;
9084 while(lastOffset < freeSpace2ndTo1stEnd)
9087 while(nextAlloc2ndIndex < suballoc2ndCount &&
9088 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9090 ++nextAlloc2ndIndex;
9094 if(nextAlloc2ndIndex < suballoc2ndCount)
9096 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9099 if(lastOffset < suballoc.offset)
9102 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9116 lastOffset = suballoc.offset + suballoc.size;
9117 ++nextAlloc2ndIndex;
9123 if(lastOffset < freeSpace2ndTo1stEnd)
9125 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9133 lastOffset = freeSpace2ndTo1stEnd;
9138 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9139 const VkDeviceSize freeSpace1stTo2ndEnd =
9140 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9141 while(lastOffset < freeSpace1stTo2ndEnd)
9144 while(nextAlloc1stIndex < suballoc1stCount &&
9145 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9147 ++nextAlloc1stIndex;
9151 if(nextAlloc1stIndex < suballoc1stCount)
9153 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9156 if(lastOffset < suballoc.offset)
9159 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9173 lastOffset = suballoc.offset + suballoc.size;
9174 ++nextAlloc1stIndex;
9180 if(lastOffset < freeSpace1stTo2ndEnd)
9182 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9190 lastOffset = freeSpace1stTo2ndEnd;
9194 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9196 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9197 while(lastOffset < size)
9200 while(nextAlloc2ndIndex != SIZE_MAX &&
9201 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9203 --nextAlloc2ndIndex;
9207 if(nextAlloc2ndIndex != SIZE_MAX)
9209 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9212 if(lastOffset < suballoc.offset)
9215 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9229 lastOffset = suballoc.offset + suballoc.size;
9230 --nextAlloc2ndIndex;
9236 if(lastOffset < size)
9238 const VkDeviceSize unusedRangeSize = size - lastOffset;
9254 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9256 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9257 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9258 const VkDeviceSize size = GetSize();
9259 const size_t suballoc1stCount = suballocations1st.size();
9260 const size_t suballoc2ndCount = suballocations2nd.size();
9262 inoutStats.
size += size;
9264 VkDeviceSize lastOffset = 0;
9266 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9268 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9269 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9270 while(lastOffset < freeSpace2ndTo1stEnd)
9273 while(nextAlloc2ndIndex < suballoc2ndCount &&
9274 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9276 ++nextAlloc2ndIndex;
9280 if(nextAlloc2ndIndex < suballoc2ndCount)
9282 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9285 if(lastOffset < suballoc.offset)
9288 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9299 lastOffset = suballoc.offset + suballoc.size;
9300 ++nextAlloc2ndIndex;
9305 if(lastOffset < freeSpace2ndTo1stEnd)
9308 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9315 lastOffset = freeSpace2ndTo1stEnd;
9320 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9321 const VkDeviceSize freeSpace1stTo2ndEnd =
9322 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9323 while(lastOffset < freeSpace1stTo2ndEnd)
9326 while(nextAlloc1stIndex < suballoc1stCount &&
9327 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9329 ++nextAlloc1stIndex;
9333 if(nextAlloc1stIndex < suballoc1stCount)
9335 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9338 if(lastOffset < suballoc.offset)
9341 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9352 lastOffset = suballoc.offset + suballoc.size;
9353 ++nextAlloc1stIndex;
9358 if(lastOffset < freeSpace1stTo2ndEnd)
9361 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9368 lastOffset = freeSpace1stTo2ndEnd;
9372 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9374 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9375 while(lastOffset < size)
9378 while(nextAlloc2ndIndex != SIZE_MAX &&
9379 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9381 --nextAlloc2ndIndex;
9385 if(nextAlloc2ndIndex != SIZE_MAX)
9387 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9390 if(lastOffset < suballoc.offset)
9393 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9404 lastOffset = suballoc.offset + suballoc.size;
9405 --nextAlloc2ndIndex;
9410 if(lastOffset < size)
9413 const VkDeviceSize unusedRangeSize = size - lastOffset;
9426 #if VMA_STATS_STRING_ENABLED 9427 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9429 const VkDeviceSize size = GetSize();
9430 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9431 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9432 const size_t suballoc1stCount = suballocations1st.size();
9433 const size_t suballoc2ndCount = suballocations2nd.size();
9437 size_t unusedRangeCount = 0;
9438 VkDeviceSize usedBytes = 0;
9440 VkDeviceSize lastOffset = 0;
9442 size_t alloc2ndCount = 0;
9443 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9445 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9446 size_t nextAlloc2ndIndex = 0;
9447 while(lastOffset < freeSpace2ndTo1stEnd)
9450 while(nextAlloc2ndIndex < suballoc2ndCount &&
9451 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9453 ++nextAlloc2ndIndex;
9457 if(nextAlloc2ndIndex < suballoc2ndCount)
9459 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9462 if(lastOffset < suballoc.offset)
9471 usedBytes += suballoc.size;
9474 lastOffset = suballoc.offset + suballoc.size;
9475 ++nextAlloc2ndIndex;
9480 if(lastOffset < freeSpace2ndTo1stEnd)
9487 lastOffset = freeSpace2ndTo1stEnd;
9492 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9493 size_t alloc1stCount = 0;
9494 const VkDeviceSize freeSpace1stTo2ndEnd =
9495 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9496 while(lastOffset < freeSpace1stTo2ndEnd)
9499 while(nextAlloc1stIndex < suballoc1stCount &&
9500 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9502 ++nextAlloc1stIndex;
9506 if(nextAlloc1stIndex < suballoc1stCount)
9508 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9511 if(lastOffset < suballoc.offset)
9520 usedBytes += suballoc.size;
9523 lastOffset = suballoc.offset + suballoc.size;
9524 ++nextAlloc1stIndex;
9529 if(lastOffset < size)
9536 lastOffset = freeSpace1stTo2ndEnd;
9540 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9542 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9543 while(lastOffset < size)
9546 while(nextAlloc2ndIndex != SIZE_MAX &&
9547 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9549 --nextAlloc2ndIndex;
9553 if(nextAlloc2ndIndex != SIZE_MAX)
9555 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9558 if(lastOffset < suballoc.offset)
9567 usedBytes += suballoc.size;
9570 lastOffset = suballoc.offset + suballoc.size;
9571 --nextAlloc2ndIndex;
9576 if(lastOffset < size)
9588 const VkDeviceSize unusedBytes = size - usedBytes;
9589 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9594 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9596 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9597 size_t nextAlloc2ndIndex = 0;
9598 while(lastOffset < freeSpace2ndTo1stEnd)
9601 while(nextAlloc2ndIndex < suballoc2ndCount &&
9602 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9604 ++nextAlloc2ndIndex;
9608 if(nextAlloc2ndIndex < suballoc2ndCount)
9610 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9613 if(lastOffset < suballoc.offset)
9616 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9617 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9622 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9625 lastOffset = suballoc.offset + suballoc.size;
9626 ++nextAlloc2ndIndex;
9631 if(lastOffset < freeSpace2ndTo1stEnd)
9634 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9635 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9639 lastOffset = freeSpace2ndTo1stEnd;
9644 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9645 while(lastOffset < freeSpace1stTo2ndEnd)
9648 while(nextAlloc1stIndex < suballoc1stCount &&
9649 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9651 ++nextAlloc1stIndex;
9655 if(nextAlloc1stIndex < suballoc1stCount)
9657 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9660 if(lastOffset < suballoc.offset)
9663 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9664 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9669 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9672 lastOffset = suballoc.offset + suballoc.size;
9673 ++nextAlloc1stIndex;
9678 if(lastOffset < freeSpace1stTo2ndEnd)
9681 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9682 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9686 lastOffset = freeSpace1stTo2ndEnd;
9690 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9692 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9693 while(lastOffset < size)
9696 while(nextAlloc2ndIndex != SIZE_MAX &&
9697 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9699 --nextAlloc2ndIndex;
9703 if(nextAlloc2ndIndex != SIZE_MAX)
9705 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9708 if(lastOffset < suballoc.offset)
9711 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9712 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9717 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9720 lastOffset = suballoc.offset + suballoc.size;
9721 --nextAlloc2ndIndex;
9726 if(lastOffset < size)
9729 const VkDeviceSize unusedRangeSize = size - lastOffset;
9730 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9739 PrintDetailedMap_End(json);
9741 #endif // #if VMA_STATS_STRING_ENABLED 9743 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9744 uint32_t currentFrameIndex,
9745 uint32_t frameInUseCount,
9746 VkDeviceSize bufferImageGranularity,
9747 VkDeviceSize allocSize,
9748 VkDeviceSize allocAlignment,
9750 VmaSuballocationType allocType,
9751 bool canMakeOtherLost,
9753 VmaAllocationRequest* pAllocationRequest)
9755 VMA_ASSERT(allocSize > 0);
9756 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9757 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9758 VMA_HEAVY_ASSERT(Validate());
9759 return upperAddress ?
9760 CreateAllocationRequest_UpperAddress(
9761 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9762 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9763 CreateAllocationRequest_LowerAddress(
9764 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9765 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9768 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9769 uint32_t currentFrameIndex,
9770 uint32_t frameInUseCount,
9771 VkDeviceSize bufferImageGranularity,
9772 VkDeviceSize allocSize,
9773 VkDeviceSize allocAlignment,
9774 VmaSuballocationType allocType,
9775 bool canMakeOtherLost,
9777 VmaAllocationRequest* pAllocationRequest)
9779 const VkDeviceSize size = GetSize();
9780 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9781 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9783 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9785 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9790 if(allocSize > size)
9794 VkDeviceSize resultBaseOffset = size - allocSize;
9795 if(!suballocations2nd.empty())
9797 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9798 resultBaseOffset = lastSuballoc.offset - allocSize;
9799 if(allocSize > lastSuballoc.offset)
9806 VkDeviceSize resultOffset = resultBaseOffset;
9809 if(VMA_DEBUG_MARGIN > 0)
9811 if(resultOffset < VMA_DEBUG_MARGIN)
9815 resultOffset -= VMA_DEBUG_MARGIN;
9819 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9823 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9825 bool bufferImageGranularityConflict =
false;
9826 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9828 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9829 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9831 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9833 bufferImageGranularityConflict =
true;
9841 if(bufferImageGranularityConflict)
9843 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9848 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9849 suballocations1st.back().offset + suballocations1st.back().size :
9851 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9855 if(bufferImageGranularity > 1)
9857 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9859 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9860 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9862 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9876 pAllocationRequest->offset = resultOffset;
9877 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9878 pAllocationRequest->sumItemSize = 0;
9880 pAllocationRequest->itemsToMakeLostCount = 0;
9881 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9888 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9889 uint32_t currentFrameIndex,
9890 uint32_t frameInUseCount,
9891 VkDeviceSize bufferImageGranularity,
9892 VkDeviceSize allocSize,
9893 VkDeviceSize allocAlignment,
9894 VmaSuballocationType allocType,
9895 bool canMakeOtherLost,
9897 VmaAllocationRequest* pAllocationRequest)
9899 const VkDeviceSize size = GetSize();
9900 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9901 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9903 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9907 VkDeviceSize resultBaseOffset = 0;
9908 if(!suballocations1st.empty())
9910 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9911 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9915 VkDeviceSize resultOffset = resultBaseOffset;
9918 if(VMA_DEBUG_MARGIN > 0)
9920 resultOffset += VMA_DEBUG_MARGIN;
9924 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9928 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9930 bool bufferImageGranularityConflict =
false;
9931 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9933 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9934 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9936 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9938 bufferImageGranularityConflict =
true;
9946 if(bufferImageGranularityConflict)
9948 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9952 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9953 suballocations2nd.back().offset : size;
9956 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9960 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9962 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9964 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9965 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9967 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9981 pAllocationRequest->offset = resultOffset;
9982 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9983 pAllocationRequest->sumItemSize = 0;
9985 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9986 pAllocationRequest->itemsToMakeLostCount = 0;
9993 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9995 VMA_ASSERT(!suballocations1st.empty());
9997 VkDeviceSize resultBaseOffset = 0;
9998 if(!suballocations2nd.empty())
10000 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10001 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10005 VkDeviceSize resultOffset = resultBaseOffset;
10008 if(VMA_DEBUG_MARGIN > 0)
10010 resultOffset += VMA_DEBUG_MARGIN;
10014 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10018 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10020 bool bufferImageGranularityConflict =
false;
10021 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10023 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10024 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10026 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10028 bufferImageGranularityConflict =
true;
10036 if(bufferImageGranularityConflict)
10038 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10042 pAllocationRequest->itemsToMakeLostCount = 0;
10043 pAllocationRequest->sumItemSize = 0;
10044 size_t index1st = m_1stNullItemsBeginCount;
10046 if(canMakeOtherLost)
10048 while(index1st < suballocations1st.size() &&
10049 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10052 const VmaSuballocation& suballoc = suballocations1st[index1st];
10053 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10059 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10060 if(suballoc.hAllocation->CanBecomeLost() &&
10061 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10063 ++pAllocationRequest->itemsToMakeLostCount;
10064 pAllocationRequest->sumItemSize += suballoc.size;
10076 if(bufferImageGranularity > 1)
10078 while(index1st < suballocations1st.size())
10080 const VmaSuballocation& suballoc = suballocations1st[index1st];
10081 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10083 if(suballoc.hAllocation != VK_NULL_HANDLE)
10086 if(suballoc.hAllocation->CanBecomeLost() &&
10087 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10089 ++pAllocationRequest->itemsToMakeLostCount;
10090 pAllocationRequest->sumItemSize += suballoc.size;
10108 if(index1st == suballocations1st.size() &&
10109 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10112 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10117 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10118 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10122 if(bufferImageGranularity > 1)
10124 for(
size_t nextSuballocIndex = index1st;
10125 nextSuballocIndex < suballocations1st.size();
10126 nextSuballocIndex++)
10128 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10129 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10131 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10145 pAllocationRequest->offset = resultOffset;
10146 pAllocationRequest->sumFreeSize =
10147 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10149 - pAllocationRequest->sumItemSize;
10150 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10159 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10160 uint32_t currentFrameIndex,
10161 uint32_t frameInUseCount,
10162 VmaAllocationRequest* pAllocationRequest)
10164 if(pAllocationRequest->itemsToMakeLostCount == 0)
10169 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10172 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10173 size_t index = m_1stNullItemsBeginCount;
10174 size_t madeLostCount = 0;
10175 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10177 if(index == suballocations->size())
10181 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10183 suballocations = &AccessSuballocations2nd();
10187 VMA_ASSERT(!suballocations->empty());
10189 VmaSuballocation& suballoc = (*suballocations)[index];
10190 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10192 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10193 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10194 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10196 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10197 suballoc.hAllocation = VK_NULL_HANDLE;
10198 m_SumFreeSize += suballoc.size;
10199 if(suballocations == &AccessSuballocations1st())
10201 ++m_1stNullItemsMiddleCount;
10205 ++m_2ndNullItemsCount;
10217 CleanupAfterFree();
10223 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10225 uint32_t lostAllocationCount = 0;
10227 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10228 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10230 VmaSuballocation& suballoc = suballocations1st[i];
10231 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10232 suballoc.hAllocation->CanBecomeLost() &&
10233 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10235 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10236 suballoc.hAllocation = VK_NULL_HANDLE;
10237 ++m_1stNullItemsMiddleCount;
10238 m_SumFreeSize += suballoc.size;
10239 ++lostAllocationCount;
10243 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10244 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10246 VmaSuballocation& suballoc = suballocations2nd[i];
10247 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10248 suballoc.hAllocation->CanBecomeLost() &&
10249 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10251 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10252 suballoc.hAllocation = VK_NULL_HANDLE;
10253 ++m_2ndNullItemsCount;
10254 m_SumFreeSize += suballoc.size;
10255 ++lostAllocationCount;
10259 if(lostAllocationCount)
10261 CleanupAfterFree();
10264 return lostAllocationCount;
10267 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10269 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10270 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10272 const VmaSuballocation& suballoc = suballocations1st[i];
10273 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10275 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10277 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10278 return VK_ERROR_VALIDATION_FAILED_EXT;
10280 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10282 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10283 return VK_ERROR_VALIDATION_FAILED_EXT;
10288 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10289 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10291 const VmaSuballocation& suballoc = suballocations2nd[i];
10292 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10294 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10296 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10297 return VK_ERROR_VALIDATION_FAILED_EXT;
10299 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10301 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10302 return VK_ERROR_VALIDATION_FAILED_EXT;
10310 void VmaBlockMetadata_Linear::Alloc(
10311 const VmaAllocationRequest& request,
10312 VmaSuballocationType type,
10313 VkDeviceSize allocSize,
10316 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10318 switch(request.type)
10320 case VmaAllocationRequestType::UpperAddress:
10322 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10323 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10324 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10325 suballocations2nd.push_back(newSuballoc);
10326 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10329 case VmaAllocationRequestType::EndOf1st:
10331 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10333 VMA_ASSERT(suballocations1st.empty() ||
10334 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10336 VMA_ASSERT(request.offset + allocSize <= GetSize());
10338 suballocations1st.push_back(newSuballoc);
10341 case VmaAllocationRequestType::EndOf2nd:
10343 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10345 VMA_ASSERT(!suballocations1st.empty() &&
10346 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10347 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10349 switch(m_2ndVectorMode)
10351 case SECOND_VECTOR_EMPTY:
10353 VMA_ASSERT(suballocations2nd.empty());
10354 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10356 case SECOND_VECTOR_RING_BUFFER:
10358 VMA_ASSERT(!suballocations2nd.empty());
10360 case SECOND_VECTOR_DOUBLE_STACK:
10361 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10367 suballocations2nd.push_back(newSuballoc);
10371 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10374 m_SumFreeSize -= newSuballoc.size;
10377 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10379 FreeAtOffset(allocation->GetOffset());
10382 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10384 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10385 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10387 if(!suballocations1st.empty())
10390 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10391 if(firstSuballoc.offset == offset)
10393 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10394 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10395 m_SumFreeSize += firstSuballoc.size;
10396 ++m_1stNullItemsBeginCount;
10397 CleanupAfterFree();
10403 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10404 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10406 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10407 if(lastSuballoc.offset == offset)
10409 m_SumFreeSize += lastSuballoc.size;
10410 suballocations2nd.pop_back();
10411 CleanupAfterFree();
10416 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10418 VmaSuballocation& lastSuballoc = suballocations1st.back();
10419 if(lastSuballoc.offset == offset)
10421 m_SumFreeSize += lastSuballoc.size;
10422 suballocations1st.pop_back();
10423 CleanupAfterFree();
10430 VmaSuballocation refSuballoc;
10431 refSuballoc.offset = offset;
10433 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10434 suballocations1st.begin() + m_1stNullItemsBeginCount,
10435 suballocations1st.end(),
10437 if(it != suballocations1st.end())
10439 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10440 it->hAllocation = VK_NULL_HANDLE;
10441 ++m_1stNullItemsMiddleCount;
10442 m_SumFreeSize += it->size;
10443 CleanupAfterFree();
10448 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10451 VmaSuballocation refSuballoc;
10452 refSuballoc.offset = offset;
10454 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10455 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10456 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10457 if(it != suballocations2nd.end())
10459 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10460 it->hAllocation = VK_NULL_HANDLE;
10461 ++m_2ndNullItemsCount;
10462 m_SumFreeSize += it->size;
10463 CleanupAfterFree();
10468 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10471 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10473 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10474 const size_t suballocCount = AccessSuballocations1st().size();
10475 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10478 void VmaBlockMetadata_Linear::CleanupAfterFree()
10480 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10481 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10485 suballocations1st.clear();
10486 suballocations2nd.clear();
10487 m_1stNullItemsBeginCount = 0;
10488 m_1stNullItemsMiddleCount = 0;
10489 m_2ndNullItemsCount = 0;
10490 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10494 const size_t suballoc1stCount = suballocations1st.size();
10495 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10496 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10499 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10500 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10502 ++m_1stNullItemsBeginCount;
10503 --m_1stNullItemsMiddleCount;
10507 while(m_1stNullItemsMiddleCount > 0 &&
10508 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10510 --m_1stNullItemsMiddleCount;
10511 suballocations1st.pop_back();
10515 while(m_2ndNullItemsCount > 0 &&
10516 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10518 --m_2ndNullItemsCount;
10519 suballocations2nd.pop_back();
10523 while(m_2ndNullItemsCount > 0 &&
10524 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10526 --m_2ndNullItemsCount;
10527 suballocations2nd.remove(0);
10530 if(ShouldCompact1st())
10532 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10533 size_t srcIndex = m_1stNullItemsBeginCount;
10534 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10536 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10540 if(dstIndex != srcIndex)
10542 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10546 suballocations1st.resize(nonNullItemCount);
10547 m_1stNullItemsBeginCount = 0;
10548 m_1stNullItemsMiddleCount = 0;
10552 if(suballocations2nd.empty())
10554 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10558 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10560 suballocations1st.clear();
10561 m_1stNullItemsBeginCount = 0;
10563 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10566 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10567 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10568 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10569 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10571 ++m_1stNullItemsBeginCount;
10572 --m_1stNullItemsMiddleCount;
10574 m_2ndNullItemsCount = 0;
10575 m_1stVectorIndex ^= 1;
10580 VMA_HEAVY_ASSERT(Validate());
10587 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10588 VmaBlockMetadata(hAllocator),
10590 m_AllocationCount(0),
10594 memset(m_FreeList, 0,
sizeof(m_FreeList));
10597 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10599 DeleteNode(m_Root);
10602 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10604 VmaBlockMetadata::Init(size);
10606 m_UsableSize = VmaPrevPow2(size);
10607 m_SumFreeSize = m_UsableSize;
10611 while(m_LevelCount < MAX_LEVELS &&
10612 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10617 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10618 rootNode->offset = 0;
10619 rootNode->type = Node::TYPE_FREE;
10620 rootNode->parent = VMA_NULL;
10621 rootNode->buddy = VMA_NULL;
10624 AddToFreeListFront(0, rootNode);
10627 bool VmaBlockMetadata_Buddy::Validate()
const 10630 ValidationContext ctx;
10631 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10633 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10635 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10636 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10639 for(uint32_t level = 0; level < m_LevelCount; ++level)
10641 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10642 m_FreeList[level].front->free.prev == VMA_NULL);
10644 for(Node* node = m_FreeList[level].front;
10646 node = node->free.next)
10648 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10650 if(node->free.next == VMA_NULL)
10652 VMA_VALIDATE(m_FreeList[level].back == node);
10656 VMA_VALIDATE(node->free.next->free.prev == node);
10662 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10664 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10670 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10672 for(uint32_t level = 0; level < m_LevelCount; ++level)
10674 if(m_FreeList[level].front != VMA_NULL)
10676 return LevelToNodeSize(level);
10682 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10684 const VkDeviceSize unusableSize = GetUnusableSize();
10695 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10697 if(unusableSize > 0)
10706 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10708 const VkDeviceSize unusableSize = GetUnusableSize();
10710 inoutStats.
size += GetSize();
10711 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10716 if(unusableSize > 0)
10723 #if VMA_STATS_STRING_ENABLED 10725 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10729 CalcAllocationStatInfo(stat);
10731 PrintDetailedMap_Begin(
10737 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10739 const VkDeviceSize unusableSize = GetUnusableSize();
10740 if(unusableSize > 0)
10742 PrintDetailedMap_UnusedRange(json,
10747 PrintDetailedMap_End(json);
10750 #endif // #if VMA_STATS_STRING_ENABLED 10752 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10753 uint32_t currentFrameIndex,
10754 uint32_t frameInUseCount,
10755 VkDeviceSize bufferImageGranularity,
10756 VkDeviceSize allocSize,
10757 VkDeviceSize allocAlignment,
10759 VmaSuballocationType allocType,
10760 bool canMakeOtherLost,
10762 VmaAllocationRequest* pAllocationRequest)
10764 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10768 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10769 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10770 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10772 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10773 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10776 if(allocSize > m_UsableSize)
10781 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10782 for(uint32_t level = targetLevel + 1; level--; )
10784 for(Node* freeNode = m_FreeList[level].front;
10785 freeNode != VMA_NULL;
10786 freeNode = freeNode->free.next)
10788 if(freeNode->offset % allocAlignment == 0)
10790 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10791 pAllocationRequest->offset = freeNode->offset;
10792 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10793 pAllocationRequest->sumItemSize = 0;
10794 pAllocationRequest->itemsToMakeLostCount = 0;
10795 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10804 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10805 uint32_t currentFrameIndex,
10806 uint32_t frameInUseCount,
10807 VmaAllocationRequest* pAllocationRequest)
10813 return pAllocationRequest->itemsToMakeLostCount == 0;
10816 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10825 void VmaBlockMetadata_Buddy::Alloc(
10826 const VmaAllocationRequest& request,
10827 VmaSuballocationType type,
10828 VkDeviceSize allocSize,
10831 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10833 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10834 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10836 Node* currNode = m_FreeList[currLevel].front;
10837 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10838 while(currNode->offset != request.offset)
10840 currNode = currNode->free.next;
10841 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10845 while(currLevel < targetLevel)
10849 RemoveFromFreeList(currLevel, currNode);
10851 const uint32_t childrenLevel = currLevel + 1;
10854 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10855 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10857 leftChild->offset = currNode->offset;
10858 leftChild->type = Node::TYPE_FREE;
10859 leftChild->parent = currNode;
10860 leftChild->buddy = rightChild;
10862 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10863 rightChild->type = Node::TYPE_FREE;
10864 rightChild->parent = currNode;
10865 rightChild->buddy = leftChild;
10868 currNode->type = Node::TYPE_SPLIT;
10869 currNode->split.leftChild = leftChild;
10872 AddToFreeListFront(childrenLevel, rightChild);
10873 AddToFreeListFront(childrenLevel, leftChild);
10878 currNode = m_FreeList[currLevel].front;
10887 VMA_ASSERT(currLevel == targetLevel &&
10888 currNode != VMA_NULL &&
10889 currNode->type == Node::TYPE_FREE);
10890 RemoveFromFreeList(currLevel, currNode);
10893 currNode->type = Node::TYPE_ALLOCATION;
10894 currNode->allocation.alloc = hAllocation;
10896 ++m_AllocationCount;
10898 m_SumFreeSize -= allocSize;
10901 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10903 if(node->type == Node::TYPE_SPLIT)
10905 DeleteNode(node->split.leftChild->buddy);
10906 DeleteNode(node->split.leftChild);
10909 vma_delete(GetAllocationCallbacks(), node);
10912 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10914 VMA_VALIDATE(level < m_LevelCount);
10915 VMA_VALIDATE(curr->parent == parent);
10916 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10917 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10920 case Node::TYPE_FREE:
10922 ctx.calculatedSumFreeSize += levelNodeSize;
10923 ++ctx.calculatedFreeCount;
10925 case Node::TYPE_ALLOCATION:
10926 ++ctx.calculatedAllocationCount;
10927 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10928 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10930 case Node::TYPE_SPLIT:
10932 const uint32_t childrenLevel = level + 1;
10933 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10934 const Node*
const leftChild = curr->split.leftChild;
10935 VMA_VALIDATE(leftChild != VMA_NULL);
10936 VMA_VALIDATE(leftChild->offset == curr->offset);
10937 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10939 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10941 const Node*
const rightChild = leftChild->buddy;
10942 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10943 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10945 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10956 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10959 uint32_t level = 0;
10960 VkDeviceSize currLevelNodeSize = m_UsableSize;
10961 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10962 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10965 currLevelNodeSize = nextLevelNodeSize;
10966 nextLevelNodeSize = currLevelNodeSize >> 1;
10971 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10974 Node* node = m_Root;
10975 VkDeviceSize nodeOffset = 0;
10976 uint32_t level = 0;
10977 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10978 while(node->type == Node::TYPE_SPLIT)
10980 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10981 if(offset < nodeOffset + nextLevelSize)
10983 node = node->split.leftChild;
10987 node = node->split.leftChild->buddy;
10988 nodeOffset += nextLevelSize;
10991 levelNodeSize = nextLevelSize;
10994 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10995 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10998 --m_AllocationCount;
10999 m_SumFreeSize += alloc->GetSize();
11001 node->type = Node::TYPE_FREE;
11004 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11006 RemoveFromFreeList(level, node->buddy);
11007 Node*
const parent = node->parent;
11009 vma_delete(GetAllocationCallbacks(), node->buddy);
11010 vma_delete(GetAllocationCallbacks(), node);
11011 parent->type = Node::TYPE_FREE;
11019 AddToFreeListFront(level, node);
11022 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 11026 case Node::TYPE_FREE:
11032 case Node::TYPE_ALLOCATION:
11034 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11040 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11041 if(unusedRangeSize > 0)
11050 case Node::TYPE_SPLIT:
11052 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11053 const Node*
const leftChild = node->split.leftChild;
11054 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11055 const Node*
const rightChild = leftChild->buddy;
11056 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11064 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11066 VMA_ASSERT(node->type == Node::TYPE_FREE);
11069 Node*
const frontNode = m_FreeList[level].front;
11070 if(frontNode == VMA_NULL)
11072 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11073 node->free.prev = node->free.next = VMA_NULL;
11074 m_FreeList[level].front = m_FreeList[level].back = node;
11078 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11079 node->free.prev = VMA_NULL;
11080 node->free.next = frontNode;
11081 frontNode->free.prev = node;
11082 m_FreeList[level].front = node;
11086 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11088 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11091 if(node->free.prev == VMA_NULL)
11093 VMA_ASSERT(m_FreeList[level].front == node);
11094 m_FreeList[level].front = node->free.next;
11098 Node*
const prevFreeNode = node->free.prev;
11099 VMA_ASSERT(prevFreeNode->free.next == node);
11100 prevFreeNode->free.next = node->free.next;
11104 if(node->free.next == VMA_NULL)
11106 VMA_ASSERT(m_FreeList[level].back == node);
11107 m_FreeList[level].back = node->free.prev;
11111 Node*
const nextFreeNode = node->free.next;
11112 VMA_ASSERT(nextFreeNode->free.prev == node);
11113 nextFreeNode->free.prev = node->free.prev;
11117 #if VMA_STATS_STRING_ENABLED 11118 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11122 case Node::TYPE_FREE:
11123 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11125 case Node::TYPE_ALLOCATION:
11127 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11128 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11129 if(allocSize < levelNodeSize)
11131 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11135 case Node::TYPE_SPLIT:
11137 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11138 const Node*
const leftChild = node->split.leftChild;
11139 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11140 const Node*
const rightChild = leftChild->buddy;
11141 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11148 #endif // #if VMA_STATS_STRING_ENABLED 11154 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11155 m_pMetadata(VMA_NULL),
11156 m_MemoryTypeIndex(UINT32_MAX),
11158 m_hMemory(VK_NULL_HANDLE),
11160 m_pMappedData(VMA_NULL)
11164 void VmaDeviceMemoryBlock::Init(
11167 uint32_t newMemoryTypeIndex,
11168 VkDeviceMemory newMemory,
11169 VkDeviceSize newSize,
11171 uint32_t algorithm)
11173 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11175 m_hParentPool = hParentPool;
11176 m_MemoryTypeIndex = newMemoryTypeIndex;
11178 m_hMemory = newMemory;
11183 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11186 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11192 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11194 m_pMetadata->Init(newSize);
11197 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11201 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11203 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11204 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11205 m_hMemory = VK_NULL_HANDLE;
11207 vma_delete(allocator, m_pMetadata);
11208 m_pMetadata = VMA_NULL;
11211 bool VmaDeviceMemoryBlock::Validate()
const 11213 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11214 (m_pMetadata->GetSize() != 0));
11216 return m_pMetadata->Validate();
11219 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11221 void* pData =
nullptr;
11222 VkResult res = Map(hAllocator, 1, &pData);
11223 if(res != VK_SUCCESS)
11228 res = m_pMetadata->CheckCorruption(pData);
11230 Unmap(hAllocator, 1);
11235 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11242 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11243 if(m_MapCount != 0)
11245 m_MapCount += count;
11246 VMA_ASSERT(m_pMappedData != VMA_NULL);
11247 if(ppData != VMA_NULL)
11249 *ppData = m_pMappedData;
11255 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11256 hAllocator->m_hDevice,
11262 if(result == VK_SUCCESS)
11264 if(ppData != VMA_NULL)
11266 *ppData = m_pMappedData;
11268 m_MapCount = count;
11274 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11281 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11282 if(m_MapCount >= count)
11284 m_MapCount -= count;
11285 if(m_MapCount == 0)
11287 m_pMappedData = VMA_NULL;
11288 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11293 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11297 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11299 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11300 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11303 VkResult res = Map(hAllocator, 1, &pData);
11304 if(res != VK_SUCCESS)
11309 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11310 VmaWriteMagicValue(pData, allocOffset + allocSize);
11312 Unmap(hAllocator, 1);
11317 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11319 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11320 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11323 VkResult res = Map(hAllocator, 1, &pData);
11324 if(res != VK_SUCCESS)
11329 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11331 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11333 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11335 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11338 Unmap(hAllocator, 1);
11343 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11348 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11349 hAllocation->GetBlock() ==
this);
11351 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11352 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11353 hAllocator->m_hDevice,
11356 hAllocation->GetOffset());
11359 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11364 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11365 hAllocation->GetBlock() ==
this);
11367 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11368 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11369 hAllocator->m_hDevice,
11372 hAllocation->GetOffset());
11377 memset(&outInfo, 0,
sizeof(outInfo));
11396 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11404 VmaPool_T::VmaPool_T(
11407 VkDeviceSize preferredBlockSize) :
11411 createInfo.memoryTypeIndex,
11412 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11413 createInfo.minBlockCount,
11414 createInfo.maxBlockCount,
11416 createInfo.frameInUseCount,
11418 createInfo.blockSize != 0,
11424 VmaPool_T::~VmaPool_T()
11428 #if VMA_STATS_STRING_ENABLED 11430 #endif // #if VMA_STATS_STRING_ENABLED 11432 VmaBlockVector::VmaBlockVector(
11435 uint32_t memoryTypeIndex,
11436 VkDeviceSize preferredBlockSize,
11437 size_t minBlockCount,
11438 size_t maxBlockCount,
11439 VkDeviceSize bufferImageGranularity,
11440 uint32_t frameInUseCount,
11442 bool explicitBlockSize,
11443 uint32_t algorithm) :
11444 m_hAllocator(hAllocator),
11445 m_hParentPool(hParentPool),
11446 m_MemoryTypeIndex(memoryTypeIndex),
11447 m_PreferredBlockSize(preferredBlockSize),
11448 m_MinBlockCount(minBlockCount),
11449 m_MaxBlockCount(maxBlockCount),
11450 m_BufferImageGranularity(bufferImageGranularity),
11451 m_FrameInUseCount(frameInUseCount),
11452 m_IsCustomPool(isCustomPool),
11453 m_ExplicitBlockSize(explicitBlockSize),
11454 m_Algorithm(algorithm),
11455 m_HasEmptyBlock(false),
11456 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11461 VmaBlockVector::~VmaBlockVector()
11463 for(
size_t i = m_Blocks.size(); i--; )
11465 m_Blocks[i]->Destroy(m_hAllocator);
11466 vma_delete(m_hAllocator, m_Blocks[i]);
11470 VkResult VmaBlockVector::CreateMinBlocks()
11472 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11474 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11475 if(res != VK_SUCCESS)
11483 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11485 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11487 const size_t blockCount = m_Blocks.size();
11496 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11498 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11499 VMA_ASSERT(pBlock);
11500 VMA_HEAVY_ASSERT(pBlock->Validate());
11501 pBlock->m_pMetadata->AddPoolStats(*pStats);
11505 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11507 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11508 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11509 (VMA_DEBUG_MARGIN > 0) &&
11511 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11514 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11516 VkResult VmaBlockVector::Allocate(
11517 uint32_t currentFrameIndex,
11519 VkDeviceSize alignment,
11521 VmaSuballocationType suballocType,
11522 size_t allocationCount,
11526 VkResult res = VK_SUCCESS;
11528 if(IsCorruptionDetectionEnabled())
11530 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11531 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11535 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11536 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11538 res = AllocatePage(
11544 pAllocations + allocIndex);
11545 if(res != VK_SUCCESS)
11552 if(res != VK_SUCCESS)
11555 while(allocIndex--)
11557 Free(pAllocations[allocIndex]);
11559 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11565 VkResult VmaBlockVector::AllocatePage(
11566 uint32_t currentFrameIndex,
11568 VkDeviceSize alignment,
11570 VmaSuballocationType suballocType,
11577 const bool canCreateNewBlock =
11579 (m_Blocks.size() < m_MaxBlockCount);
11586 canMakeOtherLost =
false;
11590 if(isUpperAddress &&
11593 return VK_ERROR_FEATURE_NOT_PRESENT;
11607 return VK_ERROR_FEATURE_NOT_PRESENT;
11611 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11613 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11621 if(!canMakeOtherLost || canCreateNewBlock)
11630 if(!m_Blocks.empty())
11632 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11633 VMA_ASSERT(pCurrBlock);
11634 VkResult res = AllocateFromBlock(
11644 if(res == VK_SUCCESS)
11646 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11656 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11658 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11659 VMA_ASSERT(pCurrBlock);
11660 VkResult res = AllocateFromBlock(
11670 if(res == VK_SUCCESS)
11672 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11680 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11682 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11683 VMA_ASSERT(pCurrBlock);
11684 VkResult res = AllocateFromBlock(
11694 if(res == VK_SUCCESS)
11696 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11704 if(canCreateNewBlock)
11707 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11708 uint32_t newBlockSizeShift = 0;
11709 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11711 if(!m_ExplicitBlockSize)
11714 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11715 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11717 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11718 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11720 newBlockSize = smallerNewBlockSize;
11721 ++newBlockSizeShift;
11730 size_t newBlockIndex = 0;
11731 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11733 if(!m_ExplicitBlockSize)
11735 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11737 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11738 if(smallerNewBlockSize >= size)
11740 newBlockSize = smallerNewBlockSize;
11741 ++newBlockSizeShift;
11742 res = CreateBlock(newBlockSize, &newBlockIndex);
11751 if(res == VK_SUCCESS)
11753 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11754 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11756 res = AllocateFromBlock(
11766 if(res == VK_SUCCESS)
11768 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11774 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11781 if(canMakeOtherLost)
11783 uint32_t tryIndex = 0;
11784 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11786 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11787 VmaAllocationRequest bestRequest = {};
11788 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11794 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11796 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11797 VMA_ASSERT(pCurrBlock);
11798 VmaAllocationRequest currRequest = {};
11799 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11802 m_BufferImageGranularity,
11811 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11812 if(pBestRequestBlock == VMA_NULL ||
11813 currRequestCost < bestRequestCost)
11815 pBestRequestBlock = pCurrBlock;
11816 bestRequest = currRequest;
11817 bestRequestCost = currRequestCost;
11819 if(bestRequestCost == 0)
11830 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11832 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11833 VMA_ASSERT(pCurrBlock);
11834 VmaAllocationRequest currRequest = {};
11835 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11838 m_BufferImageGranularity,
11847 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11848 if(pBestRequestBlock == VMA_NULL ||
11849 currRequestCost < bestRequestCost ||
11852 pBestRequestBlock = pCurrBlock;
11853 bestRequest = currRequest;
11854 bestRequestCost = currRequestCost;
11856 if(bestRequestCost == 0 ||
11866 if(pBestRequestBlock != VMA_NULL)
11870 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11871 if(res != VK_SUCCESS)
11877 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11883 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11885 m_HasEmptyBlock =
false;
11888 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11889 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11890 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11891 (*pAllocation)->InitBlockAllocation(
11893 bestRequest.offset,
11899 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11900 VMA_DEBUG_LOG(
" Returned from existing block");
11901 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11902 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11904 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11906 if(IsCorruptionDetectionEnabled())
11908 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11909 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11924 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11926 return VK_ERROR_TOO_MANY_OBJECTS;
11930 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11933 void VmaBlockVector::Free(
11936 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11940 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11942 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11944 if(IsCorruptionDetectionEnabled())
11946 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11947 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11950 if(hAllocation->IsPersistentMap())
11952 pBlock->Unmap(m_hAllocator, 1);
11955 pBlock->m_pMetadata->Free(hAllocation);
11956 VMA_HEAVY_ASSERT(pBlock->Validate());
11958 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11961 if(pBlock->m_pMetadata->IsEmpty())
11964 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11966 pBlockToDelete = pBlock;
11972 m_HasEmptyBlock =
true;
11977 else if(m_HasEmptyBlock)
11979 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11980 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11982 pBlockToDelete = pLastBlock;
11983 m_Blocks.pop_back();
11984 m_HasEmptyBlock =
false;
11988 IncrementallySortBlocks();
11993 if(pBlockToDelete != VMA_NULL)
11995 VMA_DEBUG_LOG(
" Deleted empty allocation");
11996 pBlockToDelete->Destroy(m_hAllocator);
11997 vma_delete(m_hAllocator, pBlockToDelete);
12001 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 12003 VkDeviceSize result = 0;
12004 for(
size_t i = m_Blocks.size(); i--; )
12006 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12007 if(result >= m_PreferredBlockSize)
12015 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12017 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12019 if(m_Blocks[blockIndex] == pBlock)
12021 VmaVectorRemove(m_Blocks, blockIndex);
12028 void VmaBlockVector::IncrementallySortBlocks()
12033 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12035 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12037 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12044 VkResult VmaBlockVector::AllocateFromBlock(
12045 VmaDeviceMemoryBlock* pBlock,
12046 uint32_t currentFrameIndex,
12048 VkDeviceSize alignment,
12051 VmaSuballocationType suballocType,
12060 VmaAllocationRequest currRequest = {};
12061 if(pBlock->m_pMetadata->CreateAllocationRequest(
12064 m_BufferImageGranularity,
12074 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12078 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12079 if(res != VK_SUCCESS)
12086 if(pBlock->m_pMetadata->IsEmpty())
12088 m_HasEmptyBlock =
false;
12091 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12092 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12093 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12094 (*pAllocation)->InitBlockAllocation(
12096 currRequest.offset,
12102 VMA_HEAVY_ASSERT(pBlock->Validate());
12103 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12104 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12106 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12108 if(IsCorruptionDetectionEnabled())
12110 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12111 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12115 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12118 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12120 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12121 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12122 allocInfo.allocationSize = blockSize;
12123 VkDeviceMemory mem = VK_NULL_HANDLE;
12124 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12133 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12139 allocInfo.allocationSize,
12143 m_Blocks.push_back(pBlock);
12144 if(pNewBlockIndex != VMA_NULL)
12146 *pNewBlockIndex = m_Blocks.size() - 1;
12152 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12153 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12154 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12156 const size_t blockCount = m_Blocks.size();
12157 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12161 BLOCK_FLAG_USED = 0x00000001,
12162 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12170 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12171 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12172 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12175 const size_t moveCount = moves.size();
12176 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12178 const VmaDefragmentationMove& move = moves[moveIndex];
12179 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12180 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12183 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12186 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12188 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12189 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12190 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12192 currBlockInfo.pMappedData = pBlock->GetMappedData();
12194 if(currBlockInfo.pMappedData == VMA_NULL)
12196 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12197 if(pDefragCtx->res == VK_SUCCESS)
12199 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12206 if(pDefragCtx->res == VK_SUCCESS)
12208 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12209 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12211 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12213 const VmaDefragmentationMove& move = moves[moveIndex];
12215 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12216 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12218 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12223 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12224 memRange.memory = pSrcBlock->GetDeviceMemory();
12225 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12226 memRange.size = VMA_MIN(
12227 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12228 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12229 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12234 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12235 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12236 static_cast<size_t>(move.size));
12238 if(IsCorruptionDetectionEnabled())
12240 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12241 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12247 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12248 memRange.memory = pDstBlock->GetDeviceMemory();
12249 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12250 memRange.size = VMA_MIN(
12251 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12252 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12253 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12260 for(
size_t blockIndex = blockCount; blockIndex--; )
12262 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12263 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12265 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12266 pBlock->Unmap(m_hAllocator, 1);
12271 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12272 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12273 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12274 VkCommandBuffer commandBuffer)
12276 const size_t blockCount = m_Blocks.size();
12278 pDefragCtx->blockContexts.resize(blockCount);
12279 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12282 const size_t moveCount = moves.size();
12283 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12285 const VmaDefragmentationMove& move = moves[moveIndex];
12286 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12287 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12290 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12294 VkBufferCreateInfo bufCreateInfo;
12295 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12297 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12299 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12300 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12301 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12303 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12304 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12305 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12306 if(pDefragCtx->res == VK_SUCCESS)
12308 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12309 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12316 if(pDefragCtx->res == VK_SUCCESS)
12318 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12320 const VmaDefragmentationMove& move = moves[moveIndex];
12322 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12323 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12325 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12327 VkBufferCopy region = {
12331 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12332 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12337 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12339 pDefragCtx->res = VK_NOT_READY;
12345 m_HasEmptyBlock =
false;
12346 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12348 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12349 if(pBlock->m_pMetadata->IsEmpty())
12351 if(m_Blocks.size() > m_MinBlockCount)
12353 if(pDefragmentationStats != VMA_NULL)
12356 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12359 VmaVectorRemove(m_Blocks, blockIndex);
12360 pBlock->Destroy(m_hAllocator);
12361 vma_delete(m_hAllocator, pBlock);
12365 m_HasEmptyBlock =
true;
12371 #if VMA_STATS_STRING_ENABLED 12373 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12375 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12377 json.BeginObject();
12381 json.WriteString(
"MemoryTypeIndex");
12382 json.WriteNumber(m_MemoryTypeIndex);
12384 json.WriteString(
"BlockSize");
12385 json.WriteNumber(m_PreferredBlockSize);
12387 json.WriteString(
"BlockCount");
12388 json.BeginObject(
true);
12389 if(m_MinBlockCount > 0)
12391 json.WriteString(
"Min");
12392 json.WriteNumber((uint64_t)m_MinBlockCount);
12394 if(m_MaxBlockCount < SIZE_MAX)
12396 json.WriteString(
"Max");
12397 json.WriteNumber((uint64_t)m_MaxBlockCount);
12399 json.WriteString(
"Cur");
12400 json.WriteNumber((uint64_t)m_Blocks.size());
12403 if(m_FrameInUseCount > 0)
12405 json.WriteString(
"FrameInUseCount");
12406 json.WriteNumber(m_FrameInUseCount);
12409 if(m_Algorithm != 0)
12411 json.WriteString(
"Algorithm");
12412 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12417 json.WriteString(
"PreferredBlockSize");
12418 json.WriteNumber(m_PreferredBlockSize);
12421 json.WriteString(
"Blocks");
12422 json.BeginObject();
12423 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12425 json.BeginString();
12426 json.ContinueString(m_Blocks[i]->GetId());
12429 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12436 #endif // #if VMA_STATS_STRING_ENABLED 12438 void VmaBlockVector::Defragment(
12439 class VmaBlockVectorDefragmentationContext* pCtx,
12441 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12442 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12443 VkCommandBuffer commandBuffer)
12445 pCtx->res = VK_SUCCESS;
12447 const VkMemoryPropertyFlags memPropFlags =
12448 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12449 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12450 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12452 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12454 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12455 !IsCorruptionDetectionEnabled() &&
12456 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12459 if(canDefragmentOnCpu || canDefragmentOnGpu)
12461 bool defragmentOnGpu;
12463 if(canDefragmentOnGpu != canDefragmentOnCpu)
12465 defragmentOnGpu = canDefragmentOnGpu;
12470 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12471 m_hAllocator->IsIntegratedGpu();
12474 bool overlappingMoveSupported = !defragmentOnGpu;
12476 if(m_hAllocator->m_UseMutex)
12478 m_Mutex.LockWrite();
12479 pCtx->mutexLocked =
true;
12482 pCtx->Begin(overlappingMoveSupported);
12486 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12487 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12488 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12489 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12490 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12493 if(pStats != VMA_NULL)
12495 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12496 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12499 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12500 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12501 if(defragmentOnGpu)
12503 maxGpuBytesToMove -= bytesMoved;
12504 maxGpuAllocationsToMove -= allocationsMoved;
12508 maxCpuBytesToMove -= bytesMoved;
12509 maxCpuAllocationsToMove -= allocationsMoved;
12513 if(pCtx->res >= VK_SUCCESS)
12515 if(defragmentOnGpu)
12517 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12521 ApplyDefragmentationMovesCpu(pCtx, moves);
12527 void VmaBlockVector::DefragmentationEnd(
12528 class VmaBlockVectorDefragmentationContext* pCtx,
12532 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12534 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12535 if(blockCtx.hBuffer)
12537 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12538 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12542 if(pCtx->res >= VK_SUCCESS)
12544 FreeEmptyBlocks(pStats);
12547 if(pCtx->mutexLocked)
12549 VMA_ASSERT(m_hAllocator->m_UseMutex);
12550 m_Mutex.UnlockWrite();
12554 size_t VmaBlockVector::CalcAllocationCount()
const 12557 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12559 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12564 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12566 if(m_BufferImageGranularity == 1)
12570 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12571 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12573 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12574 VMA_ASSERT(m_Algorithm == 0);
12575 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12576 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12584 void VmaBlockVector::MakePoolAllocationsLost(
12585 uint32_t currentFrameIndex,
12586 size_t* pLostAllocationCount)
12588 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12589 size_t lostAllocationCount = 0;
12590 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12592 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12593 VMA_ASSERT(pBlock);
12594 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12596 if(pLostAllocationCount != VMA_NULL)
12598 *pLostAllocationCount = lostAllocationCount;
12602 VkResult VmaBlockVector::CheckCorruption()
12604 if(!IsCorruptionDetectionEnabled())
12606 return VK_ERROR_FEATURE_NOT_PRESENT;
12609 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12610 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12612 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12613 VMA_ASSERT(pBlock);
12614 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12615 if(res != VK_SUCCESS)
12623 void VmaBlockVector::AddStats(
VmaStats* pStats)
12625 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12626 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12628 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12630 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12632 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12633 VMA_ASSERT(pBlock);
12634 VMA_HEAVY_ASSERT(pBlock->Validate());
12636 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12637 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12638 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12639 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12646 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12648 VmaBlockVector* pBlockVector,
12649 uint32_t currentFrameIndex,
12650 bool overlappingMoveSupported) :
12651 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12652 m_AllocationCount(0),
12653 m_AllAllocations(false),
12655 m_AllocationsMoved(0),
12656 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12659 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12660 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12662 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12663 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12664 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12665 m_Blocks.push_back(pBlockInfo);
12669 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12672 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12674 for(
size_t i = m_Blocks.size(); i--; )
12676 vma_delete(m_hAllocator, m_Blocks[i]);
12680 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12683 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12685 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12686 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12687 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12689 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12690 (*it)->m_Allocations.push_back(allocInfo);
12697 ++m_AllocationCount;
12701 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12702 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12703 VkDeviceSize maxBytesToMove,
12704 uint32_t maxAllocationsToMove)
12706 if(m_Blocks.empty())
12719 size_t srcBlockMinIndex = 0;
12732 size_t srcBlockIndex = m_Blocks.size() - 1;
12733 size_t srcAllocIndex = SIZE_MAX;
12739 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12741 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12744 if(srcBlockIndex == srcBlockMinIndex)
12751 srcAllocIndex = SIZE_MAX;
12756 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12760 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12761 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12763 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12764 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12765 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12766 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12769 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12771 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12772 VmaAllocationRequest dstAllocRequest;
12773 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12774 m_CurrentFrameIndex,
12775 m_pBlockVector->GetFrameInUseCount(),
12776 m_pBlockVector->GetBufferImageGranularity(),
12783 &dstAllocRequest) &&
12785 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12787 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12790 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12791 (m_BytesMoved + size > maxBytesToMove))
12796 VmaDefragmentationMove move;
12797 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12798 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12799 move.srcOffset = srcOffset;
12800 move.dstOffset = dstAllocRequest.offset;
12802 moves.push_back(move);
12804 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12808 allocInfo.m_hAllocation);
12809 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12811 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12813 if(allocInfo.m_pChanged != VMA_NULL)
12815 *allocInfo.m_pChanged = VK_TRUE;
12818 ++m_AllocationsMoved;
12819 m_BytesMoved += size;
12821 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12829 if(srcAllocIndex > 0)
12835 if(srcBlockIndex > 0)
12838 srcAllocIndex = SIZE_MAX;
12848 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12851 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12853 if(m_Blocks[i]->m_HasNonMovableAllocations)
12861 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12862 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12863 VkDeviceSize maxBytesToMove,
12864 uint32_t maxAllocationsToMove)
12866 if(!m_AllAllocations && m_AllocationCount == 0)
12871 const size_t blockCount = m_Blocks.size();
12872 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12874 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12876 if(m_AllAllocations)
12878 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12879 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12880 it != pMetadata->m_Suballocations.end();
12883 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12885 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12886 pBlockInfo->m_Allocations.push_back(allocInfo);
12891 pBlockInfo->CalcHasNonMovableAllocations();
12895 pBlockInfo->SortAllocationsByOffsetDescending();
12901 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12904 const uint32_t roundCount = 2;
12907 VkResult result = VK_SUCCESS;
12908 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12910 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12916 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12917 size_t dstBlockIndex, VkDeviceSize dstOffset,
12918 size_t srcBlockIndex, VkDeviceSize srcOffset)
12920 if(dstBlockIndex < srcBlockIndex)
12924 if(dstBlockIndex > srcBlockIndex)
12928 if(dstOffset < srcOffset)
12938 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12940 VmaBlockVector* pBlockVector,
12941 uint32_t currentFrameIndex,
12942 bool overlappingMoveSupported) :
12943 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12944 m_OverlappingMoveSupported(overlappingMoveSupported),
12945 m_AllocationCount(0),
12946 m_AllAllocations(false),
12948 m_AllocationsMoved(0),
12949 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12951 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12955 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12959 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12960 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12961 VkDeviceSize maxBytesToMove,
12962 uint32_t maxAllocationsToMove)
12964 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12966 const size_t blockCount = m_pBlockVector->GetBlockCount();
12967 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12972 PreprocessMetadata();
12976 m_BlockInfos.resize(blockCount);
12977 for(
size_t i = 0; i < blockCount; ++i)
12979 m_BlockInfos[i].origBlockIndex = i;
12982 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12983 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12984 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12989 FreeSpaceDatabase freeSpaceDb;
12991 size_t dstBlockInfoIndex = 0;
12992 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12993 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12994 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12995 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12996 VkDeviceSize dstOffset = 0;
12999 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13001 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13002 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13003 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13004 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13005 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13007 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13008 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13009 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13010 if(m_AllocationsMoved == maxAllocationsToMove ||
13011 m_BytesMoved + srcAllocSize > maxBytesToMove)
13016 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13019 size_t freeSpaceInfoIndex;
13020 VkDeviceSize dstAllocOffset;
13021 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13022 freeSpaceInfoIndex, dstAllocOffset))
13024 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13025 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13026 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13029 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13031 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13035 VmaSuballocation suballoc = *srcSuballocIt;
13036 suballoc.offset = dstAllocOffset;
13037 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13038 m_BytesMoved += srcAllocSize;
13039 ++m_AllocationsMoved;
13041 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13043 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13044 srcSuballocIt = nextSuballocIt;
13046 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13048 VmaDefragmentationMove move = {
13049 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13050 srcAllocOffset, dstAllocOffset,
13052 moves.push_back(move);
13059 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13061 VmaSuballocation suballoc = *srcSuballocIt;
13062 suballoc.offset = dstAllocOffset;
13063 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13064 m_BytesMoved += srcAllocSize;
13065 ++m_AllocationsMoved;
13067 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13069 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13070 srcSuballocIt = nextSuballocIt;
13072 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13074 VmaDefragmentationMove move = {
13075 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13076 srcAllocOffset, dstAllocOffset,
13078 moves.push_back(move);
13083 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13086 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13087 dstAllocOffset + srcAllocSize > dstBlockSize)
13090 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13092 ++dstBlockInfoIndex;
13093 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13094 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13095 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13096 dstBlockSize = pDstMetadata->GetSize();
13098 dstAllocOffset = 0;
13102 if(dstBlockInfoIndex == srcBlockInfoIndex)
13104 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13106 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13108 bool skipOver = overlap;
13109 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13113 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13118 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13120 dstOffset = srcAllocOffset + srcAllocSize;
13126 srcSuballocIt->offset = dstAllocOffset;
13127 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13128 dstOffset = dstAllocOffset + srcAllocSize;
13129 m_BytesMoved += srcAllocSize;
13130 ++m_AllocationsMoved;
13132 VmaDefragmentationMove move = {
13133 srcOrigBlockIndex, dstOrigBlockIndex,
13134 srcAllocOffset, dstAllocOffset,
13136 moves.push_back(move);
13144 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13145 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13147 VmaSuballocation suballoc = *srcSuballocIt;
13148 suballoc.offset = dstAllocOffset;
13149 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13150 dstOffset = dstAllocOffset + srcAllocSize;
13151 m_BytesMoved += srcAllocSize;
13152 ++m_AllocationsMoved;
13154 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13156 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13157 srcSuballocIt = nextSuballocIt;
13159 pDstMetadata->m_Suballocations.push_back(suballoc);
13161 VmaDefragmentationMove move = {
13162 srcOrigBlockIndex, dstOrigBlockIndex,
13163 srcAllocOffset, dstAllocOffset,
13165 moves.push_back(move);
13171 m_BlockInfos.clear();
13173 PostprocessMetadata();
13178 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13180 const size_t blockCount = m_pBlockVector->GetBlockCount();
13181 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13183 VmaBlockMetadata_Generic*
const pMetadata =
13184 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13185 pMetadata->m_FreeCount = 0;
13186 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13187 pMetadata->m_FreeSuballocationsBySize.clear();
13188 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13189 it != pMetadata->m_Suballocations.end(); )
13191 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13193 VmaSuballocationList::iterator nextIt = it;
13195 pMetadata->m_Suballocations.erase(it);
13206 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13208 const size_t blockCount = m_pBlockVector->GetBlockCount();
13209 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13211 VmaBlockMetadata_Generic*
const pMetadata =
13212 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13213 const VkDeviceSize blockSize = pMetadata->GetSize();
13216 if(pMetadata->m_Suballocations.empty())
13218 pMetadata->m_FreeCount = 1;
13220 VmaSuballocation suballoc = {
13224 VMA_SUBALLOCATION_TYPE_FREE };
13225 pMetadata->m_Suballocations.push_back(suballoc);
13226 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13231 VkDeviceSize offset = 0;
13232 VmaSuballocationList::iterator it;
13233 for(it = pMetadata->m_Suballocations.begin();
13234 it != pMetadata->m_Suballocations.end();
13237 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13238 VMA_ASSERT(it->offset >= offset);
13241 if(it->offset > offset)
13243 ++pMetadata->m_FreeCount;
13244 const VkDeviceSize freeSize = it->offset - offset;
13245 VmaSuballocation suballoc = {
13249 VMA_SUBALLOCATION_TYPE_FREE };
13250 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13251 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13253 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13257 pMetadata->m_SumFreeSize -= it->size;
13258 offset = it->offset + it->size;
13262 if(offset < blockSize)
13264 ++pMetadata->m_FreeCount;
13265 const VkDeviceSize freeSize = blockSize - offset;
13266 VmaSuballocation suballoc = {
13270 VMA_SUBALLOCATION_TYPE_FREE };
13271 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13272 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13273 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13275 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13280 pMetadata->m_FreeSuballocationsBySize.begin(),
13281 pMetadata->m_FreeSuballocationsBySize.end(),
13282 VmaSuballocationItemSizeLess());
13285 VMA_HEAVY_ASSERT(pMetadata->Validate());
13289 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13292 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13293 while(it != pMetadata->m_Suballocations.end())
13295 if(it->offset < suballoc.offset)
13300 pMetadata->m_Suballocations.insert(it, suballoc);
13306 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13309 VmaBlockVector* pBlockVector,
13310 uint32_t currFrameIndex,
13311 uint32_t algorithmFlags) :
13313 mutexLocked(false),
13314 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13315 m_hAllocator(hAllocator),
13316 m_hCustomPool(hCustomPool),
13317 m_pBlockVector(pBlockVector),
13318 m_CurrFrameIndex(currFrameIndex),
13319 m_AlgorithmFlags(algorithmFlags),
13320 m_pAlgorithm(VMA_NULL),
13321 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13322 m_AllAllocations(false)
13326 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13328 vma_delete(m_hAllocator, m_pAlgorithm);
13331 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13333 AllocInfo info = { hAlloc, pChanged };
13334 m_Allocations.push_back(info);
13337 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13339 const bool allAllocations = m_AllAllocations ||
13340 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13352 if(VMA_DEBUG_MARGIN == 0 &&
13354 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13356 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13357 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13361 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13362 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13367 m_pAlgorithm->AddAll();
13371 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13373 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13381 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13383 uint32_t currFrameIndex,
13386 m_hAllocator(hAllocator),
13387 m_CurrFrameIndex(currFrameIndex),
13390 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13392 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13395 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13397 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13399 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13400 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13401 vma_delete(m_hAllocator, pBlockVectorCtx);
13403 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13405 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13406 if(pBlockVectorCtx)
13408 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13409 vma_delete(m_hAllocator, pBlockVectorCtx);
13414 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13416 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13418 VmaPool pool = pPools[poolIndex];
13421 if(pool->m_BlockVector.GetAlgorithm() == 0)
13423 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13425 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13427 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13429 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13434 if(!pBlockVectorDefragCtx)
13436 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13439 &pool->m_BlockVector,
13442 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13445 pBlockVectorDefragCtx->AddAll();
13450 void VmaDefragmentationContext_T::AddAllocations(
13451 uint32_t allocationCount,
13453 VkBool32* pAllocationsChanged)
13456 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13459 VMA_ASSERT(hAlloc);
13461 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13463 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13465 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13467 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13469 if(hAllocPool != VK_NULL_HANDLE)
13472 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13474 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13476 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13478 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13482 if(!pBlockVectorDefragCtx)
13484 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13487 &hAllocPool->m_BlockVector,
13490 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13497 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13498 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13499 if(!pBlockVectorDefragCtx)
13501 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13504 m_hAllocator->m_pBlockVectors[memTypeIndex],
13507 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13511 if(pBlockVectorDefragCtx)
13513 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13514 &pAllocationsChanged[allocIndex] : VMA_NULL;
13515 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13521 VkResult VmaDefragmentationContext_T::Defragment(
13522 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13523 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13531 if(commandBuffer == VK_NULL_HANDLE)
13533 maxGpuBytesToMove = 0;
13534 maxGpuAllocationsToMove = 0;
13537 VkResult res = VK_SUCCESS;
13540 for(uint32_t memTypeIndex = 0;
13541 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13544 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13545 if(pBlockVectorCtx)
13547 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13548 pBlockVectorCtx->GetBlockVector()->Defragment(
13551 maxCpuBytesToMove, maxCpuAllocationsToMove,
13552 maxGpuBytesToMove, maxGpuAllocationsToMove,
13554 if(pBlockVectorCtx->res != VK_SUCCESS)
13556 res = pBlockVectorCtx->res;
13562 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13563 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13566 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13567 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13568 pBlockVectorCtx->GetBlockVector()->Defragment(
13571 maxCpuBytesToMove, maxCpuAllocationsToMove,
13572 maxGpuBytesToMove, maxGpuAllocationsToMove,
13574 if(pBlockVectorCtx->res != VK_SUCCESS)
13576 res = pBlockVectorCtx->res;
13586 #if VMA_RECORDING_ENABLED 13588 VmaRecorder::VmaRecorder() :
13593 m_StartCounter(INT64_MAX)
13599 m_UseMutex = useMutex;
13600 m_Flags = settings.
flags;
13602 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13603 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13606 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13609 return VK_ERROR_INITIALIZATION_FAILED;
13613 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13614 fprintf(m_File,
"%s\n",
"1,5");
13619 VmaRecorder::~VmaRecorder()
13621 if(m_File != VMA_NULL)
13627 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13629 CallParams callParams;
13630 GetBasicParams(callParams);
13632 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13633 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13637 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13639 CallParams callParams;
13640 GetBasicParams(callParams);
13642 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13643 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13649 CallParams callParams;
13650 GetBasicParams(callParams);
13652 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13653 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13664 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13666 CallParams callParams;
13667 GetBasicParams(callParams);
13669 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13670 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13675 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13676 const VkMemoryRequirements& vkMemReq,
13680 CallParams callParams;
13681 GetBasicParams(callParams);
13683 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13684 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13685 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13687 vkMemReq.alignment,
13688 vkMemReq.memoryTypeBits,
13696 userDataStr.GetString());
13700 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13701 const VkMemoryRequirements& vkMemReq,
13703 uint64_t allocationCount,
13706 CallParams callParams;
13707 GetBasicParams(callParams);
13709 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13710 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13711 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13713 vkMemReq.alignment,
13714 vkMemReq.memoryTypeBits,
13721 PrintPointerList(allocationCount, pAllocations);
13722 fprintf(m_File,
",%s\n", userDataStr.GetString());
13726 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13727 const VkMemoryRequirements& vkMemReq,
13728 bool requiresDedicatedAllocation,
13729 bool prefersDedicatedAllocation,
13733 CallParams callParams;
13734 GetBasicParams(callParams);
13736 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13737 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13738 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13740 vkMemReq.alignment,
13741 vkMemReq.memoryTypeBits,
13742 requiresDedicatedAllocation ? 1 : 0,
13743 prefersDedicatedAllocation ? 1 : 0,
13751 userDataStr.GetString());
13755 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13756 const VkMemoryRequirements& vkMemReq,
13757 bool requiresDedicatedAllocation,
13758 bool prefersDedicatedAllocation,
13762 CallParams callParams;
13763 GetBasicParams(callParams);
13765 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13766 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13767 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13769 vkMemReq.alignment,
13770 vkMemReq.memoryTypeBits,
13771 requiresDedicatedAllocation ? 1 : 0,
13772 prefersDedicatedAllocation ? 1 : 0,
13780 userDataStr.GetString());
13784 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13787 CallParams callParams;
13788 GetBasicParams(callParams);
13790 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13791 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13796 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13797 uint64_t allocationCount,
13800 CallParams callParams;
13801 GetBasicParams(callParams);
13803 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13804 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13805 PrintPointerList(allocationCount, pAllocations);
13806 fprintf(m_File,
"\n");
13810 void VmaRecorder::RecordResizeAllocation(
13811 uint32_t frameIndex,
13813 VkDeviceSize newSize)
13815 CallParams callParams;
13816 GetBasicParams(callParams);
13818 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13819 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13820 allocation, newSize);
13824 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13826 const void* pUserData)
13828 CallParams callParams;
13829 GetBasicParams(callParams);
13831 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13832 UserDataString userDataStr(
13835 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13837 userDataStr.GetString());
13841 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13844 CallParams callParams;
13845 GetBasicParams(callParams);
13847 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13848 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13853 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13856 CallParams callParams;
13857 GetBasicParams(callParams);
13859 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13860 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13865 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13868 CallParams callParams;
13869 GetBasicParams(callParams);
13871 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13872 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13877 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13878 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13880 CallParams callParams;
13881 GetBasicParams(callParams);
13883 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13884 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13891 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13892 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13894 CallParams callParams;
13895 GetBasicParams(callParams);
13897 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13898 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13905 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13906 const VkBufferCreateInfo& bufCreateInfo,
13910 CallParams callParams;
13911 GetBasicParams(callParams);
13913 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13914 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13915 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13916 bufCreateInfo.flags,
13917 bufCreateInfo.size,
13918 bufCreateInfo.usage,
13919 bufCreateInfo.sharingMode,
13920 allocCreateInfo.
flags,
13921 allocCreateInfo.
usage,
13925 allocCreateInfo.
pool,
13927 userDataStr.GetString());
13931 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13932 const VkImageCreateInfo& imageCreateInfo,
13936 CallParams callParams;
13937 GetBasicParams(callParams);
13939 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13940 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13941 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13942 imageCreateInfo.flags,
13943 imageCreateInfo.imageType,
13944 imageCreateInfo.format,
13945 imageCreateInfo.extent.width,
13946 imageCreateInfo.extent.height,
13947 imageCreateInfo.extent.depth,
13948 imageCreateInfo.mipLevels,
13949 imageCreateInfo.arrayLayers,
13950 imageCreateInfo.samples,
13951 imageCreateInfo.tiling,
13952 imageCreateInfo.usage,
13953 imageCreateInfo.sharingMode,
13954 imageCreateInfo.initialLayout,
13955 allocCreateInfo.
flags,
13956 allocCreateInfo.
usage,
13960 allocCreateInfo.
pool,
13962 userDataStr.GetString());
13966 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13969 CallParams callParams;
13970 GetBasicParams(callParams);
13972 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13973 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13978 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13981 CallParams callParams;
13982 GetBasicParams(callParams);
13984 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13985 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13990 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13993 CallParams callParams;
13994 GetBasicParams(callParams);
13996 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13997 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14002 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14005 CallParams callParams;
14006 GetBasicParams(callParams);
14008 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14009 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14014 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14017 CallParams callParams;
14018 GetBasicParams(callParams);
14020 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14021 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14026 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14030 CallParams callParams;
14031 GetBasicParams(callParams);
14033 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14034 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14037 fprintf(m_File,
",");
14039 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14049 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14052 CallParams callParams;
14053 GetBasicParams(callParams);
14055 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14056 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14063 if(pUserData != VMA_NULL)
14067 m_Str = (
const char*)pUserData;
14071 sprintf_s(m_PtrStr,
"%p", pUserData);
14081 void VmaRecorder::WriteConfiguration(
14082 const VkPhysicalDeviceProperties& devProps,
14083 const VkPhysicalDeviceMemoryProperties& memProps,
14084 bool dedicatedAllocationExtensionEnabled)
14086 fprintf(m_File,
"Config,Begin\n");
14088 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14089 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14090 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14091 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14092 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14093 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14095 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14096 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14097 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14099 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14100 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14102 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14103 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14105 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14106 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14108 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14109 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14112 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14114 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14115 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14116 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14117 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14118 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14119 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14120 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14121 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14122 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14124 fprintf(m_File,
"Config,End\n");
14127 void VmaRecorder::GetBasicParams(CallParams& outParams)
14129 outParams.threadId = GetCurrentThreadId();
14131 LARGE_INTEGER counter;
14132 QueryPerformanceCounter(&counter);
14133 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14136 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14140 fprintf(m_File,
"%p", pItems[0]);
14141 for(uint64_t i = 1; i < count; ++i)
14143 fprintf(m_File,
" %p", pItems[i]);
14148 void VmaRecorder::Flush()
14156 #endif // #if VMA_RECORDING_ENABLED 14161 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14162 m_Allocator(pAllocationCallbacks, 1024)
14168 VmaMutexLock mutexLock(m_Mutex);
14169 return m_Allocator.Alloc();
14172 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14174 VmaMutexLock mutexLock(m_Mutex);
14175 m_Allocator.Free(hAlloc);
14184 m_hDevice(pCreateInfo->device),
14185 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14186 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14187 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14188 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14189 m_PreferredLargeHeapBlockSize(0),
14190 m_PhysicalDevice(pCreateInfo->physicalDevice),
14191 m_CurrentFrameIndex(0),
14192 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14193 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14196 ,m_pRecorder(VMA_NULL)
14199 if(VMA_DEBUG_DETECT_CORRUPTION)
14202 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14207 #if !(VMA_DEDICATED_ALLOCATION) 14210 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14214 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14215 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14216 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14218 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14219 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14221 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14223 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14234 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14235 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14237 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14238 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14239 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14240 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14247 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14249 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14250 if(limit != VK_WHOLE_SIZE)
14252 m_HeapSizeLimit[heapIndex] = limit;
14253 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14255 m_MemProps.memoryHeaps[heapIndex].size = limit;
14261 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14263 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14265 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14269 preferredBlockSize,
14272 GetBufferImageGranularity(),
14279 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14286 VkResult res = VK_SUCCESS;
14291 #if VMA_RECORDING_ENABLED 14292 m_pRecorder = vma_new(
this, VmaRecorder)();
14294 if(res != VK_SUCCESS)
14298 m_pRecorder->WriteConfiguration(
14299 m_PhysicalDeviceProperties,
14301 m_UseKhrDedicatedAllocation);
14302 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14304 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14305 return VK_ERROR_FEATURE_NOT_PRESENT;
14312 VmaAllocator_T::~VmaAllocator_T()
14314 #if VMA_RECORDING_ENABLED 14315 if(m_pRecorder != VMA_NULL)
14317 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14318 vma_delete(
this, m_pRecorder);
14322 VMA_ASSERT(m_Pools.empty());
14324 for(
size_t i = GetMemoryTypeCount(); i--; )
14326 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14328 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14331 vma_delete(
this, m_pDedicatedAllocations[i]);
14332 vma_delete(
this, m_pBlockVectors[i]);
14336 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14338 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14339 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14340 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14341 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14342 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14343 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14344 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14345 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14346 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14347 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14348 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14349 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14350 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14351 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14352 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14353 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14354 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14355 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14356 #if VMA_DEDICATED_ALLOCATION 14357 if(m_UseKhrDedicatedAllocation)
14359 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14360 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14361 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14362 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14364 #endif // #if VMA_DEDICATED_ALLOCATION 14365 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14367 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14368 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14370 if(pVulkanFunctions != VMA_NULL)
14372 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14373 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14374 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14375 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14376 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14377 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14378 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14379 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14380 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14381 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14382 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14383 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14384 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14385 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14386 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14387 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14388 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14389 #if VMA_DEDICATED_ALLOCATION 14390 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14391 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14395 #undef VMA_COPY_IF_NOT_NULL 14399 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14400 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14401 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14402 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14403 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14404 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14405 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14406 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14407 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14408 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14409 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14410 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14411 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14412 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14413 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14414 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14415 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14416 #if VMA_DEDICATED_ALLOCATION 14417 if(m_UseKhrDedicatedAllocation)
14419 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14420 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14425 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14427 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14428 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14429 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14430 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14433 VkResult VmaAllocator_T::AllocateMemoryOfType(
14435 VkDeviceSize alignment,
14436 bool dedicatedAllocation,
14437 VkBuffer dedicatedBuffer,
14438 VkImage dedicatedImage,
14440 uint32_t memTypeIndex,
14441 VmaSuballocationType suballocType,
14442 size_t allocationCount,
14445 VMA_ASSERT(pAllocations != VMA_NULL);
14446 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14452 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14457 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14458 VMA_ASSERT(blockVector);
14460 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14461 bool preferDedicatedMemory =
14462 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14463 dedicatedAllocation ||
14465 size > preferredBlockSize / 2;
14467 if(preferDedicatedMemory &&
14469 finalCreateInfo.
pool == VK_NULL_HANDLE)
14478 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14482 return AllocateDedicatedMemory(
14497 VkResult res = blockVector->Allocate(
14498 m_CurrentFrameIndex.load(),
14505 if(res == VK_SUCCESS)
14513 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14517 res = AllocateDedicatedMemory(
14523 finalCreateInfo.pUserData,
14528 if(res == VK_SUCCESS)
14531 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14537 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14544 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14546 VmaSuballocationType suballocType,
14547 uint32_t memTypeIndex,
14549 bool isUserDataString,
14551 VkBuffer dedicatedBuffer,
14552 VkImage dedicatedImage,
14553 size_t allocationCount,
14556 VMA_ASSERT(allocationCount > 0 && pAllocations);
14558 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14559 allocInfo.memoryTypeIndex = memTypeIndex;
14560 allocInfo.allocationSize = size;
14562 #if VMA_DEDICATED_ALLOCATION 14563 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14564 if(m_UseKhrDedicatedAllocation)
14566 if(dedicatedBuffer != VK_NULL_HANDLE)
14568 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14569 dedicatedAllocInfo.buffer = dedicatedBuffer;
14570 allocInfo.pNext = &dedicatedAllocInfo;
14572 else if(dedicatedImage != VK_NULL_HANDLE)
14574 dedicatedAllocInfo.image = dedicatedImage;
14575 allocInfo.pNext = &dedicatedAllocInfo;
14578 #endif // #if VMA_DEDICATED_ALLOCATION 14581 VkResult res = VK_SUCCESS;
14582 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14584 res = AllocateDedicatedMemoryPage(
14592 pAllocations + allocIndex);
14593 if(res != VK_SUCCESS)
14599 if(res == VK_SUCCESS)
14603 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14604 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14605 VMA_ASSERT(pDedicatedAllocations);
14606 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14608 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14612 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14617 while(allocIndex--)
14620 VkDeviceMemory hMemory = currAlloc->GetMemory();
14632 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14634 currAlloc->SetUserData(
this, VMA_NULL);
14636 m_AllocationObjectAllocator.Free(currAlloc);
14639 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14645 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14647 VmaSuballocationType suballocType,
14648 uint32_t memTypeIndex,
14649 const VkMemoryAllocateInfo& allocInfo,
14651 bool isUserDataString,
14655 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14656 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14659 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14663 void* pMappedData = VMA_NULL;
14666 res = (*m_VulkanFunctions.vkMapMemory)(
14675 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14676 FreeVulkanMemory(memTypeIndex, size, hMemory);
14681 *pAllocation = m_AllocationObjectAllocator.Allocate();
14682 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14683 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14684 (*pAllocation)->SetUserData(
this, pUserData);
14685 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14687 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14693 void VmaAllocator_T::GetBufferMemoryRequirements(
14695 VkMemoryRequirements& memReq,
14696 bool& requiresDedicatedAllocation,
14697 bool& prefersDedicatedAllocation)
const 14699 #if VMA_DEDICATED_ALLOCATION 14700 if(m_UseKhrDedicatedAllocation)
14702 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14703 memReqInfo.buffer = hBuffer;
14705 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14707 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14708 memReq2.pNext = &memDedicatedReq;
14710 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14712 memReq = memReq2.memoryRequirements;
14713 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14714 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14717 #endif // #if VMA_DEDICATED_ALLOCATION 14719 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14720 requiresDedicatedAllocation =
false;
14721 prefersDedicatedAllocation =
false;
14725 void VmaAllocator_T::GetImageMemoryRequirements(
14727 VkMemoryRequirements& memReq,
14728 bool& requiresDedicatedAllocation,
14729 bool& prefersDedicatedAllocation)
const 14731 #if VMA_DEDICATED_ALLOCATION 14732 if(m_UseKhrDedicatedAllocation)
14734 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14735 memReqInfo.image = hImage;
14737 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14739 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14740 memReq2.pNext = &memDedicatedReq;
14742 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14744 memReq = memReq2.memoryRequirements;
14745 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14746 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14749 #endif // #if VMA_DEDICATED_ALLOCATION 14751 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14752 requiresDedicatedAllocation =
false;
14753 prefersDedicatedAllocation =
false;
14757 VkResult VmaAllocator_T::AllocateMemory(
14758 const VkMemoryRequirements& vkMemReq,
14759 bool requiresDedicatedAllocation,
14760 bool prefersDedicatedAllocation,
14761 VkBuffer dedicatedBuffer,
14762 VkImage dedicatedImage,
14764 VmaSuballocationType suballocType,
14765 size_t allocationCount,
14768 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14770 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14772 if(vkMemReq.size == 0)
14774 return VK_ERROR_VALIDATION_FAILED_EXT;
14779 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14780 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14785 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14786 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14788 if(requiresDedicatedAllocation)
14792 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14793 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14795 if(createInfo.
pool != VK_NULL_HANDLE)
14797 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14798 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14801 if((createInfo.
pool != VK_NULL_HANDLE) &&
14804 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14805 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14808 if(createInfo.
pool != VK_NULL_HANDLE)
14810 const VkDeviceSize alignmentForPool = VMA_MAX(
14811 vkMemReq.alignment,
14812 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14813 return createInfo.
pool->m_BlockVector.Allocate(
14814 m_CurrentFrameIndex.load(),
14825 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14826 uint32_t memTypeIndex = UINT32_MAX;
14828 if(res == VK_SUCCESS)
14830 VkDeviceSize alignmentForMemType = VMA_MAX(
14831 vkMemReq.alignment,
14832 GetMemoryTypeMinAlignment(memTypeIndex));
14834 res = AllocateMemoryOfType(
14836 alignmentForMemType,
14837 requiresDedicatedAllocation || prefersDedicatedAllocation,
14846 if(res == VK_SUCCESS)
14856 memoryTypeBits &= ~(1u << memTypeIndex);
14859 if(res == VK_SUCCESS)
14861 alignmentForMemType = VMA_MAX(
14862 vkMemReq.alignment,
14863 GetMemoryTypeMinAlignment(memTypeIndex));
14865 res = AllocateMemoryOfType(
14867 alignmentForMemType,
14868 requiresDedicatedAllocation || prefersDedicatedAllocation,
14877 if(res == VK_SUCCESS)
14887 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14898 void VmaAllocator_T::FreeMemory(
14899 size_t allocationCount,
14902 VMA_ASSERT(pAllocations);
14904 for(
size_t allocIndex = allocationCount; allocIndex--; )
14908 if(allocation != VK_NULL_HANDLE)
14910 if(TouchAllocation(allocation))
14912 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14914 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14917 switch(allocation->GetType())
14919 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14921 VmaBlockVector* pBlockVector = VMA_NULL;
14922 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14923 if(hPool != VK_NULL_HANDLE)
14925 pBlockVector = &hPool->m_BlockVector;
14929 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14930 pBlockVector = m_pBlockVectors[memTypeIndex];
14932 pBlockVector->Free(allocation);
14935 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14936 FreeDedicatedMemory(allocation);
14943 allocation->SetUserData(
this, VMA_NULL);
14944 allocation->Dtor();
14945 m_AllocationObjectAllocator.Free(allocation);
14950 VkResult VmaAllocator_T::ResizeAllocation(
14952 VkDeviceSize newSize)
14954 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14956 return VK_ERROR_VALIDATION_FAILED_EXT;
14958 if(newSize == alloc->GetSize())
14963 switch(alloc->GetType())
14965 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14966 return VK_ERROR_FEATURE_NOT_PRESENT;
14967 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14968 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14970 alloc->ChangeSize(newSize);
14971 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14976 return VK_ERROR_OUT_OF_POOL_MEMORY;
14980 return VK_ERROR_VALIDATION_FAILED_EXT;
14984 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14987 InitStatInfo(pStats->
total);
14988 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14990 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14994 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14996 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14997 VMA_ASSERT(pBlockVector);
14998 pBlockVector->AddStats(pStats);
15003 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15004 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15006 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15011 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15013 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15014 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15015 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15016 VMA_ASSERT(pDedicatedAllocVector);
15017 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15020 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15021 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15022 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15023 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15028 VmaPostprocessCalcStatInfo(pStats->
total);
15029 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15030 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15031 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15032 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15035 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15037 VkResult VmaAllocator_T::DefragmentationBegin(
15047 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15048 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15051 (*pContext)->AddAllocations(
15054 VkResult res = (*pContext)->Defragment(
15059 if(res != VK_NOT_READY)
15061 vma_delete(
this, *pContext);
15062 *pContext = VMA_NULL;
15068 VkResult VmaAllocator_T::DefragmentationEnd(
15071 vma_delete(
this, context);
15077 if(hAllocation->CanBecomeLost())
15083 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15084 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15087 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15091 pAllocationInfo->
offset = 0;
15092 pAllocationInfo->
size = hAllocation->GetSize();
15094 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15097 else if(localLastUseFrameIndex == localCurrFrameIndex)
15099 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15100 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15101 pAllocationInfo->
offset = hAllocation->GetOffset();
15102 pAllocationInfo->
size = hAllocation->GetSize();
15104 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15109 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15111 localLastUseFrameIndex = localCurrFrameIndex;
15118 #if VMA_STATS_STRING_ENABLED 15119 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15120 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15123 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15124 if(localLastUseFrameIndex == localCurrFrameIndex)
15130 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15132 localLastUseFrameIndex = localCurrFrameIndex;
15138 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15139 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15140 pAllocationInfo->
offset = hAllocation->GetOffset();
15141 pAllocationInfo->
size = hAllocation->GetSize();
15142 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15143 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15147 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15150 if(hAllocation->CanBecomeLost())
15152 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15153 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15156 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15160 else if(localLastUseFrameIndex == localCurrFrameIndex)
15166 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15168 localLastUseFrameIndex = localCurrFrameIndex;
15175 #if VMA_STATS_STRING_ENABLED 15176 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15177 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15180 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15181 if(localLastUseFrameIndex == localCurrFrameIndex)
15187 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15189 localLastUseFrameIndex = localCurrFrameIndex;
15201 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15211 return VK_ERROR_INITIALIZATION_FAILED;
15214 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15216 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15218 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15219 if(res != VK_SUCCESS)
15221 vma_delete(
this, *pPool);
15228 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15229 (*pPool)->SetId(m_NextPoolId++);
15230 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15236 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15240 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15241 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15242 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15245 vma_delete(
this, pool);
15250 pool->m_BlockVector.GetPoolStats(pPoolStats);
15253 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15255 m_CurrentFrameIndex.store(frameIndex);
15258 void VmaAllocator_T::MakePoolAllocationsLost(
15260 size_t* pLostAllocationCount)
15262 hPool->m_BlockVector.MakePoolAllocationsLost(
15263 m_CurrentFrameIndex.load(),
15264 pLostAllocationCount);
15267 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15269 return hPool->m_BlockVector.CheckCorruption();
15272 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15274 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15277 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15279 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15281 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15282 VMA_ASSERT(pBlockVector);
15283 VkResult localRes = pBlockVector->CheckCorruption();
15286 case VK_ERROR_FEATURE_NOT_PRESENT:
15289 finalRes = VK_SUCCESS;
15299 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15300 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15302 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15304 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15307 case VK_ERROR_FEATURE_NOT_PRESENT:
15310 finalRes = VK_SUCCESS;
15322 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15324 *pAllocation = m_AllocationObjectAllocator.Allocate();
15325 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15326 (*pAllocation)->InitLost();
15329 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15331 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15334 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15336 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15337 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15339 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15340 if(res == VK_SUCCESS)
15342 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15347 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15352 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15355 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15357 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15363 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15365 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15367 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15370 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15372 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15373 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15375 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15376 m_HeapSizeLimit[heapIndex] += size;
15380 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15382 if(hAllocation->CanBecomeLost())
15384 return VK_ERROR_MEMORY_MAP_FAILED;
15387 switch(hAllocation->GetType())
15389 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15391 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15392 char *pBytes = VMA_NULL;
15393 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15394 if(res == VK_SUCCESS)
15396 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15397 hAllocation->BlockAllocMap();
15401 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15402 return hAllocation->DedicatedAllocMap(
this, ppData);
15405 return VK_ERROR_MEMORY_MAP_FAILED;
15411 switch(hAllocation->GetType())
15413 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15415 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15416 hAllocation->BlockAllocUnmap();
15417 pBlock->Unmap(
this, 1);
15420 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15421 hAllocation->DedicatedAllocUnmap(
this);
15428 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15430 VkResult res = VK_SUCCESS;
15431 switch(hAllocation->GetType())
15433 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15434 res = GetVulkanFunctions().vkBindBufferMemory(
15437 hAllocation->GetMemory(),
15440 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15442 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15443 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15444 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15453 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15455 VkResult res = VK_SUCCESS;
15456 switch(hAllocation->GetType())
15458 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15459 res = GetVulkanFunctions().vkBindImageMemory(
15462 hAllocation->GetMemory(),
15465 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15467 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15468 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15469 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15478 void VmaAllocator_T::FlushOrInvalidateAllocation(
15480 VkDeviceSize offset, VkDeviceSize size,
15481 VMA_CACHE_OPERATION op)
15483 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15484 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15486 const VkDeviceSize allocationSize = hAllocation->GetSize();
15487 VMA_ASSERT(offset <= allocationSize);
15489 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15491 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15492 memRange.memory = hAllocation->GetMemory();
15494 switch(hAllocation->GetType())
15496 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15497 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15498 if(size == VK_WHOLE_SIZE)
15500 memRange.size = allocationSize - memRange.offset;
15504 VMA_ASSERT(offset + size <= allocationSize);
15505 memRange.size = VMA_MIN(
15506 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15507 allocationSize - memRange.offset);
15511 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15514 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15515 if(size == VK_WHOLE_SIZE)
15517 size = allocationSize - offset;
15521 VMA_ASSERT(offset + size <= allocationSize);
15523 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15526 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15527 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15528 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15529 memRange.offset += allocationOffset;
15530 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15541 case VMA_CACHE_FLUSH:
15542 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15544 case VMA_CACHE_INVALIDATE:
15545 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15554 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15556 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15558 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15560 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15561 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15562 VMA_ASSERT(pDedicatedAllocations);
15563 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15564 VMA_ASSERT(success);
15567 VkDeviceMemory hMemory = allocation->GetMemory();
15579 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15581 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15584 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const 15586 VkBufferCreateInfo dummyBufCreateInfo;
15587 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15589 uint32_t memoryTypeBits = 0;
15592 VkBuffer buf = VMA_NULL;
15593 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15594 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15595 if(res == VK_SUCCESS)
15598 VkMemoryRequirements memReq;
15599 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15600 memoryTypeBits = memReq.memoryTypeBits;
15603 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15606 return memoryTypeBits;
15609 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15611 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15612 !hAllocation->CanBecomeLost() &&
15613 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15615 void* pData = VMA_NULL;
15616 VkResult res = Map(hAllocation, &pData);
15617 if(res == VK_SUCCESS)
15619 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15620 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15621 Unmap(hAllocation);
15625 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15630 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15632 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15633 if(memoryTypeBits == UINT32_MAX)
15635 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15636 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15638 return memoryTypeBits;
15641 #if VMA_STATS_STRING_ENABLED 15643 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15645 bool dedicatedAllocationsStarted =
false;
15646 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15648 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15649 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15650 VMA_ASSERT(pDedicatedAllocVector);
15651 if(pDedicatedAllocVector->empty() ==
false)
15653 if(dedicatedAllocationsStarted ==
false)
15655 dedicatedAllocationsStarted =
true;
15656 json.WriteString(
"DedicatedAllocations");
15657 json.BeginObject();
15660 json.BeginString(
"Type ");
15661 json.ContinueString(memTypeIndex);
15666 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15668 json.BeginObject(
true);
15670 hAlloc->PrintParameters(json);
15677 if(dedicatedAllocationsStarted)
15683 bool allocationsStarted =
false;
15684 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15686 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15688 if(allocationsStarted ==
false)
15690 allocationsStarted =
true;
15691 json.WriteString(
"DefaultPools");
15692 json.BeginObject();
15695 json.BeginString(
"Type ");
15696 json.ContinueString(memTypeIndex);
15699 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15702 if(allocationsStarted)
15710 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15711 const size_t poolCount = m_Pools.size();
15714 json.WriteString(
"Pools");
15715 json.BeginObject();
15716 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15718 json.BeginString();
15719 json.ContinueString(m_Pools[poolIndex]->GetId());
15722 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15729 #endif // #if VMA_STATS_STRING_ENABLED 15738 VMA_ASSERT(pCreateInfo && pAllocator);
15739 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15741 return (*pAllocator)->Init(pCreateInfo);
15747 if(allocator != VK_NULL_HANDLE)
15749 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15750 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15751 vma_delete(&allocationCallbacks, allocator);
15757 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15759 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15760 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15765 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15767 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15768 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15773 uint32_t memoryTypeIndex,
15774 VkMemoryPropertyFlags* pFlags)
15776 VMA_ASSERT(allocator && pFlags);
15777 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15778 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15783 uint32_t frameIndex)
15785 VMA_ASSERT(allocator);
15786 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15788 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15790 allocator->SetCurrentFrameIndex(frameIndex);
15797 VMA_ASSERT(allocator && pStats);
15798 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15799 allocator->CalculateStats(pStats);
15802 #if VMA_STATS_STRING_ENABLED 15806 char** ppStatsString,
15807 VkBool32 detailedMap)
15809 VMA_ASSERT(allocator && ppStatsString);
15810 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15812 VmaStringBuilder sb(allocator);
15814 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15815 json.BeginObject();
15818 allocator->CalculateStats(&stats);
15820 json.WriteString(
"Total");
15821 VmaPrintStatInfo(json, stats.
total);
15823 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15825 json.BeginString(
"Heap ");
15826 json.ContinueString(heapIndex);
15828 json.BeginObject();
15830 json.WriteString(
"Size");
15831 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15833 json.WriteString(
"Flags");
15834 json.BeginArray(
true);
15835 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15837 json.WriteString(
"DEVICE_LOCAL");
15843 json.WriteString(
"Stats");
15844 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15847 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15849 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15851 json.BeginString(
"Type ");
15852 json.ContinueString(typeIndex);
15855 json.BeginObject();
15857 json.WriteString(
"Flags");
15858 json.BeginArray(
true);
15859 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15860 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15862 json.WriteString(
"DEVICE_LOCAL");
15864 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15866 json.WriteString(
"HOST_VISIBLE");
15868 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15870 json.WriteString(
"HOST_COHERENT");
15872 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15874 json.WriteString(
"HOST_CACHED");
15876 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15878 json.WriteString(
"LAZILY_ALLOCATED");
15884 json.WriteString(
"Stats");
15885 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15894 if(detailedMap == VK_TRUE)
15896 allocator->PrintDetailedMap(json);
15902 const size_t len = sb.GetLength();
15903 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15906 memcpy(pChars, sb.GetData(), len);
15908 pChars[len] =
'\0';
15909 *ppStatsString = pChars;
15914 char* pStatsString)
15916 if(pStatsString != VMA_NULL)
15918 VMA_ASSERT(allocator);
15919 size_t len = strlen(pStatsString);
15920 vma_delete_array(allocator, pStatsString, len + 1);
15924 #endif // #if VMA_STATS_STRING_ENABLED 15931 uint32_t memoryTypeBits,
15933 uint32_t* pMemoryTypeIndex)
15935 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15936 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15937 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15944 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15945 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15948 switch(pAllocationCreateInfo->
usage)
15953 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15955 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15959 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15962 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15963 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15965 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15969 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15970 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15976 *pMemoryTypeIndex = UINT32_MAX;
15977 uint32_t minCost = UINT32_MAX;
15978 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15979 memTypeIndex < allocator->GetMemoryTypeCount();
15980 ++memTypeIndex, memTypeBit <<= 1)
15983 if((memTypeBit & memoryTypeBits) != 0)
15985 const VkMemoryPropertyFlags currFlags =
15986 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15988 if((requiredFlags & ~currFlags) == 0)
15991 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15993 if(currCost < minCost)
15995 *pMemoryTypeIndex = memTypeIndex;
16000 minCost = currCost;
16005 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16010 const VkBufferCreateInfo* pBufferCreateInfo,
16012 uint32_t* pMemoryTypeIndex)
16014 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16015 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16016 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16017 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16019 const VkDevice hDev = allocator->m_hDevice;
16020 VkBuffer hBuffer = VK_NULL_HANDLE;
16021 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16022 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16023 if(res == VK_SUCCESS)
16025 VkMemoryRequirements memReq = {};
16026 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16027 hDev, hBuffer, &memReq);
16031 memReq.memoryTypeBits,
16032 pAllocationCreateInfo,
16035 allocator->GetVulkanFunctions().vkDestroyBuffer(
16036 hDev, hBuffer, allocator->GetAllocationCallbacks());
16043 const VkImageCreateInfo* pImageCreateInfo,
16045 uint32_t* pMemoryTypeIndex)
16047 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16048 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16049 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16050 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16052 const VkDevice hDev = allocator->m_hDevice;
16053 VkImage hImage = VK_NULL_HANDLE;
16054 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16055 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16056 if(res == VK_SUCCESS)
16058 VkMemoryRequirements memReq = {};
16059 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16060 hDev, hImage, &memReq);
16064 memReq.memoryTypeBits,
16065 pAllocationCreateInfo,
16068 allocator->GetVulkanFunctions().vkDestroyImage(
16069 hDev, hImage, allocator->GetAllocationCallbacks());
16079 VMA_ASSERT(allocator && pCreateInfo && pPool);
16081 VMA_DEBUG_LOG(
"vmaCreatePool");
16083 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16085 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16087 #if VMA_RECORDING_ENABLED 16088 if(allocator->GetRecorder() != VMA_NULL)
16090 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16101 VMA_ASSERT(allocator);
16103 if(pool == VK_NULL_HANDLE)
16108 VMA_DEBUG_LOG(
"vmaDestroyPool");
16110 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16112 #if VMA_RECORDING_ENABLED 16113 if(allocator->GetRecorder() != VMA_NULL)
16115 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16119 allocator->DestroyPool(pool);
16127 VMA_ASSERT(allocator && pool && pPoolStats);
16129 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16131 allocator->GetPoolStats(pool, pPoolStats);
16137 size_t* pLostAllocationCount)
16139 VMA_ASSERT(allocator && pool);
16141 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16143 #if VMA_RECORDING_ENABLED 16144 if(allocator->GetRecorder() != VMA_NULL)
16146 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16150 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16155 VMA_ASSERT(allocator && pool);
16157 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16159 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16161 return allocator->CheckPoolCorruption(pool);
16166 const VkMemoryRequirements* pVkMemoryRequirements,
16171 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16173 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16175 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16177 VkResult result = allocator->AllocateMemory(
16178 *pVkMemoryRequirements,
16184 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16188 #if VMA_RECORDING_ENABLED 16189 if(allocator->GetRecorder() != VMA_NULL)
16191 allocator->GetRecorder()->RecordAllocateMemory(
16192 allocator->GetCurrentFrameIndex(),
16193 *pVkMemoryRequirements,
16199 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16201 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16209 const VkMemoryRequirements* pVkMemoryRequirements,
16211 size_t allocationCount,
16215 if(allocationCount == 0)
16220 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16222 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16224 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16226 VkResult result = allocator->AllocateMemory(
16227 *pVkMemoryRequirements,
16233 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16237 #if VMA_RECORDING_ENABLED 16238 if(allocator->GetRecorder() != VMA_NULL)
16240 allocator->GetRecorder()->RecordAllocateMemoryPages(
16241 allocator->GetCurrentFrameIndex(),
16242 *pVkMemoryRequirements,
16244 (uint64_t)allocationCount,
16249 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16251 for(
size_t i = 0; i < allocationCount; ++i)
16253 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16267 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16269 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16271 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16273 VkMemoryRequirements vkMemReq = {};
16274 bool requiresDedicatedAllocation =
false;
16275 bool prefersDedicatedAllocation =
false;
16276 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16277 requiresDedicatedAllocation,
16278 prefersDedicatedAllocation);
16280 VkResult result = allocator->AllocateMemory(
16282 requiresDedicatedAllocation,
16283 prefersDedicatedAllocation,
16287 VMA_SUBALLOCATION_TYPE_BUFFER,
16291 #if VMA_RECORDING_ENABLED 16292 if(allocator->GetRecorder() != VMA_NULL)
16294 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16295 allocator->GetCurrentFrameIndex(),
16297 requiresDedicatedAllocation,
16298 prefersDedicatedAllocation,
16304 if(pAllocationInfo && result == VK_SUCCESS)
16306 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16319 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16321 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16323 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16325 VkMemoryRequirements vkMemReq = {};
16326 bool requiresDedicatedAllocation =
false;
16327 bool prefersDedicatedAllocation =
false;
16328 allocator->GetImageMemoryRequirements(image, vkMemReq,
16329 requiresDedicatedAllocation, prefersDedicatedAllocation);
16331 VkResult result = allocator->AllocateMemory(
16333 requiresDedicatedAllocation,
16334 prefersDedicatedAllocation,
16338 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16342 #if VMA_RECORDING_ENABLED 16343 if(allocator->GetRecorder() != VMA_NULL)
16345 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16346 allocator->GetCurrentFrameIndex(),
16348 requiresDedicatedAllocation,
16349 prefersDedicatedAllocation,
16355 if(pAllocationInfo && result == VK_SUCCESS)
16357 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16367 VMA_ASSERT(allocator);
16369 if(allocation == VK_NULL_HANDLE)
16374 VMA_DEBUG_LOG(
"vmaFreeMemory");
16376 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16378 #if VMA_RECORDING_ENABLED 16379 if(allocator->GetRecorder() != VMA_NULL)
16381 allocator->GetRecorder()->RecordFreeMemory(
16382 allocator->GetCurrentFrameIndex(),
16387 allocator->FreeMemory(
16394 size_t allocationCount,
16397 if(allocationCount == 0)
16402 VMA_ASSERT(allocator);
16404 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16406 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16408 #if VMA_RECORDING_ENABLED 16409 if(allocator->GetRecorder() != VMA_NULL)
16411 allocator->GetRecorder()->RecordFreeMemoryPages(
16412 allocator->GetCurrentFrameIndex(),
16413 (uint64_t)allocationCount,
16418 allocator->FreeMemory(allocationCount, pAllocations);
16424 VkDeviceSize newSize)
16426 VMA_ASSERT(allocator && allocation);
16428 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16430 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16432 #if VMA_RECORDING_ENABLED 16433 if(allocator->GetRecorder() != VMA_NULL)
16435 allocator->GetRecorder()->RecordResizeAllocation(
16436 allocator->GetCurrentFrameIndex(),
16442 return allocator->ResizeAllocation(allocation, newSize);
16450 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16452 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16454 #if VMA_RECORDING_ENABLED 16455 if(allocator->GetRecorder() != VMA_NULL)
16457 allocator->GetRecorder()->RecordGetAllocationInfo(
16458 allocator->GetCurrentFrameIndex(),
16463 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16470 VMA_ASSERT(allocator && allocation);
16472 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16474 #if VMA_RECORDING_ENABLED 16475 if(allocator->GetRecorder() != VMA_NULL)
16477 allocator->GetRecorder()->RecordTouchAllocation(
16478 allocator->GetCurrentFrameIndex(),
16483 return allocator->TouchAllocation(allocation);
16491 VMA_ASSERT(allocator && allocation);
16493 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16495 allocation->SetUserData(allocator, pUserData);
16497 #if VMA_RECORDING_ENABLED 16498 if(allocator->GetRecorder() != VMA_NULL)
16500 allocator->GetRecorder()->RecordSetAllocationUserData(
16501 allocator->GetCurrentFrameIndex(),
16512 VMA_ASSERT(allocator && pAllocation);
16514 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16516 allocator->CreateLostAllocation(pAllocation);
16518 #if VMA_RECORDING_ENABLED 16519 if(allocator->GetRecorder() != VMA_NULL)
16521 allocator->GetRecorder()->RecordCreateLostAllocation(
16522 allocator->GetCurrentFrameIndex(),
16533 VMA_ASSERT(allocator && allocation && ppData);
16535 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16537 VkResult res = allocator->Map(allocation, ppData);
16539 #if VMA_RECORDING_ENABLED 16540 if(allocator->GetRecorder() != VMA_NULL)
16542 allocator->GetRecorder()->RecordMapMemory(
16543 allocator->GetCurrentFrameIndex(),
16555 VMA_ASSERT(allocator && allocation);
16557 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16559 #if VMA_RECORDING_ENABLED 16560 if(allocator->GetRecorder() != VMA_NULL)
16562 allocator->GetRecorder()->RecordUnmapMemory(
16563 allocator->GetCurrentFrameIndex(),
16568 allocator->Unmap(allocation);
16573 VMA_ASSERT(allocator && allocation);
16575 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16577 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16579 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16581 #if VMA_RECORDING_ENABLED 16582 if(allocator->GetRecorder() != VMA_NULL)
16584 allocator->GetRecorder()->RecordFlushAllocation(
16585 allocator->GetCurrentFrameIndex(),
16586 allocation, offset, size);
16593 VMA_ASSERT(allocator && allocation);
16595 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16599 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16601 #if VMA_RECORDING_ENABLED 16602 if(allocator->GetRecorder() != VMA_NULL)
16604 allocator->GetRecorder()->RecordInvalidateAllocation(
16605 allocator->GetCurrentFrameIndex(),
16606 allocation, offset, size);
16613 VMA_ASSERT(allocator);
16615 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16617 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16619 return allocator->CheckCorruption(memoryTypeBits);
16625 size_t allocationCount,
16626 VkBool32* pAllocationsChanged,
16636 if(pDefragmentationInfo != VMA_NULL)
16650 if(res == VK_NOT_READY)
16663 VMA_ASSERT(allocator && pInfo && pContext);
16674 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16676 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16678 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16680 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16682 #if VMA_RECORDING_ENABLED 16683 if(allocator->GetRecorder() != VMA_NULL)
16685 allocator->GetRecorder()->RecordDefragmentationBegin(
16686 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16697 VMA_ASSERT(allocator);
16699 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16701 if(context != VK_NULL_HANDLE)
16703 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16705 #if VMA_RECORDING_ENABLED 16706 if(allocator->GetRecorder() != VMA_NULL)
16708 allocator->GetRecorder()->RecordDefragmentationEnd(
16709 allocator->GetCurrentFrameIndex(), context);
16713 return allocator->DefragmentationEnd(context);
16726 VMA_ASSERT(allocator && allocation && buffer);
16728 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16730 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16732 return allocator->BindBufferMemory(allocation, buffer);
16740 VMA_ASSERT(allocator && allocation && image);
16742 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16744 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16746 return allocator->BindImageMemory(allocation, image);
16751 const VkBufferCreateInfo* pBufferCreateInfo,
16757 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16759 if(pBufferCreateInfo->size == 0)
16761 return VK_ERROR_VALIDATION_FAILED_EXT;
16764 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16766 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16768 *pBuffer = VK_NULL_HANDLE;
16769 *pAllocation = VK_NULL_HANDLE;
16772 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16773 allocator->m_hDevice,
16775 allocator->GetAllocationCallbacks(),
16780 VkMemoryRequirements vkMemReq = {};
16781 bool requiresDedicatedAllocation =
false;
16782 bool prefersDedicatedAllocation =
false;
16783 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16784 requiresDedicatedAllocation, prefersDedicatedAllocation);
16788 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16790 VMA_ASSERT(vkMemReq.alignment %
16791 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16793 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16795 VMA_ASSERT(vkMemReq.alignment %
16796 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16798 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16800 VMA_ASSERT(vkMemReq.alignment %
16801 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16805 res = allocator->AllocateMemory(
16807 requiresDedicatedAllocation,
16808 prefersDedicatedAllocation,
16811 *pAllocationCreateInfo,
16812 VMA_SUBALLOCATION_TYPE_BUFFER,
16816 #if VMA_RECORDING_ENABLED 16817 if(allocator->GetRecorder() != VMA_NULL)
16819 allocator->GetRecorder()->RecordCreateBuffer(
16820 allocator->GetCurrentFrameIndex(),
16821 *pBufferCreateInfo,
16822 *pAllocationCreateInfo,
16832 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16837 #if VMA_STATS_STRING_ENABLED 16838 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16840 if(pAllocationInfo != VMA_NULL)
16842 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16847 allocator->FreeMemory(
16850 *pAllocation = VK_NULL_HANDLE;
16851 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16852 *pBuffer = VK_NULL_HANDLE;
16855 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16856 *pBuffer = VK_NULL_HANDLE;
16867 VMA_ASSERT(allocator);
16869 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16874 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16876 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16878 #if VMA_RECORDING_ENABLED 16879 if(allocator->GetRecorder() != VMA_NULL)
16881 allocator->GetRecorder()->RecordDestroyBuffer(
16882 allocator->GetCurrentFrameIndex(),
16887 if(buffer != VK_NULL_HANDLE)
16889 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16892 if(allocation != VK_NULL_HANDLE)
16894 allocator->FreeMemory(
16902 const VkImageCreateInfo* pImageCreateInfo,
16908 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16910 if(pImageCreateInfo->extent.width == 0 ||
16911 pImageCreateInfo->extent.height == 0 ||
16912 pImageCreateInfo->extent.depth == 0 ||
16913 pImageCreateInfo->mipLevels == 0 ||
16914 pImageCreateInfo->arrayLayers == 0)
16916 return VK_ERROR_VALIDATION_FAILED_EXT;
16919 VMA_DEBUG_LOG(
"vmaCreateImage");
16921 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16923 *pImage = VK_NULL_HANDLE;
16924 *pAllocation = VK_NULL_HANDLE;
16927 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16928 allocator->m_hDevice,
16930 allocator->GetAllocationCallbacks(),
16934 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16935 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16936 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16939 VkMemoryRequirements vkMemReq = {};
16940 bool requiresDedicatedAllocation =
false;
16941 bool prefersDedicatedAllocation =
false;
16942 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16943 requiresDedicatedAllocation, prefersDedicatedAllocation);
16945 res = allocator->AllocateMemory(
16947 requiresDedicatedAllocation,
16948 prefersDedicatedAllocation,
16951 *pAllocationCreateInfo,
16956 #if VMA_RECORDING_ENABLED 16957 if(allocator->GetRecorder() != VMA_NULL)
16959 allocator->GetRecorder()->RecordCreateImage(
16960 allocator->GetCurrentFrameIndex(),
16962 *pAllocationCreateInfo,
16972 res = allocator->BindImageMemory(*pAllocation, *pImage);
16977 #if VMA_STATS_STRING_ENABLED 16978 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16980 if(pAllocationInfo != VMA_NULL)
16982 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16987 allocator->FreeMemory(
16990 *pAllocation = VK_NULL_HANDLE;
16991 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16992 *pImage = VK_NULL_HANDLE;
16995 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16996 *pImage = VK_NULL_HANDLE;
17007 VMA_ASSERT(allocator);
17009 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17014 VMA_DEBUG_LOG(
"vmaDestroyImage");
17016 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17018 #if VMA_RECORDING_ENABLED 17019 if(allocator->GetRecorder() != VMA_NULL)
17021 allocator->GetRecorder()->RecordDestroyImage(
17022 allocator->GetCurrentFrameIndex(),
17027 if(image != VK_NULL_HANDLE)
17029 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17031 if(allocation != VK_NULL_HANDLE)
17033 allocator->FreeMemory(
17039 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1756
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2056
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1802
-
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2855
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1814
+
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2867
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1776
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2375
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1756
+
Definition: vk_mem_alloc.h:1788
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2387
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1768
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:2006
-
Definition: vk_mem_alloc.h:2110
-
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2808
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1748
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2475
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1799
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2891
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2264
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1643
+
Definition: vk_mem_alloc.h:2018
+
Definition: vk_mem_alloc.h:2122
+
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2820
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1760
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2487
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1811
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2903
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2276
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1655
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2356
-
Definition: vk_mem_alloc.h:2081
-
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2811
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1737
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2163
-
Definition: vk_mem_alloc.h:2033
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1811
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2292
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2368
+
Definition: vk_mem_alloc.h:2093
+
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2823
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1749
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2175
+
Definition: vk_mem_alloc.h:2045
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1823
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2304
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1865
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1796
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1877
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1808
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2037
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2049
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1937
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1753
-
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2845
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1936
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2895
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1949
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1765
+
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2857
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1948
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2907
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1828
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1946
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2903
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2147
-
Definition: vk_mem_alloc.h:2105
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2886
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1754
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1679
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1840
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1958
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2915
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2159
+
Definition: vk_mem_alloc.h:2117
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2898
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1766
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1691
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1805
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1817
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2306
-
Definition: vk_mem_alloc.h:2300
-
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1760
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1872
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2485
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2318
+
Definition: vk_mem_alloc.h:2312
+
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1772
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1884
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2497
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1749
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1761
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1774
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2184
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2326
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2362
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1786
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2196
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2338
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2374
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1735
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2309
+
Definition: vk_mem_alloc.h:1747
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2321
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2860
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1984
+
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2872
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1996
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2820
+
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2832
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2881
+
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2893
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2899
-
Definition: vk_mem_alloc.h:2023
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2171
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1752
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2911
+
Definition: vk_mem_alloc.h:2035
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2183
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1764
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1942
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1685
-
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2799
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1954
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1697
+
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2811
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:2797
-
Definition: vk_mem_alloc.h:2131
-
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2826
+
Definition: vk_mem_alloc.h:2809
+
Definition: vk_mem_alloc.h:2143
+
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2838
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1706
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1718
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1778
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1711
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2901
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1790
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1723
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2913
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2158
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2372
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2170
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2384
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1745
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1925
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2321
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1698
-
Definition: vk_mem_alloc.h:2296
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1757
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1937
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2333
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1710
+
Definition: vk_mem_alloc.h:2308
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2088
+
Definition: vk_mem_alloc.h:2100
Represents Opaque object that represents started defragmentation process.
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1938
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1702
-
Definition: vk_mem_alloc.h:2121
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2312
-
Definition: vk_mem_alloc.h:2032
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1751
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1950
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1714
+
Definition: vk_mem_alloc.h:2133
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2324
+
Definition: vk_mem_alloc.h:2044
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1763
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2153
-
Definition: vk_mem_alloc.h:2144
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2165
+
Definition: vk_mem_alloc.h:2156
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1928
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1747
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2334
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1814
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2365
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2142
-
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2850
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2177
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1940
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1759
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2346
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1826
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2377
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2154
+
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2862
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2189
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1853
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1944
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2068
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1937
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1865
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1956
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2080
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1949
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1758
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1784
-
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2796
-
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2874
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1700
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1757
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1770
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1796
+
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2808
+
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2886
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1712
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1769
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2348
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1750
-
Definition: vk_mem_alloc.h:2099
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2360
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1762
+
Definition: vk_mem_alloc.h:2111
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1792
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2499
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1808
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1937
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1804
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2511
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1820
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1949
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1934
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1946
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2353
-
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2805
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2365
+
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2817
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
-
Definition: vk_mem_alloc.h:2114
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2480
-
Definition: vk_mem_alloc.h:2128
-
Definition: vk_mem_alloc.h:2140
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2897
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1743
+
Definition: vk_mem_alloc.h:2126
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2492
+
Definition: vk_mem_alloc.h:2140
+
Definition: vk_mem_alloc.h:2152
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2909
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1755
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1932
-
Definition: vk_mem_alloc.h:1989
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2302
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1944
+
Definition: vk_mem_alloc.h:2001
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2314
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1781
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1930
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1755
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1759
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2055
-
Definition: vk_mem_alloc.h:2135
-
Definition: vk_mem_alloc.h:2016
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2494
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1793
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1942
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1767
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1771
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2067
+
Definition: vk_mem_alloc.h:2147
+
Definition: vk_mem_alloc.h:2028
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2506
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1733
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1745
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1746
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2281
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1758
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2293
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2461
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2473
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2125
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2246
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1938
+
Definition: vk_mem_alloc.h:2137
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2258
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1950
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
-
Definition: vk_mem_alloc.h:2094
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1768
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1945
+
Definition: vk_mem_alloc.h:2106
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1780
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1957
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2359
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1938
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2371
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1950
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
-
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2865
+
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2877
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2466
-
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2829
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2478
+
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2841