23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1618 #ifndef VMA_RECORDING_ENABLED 1620 #define VMA_RECORDING_ENABLED 1 1622 #define VMA_RECORDING_ENABLED 0 1627 #define NOMINMAX // For windows.h 1631 #include <vulkan/vulkan.h> 1634 #if VMA_RECORDING_ENABLED 1635 #include <windows.h> 1638 #if !defined(VMA_DEDICATED_ALLOCATION) 1639 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1640 #define VMA_DEDICATED_ALLOCATION 1 1642 #define VMA_DEDICATED_ALLOCATION 0 1660 uint32_t memoryType,
1661 VkDeviceMemory memory,
1666 uint32_t memoryType,
1667 VkDeviceMemory memory,
1740 #if VMA_DEDICATED_ALLOCATION 1741 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1742 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1869 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1877 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1887 uint32_t memoryTypeIndex,
1888 VkMemoryPropertyFlags* pFlags);
1900 uint32_t frameIndex);
1933 #define VMA_STATS_STRING_ENABLED 1 1935 #if VMA_STATS_STRING_ENABLED 1942 char** ppStatsString,
1943 VkBool32 detailedMap);
1947 char* pStatsString);
1949 #endif // #if VMA_STATS_STRING_ENABLED 2181 uint32_t memoryTypeBits,
2183 uint32_t* pMemoryTypeIndex);
2199 const VkBufferCreateInfo* pBufferCreateInfo,
2201 uint32_t* pMemoryTypeIndex);
2217 const VkImageCreateInfo* pImageCreateInfo,
2219 uint32_t* pMemoryTypeIndex);
2391 size_t* pLostAllocationCount);
2490 const VkMemoryRequirements* pVkMemoryRequirements,
2516 const VkMemoryRequirements* pVkMemoryRequirements,
2518 size_t allocationCount,
2563 size_t allocationCount,
2589 VkDeviceSize newSize);
2958 size_t allocationCount,
2959 VkBool32* pAllocationsChanged,
3025 const VkBufferCreateInfo* pBufferCreateInfo,
3050 const VkImageCreateInfo* pImageCreateInfo,
3076 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3079 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3080 #define VMA_IMPLEMENTATION 3083 #ifdef VMA_IMPLEMENTATION 3084 #undef VMA_IMPLEMENTATION 3106 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3107 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3119 #if VMA_USE_STL_CONTAINERS 3120 #define VMA_USE_STL_VECTOR 1 3121 #define VMA_USE_STL_UNORDERED_MAP 1 3122 #define VMA_USE_STL_LIST 1 3125 #ifndef VMA_USE_STL_SHARED_MUTEX 3127 #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 3128 #define VMA_USE_STL_SHARED_MUTEX 1 3132 #if VMA_USE_STL_VECTOR 3136 #if VMA_USE_STL_UNORDERED_MAP 3137 #include <unordered_map> 3140 #if VMA_USE_STL_LIST 3149 #include <algorithm> 3155 #define VMA_NULL nullptr 3158 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3160 void *aligned_alloc(
size_t alignment,
size_t size)
3163 if(alignment <
sizeof(
void*))
3165 alignment =
sizeof(
void*);
3168 return memalign(alignment, size);
3170 #elif defined(__APPLE__) || defined(__ANDROID__) 3172 void *aligned_alloc(
size_t alignment,
size_t size)
3175 if(alignment <
sizeof(
void*))
3177 alignment =
sizeof(
void*);
3181 if(posix_memalign(&pointer, alignment, size) == 0)
3195 #define VMA_ASSERT(expr) assert(expr) 3197 #define VMA_ASSERT(expr) 3203 #ifndef VMA_HEAVY_ASSERT 3205 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3207 #define VMA_HEAVY_ASSERT(expr) 3211 #ifndef VMA_ALIGN_OF 3212 #define VMA_ALIGN_OF(type) (__alignof(type)) 3215 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3217 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3219 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3223 #ifndef VMA_SYSTEM_FREE 3225 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3227 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3232 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3236 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3240 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3244 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3247 #ifndef VMA_DEBUG_LOG 3248 #define VMA_DEBUG_LOG(format, ...) 3258 #if VMA_STATS_STRING_ENABLED 3259 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3261 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3263 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3265 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3267 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3269 snprintf(outStr, strLen,
"%p", ptr);
3277 void Lock() { m_Mutex.lock(); }
3278 void Unlock() { m_Mutex.unlock(); }
3282 #define VMA_MUTEX VmaMutex 3286 #ifndef VMA_RW_MUTEX 3287 #if VMA_USE_STL_SHARED_MUTEX 3289 #include <shared_mutex> 3293 void LockRead() { m_Mutex.lock_shared(); }
3294 void UnlockRead() { m_Mutex.unlock_shared(); }
3295 void LockWrite() { m_Mutex.lock(); }
3296 void UnlockWrite() { m_Mutex.unlock(); }
3298 std::shared_mutex m_Mutex;
3300 #define VMA_RW_MUTEX VmaRWMutex 3301 #elif defined(_WIN32) 3306 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3307 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3308 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3309 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3310 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3314 #define VMA_RW_MUTEX VmaRWMutex 3320 void LockRead() { m_Mutex.Lock(); }
3321 void UnlockRead() { m_Mutex.Unlock(); }
3322 void LockWrite() { m_Mutex.Lock(); }
3323 void UnlockWrite() { m_Mutex.Unlock(); }
3327 #define VMA_RW_MUTEX VmaRWMutex 3328 #endif // #if VMA_USE_STL_SHARED_MUTEX 3329 #endif // #ifndef VMA_RW_MUTEX 3339 #ifndef VMA_ATOMIC_UINT32 3340 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3343 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3348 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3351 #ifndef VMA_DEBUG_ALIGNMENT 3356 #define VMA_DEBUG_ALIGNMENT (1) 3359 #ifndef VMA_DEBUG_MARGIN 3364 #define VMA_DEBUG_MARGIN (0) 3367 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3372 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3375 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3381 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3384 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3389 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3392 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3397 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3400 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3401 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3405 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3406 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3410 #ifndef VMA_CLASS_NO_COPY 3411 #define VMA_CLASS_NO_COPY(className) \ 3413 className(const className&) = delete; \ 3414 className& operator=(const className&) = delete; 3417 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3420 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3422 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3423 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3429 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3431 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3432 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3435 static inline uint32_t VmaCountBitsSet(uint32_t v)
3437 uint32_t c = v - ((v >> 1) & 0x55555555);
3438 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3439 c = ((c >> 4) + c) & 0x0F0F0F0F;
3440 c = ((c >> 8) + c) & 0x00FF00FF;
3441 c = ((c >> 16) + c) & 0x0000FFFF;
3447 template <
typename T>
3448 static inline T VmaAlignUp(T val, T align)
3450 return (val + align - 1) / align * align;
3454 template <
typename T>
3455 static inline T VmaAlignDown(T val, T align)
3457 return val / align * align;
3461 template <
typename T>
3462 static inline T VmaRoundDiv(T x, T y)
3464 return (x + (y / (T)2)) / y;
3472 template <
typename T>
3473 inline bool VmaIsPow2(T x)
3475 return (x & (x-1)) == 0;
3479 static inline uint32_t VmaNextPow2(uint32_t v)
3490 static inline uint64_t VmaNextPow2(uint64_t v)
3504 static inline uint32_t VmaPrevPow2(uint32_t v)
3514 static inline uint64_t VmaPrevPow2(uint64_t v)
3526 static inline bool VmaStrIsEmpty(
const char* pStr)
3528 return pStr == VMA_NULL || *pStr ==
'\0';
3531 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3549 template<
typename Iterator,
typename Compare>
3550 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3552 Iterator centerValue = end; --centerValue;
3553 Iterator insertIndex = beg;
3554 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3556 if(cmp(*memTypeIndex, *centerValue))
3558 if(insertIndex != memTypeIndex)
3560 VMA_SWAP(*memTypeIndex, *insertIndex);
3565 if(insertIndex != centerValue)
3567 VMA_SWAP(*insertIndex, *centerValue);
3572 template<
typename Iterator,
typename Compare>
3573 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3577 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3578 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3579 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3583 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3585 #endif // #ifndef VMA_SORT 3594 static inline bool VmaBlocksOnSamePage(
3595 VkDeviceSize resourceAOffset,
3596 VkDeviceSize resourceASize,
3597 VkDeviceSize resourceBOffset,
3598 VkDeviceSize pageSize)
3600 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3601 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3602 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3603 VkDeviceSize resourceBStart = resourceBOffset;
3604 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3605 return resourceAEndPage == resourceBStartPage;
3608 enum VmaSuballocationType
3610 VMA_SUBALLOCATION_TYPE_FREE = 0,
3611 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3612 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3613 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3614 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3615 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3616 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3625 static inline bool VmaIsBufferImageGranularityConflict(
3626 VmaSuballocationType suballocType1,
3627 VmaSuballocationType suballocType2)
3629 if(suballocType1 > suballocType2)
3631 VMA_SWAP(suballocType1, suballocType2);
3634 switch(suballocType1)
3636 case VMA_SUBALLOCATION_TYPE_FREE:
3638 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3640 case VMA_SUBALLOCATION_TYPE_BUFFER:
3642 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3643 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3644 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3646 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3647 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3648 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3649 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3651 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3652 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3660 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3662 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3663 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3664 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3666 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3670 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3672 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3673 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3674 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3676 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3687 VMA_CLASS_NO_COPY(VmaMutexLock)
3689 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3690 m_pMutex(useMutex ? &mutex : VMA_NULL)
3691 {
if(m_pMutex) { m_pMutex->Lock(); } }
3693 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3695 VMA_MUTEX* m_pMutex;
3699 struct VmaMutexLockRead
3701 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3703 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3704 m_pMutex(useMutex ? &mutex : VMA_NULL)
3705 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3706 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3708 VMA_RW_MUTEX* m_pMutex;
3712 struct VmaMutexLockWrite
3714 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3716 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3717 m_pMutex(useMutex ? &mutex : VMA_NULL)
3718 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3719 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3721 VMA_RW_MUTEX* m_pMutex;
3724 #if VMA_DEBUG_GLOBAL_MUTEX 3725 static VMA_MUTEX gDebugGlobalMutex;
3726 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3728 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3732 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3743 template <
typename CmpLess,
typename IterT,
typename KeyT>
3744 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3746 size_t down = 0, up = (end - beg);
3749 const size_t mid = (down + up) / 2;
3750 if(cmp(*(beg+mid), key))
3767 template<
typename T>
3768 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3770 for(uint32_t i = 0; i < count; ++i)
3772 const T iPtr = arr[i];
3773 if(iPtr == VMA_NULL)
3777 for(uint32_t j = i + 1; j < count; ++j)
3791 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3793 if((pAllocationCallbacks != VMA_NULL) &&
3794 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3796 return (*pAllocationCallbacks->pfnAllocation)(
3797 pAllocationCallbacks->pUserData,
3800 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3804 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3808 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3810 if((pAllocationCallbacks != VMA_NULL) &&
3811 (pAllocationCallbacks->pfnFree != VMA_NULL))
3813 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3817 VMA_SYSTEM_FREE(ptr);
3821 template<
typename T>
3822 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3824 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3827 template<
typename T>
3828 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3830 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3833 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3835 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3837 template<
typename T>
3838 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3841 VmaFree(pAllocationCallbacks, ptr);
3844 template<
typename T>
3845 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3849 for(
size_t i = count; i--; )
3853 VmaFree(pAllocationCallbacks, ptr);
3858 template<
typename T>
3859 class VmaStlAllocator
3862 const VkAllocationCallbacks*
const m_pCallbacks;
3863 typedef T value_type;
3865 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3866 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3868 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3869 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3871 template<
typename U>
3872 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3874 return m_pCallbacks == rhs.m_pCallbacks;
3876 template<
typename U>
3877 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3879 return m_pCallbacks != rhs.m_pCallbacks;
3882 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3885 #if VMA_USE_STL_VECTOR 3887 #define VmaVector std::vector 3889 template<
typename T,
typename allocatorT>
3890 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3892 vec.insert(vec.begin() + index, item);
3895 template<
typename T,
typename allocatorT>
3896 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3898 vec.erase(vec.begin() + index);
3901 #else // #if VMA_USE_STL_VECTOR 3906 template<
typename T,
typename AllocatorT>
3910 typedef T value_type;
3912 VmaVector(
const AllocatorT& allocator) :
3913 m_Allocator(allocator),
3920 VmaVector(
size_t count,
const AllocatorT& allocator) :
3921 m_Allocator(allocator),
3922 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3928 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3929 m_Allocator(src.m_Allocator),
3930 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3931 m_Count(src.m_Count),
3932 m_Capacity(src.m_Count)
3936 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3942 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3945 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3949 resize(rhs.m_Count);
3952 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3958 bool empty()
const {
return m_Count == 0; }
3959 size_t size()
const {
return m_Count; }
3960 T* data() {
return m_pArray; }
3961 const T* data()
const {
return m_pArray; }
3963 T& operator[](
size_t index)
3965 VMA_HEAVY_ASSERT(index < m_Count);
3966 return m_pArray[index];
3968 const T& operator[](
size_t index)
const 3970 VMA_HEAVY_ASSERT(index < m_Count);
3971 return m_pArray[index];
3976 VMA_HEAVY_ASSERT(m_Count > 0);
3979 const T& front()
const 3981 VMA_HEAVY_ASSERT(m_Count > 0);
3986 VMA_HEAVY_ASSERT(m_Count > 0);
3987 return m_pArray[m_Count - 1];
3989 const T& back()
const 3991 VMA_HEAVY_ASSERT(m_Count > 0);
3992 return m_pArray[m_Count - 1];
3995 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3997 newCapacity = VMA_MAX(newCapacity, m_Count);
3999 if((newCapacity < m_Capacity) && !freeMemory)
4001 newCapacity = m_Capacity;
4004 if(newCapacity != m_Capacity)
4006 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4009 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4011 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4012 m_Capacity = newCapacity;
4013 m_pArray = newArray;
4017 void resize(
size_t newCount,
bool freeMemory =
false)
4019 size_t newCapacity = m_Capacity;
4020 if(newCount > m_Capacity)
4022 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4026 newCapacity = newCount;
4029 if(newCapacity != m_Capacity)
4031 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4032 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4033 if(elementsToCopy != 0)
4035 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4037 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4038 m_Capacity = newCapacity;
4039 m_pArray = newArray;
4045 void clear(
bool freeMemory =
false)
4047 resize(0, freeMemory);
4050 void insert(
size_t index,
const T& src)
4052 VMA_HEAVY_ASSERT(index <= m_Count);
4053 const size_t oldCount = size();
4054 resize(oldCount + 1);
4055 if(index < oldCount)
4057 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4059 m_pArray[index] = src;
4062 void remove(
size_t index)
4064 VMA_HEAVY_ASSERT(index < m_Count);
4065 const size_t oldCount = size();
4066 if(index < oldCount - 1)
4068 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4070 resize(oldCount - 1);
4073 void push_back(
const T& src)
4075 const size_t newIndex = size();
4076 resize(newIndex + 1);
4077 m_pArray[newIndex] = src;
4082 VMA_HEAVY_ASSERT(m_Count > 0);
4086 void push_front(
const T& src)
4093 VMA_HEAVY_ASSERT(m_Count > 0);
4097 typedef T* iterator;
4099 iterator begin() {
return m_pArray; }
4100 iterator end() {
return m_pArray + m_Count; }
4103 AllocatorT m_Allocator;
4109 template<
typename T,
typename allocatorT>
4110 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4112 vec.insert(index, item);
4115 template<
typename T,
typename allocatorT>
4116 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4121 #endif // #if VMA_USE_STL_VECTOR 4123 template<
typename CmpLess,
typename VectorT>
4124 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4126 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4128 vector.data() + vector.size(),
4130 CmpLess()) - vector.data();
4131 VmaVectorInsert(vector, indexToInsert, value);
4132 return indexToInsert;
4135 template<
typename CmpLess,
typename VectorT>
4136 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4139 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4144 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4146 size_t indexToRemove = it - vector.begin();
4147 VmaVectorRemove(vector, indexToRemove);
4153 template<
typename CmpLess,
typename IterT,
typename KeyT>
4154 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4157 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4158 beg, end, value, comparator);
4160 (!comparator(*it, value) && !comparator(value, *it)))
4175 template<
typename T>
4176 class VmaPoolAllocator
4178 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4180 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
4181 ~VmaPoolAllocator();
4189 uint32_t NextFreeIndex;
4196 uint32_t FirstFreeIndex;
4199 const VkAllocationCallbacks* m_pAllocationCallbacks;
4200 size_t m_ItemsPerBlock;
4201 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4203 ItemBlock& CreateNewBlock();
4206 template<
typename T>
4207 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
4208 m_pAllocationCallbacks(pAllocationCallbacks),
4209 m_ItemsPerBlock(itemsPerBlock),
4210 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4212 VMA_ASSERT(itemsPerBlock > 0);
4215 template<
typename T>
4216 VmaPoolAllocator<T>::~VmaPoolAllocator()
4221 template<
typename T>
4222 void VmaPoolAllocator<T>::Clear()
4224 for(
size_t i = m_ItemBlocks.size(); i--; )
4225 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
4226 m_ItemBlocks.clear();
4229 template<
typename T>
4230 T* VmaPoolAllocator<T>::Alloc()
4232 for(
size_t i = m_ItemBlocks.size(); i--; )
4234 ItemBlock& block = m_ItemBlocks[i];
4236 if(block.FirstFreeIndex != UINT32_MAX)
4238 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4239 block.FirstFreeIndex = pItem->NextFreeIndex;
4240 return &pItem->Value;
4245 ItemBlock& newBlock = CreateNewBlock();
4246 Item*
const pItem = &newBlock.pItems[0];
4247 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4248 return &pItem->Value;
4251 template<
typename T>
4252 void VmaPoolAllocator<T>::Free(T* ptr)
4255 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
4257 ItemBlock& block = m_ItemBlocks[i];
4261 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4264 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
4266 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4267 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4268 block.FirstFreeIndex = index;
4272 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4275 template<
typename T>
4276 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4278 ItemBlock newBlock = {
4279 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
4281 m_ItemBlocks.push_back(newBlock);
4284 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
4285 newBlock.pItems[i].NextFreeIndex = i + 1;
4286 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
4287 return m_ItemBlocks.back();
4293 #if VMA_USE_STL_LIST 4295 #define VmaList std::list 4297 #else // #if VMA_USE_STL_LIST 4299 template<
typename T>
4308 template<
typename T>
4311 VMA_CLASS_NO_COPY(VmaRawList)
4313 typedef VmaListItem<T> ItemType;
4315 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4319 size_t GetCount()
const {
return m_Count; }
4320 bool IsEmpty()
const {
return m_Count == 0; }
4322 ItemType* Front() {
return m_pFront; }
4323 const ItemType* Front()
const {
return m_pFront; }
4324 ItemType* Back() {
return m_pBack; }
4325 const ItemType* Back()
const {
return m_pBack; }
4327 ItemType* PushBack();
4328 ItemType* PushFront();
4329 ItemType* PushBack(
const T& value);
4330 ItemType* PushFront(
const T& value);
4335 ItemType* InsertBefore(ItemType* pItem);
4337 ItemType* InsertAfter(ItemType* pItem);
4339 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4340 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4342 void Remove(ItemType* pItem);
4345 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4346 VmaPoolAllocator<ItemType> m_ItemAllocator;
4352 template<
typename T>
4353 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4354 m_pAllocationCallbacks(pAllocationCallbacks),
4355 m_ItemAllocator(pAllocationCallbacks, 128),
4362 template<
typename T>
4363 VmaRawList<T>::~VmaRawList()
4369 template<
typename T>
4370 void VmaRawList<T>::Clear()
4372 if(IsEmpty() ==
false)
4374 ItemType* pItem = m_pBack;
4375 while(pItem != VMA_NULL)
4377 ItemType*
const pPrevItem = pItem->pPrev;
4378 m_ItemAllocator.Free(pItem);
4381 m_pFront = VMA_NULL;
4387 template<
typename T>
4388 VmaListItem<T>* VmaRawList<T>::PushBack()
4390 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4391 pNewItem->pNext = VMA_NULL;
4394 pNewItem->pPrev = VMA_NULL;
4395 m_pFront = pNewItem;
4401 pNewItem->pPrev = m_pBack;
4402 m_pBack->pNext = pNewItem;
4409 template<
typename T>
4410 VmaListItem<T>* VmaRawList<T>::PushFront()
4412 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4413 pNewItem->pPrev = VMA_NULL;
4416 pNewItem->pNext = VMA_NULL;
4417 m_pFront = pNewItem;
4423 pNewItem->pNext = m_pFront;
4424 m_pFront->pPrev = pNewItem;
4425 m_pFront = pNewItem;
4431 template<
typename T>
4432 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4434 ItemType*
const pNewItem = PushBack();
4435 pNewItem->Value = value;
4439 template<
typename T>
4440 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4442 ItemType*
const pNewItem = PushFront();
4443 pNewItem->Value = value;
4447 template<
typename T>
4448 void VmaRawList<T>::PopBack()
4450 VMA_HEAVY_ASSERT(m_Count > 0);
4451 ItemType*
const pBackItem = m_pBack;
4452 ItemType*
const pPrevItem = pBackItem->pPrev;
4453 if(pPrevItem != VMA_NULL)
4455 pPrevItem->pNext = VMA_NULL;
4457 m_pBack = pPrevItem;
4458 m_ItemAllocator.Free(pBackItem);
4462 template<
typename T>
4463 void VmaRawList<T>::PopFront()
4465 VMA_HEAVY_ASSERT(m_Count > 0);
4466 ItemType*
const pFrontItem = m_pFront;
4467 ItemType*
const pNextItem = pFrontItem->pNext;
4468 if(pNextItem != VMA_NULL)
4470 pNextItem->pPrev = VMA_NULL;
4472 m_pFront = pNextItem;
4473 m_ItemAllocator.Free(pFrontItem);
4477 template<
typename T>
4478 void VmaRawList<T>::Remove(ItemType* pItem)
4480 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4481 VMA_HEAVY_ASSERT(m_Count > 0);
4483 if(pItem->pPrev != VMA_NULL)
4485 pItem->pPrev->pNext = pItem->pNext;
4489 VMA_HEAVY_ASSERT(m_pFront == pItem);
4490 m_pFront = pItem->pNext;
4493 if(pItem->pNext != VMA_NULL)
4495 pItem->pNext->pPrev = pItem->pPrev;
4499 VMA_HEAVY_ASSERT(m_pBack == pItem);
4500 m_pBack = pItem->pPrev;
4503 m_ItemAllocator.Free(pItem);
4507 template<
typename T>
4508 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4510 if(pItem != VMA_NULL)
4512 ItemType*
const prevItem = pItem->pPrev;
4513 ItemType*
const newItem = m_ItemAllocator.Alloc();
4514 newItem->pPrev = prevItem;
4515 newItem->pNext = pItem;
4516 pItem->pPrev = newItem;
4517 if(prevItem != VMA_NULL)
4519 prevItem->pNext = newItem;
4523 VMA_HEAVY_ASSERT(m_pFront == pItem);
4533 template<
typename T>
4534 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4536 if(pItem != VMA_NULL)
4538 ItemType*
const nextItem = pItem->pNext;
4539 ItemType*
const newItem = m_ItemAllocator.Alloc();
4540 newItem->pNext = nextItem;
4541 newItem->pPrev = pItem;
4542 pItem->pNext = newItem;
4543 if(nextItem != VMA_NULL)
4545 nextItem->pPrev = newItem;
4549 VMA_HEAVY_ASSERT(m_pBack == pItem);
4559 template<
typename T>
4560 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4562 ItemType*
const newItem = InsertBefore(pItem);
4563 newItem->Value = value;
4567 template<
typename T>
4568 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4570 ItemType*
const newItem = InsertAfter(pItem);
4571 newItem->Value = value;
4575 template<
typename T,
typename AllocatorT>
4578 VMA_CLASS_NO_COPY(VmaList)
4589 T& operator*()
const 4591 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4592 return m_pItem->Value;
4594 T* operator->()
const 4596 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4597 return &m_pItem->Value;
4600 iterator& operator++()
4602 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4603 m_pItem = m_pItem->pNext;
4606 iterator& operator--()
4608 if(m_pItem != VMA_NULL)
4610 m_pItem = m_pItem->pPrev;
4614 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4615 m_pItem = m_pList->Back();
4620 iterator operator++(
int)
4622 iterator result = *
this;
4626 iterator operator--(
int)
4628 iterator result = *
this;
4633 bool operator==(
const iterator& rhs)
const 4635 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4636 return m_pItem == rhs.m_pItem;
4638 bool operator!=(
const iterator& rhs)
const 4640 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4641 return m_pItem != rhs.m_pItem;
4645 VmaRawList<T>* m_pList;
4646 VmaListItem<T>* m_pItem;
4648 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4654 friend class VmaList<T, AllocatorT>;
4657 class const_iterator
4666 const_iterator(
const iterator& src) :
4667 m_pList(src.m_pList),
4668 m_pItem(src.m_pItem)
4672 const T& operator*()
const 4674 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4675 return m_pItem->Value;
4677 const T* operator->()
const 4679 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4680 return &m_pItem->Value;
4683 const_iterator& operator++()
4685 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4686 m_pItem = m_pItem->pNext;
4689 const_iterator& operator--()
4691 if(m_pItem != VMA_NULL)
4693 m_pItem = m_pItem->pPrev;
4697 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4698 m_pItem = m_pList->Back();
4703 const_iterator operator++(
int)
4705 const_iterator result = *
this;
4709 const_iterator operator--(
int)
4711 const_iterator result = *
this;
4716 bool operator==(
const const_iterator& rhs)
const 4718 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4719 return m_pItem == rhs.m_pItem;
4721 bool operator!=(
const const_iterator& rhs)
const 4723 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4724 return m_pItem != rhs.m_pItem;
4728 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4734 const VmaRawList<T>* m_pList;
4735 const VmaListItem<T>* m_pItem;
4737 friend class VmaList<T, AllocatorT>;
4740 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4742 bool empty()
const {
return m_RawList.IsEmpty(); }
4743 size_t size()
const {
return m_RawList.GetCount(); }
4745 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4746 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4748 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4749 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4751 void clear() { m_RawList.Clear(); }
4752 void push_back(
const T& value) { m_RawList.PushBack(value); }
4753 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4754 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4757 VmaRawList<T> m_RawList;
4760 #endif // #if VMA_USE_STL_LIST 4768 #if VMA_USE_STL_UNORDERED_MAP 4770 #define VmaPair std::pair 4772 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4773 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4775 #else // #if VMA_USE_STL_UNORDERED_MAP 4777 template<
typename T1,
typename T2>
4783 VmaPair() : first(), second() { }
4784 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4790 template<
typename KeyT,
typename ValueT>
4794 typedef VmaPair<KeyT, ValueT> PairType;
4795 typedef PairType* iterator;
4797 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4799 iterator begin() {
return m_Vector.begin(); }
4800 iterator end() {
return m_Vector.end(); }
4802 void insert(
const PairType& pair);
4803 iterator find(
const KeyT& key);
4804 void erase(iterator it);
4807 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4810 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4812 template<
typename FirstT,
typename SecondT>
4813 struct VmaPairFirstLess
4815 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4817 return lhs.first < rhs.first;
4819 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4821 return lhs.first < rhsFirst;
4825 template<
typename KeyT,
typename ValueT>
4826 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4828 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4830 m_Vector.data() + m_Vector.size(),
4832 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4833 VmaVectorInsert(m_Vector, indexToInsert, pair);
4836 template<
typename KeyT,
typename ValueT>
4837 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4839 PairType* it = VmaBinaryFindFirstNotLess(
4841 m_Vector.data() + m_Vector.size(),
4843 VmaPairFirstLess<KeyT, ValueT>());
4844 if((it != m_Vector.end()) && (it->first == key))
4850 return m_Vector.end();
4854 template<
typename KeyT,
typename ValueT>
4855 void VmaMap<KeyT, ValueT>::erase(iterator it)
4857 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4860 #endif // #if VMA_USE_STL_UNORDERED_MAP 4866 class VmaDeviceMemoryBlock;
4868 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4870 struct VmaAllocation_T
4872 VMA_CLASS_NO_COPY(VmaAllocation_T)
4874 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4878 FLAG_USER_DATA_STRING = 0x01,
4882 enum ALLOCATION_TYPE
4884 ALLOCATION_TYPE_NONE,
4885 ALLOCATION_TYPE_BLOCK,
4886 ALLOCATION_TYPE_DEDICATED,
4889 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4892 m_pUserData(VMA_NULL),
4893 m_LastUseFrameIndex(currentFrameIndex),
4894 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4895 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4897 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4899 #if VMA_STATS_STRING_ENABLED 4900 m_CreationFrameIndex = currentFrameIndex;
4901 m_BufferImageUsage = 0;
4907 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4910 VMA_ASSERT(m_pUserData == VMA_NULL);
4913 void InitBlockAllocation(
4915 VmaDeviceMemoryBlock* block,
4916 VkDeviceSize offset,
4917 VkDeviceSize alignment,
4919 VmaSuballocationType suballocationType,
4923 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4924 VMA_ASSERT(block != VMA_NULL);
4925 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4926 m_Alignment = alignment;
4928 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4929 m_SuballocationType = (uint8_t)suballocationType;
4930 m_BlockAllocation.m_hPool = hPool;
4931 m_BlockAllocation.m_Block = block;
4932 m_BlockAllocation.m_Offset = offset;
4933 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4938 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4939 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4940 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4941 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4942 m_BlockAllocation.m_Block = VMA_NULL;
4943 m_BlockAllocation.m_Offset = 0;
4944 m_BlockAllocation.m_CanBecomeLost =
true;
4947 void ChangeBlockAllocation(
4949 VmaDeviceMemoryBlock* block,
4950 VkDeviceSize offset);
4952 void ChangeSize(VkDeviceSize newSize);
4953 void ChangeOffset(VkDeviceSize newOffset);
4956 void InitDedicatedAllocation(
4957 uint32_t memoryTypeIndex,
4958 VkDeviceMemory hMemory,
4959 VmaSuballocationType suballocationType,
4963 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4964 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4965 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4968 m_SuballocationType = (uint8_t)suballocationType;
4969 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4970 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4971 m_DedicatedAllocation.m_hMemory = hMemory;
4972 m_DedicatedAllocation.m_pMappedData = pMappedData;
4975 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4976 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4977 VkDeviceSize GetSize()
const {
return m_Size; }
4978 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4979 void* GetUserData()
const {
return m_pUserData; }
4980 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4981 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4983 VmaDeviceMemoryBlock* GetBlock()
const 4985 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4986 return m_BlockAllocation.m_Block;
4988 VkDeviceSize GetOffset()
const;
4989 VkDeviceMemory GetMemory()
const;
4990 uint32_t GetMemoryTypeIndex()
const;
4991 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4992 void* GetMappedData()
const;
4993 bool CanBecomeLost()
const;
4996 uint32_t GetLastUseFrameIndex()
const 4998 return m_LastUseFrameIndex.load();
5000 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5002 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5012 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5014 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5016 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5027 void BlockAllocMap();
5028 void BlockAllocUnmap();
5029 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5032 #if VMA_STATS_STRING_ENABLED 5033 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5034 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5036 void InitBufferImageUsage(uint32_t bufferImageUsage)
5038 VMA_ASSERT(m_BufferImageUsage == 0);
5039 m_BufferImageUsage = bufferImageUsage;
5042 void PrintParameters(
class VmaJsonWriter& json)
const;
5046 VkDeviceSize m_Alignment;
5047 VkDeviceSize m_Size;
5049 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5051 uint8_t m_SuballocationType;
5058 struct BlockAllocation
5061 VmaDeviceMemoryBlock* m_Block;
5062 VkDeviceSize m_Offset;
5063 bool m_CanBecomeLost;
5067 struct DedicatedAllocation
5069 uint32_t m_MemoryTypeIndex;
5070 VkDeviceMemory m_hMemory;
5071 void* m_pMappedData;
5077 BlockAllocation m_BlockAllocation;
5079 DedicatedAllocation m_DedicatedAllocation;
5082 #if VMA_STATS_STRING_ENABLED 5083 uint32_t m_CreationFrameIndex;
5084 uint32_t m_BufferImageUsage;
5094 struct VmaSuballocation
5096 VkDeviceSize offset;
5099 VmaSuballocationType type;
5103 struct VmaSuballocationOffsetLess
5105 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5107 return lhs.offset < rhs.offset;
5110 struct VmaSuballocationOffsetGreater
5112 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5114 return lhs.offset > rhs.offset;
5118 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5121 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5136 struct VmaAllocationRequest
5138 VkDeviceSize offset;
5139 VkDeviceSize sumFreeSize;
5140 VkDeviceSize sumItemSize;
5141 VmaSuballocationList::iterator item;
5142 size_t itemsToMakeLostCount;
5145 VkDeviceSize CalcCost()
const 5147 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5155 class VmaBlockMetadata
5159 virtual ~VmaBlockMetadata() { }
5160 virtual void Init(VkDeviceSize size) { m_Size = size; }
5163 virtual bool Validate()
const = 0;
5164 VkDeviceSize GetSize()
const {
return m_Size; }
5165 virtual size_t GetAllocationCount()
const = 0;
5166 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5167 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5169 virtual bool IsEmpty()
const = 0;
5171 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5173 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5175 #if VMA_STATS_STRING_ENABLED 5176 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5182 virtual bool CreateAllocationRequest(
5183 uint32_t currentFrameIndex,
5184 uint32_t frameInUseCount,
5185 VkDeviceSize bufferImageGranularity,
5186 VkDeviceSize allocSize,
5187 VkDeviceSize allocAlignment,
5189 VmaSuballocationType allocType,
5190 bool canMakeOtherLost,
5193 VmaAllocationRequest* pAllocationRequest) = 0;
5195 virtual bool MakeRequestedAllocationsLost(
5196 uint32_t currentFrameIndex,
5197 uint32_t frameInUseCount,
5198 VmaAllocationRequest* pAllocationRequest) = 0;
5200 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5202 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5206 const VmaAllocationRequest& request,
5207 VmaSuballocationType type,
5208 VkDeviceSize allocSize,
5214 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5217 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5220 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5222 #if VMA_STATS_STRING_ENABLED 5223 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5224 VkDeviceSize unusedBytes,
5225 size_t allocationCount,
5226 size_t unusedRangeCount)
const;
5227 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5228 VkDeviceSize offset,
5230 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5231 VkDeviceSize offset,
5232 VkDeviceSize size)
const;
5233 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5237 VkDeviceSize m_Size;
5238 const VkAllocationCallbacks* m_pAllocationCallbacks;
5241 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5242 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5246 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5248 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5251 virtual ~VmaBlockMetadata_Generic();
5252 virtual void Init(VkDeviceSize size);
5254 virtual bool Validate()
const;
5255 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5256 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5257 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5258 virtual bool IsEmpty()
const;
5260 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5261 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5263 #if VMA_STATS_STRING_ENABLED 5264 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5267 virtual bool CreateAllocationRequest(
5268 uint32_t currentFrameIndex,
5269 uint32_t frameInUseCount,
5270 VkDeviceSize bufferImageGranularity,
5271 VkDeviceSize allocSize,
5272 VkDeviceSize allocAlignment,
5274 VmaSuballocationType allocType,
5275 bool canMakeOtherLost,
5277 VmaAllocationRequest* pAllocationRequest);
5279 virtual bool MakeRequestedAllocationsLost(
5280 uint32_t currentFrameIndex,
5281 uint32_t frameInUseCount,
5282 VmaAllocationRequest* pAllocationRequest);
5284 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5286 virtual VkResult CheckCorruption(
const void* pBlockData);
5289 const VmaAllocationRequest& request,
5290 VmaSuballocationType type,
5291 VkDeviceSize allocSize,
5296 virtual void FreeAtOffset(VkDeviceSize offset);
5298 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5303 bool IsBufferImageGranularityConflictPossible(
5304 VkDeviceSize bufferImageGranularity,
5305 VmaSuballocationType& inOutPrevSuballocType)
const;
5308 friend class VmaDefragmentationAlgorithm_Generic;
5309 friend class VmaDefragmentationAlgorithm_Fast;
5311 uint32_t m_FreeCount;
5312 VkDeviceSize m_SumFreeSize;
5313 VmaSuballocationList m_Suballocations;
5316 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5318 bool ValidateFreeSuballocationList()
const;
5322 bool CheckAllocation(
5323 uint32_t currentFrameIndex,
5324 uint32_t frameInUseCount,
5325 VkDeviceSize bufferImageGranularity,
5326 VkDeviceSize allocSize,
5327 VkDeviceSize allocAlignment,
5328 VmaSuballocationType allocType,
5329 VmaSuballocationList::const_iterator suballocItem,
5330 bool canMakeOtherLost,
5331 VkDeviceSize* pOffset,
5332 size_t* itemsToMakeLostCount,
5333 VkDeviceSize* pSumFreeSize,
5334 VkDeviceSize* pSumItemSize)
const;
5336 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5340 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5343 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5346 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5427 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5429 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5432 virtual ~VmaBlockMetadata_Linear();
5433 virtual void Init(VkDeviceSize size);
5435 virtual bool Validate()
const;
5436 virtual size_t GetAllocationCount()
const;
5437 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5438 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5439 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5441 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5442 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5444 #if VMA_STATS_STRING_ENABLED 5445 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5448 virtual bool CreateAllocationRequest(
5449 uint32_t currentFrameIndex,
5450 uint32_t frameInUseCount,
5451 VkDeviceSize bufferImageGranularity,
5452 VkDeviceSize allocSize,
5453 VkDeviceSize allocAlignment,
5455 VmaSuballocationType allocType,
5456 bool canMakeOtherLost,
5458 VmaAllocationRequest* pAllocationRequest);
5460 virtual bool MakeRequestedAllocationsLost(
5461 uint32_t currentFrameIndex,
5462 uint32_t frameInUseCount,
5463 VmaAllocationRequest* pAllocationRequest);
5465 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5467 virtual VkResult CheckCorruption(
const void* pBlockData);
5470 const VmaAllocationRequest& request,
5471 VmaSuballocationType type,
5472 VkDeviceSize allocSize,
5477 virtual void FreeAtOffset(VkDeviceSize offset);
5487 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5489 enum SECOND_VECTOR_MODE
5491 SECOND_VECTOR_EMPTY,
5496 SECOND_VECTOR_RING_BUFFER,
5502 SECOND_VECTOR_DOUBLE_STACK,
5505 VkDeviceSize m_SumFreeSize;
5506 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5507 uint32_t m_1stVectorIndex;
5508 SECOND_VECTOR_MODE m_2ndVectorMode;
5510 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5511 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5512 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5513 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5516 size_t m_1stNullItemsBeginCount;
5518 size_t m_1stNullItemsMiddleCount;
5520 size_t m_2ndNullItemsCount;
5522 bool ShouldCompact1st()
const;
5523 void CleanupAfterFree();
5537 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5539 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5542 virtual ~VmaBlockMetadata_Buddy();
5543 virtual void Init(VkDeviceSize size);
5545 virtual bool Validate()
const;
5546 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5547 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5548 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5549 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5551 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5552 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5554 #if VMA_STATS_STRING_ENABLED 5555 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5558 virtual bool CreateAllocationRequest(
5559 uint32_t currentFrameIndex,
5560 uint32_t frameInUseCount,
5561 VkDeviceSize bufferImageGranularity,
5562 VkDeviceSize allocSize,
5563 VkDeviceSize allocAlignment,
5565 VmaSuballocationType allocType,
5566 bool canMakeOtherLost,
5568 VmaAllocationRequest* pAllocationRequest);
5570 virtual bool MakeRequestedAllocationsLost(
5571 uint32_t currentFrameIndex,
5572 uint32_t frameInUseCount,
5573 VmaAllocationRequest* pAllocationRequest);
5575 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5577 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5580 const VmaAllocationRequest& request,
5581 VmaSuballocationType type,
5582 VkDeviceSize allocSize,
5586 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5587 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5590 static const VkDeviceSize MIN_NODE_SIZE = 32;
5591 static const size_t MAX_LEVELS = 30;
5593 struct ValidationContext
5595 size_t calculatedAllocationCount;
5596 size_t calculatedFreeCount;
5597 VkDeviceSize calculatedSumFreeSize;
5599 ValidationContext() :
5600 calculatedAllocationCount(0),
5601 calculatedFreeCount(0),
5602 calculatedSumFreeSize(0) { }
5607 VkDeviceSize offset;
5637 VkDeviceSize m_UsableSize;
5638 uint32_t m_LevelCount;
5644 } m_FreeList[MAX_LEVELS];
5646 size_t m_AllocationCount;
5650 VkDeviceSize m_SumFreeSize;
5652 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5653 void DeleteNode(Node* node);
5654 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5655 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5656 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5658 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5659 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5663 void AddToFreeListFront(uint32_t level, Node* node);
5667 void RemoveFromFreeList(uint32_t level, Node* node);
5669 #if VMA_STATS_STRING_ENABLED 5670 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5680 class VmaDeviceMemoryBlock
5682 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5684 VmaBlockMetadata* m_pMetadata;
5688 ~VmaDeviceMemoryBlock()
5690 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5691 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5697 uint32_t newMemoryTypeIndex,
5698 VkDeviceMemory newMemory,
5699 VkDeviceSize newSize,
5701 uint32_t algorithm);
5705 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5706 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5707 uint32_t GetId()
const {
return m_Id; }
5708 void* GetMappedData()
const {
return m_pMappedData; }
5711 bool Validate()
const;
5716 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5719 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5720 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5722 VkResult BindBufferMemory(
5726 VkResult BindImageMemory(
5732 uint32_t m_MemoryTypeIndex;
5734 VkDeviceMemory m_hMemory;
5742 uint32_t m_MapCount;
5743 void* m_pMappedData;
5746 struct VmaPointerLess
5748 bool operator()(
const void* lhs,
const void* rhs)
const 5754 struct VmaDefragmentationMove
5756 size_t srcBlockIndex;
5757 size_t dstBlockIndex;
5758 VkDeviceSize srcOffset;
5759 VkDeviceSize dstOffset;
5763 class VmaDefragmentationAlgorithm;
5771 struct VmaBlockVector
5773 VMA_CLASS_NO_COPY(VmaBlockVector)
5777 uint32_t memoryTypeIndex,
5778 VkDeviceSize preferredBlockSize,
5779 size_t minBlockCount,
5780 size_t maxBlockCount,
5781 VkDeviceSize bufferImageGranularity,
5782 uint32_t frameInUseCount,
5784 bool explicitBlockSize,
5785 uint32_t algorithm);
5788 VkResult CreateMinBlocks();
5790 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5791 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5792 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5793 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5794 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5798 bool IsEmpty()
const {
return m_Blocks.empty(); }
5799 bool IsCorruptionDetectionEnabled()
const;
5803 uint32_t currentFrameIndex,
5805 VkDeviceSize alignment,
5807 VmaSuballocationType suballocType,
5808 size_t allocationCount,
5817 #if VMA_STATS_STRING_ENABLED 5818 void PrintDetailedMap(
class VmaJsonWriter& json);
5821 void MakePoolAllocationsLost(
5822 uint32_t currentFrameIndex,
5823 size_t* pLostAllocationCount);
5824 VkResult CheckCorruption();
5828 class VmaBlockVectorDefragmentationContext* pCtx,
5830 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5831 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5832 VkCommandBuffer commandBuffer);
5833 void DefragmentationEnd(
5834 class VmaBlockVectorDefragmentationContext* pCtx,
5840 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5841 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5842 size_t CalcAllocationCount()
const;
5843 bool IsBufferImageGranularityConflictPossible()
const;
5846 friend class VmaDefragmentationAlgorithm_Generic;
5849 const uint32_t m_MemoryTypeIndex;
5850 const VkDeviceSize m_PreferredBlockSize;
5851 const size_t m_MinBlockCount;
5852 const size_t m_MaxBlockCount;
5853 const VkDeviceSize m_BufferImageGranularity;
5854 const uint32_t m_FrameInUseCount;
5855 const bool m_IsCustomPool;
5856 const bool m_ExplicitBlockSize;
5857 const uint32_t m_Algorithm;
5861 bool m_HasEmptyBlock;
5862 VMA_RW_MUTEX m_Mutex;
5864 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5865 uint32_t m_NextBlockId;
5867 VkDeviceSize CalcMaxBlockSize()
const;
5870 void Remove(VmaDeviceMemoryBlock* pBlock);
5874 void IncrementallySortBlocks();
5876 VkResult AllocatePage(
5878 uint32_t currentFrameIndex,
5880 VkDeviceSize alignment,
5882 VmaSuballocationType suballocType,
5886 VkResult AllocateFromBlock(
5887 VmaDeviceMemoryBlock* pBlock,
5889 uint32_t currentFrameIndex,
5891 VkDeviceSize alignment,
5894 VmaSuballocationType suballocType,
5898 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5901 void ApplyDefragmentationMovesCpu(
5902 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5903 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5905 void ApplyDefragmentationMovesGpu(
5906 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5907 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5908 VkCommandBuffer commandBuffer);
5919 VMA_CLASS_NO_COPY(VmaPool_T)
5921 VmaBlockVector m_BlockVector;
5926 VkDeviceSize preferredBlockSize);
5929 uint32_t GetId()
const {
return m_Id; }
5930 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5932 #if VMA_STATS_STRING_ENABLED 5947 class VmaDefragmentationAlgorithm
5949 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
5951 VmaDefragmentationAlgorithm(
5953 VmaBlockVector* pBlockVector,
5954 uint32_t currentFrameIndex) :
5955 m_hAllocator(hAllocator),
5956 m_pBlockVector(pBlockVector),
5957 m_CurrentFrameIndex(currentFrameIndex)
5960 virtual ~VmaDefragmentationAlgorithm()
5964 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
5965 virtual void AddAll() = 0;
5967 virtual VkResult Defragment(
5968 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5969 VkDeviceSize maxBytesToMove,
5970 uint32_t maxAllocationsToMove) = 0;
5972 virtual VkDeviceSize GetBytesMoved()
const = 0;
5973 virtual uint32_t GetAllocationsMoved()
const = 0;
5977 VmaBlockVector*
const m_pBlockVector;
5978 const uint32_t m_CurrentFrameIndex;
5980 struct AllocationInfo
5983 VkBool32* m_pChanged;
5986 m_hAllocation(VK_NULL_HANDLE),
5987 m_pChanged(VMA_NULL)
5991 m_hAllocation(hAlloc),
5992 m_pChanged(pChanged)
5998 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6000 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6002 VmaDefragmentationAlgorithm_Generic(
6004 VmaBlockVector* pBlockVector,
6005 uint32_t currentFrameIndex,
6006 bool overlappingMoveSupported);
6007 virtual ~VmaDefragmentationAlgorithm_Generic();
6009 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6010 virtual void AddAll() { m_AllAllocations =
true; }
6012 virtual VkResult Defragment(
6013 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6014 VkDeviceSize maxBytesToMove,
6015 uint32_t maxAllocationsToMove);
6017 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6018 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6021 uint32_t m_AllocationCount;
6022 bool m_AllAllocations;
6024 VkDeviceSize m_BytesMoved;
6025 uint32_t m_AllocationsMoved;
6027 struct AllocationInfoSizeGreater
6029 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6031 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6035 struct AllocationInfoOffsetGreater
6037 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6039 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6045 size_t m_OriginalBlockIndex;
6046 VmaDeviceMemoryBlock* m_pBlock;
6047 bool m_HasNonMovableAllocations;
6048 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6050 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6051 m_OriginalBlockIndex(SIZE_MAX),
6053 m_HasNonMovableAllocations(true),
6054 m_Allocations(pAllocationCallbacks)
6058 void CalcHasNonMovableAllocations()
6060 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6061 const size_t defragmentAllocCount = m_Allocations.size();
6062 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6065 void SortAllocationsBySizeDescending()
6067 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6070 void SortAllocationsByOffsetDescending()
6072 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6076 struct BlockPointerLess
6078 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6080 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6082 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6084 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6090 struct BlockInfoCompareMoveDestination
6092 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6094 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6098 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6102 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6110 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6111 BlockInfoVector m_Blocks;
6113 VkResult DefragmentRound(
6114 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6115 VkDeviceSize maxBytesToMove,
6116 uint32_t maxAllocationsToMove);
6118 size_t CalcBlocksWithNonMovableCount()
const;
6120 static bool MoveMakesSense(
6121 size_t dstBlockIndex, VkDeviceSize dstOffset,
6122 size_t srcBlockIndex, VkDeviceSize srcOffset);
6125 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6127 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6129 VmaDefragmentationAlgorithm_Fast(
6131 VmaBlockVector* pBlockVector,
6132 uint32_t currentFrameIndex,
6133 bool overlappingMoveSupported);
6134 virtual ~VmaDefragmentationAlgorithm_Fast();
6136 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6137 virtual void AddAll() { m_AllAllocations =
true; }
6139 virtual VkResult Defragment(
6140 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6141 VkDeviceSize maxBytesToMove,
6142 uint32_t maxAllocationsToMove);
6144 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6145 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6150 size_t origBlockIndex;
6153 class FreeSpaceDatabase
6159 s.blockInfoIndex = SIZE_MAX;
6160 for(
size_t i = 0; i < MAX_COUNT; ++i)
6162 m_FreeSpaces[i] = s;
6166 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6168 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6174 size_t bestIndex = SIZE_MAX;
6175 for(
size_t i = 0; i < MAX_COUNT; ++i)
6178 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6183 if(m_FreeSpaces[i].size < size &&
6184 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6190 if(bestIndex != SIZE_MAX)
6192 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6193 m_FreeSpaces[bestIndex].offset = offset;
6194 m_FreeSpaces[bestIndex].size = size;
6198 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6199 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6201 size_t bestIndex = SIZE_MAX;
6202 VkDeviceSize bestFreeSpaceAfter = 0;
6203 for(
size_t i = 0; i < MAX_COUNT; ++i)
6206 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6208 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6210 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6212 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6214 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6217 bestFreeSpaceAfter = freeSpaceAfter;
6223 if(bestIndex != SIZE_MAX)
6225 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6226 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6228 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6231 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6232 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6233 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6238 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6248 static const size_t MAX_COUNT = 4;
6252 size_t blockInfoIndex;
6253 VkDeviceSize offset;
6255 } m_FreeSpaces[MAX_COUNT];
6258 const bool m_OverlappingMoveSupported;
6260 uint32_t m_AllocationCount;
6261 bool m_AllAllocations;
6263 VkDeviceSize m_BytesMoved;
6264 uint32_t m_AllocationsMoved;
6266 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6268 void PreprocessMetadata();
6269 void PostprocessMetadata();
6270 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6273 struct VmaBlockDefragmentationContext
6277 BLOCK_FLAG_USED = 0x00000001,
6282 VmaBlockDefragmentationContext() :
6284 hBuffer(VK_NULL_HANDLE)
6289 class VmaBlockVectorDefragmentationContext
6291 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6295 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6297 VmaBlockVectorDefragmentationContext(
6300 VmaBlockVector* pBlockVector,
6301 uint32_t currFrameIndex,
6303 ~VmaBlockVectorDefragmentationContext();
6305 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6306 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6307 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6309 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6310 void AddAll() { m_AllAllocations =
true; }
6312 void Begin(
bool overlappingMoveSupported);
6319 VmaBlockVector*
const m_pBlockVector;
6320 const uint32_t m_CurrFrameIndex;
6321 const uint32_t m_AlgorithmFlags;
6323 VmaDefragmentationAlgorithm* m_pAlgorithm;
6331 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6332 bool m_AllAllocations;
6335 struct VmaDefragmentationContext_T
6338 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6340 VmaDefragmentationContext_T(
6342 uint32_t currFrameIndex,
6345 ~VmaDefragmentationContext_T();
6347 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6348 void AddAllocations(
6349 uint32_t allocationCount,
6351 VkBool32* pAllocationsChanged);
6359 VkResult Defragment(
6360 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6361 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6366 const uint32_t m_CurrFrameIndex;
6367 const uint32_t m_Flags;
6370 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6372 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6375 #if VMA_RECORDING_ENABLED 6382 void WriteConfiguration(
6383 const VkPhysicalDeviceProperties& devProps,
6384 const VkPhysicalDeviceMemoryProperties& memProps,
6385 bool dedicatedAllocationExtensionEnabled);
6388 void RecordCreateAllocator(uint32_t frameIndex);
6389 void RecordDestroyAllocator(uint32_t frameIndex);
6390 void RecordCreatePool(uint32_t frameIndex,
6393 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6394 void RecordAllocateMemory(uint32_t frameIndex,
6395 const VkMemoryRequirements& vkMemReq,
6398 void RecordAllocateMemoryPages(uint32_t frameIndex,
6399 const VkMemoryRequirements& vkMemReq,
6401 uint64_t allocationCount,
6403 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6404 const VkMemoryRequirements& vkMemReq,
6405 bool requiresDedicatedAllocation,
6406 bool prefersDedicatedAllocation,
6409 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6410 const VkMemoryRequirements& vkMemReq,
6411 bool requiresDedicatedAllocation,
6412 bool prefersDedicatedAllocation,
6415 void RecordFreeMemory(uint32_t frameIndex,
6417 void RecordFreeMemoryPages(uint32_t frameIndex,
6418 uint64_t allocationCount,
6420 void RecordResizeAllocation(
6421 uint32_t frameIndex,
6423 VkDeviceSize newSize);
6424 void RecordSetAllocationUserData(uint32_t frameIndex,
6426 const void* pUserData);
6427 void RecordCreateLostAllocation(uint32_t frameIndex,
6429 void RecordMapMemory(uint32_t frameIndex,
6431 void RecordUnmapMemory(uint32_t frameIndex,
6433 void RecordFlushAllocation(uint32_t frameIndex,
6434 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6435 void RecordInvalidateAllocation(uint32_t frameIndex,
6436 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6437 void RecordCreateBuffer(uint32_t frameIndex,
6438 const VkBufferCreateInfo& bufCreateInfo,
6441 void RecordCreateImage(uint32_t frameIndex,
6442 const VkImageCreateInfo& imageCreateInfo,
6445 void RecordDestroyBuffer(uint32_t frameIndex,
6447 void RecordDestroyImage(uint32_t frameIndex,
6449 void RecordTouchAllocation(uint32_t frameIndex,
6451 void RecordGetAllocationInfo(uint32_t frameIndex,
6453 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6455 void RecordDefragmentationBegin(uint32_t frameIndex,
6458 void RecordDefragmentationEnd(uint32_t frameIndex,
6468 class UserDataString
6472 const char* GetString()
const {
return m_Str; }
6482 VMA_MUTEX m_FileMutex;
6484 int64_t m_StartCounter;
6486 void GetBasicParams(CallParams& outParams);
6489 template<
typename T>
6490 void PrintPointerList(uint64_t count,
const T* pItems)
6494 fprintf(m_File,
"%p", pItems[0]);
6495 for(uint64_t i = 1; i < count; ++i)
6497 fprintf(m_File,
" %p", pItems[i]);
6502 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6506 #endif // #if VMA_RECORDING_ENABLED 6509 struct VmaAllocator_T
6511 VMA_CLASS_NO_COPY(VmaAllocator_T)
6514 bool m_UseKhrDedicatedAllocation;
6516 bool m_AllocationCallbacksSpecified;
6517 VkAllocationCallbacks m_AllocationCallbacks;
6521 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6522 VMA_MUTEX m_HeapSizeLimitMutex;
6524 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6525 VkPhysicalDeviceMemoryProperties m_MemProps;
6528 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6531 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6532 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6533 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6539 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6541 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6545 return m_VulkanFunctions;
6548 VkDeviceSize GetBufferImageGranularity()
const 6551 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6552 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6555 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6556 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6558 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6560 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6561 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6564 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6566 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6567 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6570 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6572 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6573 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6574 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6577 bool IsIntegratedGpu()
const 6579 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6582 #if VMA_RECORDING_ENABLED 6583 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6586 void GetBufferMemoryRequirements(
6588 VkMemoryRequirements& memReq,
6589 bool& requiresDedicatedAllocation,
6590 bool& prefersDedicatedAllocation)
const;
6591 void GetImageMemoryRequirements(
6593 VkMemoryRequirements& memReq,
6594 bool& requiresDedicatedAllocation,
6595 bool& prefersDedicatedAllocation)
const;
6598 VkResult AllocateMemory(
6599 const VkMemoryRequirements& vkMemReq,
6600 bool requiresDedicatedAllocation,
6601 bool prefersDedicatedAllocation,
6602 VkBuffer dedicatedBuffer,
6603 VkImage dedicatedImage,
6605 VmaSuballocationType suballocType,
6606 size_t allocationCount,
6611 size_t allocationCount,
6614 VkResult ResizeAllocation(
6616 VkDeviceSize newSize);
6618 void CalculateStats(
VmaStats* pStats);
6620 #if VMA_STATS_STRING_ENABLED 6621 void PrintDetailedMap(
class VmaJsonWriter& json);
6624 VkResult DefragmentationBegin(
6628 VkResult DefragmentationEnd(
6635 void DestroyPool(
VmaPool pool);
6638 void SetCurrentFrameIndex(uint32_t frameIndex);
6639 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6641 void MakePoolAllocationsLost(
6643 size_t* pLostAllocationCount);
6644 VkResult CheckPoolCorruption(
VmaPool hPool);
6645 VkResult CheckCorruption(uint32_t memoryTypeBits);
6649 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6650 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6655 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6656 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6658 void FlushOrInvalidateAllocation(
6660 VkDeviceSize offset, VkDeviceSize size,
6661 VMA_CACHE_OPERATION op);
6663 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6666 VkDeviceSize m_PreferredLargeHeapBlockSize;
6668 VkPhysicalDevice m_PhysicalDevice;
6669 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6671 VMA_RW_MUTEX m_PoolsMutex;
6673 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6674 uint32_t m_NextPoolId;
6678 #if VMA_RECORDING_ENABLED 6679 VmaRecorder* m_pRecorder;
6684 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6686 VkResult AllocateMemoryOfType(
6688 VkDeviceSize alignment,
6689 bool dedicatedAllocation,
6690 VkBuffer dedicatedBuffer,
6691 VkImage dedicatedImage,
6693 uint32_t memTypeIndex,
6694 VmaSuballocationType suballocType,
6695 size_t allocationCount,
6699 VkResult AllocateDedicatedMemoryPage(
6701 VmaSuballocationType suballocType,
6702 uint32_t memTypeIndex,
6703 const VkMemoryAllocateInfo& allocInfo,
6705 bool isUserDataString,
6710 VkResult AllocateDedicatedMemory(
6712 VmaSuballocationType suballocType,
6713 uint32_t memTypeIndex,
6715 bool isUserDataString,
6717 VkBuffer dedicatedBuffer,
6718 VkImage dedicatedImage,
6719 size_t allocationCount,
6729 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6731 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6734 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6736 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6739 template<
typename T>
6742 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6745 template<
typename T>
6746 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6748 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6751 template<
typename T>
6752 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6757 VmaFree(hAllocator, ptr);
6761 template<
typename T>
6762 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6766 for(
size_t i = count; i--; )
6768 VmaFree(hAllocator, ptr);
6775 #if VMA_STATS_STRING_ENABLED 6777 class VmaStringBuilder
6780 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6781 size_t GetLength()
const {
return m_Data.size(); }
6782 const char* GetData()
const {
return m_Data.data(); }
6784 void Add(
char ch) { m_Data.push_back(ch); }
6785 void Add(
const char* pStr);
6786 void AddNewLine() { Add(
'\n'); }
6787 void AddNumber(uint32_t num);
6788 void AddNumber(uint64_t num);
6789 void AddPointer(
const void* ptr);
6792 VmaVector< char, VmaStlAllocator<char> > m_Data;
6795 void VmaStringBuilder::Add(
const char* pStr)
6797 const size_t strLen = strlen(pStr);
6800 const size_t oldCount = m_Data.size();
6801 m_Data.resize(oldCount + strLen);
6802 memcpy(m_Data.data() + oldCount, pStr, strLen);
6806 void VmaStringBuilder::AddNumber(uint32_t num)
6809 VmaUint32ToStr(buf,
sizeof(buf), num);
6813 void VmaStringBuilder::AddNumber(uint64_t num)
6816 VmaUint64ToStr(buf,
sizeof(buf), num);
6820 void VmaStringBuilder::AddPointer(
const void* ptr)
6823 VmaPtrToStr(buf,
sizeof(buf), ptr);
6827 #endif // #if VMA_STATS_STRING_ENABLED 6832 #if VMA_STATS_STRING_ENABLED 6836 VMA_CLASS_NO_COPY(VmaJsonWriter)
6838 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6841 void BeginObject(
bool singleLine =
false);
6844 void BeginArray(
bool singleLine =
false);
6847 void WriteString(
const char* pStr);
6848 void BeginString(
const char* pStr = VMA_NULL);
6849 void ContinueString(
const char* pStr);
6850 void ContinueString(uint32_t n);
6851 void ContinueString(uint64_t n);
6852 void ContinueString_Pointer(
const void* ptr);
6853 void EndString(
const char* pStr = VMA_NULL);
6855 void WriteNumber(uint32_t n);
6856 void WriteNumber(uint64_t n);
6857 void WriteBool(
bool b);
6861 static const char*
const INDENT;
6863 enum COLLECTION_TYPE
6865 COLLECTION_TYPE_OBJECT,
6866 COLLECTION_TYPE_ARRAY,
6870 COLLECTION_TYPE type;
6871 uint32_t valueCount;
6872 bool singleLineMode;
6875 VmaStringBuilder& m_SB;
6876 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6877 bool m_InsideString;
6879 void BeginValue(
bool isString);
6880 void WriteIndent(
bool oneLess =
false);
6883 const char*
const VmaJsonWriter::INDENT =
" ";
6885 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6887 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6888 m_InsideString(false)
6892 VmaJsonWriter::~VmaJsonWriter()
6894 VMA_ASSERT(!m_InsideString);
6895 VMA_ASSERT(m_Stack.empty());
6898 void VmaJsonWriter::BeginObject(
bool singleLine)
6900 VMA_ASSERT(!m_InsideString);
6906 item.type = COLLECTION_TYPE_OBJECT;
6907 item.valueCount = 0;
6908 item.singleLineMode = singleLine;
6909 m_Stack.push_back(item);
6912 void VmaJsonWriter::EndObject()
6914 VMA_ASSERT(!m_InsideString);
6919 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6923 void VmaJsonWriter::BeginArray(
bool singleLine)
6925 VMA_ASSERT(!m_InsideString);
6931 item.type = COLLECTION_TYPE_ARRAY;
6932 item.valueCount = 0;
6933 item.singleLineMode = singleLine;
6934 m_Stack.push_back(item);
6937 void VmaJsonWriter::EndArray()
6939 VMA_ASSERT(!m_InsideString);
6944 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6948 void VmaJsonWriter::WriteString(
const char* pStr)
6954 void VmaJsonWriter::BeginString(
const char* pStr)
6956 VMA_ASSERT(!m_InsideString);
6960 m_InsideString =
true;
6961 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6963 ContinueString(pStr);
6967 void VmaJsonWriter::ContinueString(
const char* pStr)
6969 VMA_ASSERT(m_InsideString);
6971 const size_t strLen = strlen(pStr);
6972 for(
size_t i = 0; i < strLen; ++i)
7005 VMA_ASSERT(0 &&
"Character not currently supported.");
7011 void VmaJsonWriter::ContinueString(uint32_t n)
7013 VMA_ASSERT(m_InsideString);
7017 void VmaJsonWriter::ContinueString(uint64_t n)
7019 VMA_ASSERT(m_InsideString);
7023 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7025 VMA_ASSERT(m_InsideString);
7026 m_SB.AddPointer(ptr);
7029 void VmaJsonWriter::EndString(
const char* pStr)
7031 VMA_ASSERT(m_InsideString);
7032 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7034 ContinueString(pStr);
7037 m_InsideString =
false;
7040 void VmaJsonWriter::WriteNumber(uint32_t n)
7042 VMA_ASSERT(!m_InsideString);
7047 void VmaJsonWriter::WriteNumber(uint64_t n)
7049 VMA_ASSERT(!m_InsideString);
7054 void VmaJsonWriter::WriteBool(
bool b)
7056 VMA_ASSERT(!m_InsideString);
7058 m_SB.Add(b ?
"true" :
"false");
7061 void VmaJsonWriter::WriteNull()
7063 VMA_ASSERT(!m_InsideString);
7068 void VmaJsonWriter::BeginValue(
bool isString)
7070 if(!m_Stack.empty())
7072 StackItem& currItem = m_Stack.back();
7073 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7074 currItem.valueCount % 2 == 0)
7076 VMA_ASSERT(isString);
7079 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7080 currItem.valueCount % 2 != 0)
7084 else if(currItem.valueCount > 0)
7093 ++currItem.valueCount;
7097 void VmaJsonWriter::WriteIndent(
bool oneLess)
7099 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7103 size_t count = m_Stack.size();
7104 if(count > 0 && oneLess)
7108 for(
size_t i = 0; i < count; ++i)
7115 #endif // #if VMA_STATS_STRING_ENABLED 7119 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7121 if(IsUserDataString())
7123 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7125 FreeUserDataString(hAllocator);
7127 if(pUserData != VMA_NULL)
7129 const char*
const newStrSrc = (
char*)pUserData;
7130 const size_t newStrLen = strlen(newStrSrc);
7131 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7132 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7133 m_pUserData = newStrDst;
7138 m_pUserData = pUserData;
7142 void VmaAllocation_T::ChangeBlockAllocation(
7144 VmaDeviceMemoryBlock* block,
7145 VkDeviceSize offset)
7147 VMA_ASSERT(block != VMA_NULL);
7148 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7151 if(block != m_BlockAllocation.m_Block)
7153 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7154 if(IsPersistentMap())
7156 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7157 block->Map(hAllocator, mapRefCount, VMA_NULL);
7160 m_BlockAllocation.m_Block = block;
7161 m_BlockAllocation.m_Offset = offset;
7164 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7166 VMA_ASSERT(newSize > 0);
7170 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7172 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7173 m_BlockAllocation.m_Offset = newOffset;
7176 VkDeviceSize VmaAllocation_T::GetOffset()
const 7180 case ALLOCATION_TYPE_BLOCK:
7181 return m_BlockAllocation.m_Offset;
7182 case ALLOCATION_TYPE_DEDICATED:
7190 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7194 case ALLOCATION_TYPE_BLOCK:
7195 return m_BlockAllocation.m_Block->GetDeviceMemory();
7196 case ALLOCATION_TYPE_DEDICATED:
7197 return m_DedicatedAllocation.m_hMemory;
7200 return VK_NULL_HANDLE;
7204 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7208 case ALLOCATION_TYPE_BLOCK:
7209 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7210 case ALLOCATION_TYPE_DEDICATED:
7211 return m_DedicatedAllocation.m_MemoryTypeIndex;
7218 void* VmaAllocation_T::GetMappedData()
const 7222 case ALLOCATION_TYPE_BLOCK:
7225 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7226 VMA_ASSERT(pBlockData != VMA_NULL);
7227 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7234 case ALLOCATION_TYPE_DEDICATED:
7235 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7236 return m_DedicatedAllocation.m_pMappedData;
7243 bool VmaAllocation_T::CanBecomeLost()
const 7247 case ALLOCATION_TYPE_BLOCK:
7248 return m_BlockAllocation.m_CanBecomeLost;
7249 case ALLOCATION_TYPE_DEDICATED:
7257 VmaPool VmaAllocation_T::GetPool()
const 7259 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7260 return m_BlockAllocation.m_hPool;
7263 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7265 VMA_ASSERT(CanBecomeLost());
7271 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7274 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7279 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7285 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7295 #if VMA_STATS_STRING_ENABLED 7298 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7307 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7309 json.WriteString(
"Type");
7310 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7312 json.WriteString(
"Size");
7313 json.WriteNumber(m_Size);
7315 if(m_pUserData != VMA_NULL)
7317 json.WriteString(
"UserData");
7318 if(IsUserDataString())
7320 json.WriteString((
const char*)m_pUserData);
7325 json.ContinueString_Pointer(m_pUserData);
7330 json.WriteString(
"CreationFrameIndex");
7331 json.WriteNumber(m_CreationFrameIndex);
7333 json.WriteString(
"LastUseFrameIndex");
7334 json.WriteNumber(GetLastUseFrameIndex());
7336 if(m_BufferImageUsage != 0)
7338 json.WriteString(
"Usage");
7339 json.WriteNumber(m_BufferImageUsage);
7345 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7347 VMA_ASSERT(IsUserDataString());
7348 if(m_pUserData != VMA_NULL)
7350 char*
const oldStr = (
char*)m_pUserData;
7351 const size_t oldStrLen = strlen(oldStr);
7352 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7353 m_pUserData = VMA_NULL;
7357 void VmaAllocation_T::BlockAllocMap()
7359 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7361 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7367 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7371 void VmaAllocation_T::BlockAllocUnmap()
7373 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7375 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7381 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7385 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7387 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7391 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7393 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7394 *ppData = m_DedicatedAllocation.m_pMappedData;
7400 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7401 return VK_ERROR_MEMORY_MAP_FAILED;
7406 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7407 hAllocator->m_hDevice,
7408 m_DedicatedAllocation.m_hMemory,
7413 if(result == VK_SUCCESS)
7415 m_DedicatedAllocation.m_pMappedData = *ppData;
7422 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7424 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7426 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7431 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7432 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7433 hAllocator->m_hDevice,
7434 m_DedicatedAllocation.m_hMemory);
7439 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7443 #if VMA_STATS_STRING_ENABLED 7445 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7449 json.WriteString(
"Blocks");
7452 json.WriteString(
"Allocations");
7455 json.WriteString(
"UnusedRanges");
7458 json.WriteString(
"UsedBytes");
7461 json.WriteString(
"UnusedBytes");
7466 json.WriteString(
"AllocationSize");
7467 json.BeginObject(
true);
7468 json.WriteString(
"Min");
7470 json.WriteString(
"Avg");
7472 json.WriteString(
"Max");
7479 json.WriteString(
"UnusedRangeSize");
7480 json.BeginObject(
true);
7481 json.WriteString(
"Min");
7483 json.WriteString(
"Avg");
7485 json.WriteString(
"Max");
7493 #endif // #if VMA_STATS_STRING_ENABLED 7495 struct VmaSuballocationItemSizeLess
7498 const VmaSuballocationList::iterator lhs,
7499 const VmaSuballocationList::iterator rhs)
const 7501 return lhs->size < rhs->size;
7504 const VmaSuballocationList::iterator lhs,
7505 VkDeviceSize rhsSize)
const 7507 return lhs->size < rhsSize;
7515 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7517 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7521 #if VMA_STATS_STRING_ENABLED 7523 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7524 VkDeviceSize unusedBytes,
7525 size_t allocationCount,
7526 size_t unusedRangeCount)
const 7530 json.WriteString(
"TotalBytes");
7531 json.WriteNumber(GetSize());
7533 json.WriteString(
"UnusedBytes");
7534 json.WriteNumber(unusedBytes);
7536 json.WriteString(
"Allocations");
7537 json.WriteNumber((uint64_t)allocationCount);
7539 json.WriteString(
"UnusedRanges");
7540 json.WriteNumber((uint64_t)unusedRangeCount);
7542 json.WriteString(
"Suballocations");
7546 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7547 VkDeviceSize offset,
7550 json.BeginObject(
true);
7552 json.WriteString(
"Offset");
7553 json.WriteNumber(offset);
7555 hAllocation->PrintParameters(json);
7560 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7561 VkDeviceSize offset,
7562 VkDeviceSize size)
const 7564 json.BeginObject(
true);
7566 json.WriteString(
"Offset");
7567 json.WriteNumber(offset);
7569 json.WriteString(
"Type");
7570 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7572 json.WriteString(
"Size");
7573 json.WriteNumber(size);
7578 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7584 #endif // #if VMA_STATS_STRING_ENABLED 7589 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7590 VmaBlockMetadata(hAllocator),
7593 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7594 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7598 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7602 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7604 VmaBlockMetadata::Init(size);
7607 m_SumFreeSize = size;
7609 VmaSuballocation suballoc = {};
7610 suballoc.offset = 0;
7611 suballoc.size = size;
7612 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7613 suballoc.hAllocation = VK_NULL_HANDLE;
7615 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7616 m_Suballocations.push_back(suballoc);
7617 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7619 m_FreeSuballocationsBySize.push_back(suballocItem);
7622 bool VmaBlockMetadata_Generic::Validate()
const 7624 VMA_VALIDATE(!m_Suballocations.empty());
7627 VkDeviceSize calculatedOffset = 0;
7629 uint32_t calculatedFreeCount = 0;
7631 VkDeviceSize calculatedSumFreeSize = 0;
7634 size_t freeSuballocationsToRegister = 0;
7636 bool prevFree =
false;
7638 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7639 suballocItem != m_Suballocations.cend();
7642 const VmaSuballocation& subAlloc = *suballocItem;
7645 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7647 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7649 VMA_VALIDATE(!prevFree || !currFree);
7651 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7655 calculatedSumFreeSize += subAlloc.size;
7656 ++calculatedFreeCount;
7657 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7659 ++freeSuballocationsToRegister;
7663 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7667 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7668 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7671 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7674 calculatedOffset += subAlloc.size;
7675 prevFree = currFree;
7680 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7682 VkDeviceSize lastSize = 0;
7683 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7685 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7688 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7690 VMA_VALIDATE(suballocItem->size >= lastSize);
7692 lastSize = suballocItem->size;
7696 VMA_VALIDATE(ValidateFreeSuballocationList());
7697 VMA_VALIDATE(calculatedOffset == GetSize());
7698 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7699 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7704 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7706 if(!m_FreeSuballocationsBySize.empty())
7708 return m_FreeSuballocationsBySize.back()->size;
7716 bool VmaBlockMetadata_Generic::IsEmpty()
const 7718 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7721 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7725 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7737 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7738 suballocItem != m_Suballocations.cend();
7741 const VmaSuballocation& suballoc = *suballocItem;
7742 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7755 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7757 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7759 inoutStats.
size += GetSize();
7766 #if VMA_STATS_STRING_ENABLED 7768 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7770 PrintDetailedMap_Begin(json,
7772 m_Suballocations.size() - (size_t)m_FreeCount,
7776 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7777 suballocItem != m_Suballocations.cend();
7778 ++suballocItem, ++i)
7780 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7782 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7786 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7790 PrintDetailedMap_End(json);
7793 #endif // #if VMA_STATS_STRING_ENABLED 7795 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7796 uint32_t currentFrameIndex,
7797 uint32_t frameInUseCount,
7798 VkDeviceSize bufferImageGranularity,
7799 VkDeviceSize allocSize,
7800 VkDeviceSize allocAlignment,
7802 VmaSuballocationType allocType,
7803 bool canMakeOtherLost,
7805 VmaAllocationRequest* pAllocationRequest)
7807 VMA_ASSERT(allocSize > 0);
7808 VMA_ASSERT(!upperAddress);
7809 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7810 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7811 VMA_HEAVY_ASSERT(Validate());
7814 if(canMakeOtherLost ==
false &&
7815 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7821 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7822 if(freeSuballocCount > 0)
7827 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7828 m_FreeSuballocationsBySize.data(),
7829 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7830 allocSize + 2 * VMA_DEBUG_MARGIN,
7831 VmaSuballocationItemSizeLess());
7832 size_t index = it - m_FreeSuballocationsBySize.data();
7833 for(; index < freeSuballocCount; ++index)
7838 bufferImageGranularity,
7842 m_FreeSuballocationsBySize[index],
7844 &pAllocationRequest->offset,
7845 &pAllocationRequest->itemsToMakeLostCount,
7846 &pAllocationRequest->sumFreeSize,
7847 &pAllocationRequest->sumItemSize))
7849 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7854 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7856 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7857 it != m_Suballocations.end();
7860 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7863 bufferImageGranularity,
7869 &pAllocationRequest->offset,
7870 &pAllocationRequest->itemsToMakeLostCount,
7871 &pAllocationRequest->sumFreeSize,
7872 &pAllocationRequest->sumItemSize))
7874 pAllocationRequest->item = it;
7882 for(
size_t index = freeSuballocCount; index--; )
7887 bufferImageGranularity,
7891 m_FreeSuballocationsBySize[index],
7893 &pAllocationRequest->offset,
7894 &pAllocationRequest->itemsToMakeLostCount,
7895 &pAllocationRequest->sumFreeSize,
7896 &pAllocationRequest->sumItemSize))
7898 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7905 if(canMakeOtherLost)
7909 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7910 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7912 VmaAllocationRequest tmpAllocRequest = {};
7913 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7914 suballocIt != m_Suballocations.end();
7917 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7918 suballocIt->hAllocation->CanBecomeLost())
7923 bufferImageGranularity,
7929 &tmpAllocRequest.offset,
7930 &tmpAllocRequest.itemsToMakeLostCount,
7931 &tmpAllocRequest.sumFreeSize,
7932 &tmpAllocRequest.sumItemSize))
7934 tmpAllocRequest.item = suballocIt;
7936 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7939 *pAllocationRequest = tmpAllocRequest;
7945 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7954 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7955 uint32_t currentFrameIndex,
7956 uint32_t frameInUseCount,
7957 VmaAllocationRequest* pAllocationRequest)
7959 while(pAllocationRequest->itemsToMakeLostCount > 0)
7961 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7963 ++pAllocationRequest->item;
7965 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7966 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7967 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7968 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7970 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7971 --pAllocationRequest->itemsToMakeLostCount;
7979 VMA_HEAVY_ASSERT(Validate());
7980 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7981 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7986 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7988 uint32_t lostAllocationCount = 0;
7989 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7990 it != m_Suballocations.end();
7993 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7994 it->hAllocation->CanBecomeLost() &&
7995 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7997 it = FreeSuballocation(it);
7998 ++lostAllocationCount;
8001 return lostAllocationCount;
8004 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8006 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8007 it != m_Suballocations.end();
8010 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8012 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8014 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8015 return VK_ERROR_VALIDATION_FAILED_EXT;
8017 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8019 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8020 return VK_ERROR_VALIDATION_FAILED_EXT;
8028 void VmaBlockMetadata_Generic::Alloc(
8029 const VmaAllocationRequest& request,
8030 VmaSuballocationType type,
8031 VkDeviceSize allocSize,
8035 VMA_ASSERT(!upperAddress);
8036 VMA_ASSERT(request.item != m_Suballocations.end());
8037 VmaSuballocation& suballoc = *request.item;
8039 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8041 VMA_ASSERT(request.offset >= suballoc.offset);
8042 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8043 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8044 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8048 UnregisterFreeSuballocation(request.item);
8050 suballoc.offset = request.offset;
8051 suballoc.size = allocSize;
8052 suballoc.type = type;
8053 suballoc.hAllocation = hAllocation;
8058 VmaSuballocation paddingSuballoc = {};
8059 paddingSuballoc.offset = request.offset + allocSize;
8060 paddingSuballoc.size = paddingEnd;
8061 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8062 VmaSuballocationList::iterator next = request.item;
8064 const VmaSuballocationList::iterator paddingEndItem =
8065 m_Suballocations.insert(next, paddingSuballoc);
8066 RegisterFreeSuballocation(paddingEndItem);
8072 VmaSuballocation paddingSuballoc = {};
8073 paddingSuballoc.offset = request.offset - paddingBegin;
8074 paddingSuballoc.size = paddingBegin;
8075 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8076 const VmaSuballocationList::iterator paddingBeginItem =
8077 m_Suballocations.insert(request.item, paddingSuballoc);
8078 RegisterFreeSuballocation(paddingBeginItem);
8082 m_FreeCount = m_FreeCount - 1;
8083 if(paddingBegin > 0)
8091 m_SumFreeSize -= allocSize;
8094 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8096 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8097 suballocItem != m_Suballocations.end();
8100 VmaSuballocation& suballoc = *suballocItem;
8101 if(suballoc.hAllocation == allocation)
8103 FreeSuballocation(suballocItem);
8104 VMA_HEAVY_ASSERT(Validate());
8108 VMA_ASSERT(0 &&
"Not found!");
8111 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8113 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8114 suballocItem != m_Suballocations.end();
8117 VmaSuballocation& suballoc = *suballocItem;
8118 if(suballoc.offset == offset)
8120 FreeSuballocation(suballocItem);
8124 VMA_ASSERT(0 &&
"Not found!");
8127 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8129 typedef VmaSuballocationList::iterator iter_type;
8130 for(iter_type suballocItem = m_Suballocations.begin();
8131 suballocItem != m_Suballocations.end();
8134 VmaSuballocation& suballoc = *suballocItem;
8135 if(suballoc.hAllocation == alloc)
8137 iter_type nextItem = suballocItem;
8141 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8144 if(newSize < alloc->GetSize())
8146 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8149 if(nextItem != m_Suballocations.end())
8152 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8155 UnregisterFreeSuballocation(nextItem);
8156 nextItem->offset -= sizeDiff;
8157 nextItem->size += sizeDiff;
8158 RegisterFreeSuballocation(nextItem);
8164 VmaSuballocation newFreeSuballoc;
8165 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8166 newFreeSuballoc.offset = suballoc.offset + newSize;
8167 newFreeSuballoc.size = sizeDiff;
8168 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8169 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8170 RegisterFreeSuballocation(newFreeSuballocIt);
8179 VmaSuballocation newFreeSuballoc;
8180 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8181 newFreeSuballoc.offset = suballoc.offset + newSize;
8182 newFreeSuballoc.size = sizeDiff;
8183 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8184 m_Suballocations.push_back(newFreeSuballoc);
8186 iter_type newFreeSuballocIt = m_Suballocations.end();
8187 RegisterFreeSuballocation(--newFreeSuballocIt);
8192 suballoc.size = newSize;
8193 m_SumFreeSize += sizeDiff;
8198 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8201 if(nextItem != m_Suballocations.end())
8204 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8207 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8213 if(nextItem->size > sizeDiff)
8216 UnregisterFreeSuballocation(nextItem);
8217 nextItem->offset += sizeDiff;
8218 nextItem->size -= sizeDiff;
8219 RegisterFreeSuballocation(nextItem);
8225 UnregisterFreeSuballocation(nextItem);
8226 m_Suballocations.erase(nextItem);
8242 suballoc.size = newSize;
8243 m_SumFreeSize -= sizeDiff;
8250 VMA_ASSERT(0 &&
"Not found!");
8254 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8256 VkDeviceSize lastSize = 0;
8257 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8259 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8261 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8262 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8263 VMA_VALIDATE(it->size >= lastSize);
8264 lastSize = it->size;
8269 bool VmaBlockMetadata_Generic::CheckAllocation(
8270 uint32_t currentFrameIndex,
8271 uint32_t frameInUseCount,
8272 VkDeviceSize bufferImageGranularity,
8273 VkDeviceSize allocSize,
8274 VkDeviceSize allocAlignment,
8275 VmaSuballocationType allocType,
8276 VmaSuballocationList::const_iterator suballocItem,
8277 bool canMakeOtherLost,
8278 VkDeviceSize* pOffset,
8279 size_t* itemsToMakeLostCount,
8280 VkDeviceSize* pSumFreeSize,
8281 VkDeviceSize* pSumItemSize)
const 8283 VMA_ASSERT(allocSize > 0);
8284 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8285 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8286 VMA_ASSERT(pOffset != VMA_NULL);
8288 *itemsToMakeLostCount = 0;
8292 if(canMakeOtherLost)
8294 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8296 *pSumFreeSize = suballocItem->size;
8300 if(suballocItem->hAllocation->CanBecomeLost() &&
8301 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8303 ++*itemsToMakeLostCount;
8304 *pSumItemSize = suballocItem->size;
8313 if(GetSize() - suballocItem->offset < allocSize)
8319 *pOffset = suballocItem->offset;
8322 if(VMA_DEBUG_MARGIN > 0)
8324 *pOffset += VMA_DEBUG_MARGIN;
8328 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8332 if(bufferImageGranularity > 1)
8334 bool bufferImageGranularityConflict =
false;
8335 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8336 while(prevSuballocItem != m_Suballocations.cbegin())
8339 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8340 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8342 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8344 bufferImageGranularityConflict =
true;
8352 if(bufferImageGranularityConflict)
8354 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8360 if(*pOffset >= suballocItem->offset + suballocItem->size)
8366 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8369 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8371 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8373 if(suballocItem->offset + totalSize > GetSize())
8380 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8381 if(totalSize > suballocItem->size)
8383 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8384 while(remainingSize > 0)
8387 if(lastSuballocItem == m_Suballocations.cend())
8391 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8393 *pSumFreeSize += lastSuballocItem->size;
8397 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8398 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8399 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8401 ++*itemsToMakeLostCount;
8402 *pSumItemSize += lastSuballocItem->size;
8409 remainingSize = (lastSuballocItem->size < remainingSize) ?
8410 remainingSize - lastSuballocItem->size : 0;
8416 if(bufferImageGranularity > 1)
8418 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8420 while(nextSuballocItem != m_Suballocations.cend())
8422 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8423 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8425 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8427 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8428 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8429 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8431 ++*itemsToMakeLostCount;
8450 const VmaSuballocation& suballoc = *suballocItem;
8451 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8453 *pSumFreeSize = suballoc.size;
8456 if(suballoc.size < allocSize)
8462 *pOffset = suballoc.offset;
8465 if(VMA_DEBUG_MARGIN > 0)
8467 *pOffset += VMA_DEBUG_MARGIN;
8471 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8475 if(bufferImageGranularity > 1)
8477 bool bufferImageGranularityConflict =
false;
8478 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8479 while(prevSuballocItem != m_Suballocations.cbegin())
8482 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8483 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8485 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8487 bufferImageGranularityConflict =
true;
8495 if(bufferImageGranularityConflict)
8497 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8502 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8505 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8508 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8515 if(bufferImageGranularity > 1)
8517 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8519 while(nextSuballocItem != m_Suballocations.cend())
8521 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8522 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8524 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8543 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8545 VMA_ASSERT(item != m_Suballocations.end());
8546 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8548 VmaSuballocationList::iterator nextItem = item;
8550 VMA_ASSERT(nextItem != m_Suballocations.end());
8551 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8553 item->size += nextItem->size;
8555 m_Suballocations.erase(nextItem);
8558 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8561 VmaSuballocation& suballoc = *suballocItem;
8562 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8563 suballoc.hAllocation = VK_NULL_HANDLE;
8567 m_SumFreeSize += suballoc.size;
8570 bool mergeWithNext =
false;
8571 bool mergeWithPrev =
false;
8573 VmaSuballocationList::iterator nextItem = suballocItem;
8575 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8577 mergeWithNext =
true;
8580 VmaSuballocationList::iterator prevItem = suballocItem;
8581 if(suballocItem != m_Suballocations.begin())
8584 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8586 mergeWithPrev =
true;
8592 UnregisterFreeSuballocation(nextItem);
8593 MergeFreeWithNext(suballocItem);
8598 UnregisterFreeSuballocation(prevItem);
8599 MergeFreeWithNext(prevItem);
8600 RegisterFreeSuballocation(prevItem);
8605 RegisterFreeSuballocation(suballocItem);
8606 return suballocItem;
8610 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8612 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8613 VMA_ASSERT(item->size > 0);
8617 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8619 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8621 if(m_FreeSuballocationsBySize.empty())
8623 m_FreeSuballocationsBySize.push_back(item);
8627 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8635 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8637 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8638 VMA_ASSERT(item->size > 0);
8642 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8644 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8646 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8647 m_FreeSuballocationsBySize.data(),
8648 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8650 VmaSuballocationItemSizeLess());
8651 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8652 index < m_FreeSuballocationsBySize.size();
8655 if(m_FreeSuballocationsBySize[index] == item)
8657 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8660 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8662 VMA_ASSERT(0 &&
"Not found.");
8668 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8669 VkDeviceSize bufferImageGranularity,
8670 VmaSuballocationType& inOutPrevSuballocType)
const 8672 if(bufferImageGranularity == 1 || IsEmpty())
8677 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8678 bool typeConflictFound =
false;
8679 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8680 it != m_Suballocations.cend();
8683 const VmaSuballocationType suballocType = it->type;
8684 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8686 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8687 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8689 typeConflictFound =
true;
8691 inOutPrevSuballocType = suballocType;
8695 return typeConflictFound || minAlignment >= bufferImageGranularity;
8701 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8702 VmaBlockMetadata(hAllocator),
8704 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8705 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8706 m_1stVectorIndex(0),
8707 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8708 m_1stNullItemsBeginCount(0),
8709 m_1stNullItemsMiddleCount(0),
8710 m_2ndNullItemsCount(0)
8714 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8718 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8720 VmaBlockMetadata::Init(size);
8721 m_SumFreeSize = size;
8724 bool VmaBlockMetadata_Linear::Validate()
const 8726 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8727 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8729 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8730 VMA_VALIDATE(!suballocations1st.empty() ||
8731 suballocations2nd.empty() ||
8732 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8734 if(!suballocations1st.empty())
8737 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8739 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8741 if(!suballocations2nd.empty())
8744 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8747 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8748 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8750 VkDeviceSize sumUsedSize = 0;
8751 const size_t suballoc1stCount = suballocations1st.size();
8752 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8754 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8756 const size_t suballoc2ndCount = suballocations2nd.size();
8757 size_t nullItem2ndCount = 0;
8758 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8760 const VmaSuballocation& suballoc = suballocations2nd[i];
8761 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8763 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8764 VMA_VALIDATE(suballoc.offset >= offset);
8768 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8769 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8770 sumUsedSize += suballoc.size;
8777 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8780 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8783 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8785 const VmaSuballocation& suballoc = suballocations1st[i];
8786 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8787 suballoc.hAllocation == VK_NULL_HANDLE);
8790 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8792 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8794 const VmaSuballocation& suballoc = suballocations1st[i];
8795 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8797 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8798 VMA_VALIDATE(suballoc.offset >= offset);
8799 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8803 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8804 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8805 sumUsedSize += suballoc.size;
8812 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8814 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8816 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8818 const size_t suballoc2ndCount = suballocations2nd.size();
8819 size_t nullItem2ndCount = 0;
8820 for(
size_t i = suballoc2ndCount; i--; )
8822 const VmaSuballocation& suballoc = suballocations2nd[i];
8823 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8825 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8826 VMA_VALIDATE(suballoc.offset >= offset);
8830 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8831 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8832 sumUsedSize += suballoc.size;
8839 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8842 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8845 VMA_VALIDATE(offset <= GetSize());
8846 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8851 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8853 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8854 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8857 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8859 const VkDeviceSize size = GetSize();
8871 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8873 switch(m_2ndVectorMode)
8875 case SECOND_VECTOR_EMPTY:
8881 const size_t suballocations1stCount = suballocations1st.size();
8882 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8883 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8884 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8886 firstSuballoc.offset,
8887 size - (lastSuballoc.offset + lastSuballoc.size));
8891 case SECOND_VECTOR_RING_BUFFER:
8896 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8897 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8898 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8899 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8903 case SECOND_VECTOR_DOUBLE_STACK:
8908 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8909 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8910 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8911 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8921 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8923 const VkDeviceSize size = GetSize();
8924 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8925 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8926 const size_t suballoc1stCount = suballocations1st.size();
8927 const size_t suballoc2ndCount = suballocations2nd.size();
8938 VkDeviceSize lastOffset = 0;
8940 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8942 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8943 size_t nextAlloc2ndIndex = 0;
8944 while(lastOffset < freeSpace2ndTo1stEnd)
8947 while(nextAlloc2ndIndex < suballoc2ndCount &&
8948 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8950 ++nextAlloc2ndIndex;
8954 if(nextAlloc2ndIndex < suballoc2ndCount)
8956 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8959 if(lastOffset < suballoc.offset)
8962 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8976 lastOffset = suballoc.offset + suballoc.size;
8977 ++nextAlloc2ndIndex;
8983 if(lastOffset < freeSpace2ndTo1stEnd)
8985 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8993 lastOffset = freeSpace2ndTo1stEnd;
8998 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8999 const VkDeviceSize freeSpace1stTo2ndEnd =
9000 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9001 while(lastOffset < freeSpace1stTo2ndEnd)
9004 while(nextAlloc1stIndex < suballoc1stCount &&
9005 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9007 ++nextAlloc1stIndex;
9011 if(nextAlloc1stIndex < suballoc1stCount)
9013 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9016 if(lastOffset < suballoc.offset)
9019 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9033 lastOffset = suballoc.offset + suballoc.size;
9034 ++nextAlloc1stIndex;
9040 if(lastOffset < freeSpace1stTo2ndEnd)
9042 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9050 lastOffset = freeSpace1stTo2ndEnd;
9054 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9056 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9057 while(lastOffset < size)
9060 while(nextAlloc2ndIndex != SIZE_MAX &&
9061 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9063 --nextAlloc2ndIndex;
9067 if(nextAlloc2ndIndex != SIZE_MAX)
9069 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9072 if(lastOffset < suballoc.offset)
9075 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9089 lastOffset = suballoc.offset + suballoc.size;
9090 --nextAlloc2ndIndex;
9096 if(lastOffset < size)
9098 const VkDeviceSize unusedRangeSize = size - lastOffset;
9114 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9116 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9117 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9118 const VkDeviceSize size = GetSize();
9119 const size_t suballoc1stCount = suballocations1st.size();
9120 const size_t suballoc2ndCount = suballocations2nd.size();
9122 inoutStats.
size += size;
9124 VkDeviceSize lastOffset = 0;
9126 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9128 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9129 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9130 while(lastOffset < freeSpace2ndTo1stEnd)
9133 while(nextAlloc2ndIndex < suballoc2ndCount &&
9134 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9136 ++nextAlloc2ndIndex;
9140 if(nextAlloc2ndIndex < suballoc2ndCount)
9142 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9145 if(lastOffset < suballoc.offset)
9148 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9159 lastOffset = suballoc.offset + suballoc.size;
9160 ++nextAlloc2ndIndex;
9165 if(lastOffset < freeSpace2ndTo1stEnd)
9168 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9175 lastOffset = freeSpace2ndTo1stEnd;
9180 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9181 const VkDeviceSize freeSpace1stTo2ndEnd =
9182 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9183 while(lastOffset < freeSpace1stTo2ndEnd)
9186 while(nextAlloc1stIndex < suballoc1stCount &&
9187 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9189 ++nextAlloc1stIndex;
9193 if(nextAlloc1stIndex < suballoc1stCount)
9195 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9198 if(lastOffset < suballoc.offset)
9201 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9212 lastOffset = suballoc.offset + suballoc.size;
9213 ++nextAlloc1stIndex;
9218 if(lastOffset < freeSpace1stTo2ndEnd)
9221 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9228 lastOffset = freeSpace1stTo2ndEnd;
9232 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9234 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9235 while(lastOffset < size)
9238 while(nextAlloc2ndIndex != SIZE_MAX &&
9239 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9241 --nextAlloc2ndIndex;
9245 if(nextAlloc2ndIndex != SIZE_MAX)
9247 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9250 if(lastOffset < suballoc.offset)
9253 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9264 lastOffset = suballoc.offset + suballoc.size;
9265 --nextAlloc2ndIndex;
9270 if(lastOffset < size)
9273 const VkDeviceSize unusedRangeSize = size - lastOffset;
9286 #if VMA_STATS_STRING_ENABLED 9287 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9289 const VkDeviceSize size = GetSize();
9290 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9291 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9292 const size_t suballoc1stCount = suballocations1st.size();
9293 const size_t suballoc2ndCount = suballocations2nd.size();
9297 size_t unusedRangeCount = 0;
9298 VkDeviceSize usedBytes = 0;
9300 VkDeviceSize lastOffset = 0;
9302 size_t alloc2ndCount = 0;
9303 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9305 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9306 size_t nextAlloc2ndIndex = 0;
9307 while(lastOffset < freeSpace2ndTo1stEnd)
9310 while(nextAlloc2ndIndex < suballoc2ndCount &&
9311 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9313 ++nextAlloc2ndIndex;
9317 if(nextAlloc2ndIndex < suballoc2ndCount)
9319 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9322 if(lastOffset < suballoc.offset)
9331 usedBytes += suballoc.size;
9334 lastOffset = suballoc.offset + suballoc.size;
9335 ++nextAlloc2ndIndex;
9340 if(lastOffset < freeSpace2ndTo1stEnd)
9347 lastOffset = freeSpace2ndTo1stEnd;
9352 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9353 size_t alloc1stCount = 0;
9354 const VkDeviceSize freeSpace1stTo2ndEnd =
9355 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9356 while(lastOffset < freeSpace1stTo2ndEnd)
9359 while(nextAlloc1stIndex < suballoc1stCount &&
9360 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9362 ++nextAlloc1stIndex;
9366 if(nextAlloc1stIndex < suballoc1stCount)
9368 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9371 if(lastOffset < suballoc.offset)
9380 usedBytes += suballoc.size;
9383 lastOffset = suballoc.offset + suballoc.size;
9384 ++nextAlloc1stIndex;
9389 if(lastOffset < size)
9396 lastOffset = freeSpace1stTo2ndEnd;
9400 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9402 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9403 while(lastOffset < size)
9406 while(nextAlloc2ndIndex != SIZE_MAX &&
9407 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9409 --nextAlloc2ndIndex;
9413 if(nextAlloc2ndIndex != SIZE_MAX)
9415 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9418 if(lastOffset < suballoc.offset)
9427 usedBytes += suballoc.size;
9430 lastOffset = suballoc.offset + suballoc.size;
9431 --nextAlloc2ndIndex;
9436 if(lastOffset < size)
9448 const VkDeviceSize unusedBytes = size - usedBytes;
9449 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9454 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9456 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9457 size_t nextAlloc2ndIndex = 0;
9458 while(lastOffset < freeSpace2ndTo1stEnd)
9461 while(nextAlloc2ndIndex < suballoc2ndCount &&
9462 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9464 ++nextAlloc2ndIndex;
9468 if(nextAlloc2ndIndex < suballoc2ndCount)
9470 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9473 if(lastOffset < suballoc.offset)
9476 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9477 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9482 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9485 lastOffset = suballoc.offset + suballoc.size;
9486 ++nextAlloc2ndIndex;
9491 if(lastOffset < freeSpace2ndTo1stEnd)
9494 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9495 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9499 lastOffset = freeSpace2ndTo1stEnd;
9504 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9505 while(lastOffset < freeSpace1stTo2ndEnd)
9508 while(nextAlloc1stIndex < suballoc1stCount &&
9509 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9511 ++nextAlloc1stIndex;
9515 if(nextAlloc1stIndex < suballoc1stCount)
9517 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9520 if(lastOffset < suballoc.offset)
9523 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9524 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9529 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9532 lastOffset = suballoc.offset + suballoc.size;
9533 ++nextAlloc1stIndex;
9538 if(lastOffset < freeSpace1stTo2ndEnd)
9541 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9542 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9546 lastOffset = freeSpace1stTo2ndEnd;
9550 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9552 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9553 while(lastOffset < size)
9556 while(nextAlloc2ndIndex != SIZE_MAX &&
9557 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9559 --nextAlloc2ndIndex;
9563 if(nextAlloc2ndIndex != SIZE_MAX)
9565 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9568 if(lastOffset < suballoc.offset)
9571 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9572 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9577 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9580 lastOffset = suballoc.offset + suballoc.size;
9581 --nextAlloc2ndIndex;
9586 if(lastOffset < size)
9589 const VkDeviceSize unusedRangeSize = size - lastOffset;
9590 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9599 PrintDetailedMap_End(json);
9601 #endif // #if VMA_STATS_STRING_ENABLED 9603 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9604 uint32_t currentFrameIndex,
9605 uint32_t frameInUseCount,
9606 VkDeviceSize bufferImageGranularity,
9607 VkDeviceSize allocSize,
9608 VkDeviceSize allocAlignment,
9610 VmaSuballocationType allocType,
9611 bool canMakeOtherLost,
9613 VmaAllocationRequest* pAllocationRequest)
9615 VMA_ASSERT(allocSize > 0);
9616 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9617 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9618 VMA_HEAVY_ASSERT(Validate());
9620 const VkDeviceSize size = GetSize();
9621 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9622 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9626 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9628 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9633 if(allocSize > size)
9637 VkDeviceSize resultBaseOffset = size - allocSize;
9638 if(!suballocations2nd.empty())
9640 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9641 resultBaseOffset = lastSuballoc.offset - allocSize;
9642 if(allocSize > lastSuballoc.offset)
9649 VkDeviceSize resultOffset = resultBaseOffset;
9652 if(VMA_DEBUG_MARGIN > 0)
9654 if(resultOffset < VMA_DEBUG_MARGIN)
9658 resultOffset -= VMA_DEBUG_MARGIN;
9662 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9666 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9668 bool bufferImageGranularityConflict =
false;
9669 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9671 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9672 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9674 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9676 bufferImageGranularityConflict =
true;
9684 if(bufferImageGranularityConflict)
9686 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9691 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9692 suballocations1st.back().offset + suballocations1st.back().size :
9694 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9698 if(bufferImageGranularity > 1)
9700 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9702 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9703 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9705 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9719 pAllocationRequest->offset = resultOffset;
9720 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9721 pAllocationRequest->sumItemSize = 0;
9723 pAllocationRequest->itemsToMakeLostCount = 0;
9729 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9733 VkDeviceSize resultBaseOffset = 0;
9734 if(!suballocations1st.empty())
9736 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9737 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9741 VkDeviceSize resultOffset = resultBaseOffset;
9744 if(VMA_DEBUG_MARGIN > 0)
9746 resultOffset += VMA_DEBUG_MARGIN;
9750 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9754 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9756 bool bufferImageGranularityConflict =
false;
9757 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9759 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9760 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9762 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9764 bufferImageGranularityConflict =
true;
9772 if(bufferImageGranularityConflict)
9774 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9778 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9779 suballocations2nd.back().offset : size;
9782 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9786 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9788 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9790 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9791 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9793 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9807 pAllocationRequest->offset = resultOffset;
9808 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9809 pAllocationRequest->sumItemSize = 0;
9811 pAllocationRequest->itemsToMakeLostCount = 0;
9818 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9820 VMA_ASSERT(!suballocations1st.empty());
9822 VkDeviceSize resultBaseOffset = 0;
9823 if(!suballocations2nd.empty())
9825 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9826 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9830 VkDeviceSize resultOffset = resultBaseOffset;
9833 if(VMA_DEBUG_MARGIN > 0)
9835 resultOffset += VMA_DEBUG_MARGIN;
9839 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9843 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9845 bool bufferImageGranularityConflict =
false;
9846 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9848 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9849 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9851 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9853 bufferImageGranularityConflict =
true;
9861 if(bufferImageGranularityConflict)
9863 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9867 pAllocationRequest->itemsToMakeLostCount = 0;
9868 pAllocationRequest->sumItemSize = 0;
9869 size_t index1st = m_1stNullItemsBeginCount;
9871 if(canMakeOtherLost)
9873 while(index1st < suballocations1st.size() &&
9874 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
9877 const VmaSuballocation& suballoc = suballocations1st[index1st];
9878 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9884 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9885 if(suballoc.hAllocation->CanBecomeLost() &&
9886 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9888 ++pAllocationRequest->itemsToMakeLostCount;
9889 pAllocationRequest->sumItemSize += suballoc.size;
9901 if(bufferImageGranularity > 1)
9903 while(index1st < suballocations1st.size())
9905 const VmaSuballocation& suballoc = suballocations1st[index1st];
9906 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9908 if(suballoc.hAllocation != VK_NULL_HANDLE)
9911 if(suballoc.hAllocation->CanBecomeLost() &&
9912 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9914 ++pAllocationRequest->itemsToMakeLostCount;
9915 pAllocationRequest->sumItemSize += suballoc.size;
9934 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9935 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9939 if(bufferImageGranularity > 1)
9941 for(
size_t nextSuballocIndex = index1st;
9942 nextSuballocIndex < suballocations1st.size();
9943 nextSuballocIndex++)
9945 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9946 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9948 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9962 pAllocationRequest->offset = resultOffset;
9963 pAllocationRequest->sumFreeSize =
9964 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9966 - pAllocationRequest->sumItemSize;
9976 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9977 uint32_t currentFrameIndex,
9978 uint32_t frameInUseCount,
9979 VmaAllocationRequest* pAllocationRequest)
9981 if(pAllocationRequest->itemsToMakeLostCount == 0)
9986 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9988 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9989 size_t index1st = m_1stNullItemsBeginCount;
9990 size_t madeLostCount = 0;
9991 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9993 VMA_ASSERT(index1st < suballocations1st.size());
9994 VmaSuballocation& suballoc = suballocations1st[index1st];
9995 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9997 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9998 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9999 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10001 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10002 suballoc.hAllocation = VK_NULL_HANDLE;
10003 m_SumFreeSize += suballoc.size;
10004 ++m_1stNullItemsMiddleCount;
10015 CleanupAfterFree();
10021 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10023 uint32_t lostAllocationCount = 0;
10025 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10026 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10028 VmaSuballocation& suballoc = suballocations1st[i];
10029 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10030 suballoc.hAllocation->CanBecomeLost() &&
10031 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10033 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10034 suballoc.hAllocation = VK_NULL_HANDLE;
10035 ++m_1stNullItemsMiddleCount;
10036 m_SumFreeSize += suballoc.size;
10037 ++lostAllocationCount;
10041 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10042 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10044 VmaSuballocation& suballoc = suballocations2nd[i];
10045 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10046 suballoc.hAllocation->CanBecomeLost() &&
10047 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10049 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10050 suballoc.hAllocation = VK_NULL_HANDLE;
10051 ++m_2ndNullItemsCount;
10052 ++lostAllocationCount;
10056 if(lostAllocationCount)
10058 CleanupAfterFree();
10061 return lostAllocationCount;
10064 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10066 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10067 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10069 const VmaSuballocation& suballoc = suballocations1st[i];
10070 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10072 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10074 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10075 return VK_ERROR_VALIDATION_FAILED_EXT;
10077 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10079 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10080 return VK_ERROR_VALIDATION_FAILED_EXT;
10085 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10086 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10088 const VmaSuballocation& suballoc = suballocations2nd[i];
10089 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10091 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10093 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10094 return VK_ERROR_VALIDATION_FAILED_EXT;
10096 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10098 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10099 return VK_ERROR_VALIDATION_FAILED_EXT;
10107 void VmaBlockMetadata_Linear::Alloc(
10108 const VmaAllocationRequest& request,
10109 VmaSuballocationType type,
10110 VkDeviceSize allocSize,
10114 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10118 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10119 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10120 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10121 suballocations2nd.push_back(newSuballoc);
10122 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10126 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10129 if(suballocations1st.empty())
10131 suballocations1st.push_back(newSuballoc);
10136 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
10139 VMA_ASSERT(request.offset + allocSize <= GetSize());
10140 suballocations1st.push_back(newSuballoc);
10143 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
10145 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10147 switch(m_2ndVectorMode)
10149 case SECOND_VECTOR_EMPTY:
10151 VMA_ASSERT(suballocations2nd.empty());
10152 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10154 case SECOND_VECTOR_RING_BUFFER:
10156 VMA_ASSERT(!suballocations2nd.empty());
10158 case SECOND_VECTOR_DOUBLE_STACK:
10159 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10165 suballocations2nd.push_back(newSuballoc);
10169 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10174 m_SumFreeSize -= newSuballoc.size;
10177 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10179 FreeAtOffset(allocation->GetOffset());
10182 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10184 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10185 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10187 if(!suballocations1st.empty())
10190 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10191 if(firstSuballoc.offset == offset)
10193 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10194 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10195 m_SumFreeSize += firstSuballoc.size;
10196 ++m_1stNullItemsBeginCount;
10197 CleanupAfterFree();
10203 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10204 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10206 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10207 if(lastSuballoc.offset == offset)
10209 m_SumFreeSize += lastSuballoc.size;
10210 suballocations2nd.pop_back();
10211 CleanupAfterFree();
10216 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10218 VmaSuballocation& lastSuballoc = suballocations1st.back();
10219 if(lastSuballoc.offset == offset)
10221 m_SumFreeSize += lastSuballoc.size;
10222 suballocations1st.pop_back();
10223 CleanupAfterFree();
10230 VmaSuballocation refSuballoc;
10231 refSuballoc.offset = offset;
10233 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10234 suballocations1st.begin() + m_1stNullItemsBeginCount,
10235 suballocations1st.end(),
10237 if(it != suballocations1st.end())
10239 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10240 it->hAllocation = VK_NULL_HANDLE;
10241 ++m_1stNullItemsMiddleCount;
10242 m_SumFreeSize += it->size;
10243 CleanupAfterFree();
10248 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10251 VmaSuballocation refSuballoc;
10252 refSuballoc.offset = offset;
10254 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10255 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10256 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10257 if(it != suballocations2nd.end())
10259 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10260 it->hAllocation = VK_NULL_HANDLE;
10261 ++m_2ndNullItemsCount;
10262 m_SumFreeSize += it->size;
10263 CleanupAfterFree();
10268 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10271 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10273 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10274 const size_t suballocCount = AccessSuballocations1st().size();
10275 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10278 void VmaBlockMetadata_Linear::CleanupAfterFree()
10280 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10281 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10285 suballocations1st.clear();
10286 suballocations2nd.clear();
10287 m_1stNullItemsBeginCount = 0;
10288 m_1stNullItemsMiddleCount = 0;
10289 m_2ndNullItemsCount = 0;
10290 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10294 const size_t suballoc1stCount = suballocations1st.size();
10295 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10296 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10299 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10300 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10302 ++m_1stNullItemsBeginCount;
10303 --m_1stNullItemsMiddleCount;
10307 while(m_1stNullItemsMiddleCount > 0 &&
10308 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10310 --m_1stNullItemsMiddleCount;
10311 suballocations1st.pop_back();
10315 while(m_2ndNullItemsCount > 0 &&
10316 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10318 --m_2ndNullItemsCount;
10319 suballocations2nd.pop_back();
10322 if(ShouldCompact1st())
10324 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10325 size_t srcIndex = m_1stNullItemsBeginCount;
10326 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10328 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10332 if(dstIndex != srcIndex)
10334 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10338 suballocations1st.resize(nonNullItemCount);
10339 m_1stNullItemsBeginCount = 0;
10340 m_1stNullItemsMiddleCount = 0;
10344 if(suballocations2nd.empty())
10346 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10350 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10352 suballocations1st.clear();
10353 m_1stNullItemsBeginCount = 0;
10355 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10358 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10359 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10360 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10361 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10363 ++m_1stNullItemsBeginCount;
10364 --m_1stNullItemsMiddleCount;
10366 m_2ndNullItemsCount = 0;
10367 m_1stVectorIndex ^= 1;
10372 VMA_HEAVY_ASSERT(Validate());
10379 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10380 VmaBlockMetadata(hAllocator),
10382 m_AllocationCount(0),
10386 memset(m_FreeList, 0,
sizeof(m_FreeList));
10389 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10391 DeleteNode(m_Root);
10394 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10396 VmaBlockMetadata::Init(size);
10398 m_UsableSize = VmaPrevPow2(size);
10399 m_SumFreeSize = m_UsableSize;
10403 while(m_LevelCount < MAX_LEVELS &&
10404 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10409 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10410 rootNode->offset = 0;
10411 rootNode->type = Node::TYPE_FREE;
10412 rootNode->parent = VMA_NULL;
10413 rootNode->buddy = VMA_NULL;
10416 AddToFreeListFront(0, rootNode);
10419 bool VmaBlockMetadata_Buddy::Validate()
const 10422 ValidationContext ctx;
10423 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10425 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10427 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10428 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10431 for(uint32_t level = 0; level < m_LevelCount; ++level)
10433 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10434 m_FreeList[level].front->free.prev == VMA_NULL);
10436 for(Node* node = m_FreeList[level].front;
10438 node = node->free.next)
10440 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10442 if(node->free.next == VMA_NULL)
10444 VMA_VALIDATE(m_FreeList[level].back == node);
10448 VMA_VALIDATE(node->free.next->free.prev == node);
10454 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10456 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10462 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10464 for(uint32_t level = 0; level < m_LevelCount; ++level)
10466 if(m_FreeList[level].front != VMA_NULL)
10468 return LevelToNodeSize(level);
10474 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10476 const VkDeviceSize unusableSize = GetUnusableSize();
10487 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10489 if(unusableSize > 0)
10498 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10500 const VkDeviceSize unusableSize = GetUnusableSize();
10502 inoutStats.
size += GetSize();
10503 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10508 if(unusableSize > 0)
10515 #if VMA_STATS_STRING_ENABLED 10517 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10521 CalcAllocationStatInfo(stat);
10523 PrintDetailedMap_Begin(
10529 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10531 const VkDeviceSize unusableSize = GetUnusableSize();
10532 if(unusableSize > 0)
10534 PrintDetailedMap_UnusedRange(json,
10539 PrintDetailedMap_End(json);
10542 #endif // #if VMA_STATS_STRING_ENABLED 10544 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10545 uint32_t currentFrameIndex,
10546 uint32_t frameInUseCount,
10547 VkDeviceSize bufferImageGranularity,
10548 VkDeviceSize allocSize,
10549 VkDeviceSize allocAlignment,
10551 VmaSuballocationType allocType,
10552 bool canMakeOtherLost,
10554 VmaAllocationRequest* pAllocationRequest)
10556 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10560 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10561 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10562 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10564 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10565 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10568 if(allocSize > m_UsableSize)
10573 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10574 for(uint32_t level = targetLevel + 1; level--; )
10576 for(Node* freeNode = m_FreeList[level].front;
10577 freeNode != VMA_NULL;
10578 freeNode = freeNode->free.next)
10580 if(freeNode->offset % allocAlignment == 0)
10582 pAllocationRequest->offset = freeNode->offset;
10583 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10584 pAllocationRequest->sumItemSize = 0;
10585 pAllocationRequest->itemsToMakeLostCount = 0;
10586 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10595 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10596 uint32_t currentFrameIndex,
10597 uint32_t frameInUseCount,
10598 VmaAllocationRequest* pAllocationRequest)
10604 return pAllocationRequest->itemsToMakeLostCount == 0;
10607 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10616 void VmaBlockMetadata_Buddy::Alloc(
10617 const VmaAllocationRequest& request,
10618 VmaSuballocationType type,
10619 VkDeviceSize allocSize,
10623 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10624 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10626 Node* currNode = m_FreeList[currLevel].front;
10627 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10628 while(currNode->offset != request.offset)
10630 currNode = currNode->free.next;
10631 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10635 while(currLevel < targetLevel)
10639 RemoveFromFreeList(currLevel, currNode);
10641 const uint32_t childrenLevel = currLevel + 1;
10644 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10645 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10647 leftChild->offset = currNode->offset;
10648 leftChild->type = Node::TYPE_FREE;
10649 leftChild->parent = currNode;
10650 leftChild->buddy = rightChild;
10652 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10653 rightChild->type = Node::TYPE_FREE;
10654 rightChild->parent = currNode;
10655 rightChild->buddy = leftChild;
10658 currNode->type = Node::TYPE_SPLIT;
10659 currNode->split.leftChild = leftChild;
10662 AddToFreeListFront(childrenLevel, rightChild);
10663 AddToFreeListFront(childrenLevel, leftChild);
10668 currNode = m_FreeList[currLevel].front;
10677 VMA_ASSERT(currLevel == targetLevel &&
10678 currNode != VMA_NULL &&
10679 currNode->type == Node::TYPE_FREE);
10680 RemoveFromFreeList(currLevel, currNode);
10683 currNode->type = Node::TYPE_ALLOCATION;
10684 currNode->allocation.alloc = hAllocation;
10686 ++m_AllocationCount;
10688 m_SumFreeSize -= allocSize;
10691 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10693 if(node->type == Node::TYPE_SPLIT)
10695 DeleteNode(node->split.leftChild->buddy);
10696 DeleteNode(node->split.leftChild);
10699 vma_delete(GetAllocationCallbacks(), node);
10702 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10704 VMA_VALIDATE(level < m_LevelCount);
10705 VMA_VALIDATE(curr->parent == parent);
10706 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10707 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10710 case Node::TYPE_FREE:
10712 ctx.calculatedSumFreeSize += levelNodeSize;
10713 ++ctx.calculatedFreeCount;
10715 case Node::TYPE_ALLOCATION:
10716 ++ctx.calculatedAllocationCount;
10717 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10718 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10720 case Node::TYPE_SPLIT:
10722 const uint32_t childrenLevel = level + 1;
10723 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10724 const Node*
const leftChild = curr->split.leftChild;
10725 VMA_VALIDATE(leftChild != VMA_NULL);
10726 VMA_VALIDATE(leftChild->offset == curr->offset);
10727 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10729 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10731 const Node*
const rightChild = leftChild->buddy;
10732 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10733 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10735 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10746 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10749 uint32_t level = 0;
10750 VkDeviceSize currLevelNodeSize = m_UsableSize;
10751 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10752 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10755 currLevelNodeSize = nextLevelNodeSize;
10756 nextLevelNodeSize = currLevelNodeSize >> 1;
10761 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10764 Node* node = m_Root;
10765 VkDeviceSize nodeOffset = 0;
10766 uint32_t level = 0;
10767 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10768 while(node->type == Node::TYPE_SPLIT)
10770 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10771 if(offset < nodeOffset + nextLevelSize)
10773 node = node->split.leftChild;
10777 node = node->split.leftChild->buddy;
10778 nodeOffset += nextLevelSize;
10781 levelNodeSize = nextLevelSize;
10784 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10785 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10788 --m_AllocationCount;
10789 m_SumFreeSize += alloc->GetSize();
10791 node->type = Node::TYPE_FREE;
10794 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10796 RemoveFromFreeList(level, node->buddy);
10797 Node*
const parent = node->parent;
10799 vma_delete(GetAllocationCallbacks(), node->buddy);
10800 vma_delete(GetAllocationCallbacks(), node);
10801 parent->type = Node::TYPE_FREE;
10809 AddToFreeListFront(level, node);
10812 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10816 case Node::TYPE_FREE:
10822 case Node::TYPE_ALLOCATION:
10824 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10830 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10831 if(unusedRangeSize > 0)
10840 case Node::TYPE_SPLIT:
10842 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10843 const Node*
const leftChild = node->split.leftChild;
10844 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
10845 const Node*
const rightChild = leftChild->buddy;
10846 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
10854 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
10856 VMA_ASSERT(node->type == Node::TYPE_FREE);
10859 Node*
const frontNode = m_FreeList[level].front;
10860 if(frontNode == VMA_NULL)
10862 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
10863 node->free.prev = node->free.next = VMA_NULL;
10864 m_FreeList[level].front = m_FreeList[level].back = node;
10868 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
10869 node->free.prev = VMA_NULL;
10870 node->free.next = frontNode;
10871 frontNode->free.prev = node;
10872 m_FreeList[level].front = node;
10876 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
10878 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10881 if(node->free.prev == VMA_NULL)
10883 VMA_ASSERT(m_FreeList[level].front == node);
10884 m_FreeList[level].front = node->free.next;
10888 Node*
const prevFreeNode = node->free.prev;
10889 VMA_ASSERT(prevFreeNode->free.next == node);
10890 prevFreeNode->free.next = node->free.next;
10894 if(node->free.next == VMA_NULL)
10896 VMA_ASSERT(m_FreeList[level].back == node);
10897 m_FreeList[level].back = node->free.prev;
10901 Node*
const nextFreeNode = node->free.next;
10902 VMA_ASSERT(nextFreeNode->free.prev == node);
10903 nextFreeNode->free.prev = node->free.prev;
10907 #if VMA_STATS_STRING_ENABLED 10908 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10912 case Node::TYPE_FREE:
10913 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10915 case Node::TYPE_ALLOCATION:
10917 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10918 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10919 if(allocSize < levelNodeSize)
10921 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10925 case Node::TYPE_SPLIT:
10927 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10928 const Node*
const leftChild = node->split.leftChild;
10929 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10930 const Node*
const rightChild = leftChild->buddy;
10931 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10938 #endif // #if VMA_STATS_STRING_ENABLED 10944 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10945 m_pMetadata(VMA_NULL),
10946 m_MemoryTypeIndex(UINT32_MAX),
10948 m_hMemory(VK_NULL_HANDLE),
10950 m_pMappedData(VMA_NULL)
10954 void VmaDeviceMemoryBlock::Init(
10956 uint32_t newMemoryTypeIndex,
10957 VkDeviceMemory newMemory,
10958 VkDeviceSize newSize,
10960 uint32_t algorithm)
10962 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10964 m_MemoryTypeIndex = newMemoryTypeIndex;
10966 m_hMemory = newMemory;
10971 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10974 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10980 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10982 m_pMetadata->Init(newSize);
10985 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10989 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10991 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10992 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10993 m_hMemory = VK_NULL_HANDLE;
10995 vma_delete(allocator, m_pMetadata);
10996 m_pMetadata = VMA_NULL;
10999 bool VmaDeviceMemoryBlock::Validate()
const 11001 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11002 (m_pMetadata->GetSize() != 0));
11004 return m_pMetadata->Validate();
11007 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11009 void* pData =
nullptr;
11010 VkResult res = Map(hAllocator, 1, &pData);
11011 if(res != VK_SUCCESS)
11016 res = m_pMetadata->CheckCorruption(pData);
11018 Unmap(hAllocator, 1);
11023 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11030 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11031 if(m_MapCount != 0)
11033 m_MapCount += count;
11034 VMA_ASSERT(m_pMappedData != VMA_NULL);
11035 if(ppData != VMA_NULL)
11037 *ppData = m_pMappedData;
11043 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11044 hAllocator->m_hDevice,
11050 if(result == VK_SUCCESS)
11052 if(ppData != VMA_NULL)
11054 *ppData = m_pMappedData;
11056 m_MapCount = count;
11062 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11069 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11070 if(m_MapCount >= count)
11072 m_MapCount -= count;
11073 if(m_MapCount == 0)
11075 m_pMappedData = VMA_NULL;
11076 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11081 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11085 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11087 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11088 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11091 VkResult res = Map(hAllocator, 1, &pData);
11092 if(res != VK_SUCCESS)
11097 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11098 VmaWriteMagicValue(pData, allocOffset + allocSize);
11100 Unmap(hAllocator, 1);
11105 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11107 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11108 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11111 VkResult res = Map(hAllocator, 1, &pData);
11112 if(res != VK_SUCCESS)
11117 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11119 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11121 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11123 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11126 Unmap(hAllocator, 1);
11131 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11136 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11137 hAllocation->GetBlock() ==
this);
11139 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11140 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11141 hAllocator->m_hDevice,
11144 hAllocation->GetOffset());
11147 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11152 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11153 hAllocation->GetBlock() ==
this);
11155 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11156 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11157 hAllocator->m_hDevice,
11160 hAllocation->GetOffset());
11165 memset(&outInfo, 0,
sizeof(outInfo));
11184 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11192 VmaPool_T::VmaPool_T(
11195 VkDeviceSize preferredBlockSize) :
11198 createInfo.memoryTypeIndex,
11199 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11200 createInfo.minBlockCount,
11201 createInfo.maxBlockCount,
11203 createInfo.frameInUseCount,
11205 createInfo.blockSize != 0,
11211 VmaPool_T::~VmaPool_T()
11215 #if VMA_STATS_STRING_ENABLED 11217 #endif // #if VMA_STATS_STRING_ENABLED 11219 VmaBlockVector::VmaBlockVector(
11221 uint32_t memoryTypeIndex,
11222 VkDeviceSize preferredBlockSize,
11223 size_t minBlockCount,
11224 size_t maxBlockCount,
11225 VkDeviceSize bufferImageGranularity,
11226 uint32_t frameInUseCount,
11228 bool explicitBlockSize,
11229 uint32_t algorithm) :
11230 m_hAllocator(hAllocator),
11231 m_MemoryTypeIndex(memoryTypeIndex),
11232 m_PreferredBlockSize(preferredBlockSize),
11233 m_MinBlockCount(minBlockCount),
11234 m_MaxBlockCount(maxBlockCount),
11235 m_BufferImageGranularity(bufferImageGranularity),
11236 m_FrameInUseCount(frameInUseCount),
11237 m_IsCustomPool(isCustomPool),
11238 m_ExplicitBlockSize(explicitBlockSize),
11239 m_Algorithm(algorithm),
11240 m_HasEmptyBlock(false),
11241 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11246 VmaBlockVector::~VmaBlockVector()
11248 for(
size_t i = m_Blocks.size(); i--; )
11250 m_Blocks[i]->Destroy(m_hAllocator);
11251 vma_delete(m_hAllocator, m_Blocks[i]);
11255 VkResult VmaBlockVector::CreateMinBlocks()
11257 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11259 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11260 if(res != VK_SUCCESS)
11268 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11270 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11272 const size_t blockCount = m_Blocks.size();
11281 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11283 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11284 VMA_ASSERT(pBlock);
11285 VMA_HEAVY_ASSERT(pBlock->Validate());
11286 pBlock->m_pMetadata->AddPoolStats(*pStats);
11290 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11292 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11293 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11294 (VMA_DEBUG_MARGIN > 0) &&
11295 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11298 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11300 VkResult VmaBlockVector::Allocate(
11302 uint32_t currentFrameIndex,
11304 VkDeviceSize alignment,
11306 VmaSuballocationType suballocType,
11307 size_t allocationCount,
11311 VkResult res = VK_SUCCESS;
11314 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11315 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11317 res = AllocatePage(
11324 pAllocations + allocIndex);
11325 if(res != VK_SUCCESS)
11332 if(res != VK_SUCCESS)
11335 while(allocIndex--)
11337 Free(pAllocations[allocIndex]);
11339 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11345 VkResult VmaBlockVector::AllocatePage(
11347 uint32_t currentFrameIndex,
11349 VkDeviceSize alignment,
11351 VmaSuballocationType suballocType,
11358 const bool canCreateNewBlock =
11360 (m_Blocks.size() < m_MaxBlockCount);
11367 canMakeOtherLost =
false;
11371 if(isUpperAddress &&
11374 return VK_ERROR_FEATURE_NOT_PRESENT;
11388 return VK_ERROR_FEATURE_NOT_PRESENT;
11392 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11394 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11402 if(!canMakeOtherLost || canCreateNewBlock)
11411 if(!m_Blocks.empty())
11413 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11414 VMA_ASSERT(pCurrBlock);
11415 VkResult res = AllocateFromBlock(
11426 if(res == VK_SUCCESS)
11428 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11438 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11440 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11441 VMA_ASSERT(pCurrBlock);
11442 VkResult res = AllocateFromBlock(
11453 if(res == VK_SUCCESS)
11455 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11463 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11465 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11466 VMA_ASSERT(pCurrBlock);
11467 VkResult res = AllocateFromBlock(
11478 if(res == VK_SUCCESS)
11480 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11488 if(canCreateNewBlock)
11491 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11492 uint32_t newBlockSizeShift = 0;
11493 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11495 if(!m_ExplicitBlockSize)
11498 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11499 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11501 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11502 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11504 newBlockSize = smallerNewBlockSize;
11505 ++newBlockSizeShift;
11514 size_t newBlockIndex = 0;
11515 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11517 if(!m_ExplicitBlockSize)
11519 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11521 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11522 if(smallerNewBlockSize >= size)
11524 newBlockSize = smallerNewBlockSize;
11525 ++newBlockSizeShift;
11526 res = CreateBlock(newBlockSize, &newBlockIndex);
11535 if(res == VK_SUCCESS)
11537 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11538 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11540 res = AllocateFromBlock(
11551 if(res == VK_SUCCESS)
11553 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11559 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11566 if(canMakeOtherLost)
11568 uint32_t tryIndex = 0;
11569 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11571 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11572 VmaAllocationRequest bestRequest = {};
11573 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11579 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11581 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11582 VMA_ASSERT(pCurrBlock);
11583 VmaAllocationRequest currRequest = {};
11584 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11587 m_BufferImageGranularity,
11596 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11597 if(pBestRequestBlock == VMA_NULL ||
11598 currRequestCost < bestRequestCost)
11600 pBestRequestBlock = pCurrBlock;
11601 bestRequest = currRequest;
11602 bestRequestCost = currRequestCost;
11604 if(bestRequestCost == 0)
11615 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11617 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11618 VMA_ASSERT(pCurrBlock);
11619 VmaAllocationRequest currRequest = {};
11620 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11623 m_BufferImageGranularity,
11632 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11633 if(pBestRequestBlock == VMA_NULL ||
11634 currRequestCost < bestRequestCost ||
11637 pBestRequestBlock = pCurrBlock;
11638 bestRequest = currRequest;
11639 bestRequestCost = currRequestCost;
11641 if(bestRequestCost == 0 ||
11651 if(pBestRequestBlock != VMA_NULL)
11655 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11656 if(res != VK_SUCCESS)
11662 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11668 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11670 m_HasEmptyBlock =
false;
11673 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11674 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
11675 (*pAllocation)->InitBlockAllocation(
11678 bestRequest.offset,
11684 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11685 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
11686 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11687 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11689 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11691 if(IsCorruptionDetectionEnabled())
11693 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11694 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11709 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11711 return VK_ERROR_TOO_MANY_OBJECTS;
11715 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11718 void VmaBlockVector::Free(
11721 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11725 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11727 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11729 if(IsCorruptionDetectionEnabled())
11731 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11732 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11735 if(hAllocation->IsPersistentMap())
11737 pBlock->Unmap(m_hAllocator, 1);
11740 pBlock->m_pMetadata->Free(hAllocation);
11741 VMA_HEAVY_ASSERT(pBlock->Validate());
11743 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
11746 if(pBlock->m_pMetadata->IsEmpty())
11749 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11751 pBlockToDelete = pBlock;
11757 m_HasEmptyBlock =
true;
11762 else if(m_HasEmptyBlock)
11764 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11765 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11767 pBlockToDelete = pLastBlock;
11768 m_Blocks.pop_back();
11769 m_HasEmptyBlock =
false;
11773 IncrementallySortBlocks();
11778 if(pBlockToDelete != VMA_NULL)
11780 VMA_DEBUG_LOG(
" Deleted empty allocation");
11781 pBlockToDelete->Destroy(m_hAllocator);
11782 vma_delete(m_hAllocator, pBlockToDelete);
11786 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11788 VkDeviceSize result = 0;
11789 for(
size_t i = m_Blocks.size(); i--; )
11791 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11792 if(result >= m_PreferredBlockSize)
11800 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11802 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11804 if(m_Blocks[blockIndex] == pBlock)
11806 VmaVectorRemove(m_Blocks, blockIndex);
11813 void VmaBlockVector::IncrementallySortBlocks()
11818 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11820 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11822 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
11829 VkResult VmaBlockVector::AllocateFromBlock(
11830 VmaDeviceMemoryBlock* pBlock,
11832 uint32_t currentFrameIndex,
11834 VkDeviceSize alignment,
11837 VmaSuballocationType suballocType,
11846 VmaAllocationRequest currRequest = {};
11847 if(pBlock->m_pMetadata->CreateAllocationRequest(
11850 m_BufferImageGranularity,
11860 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
11864 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
11865 if(res != VK_SUCCESS)
11872 if(pBlock->m_pMetadata->IsEmpty())
11874 m_HasEmptyBlock =
false;
11877 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11878 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
11879 (*pAllocation)->InitBlockAllocation(
11882 currRequest.offset,
11888 VMA_HEAVY_ASSERT(pBlock->Validate());
11889 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
11890 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11892 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11894 if(IsCorruptionDetectionEnabled())
11896 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
11897 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11901 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11904 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
11906 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11907 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
11908 allocInfo.allocationSize = blockSize;
11909 VkDeviceMemory mem = VK_NULL_HANDLE;
11910 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
11919 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11924 allocInfo.allocationSize,
11928 m_Blocks.push_back(pBlock);
11929 if(pNewBlockIndex != VMA_NULL)
11931 *pNewBlockIndex = m_Blocks.size() - 1;
11937 void VmaBlockVector::ApplyDefragmentationMovesCpu(
11938 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11939 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
11941 const size_t blockCount = m_Blocks.size();
11942 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
11946 BLOCK_FLAG_USED = 0x00000001,
11947 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
11955 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
11956 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
11957 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
11960 const size_t moveCount = moves.size();
11961 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11963 const VmaDefragmentationMove& move = moves[moveIndex];
11964 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
11965 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
11968 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
11971 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
11973 BlockInfo& currBlockInfo = blockInfo[blockIndex];
11974 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11975 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
11977 currBlockInfo.pMappedData = pBlock->GetMappedData();
11979 if(currBlockInfo.pMappedData == VMA_NULL)
11981 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
11982 if(pDefragCtx->res == VK_SUCCESS)
11984 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
11991 if(pDefragCtx->res == VK_SUCCESS)
11993 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
11994 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
11996 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11998 const VmaDefragmentationMove& move = moves[moveIndex];
12000 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12001 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12003 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12008 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12009 memRange.memory = pSrcBlock->GetDeviceMemory();
12010 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12011 memRange.size = VMA_MIN(
12012 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12013 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12014 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12019 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12020 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12021 static_cast<size_t>(move.size));
12023 if(IsCorruptionDetectionEnabled())
12025 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12026 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12032 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12033 memRange.memory = pDstBlock->GetDeviceMemory();
12034 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12035 memRange.size = VMA_MIN(
12036 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12037 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12038 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12045 for(
size_t blockIndex = blockCount; blockIndex--; )
12047 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12048 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12050 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12051 pBlock->Unmap(m_hAllocator, 1);
12056 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12057 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12058 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12059 VkCommandBuffer commandBuffer)
12061 const size_t blockCount = m_Blocks.size();
12063 pDefragCtx->blockContexts.resize(blockCount);
12064 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12067 const size_t moveCount = moves.size();
12068 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12070 const VmaDefragmentationMove& move = moves[moveIndex];
12071 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12072 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12075 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12079 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
12080 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
12081 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
12083 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12085 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12086 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12087 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12089 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12090 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12091 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12092 if(pDefragCtx->res == VK_SUCCESS)
12094 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12095 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12102 if(pDefragCtx->res == VK_SUCCESS)
12104 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12105 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12107 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12109 const VmaDefragmentationMove& move = moves[moveIndex];
12111 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12112 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12114 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12116 VkBufferCopy region = {
12120 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12121 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12126 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12128 pDefragCtx->res = VK_NOT_READY;
12134 m_HasEmptyBlock =
false;
12135 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12137 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12138 if(pBlock->m_pMetadata->IsEmpty())
12140 if(m_Blocks.size() > m_MinBlockCount)
12142 if(pDefragmentationStats != VMA_NULL)
12145 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12148 VmaVectorRemove(m_Blocks, blockIndex);
12149 pBlock->Destroy(m_hAllocator);
12150 vma_delete(m_hAllocator, pBlock);
12154 m_HasEmptyBlock =
true;
12160 #if VMA_STATS_STRING_ENABLED 12162 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12164 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12166 json.BeginObject();
12170 json.WriteString(
"MemoryTypeIndex");
12171 json.WriteNumber(m_MemoryTypeIndex);
12173 json.WriteString(
"BlockSize");
12174 json.WriteNumber(m_PreferredBlockSize);
12176 json.WriteString(
"BlockCount");
12177 json.BeginObject(
true);
12178 if(m_MinBlockCount > 0)
12180 json.WriteString(
"Min");
12181 json.WriteNumber((uint64_t)m_MinBlockCount);
12183 if(m_MaxBlockCount < SIZE_MAX)
12185 json.WriteString(
"Max");
12186 json.WriteNumber((uint64_t)m_MaxBlockCount);
12188 json.WriteString(
"Cur");
12189 json.WriteNumber((uint64_t)m_Blocks.size());
12192 if(m_FrameInUseCount > 0)
12194 json.WriteString(
"FrameInUseCount");
12195 json.WriteNumber(m_FrameInUseCount);
12198 if(m_Algorithm != 0)
12200 json.WriteString(
"Algorithm");
12201 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12206 json.WriteString(
"PreferredBlockSize");
12207 json.WriteNumber(m_PreferredBlockSize);
12210 json.WriteString(
"Blocks");
12211 json.BeginObject();
12212 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12214 json.BeginString();
12215 json.ContinueString(m_Blocks[i]->GetId());
12218 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12225 #endif // #if VMA_STATS_STRING_ENABLED 12227 void VmaBlockVector::Defragment(
12228 class VmaBlockVectorDefragmentationContext* pCtx,
12230 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12231 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12232 VkCommandBuffer commandBuffer)
12234 pCtx->res = VK_SUCCESS;
12236 const VkMemoryPropertyFlags memPropFlags =
12237 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12238 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12239 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12241 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12243 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12244 (VMA_DEBUG_DETECT_CORRUPTION == 0 || !(isHostVisible && isHostCoherent));
12247 if(canDefragmentOnCpu || canDefragmentOnGpu)
12249 bool defragmentOnGpu;
12251 if(canDefragmentOnGpu != canDefragmentOnCpu)
12253 defragmentOnGpu = canDefragmentOnGpu;
12258 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12259 m_hAllocator->IsIntegratedGpu();
12262 bool overlappingMoveSupported = !defragmentOnGpu;
12264 if(m_hAllocator->m_UseMutex)
12266 m_Mutex.LockWrite();
12267 pCtx->mutexLocked =
true;
12270 pCtx->Begin(overlappingMoveSupported);
12274 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12275 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12276 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12277 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12278 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12281 if(pStats != VMA_NULL)
12283 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12284 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12285 pStats->bytesMoved += bytesMoved;
12286 pStats->allocationsMoved += allocationsMoved;
12287 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12288 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12289 if(defragmentOnGpu)
12291 maxGpuBytesToMove -= bytesMoved;
12292 maxGpuAllocationsToMove -= allocationsMoved;
12296 maxCpuBytesToMove -= bytesMoved;
12297 maxCpuAllocationsToMove -= allocationsMoved;
12301 if(pCtx->res >= VK_SUCCESS)
12303 if(defragmentOnGpu)
12305 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12309 ApplyDefragmentationMovesCpu(pCtx, moves);
12315 void VmaBlockVector::DefragmentationEnd(
12316 class VmaBlockVectorDefragmentationContext* pCtx,
12320 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12322 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12323 if(blockCtx.hBuffer)
12325 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12326 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12330 if(pCtx->res >= VK_SUCCESS)
12332 FreeEmptyBlocks(pStats);
12335 if(pCtx->mutexLocked)
12337 VMA_ASSERT(m_hAllocator->m_UseMutex);
12338 m_Mutex.UnlockWrite();
12342 size_t VmaBlockVector::CalcAllocationCount()
const 12345 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12347 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12352 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12354 if(m_BufferImageGranularity == 1)
12358 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12359 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12361 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12362 VMA_ASSERT(m_Algorithm == 0);
12363 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12364 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12372 void VmaBlockVector::MakePoolAllocationsLost(
12373 uint32_t currentFrameIndex,
12374 size_t* pLostAllocationCount)
12376 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12377 size_t lostAllocationCount = 0;
12378 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12380 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12381 VMA_ASSERT(pBlock);
12382 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12384 if(pLostAllocationCount != VMA_NULL)
12386 *pLostAllocationCount = lostAllocationCount;
12390 VkResult VmaBlockVector::CheckCorruption()
12392 if(!IsCorruptionDetectionEnabled())
12394 return VK_ERROR_FEATURE_NOT_PRESENT;
12397 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12398 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12400 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12401 VMA_ASSERT(pBlock);
12402 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12403 if(res != VK_SUCCESS)
12411 void VmaBlockVector::AddStats(
VmaStats* pStats)
12413 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12414 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12416 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12418 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12420 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12421 VMA_ASSERT(pBlock);
12422 VMA_HEAVY_ASSERT(pBlock->Validate());
12424 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12425 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12426 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12427 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12434 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12436 VmaBlockVector* pBlockVector,
12437 uint32_t currentFrameIndex,
12438 bool overlappingMoveSupported) :
12439 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12440 m_AllAllocations(false),
12441 m_AllocationCount(0),
12443 m_AllocationsMoved(0),
12444 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12447 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12448 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12450 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12451 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12452 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12453 m_Blocks.push_back(pBlockInfo);
12457 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12460 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12462 for(
size_t i = m_Blocks.size(); i--; )
12464 vma_delete(m_hAllocator, m_Blocks[i]);
12468 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12471 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12473 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12474 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12475 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12477 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12478 (*it)->m_Allocations.push_back(allocInfo);
12485 ++m_AllocationCount;
12489 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12490 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12491 VkDeviceSize maxBytesToMove,
12492 uint32_t maxAllocationsToMove)
12494 if(m_Blocks.empty())
12507 size_t srcBlockMinIndex = 0;
12520 size_t srcBlockIndex = m_Blocks.size() - 1;
12521 size_t srcAllocIndex = SIZE_MAX;
12527 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12529 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12532 if(srcBlockIndex == srcBlockMinIndex)
12539 srcAllocIndex = SIZE_MAX;
12544 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12548 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12549 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12551 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12552 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12553 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12554 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12557 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12559 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12560 VmaAllocationRequest dstAllocRequest;
12561 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12562 m_CurrentFrameIndex,
12563 m_pBlockVector->GetFrameInUseCount(),
12564 m_pBlockVector->GetBufferImageGranularity(),
12571 &dstAllocRequest) &&
12573 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12575 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12578 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12579 (m_BytesMoved + size > maxBytesToMove))
12584 VmaDefragmentationMove move;
12585 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12586 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12587 move.srcOffset = srcOffset;
12588 move.dstOffset = dstAllocRequest.offset;
12590 moves.push_back(move);
12592 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12597 allocInfo.m_hAllocation);
12598 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12600 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12602 if(allocInfo.m_pChanged != VMA_NULL)
12604 *allocInfo.m_pChanged = VK_TRUE;
12607 ++m_AllocationsMoved;
12608 m_BytesMoved += size;
12610 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12618 if(srcAllocIndex > 0)
12624 if(srcBlockIndex > 0)
12627 srcAllocIndex = SIZE_MAX;
12637 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12640 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12642 if(m_Blocks[i]->m_HasNonMovableAllocations)
12650 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12651 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12652 VkDeviceSize maxBytesToMove,
12653 uint32_t maxAllocationsToMove)
12655 if(!m_AllAllocations && m_AllocationCount == 0)
12660 const size_t blockCount = m_Blocks.size();
12661 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12663 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12665 if(m_AllAllocations)
12667 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12668 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12669 it != pMetadata->m_Suballocations.end();
12672 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12674 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12675 pBlockInfo->m_Allocations.push_back(allocInfo);
12680 pBlockInfo->CalcHasNonMovableAllocations();
12684 pBlockInfo->SortAllocationsByOffsetDescending();
12690 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12693 const uint32_t roundCount = 2;
12696 VkResult result = VK_SUCCESS;
12697 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12699 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12705 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12706 size_t dstBlockIndex, VkDeviceSize dstOffset,
12707 size_t srcBlockIndex, VkDeviceSize srcOffset)
12709 if(dstBlockIndex < srcBlockIndex)
12713 if(dstBlockIndex > srcBlockIndex)
12717 if(dstOffset < srcOffset)
12727 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12729 VmaBlockVector* pBlockVector,
12730 uint32_t currentFrameIndex,
12731 bool overlappingMoveSupported) :
12732 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12733 m_OverlappingMoveSupported(overlappingMoveSupported),
12734 m_AllocationCount(0),
12735 m_AllAllocations(false),
12737 m_AllocationsMoved(0),
12738 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12740 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12744 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12748 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12749 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12750 VkDeviceSize maxBytesToMove,
12751 uint32_t maxAllocationsToMove)
12753 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12755 const size_t blockCount = m_pBlockVector->GetBlockCount();
12756 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12761 PreprocessMetadata();
12765 m_BlockInfos.resize(blockCount);
12766 for(
size_t i = 0; i < blockCount; ++i)
12768 m_BlockInfos[i].origBlockIndex = i;
12771 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12772 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12773 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12778 FreeSpaceDatabase freeSpaceDb;
12780 size_t dstBlockInfoIndex = 0;
12781 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12782 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12783 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12784 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12785 VkDeviceSize dstOffset = 0;
12788 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12790 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12791 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12792 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12793 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12794 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12796 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12797 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12798 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12799 if(m_AllocationsMoved == maxAllocationsToMove ||
12800 m_BytesMoved + srcAllocSize > maxBytesToMove)
12805 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12808 size_t freeSpaceInfoIndex;
12809 VkDeviceSize dstAllocOffset;
12810 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12811 freeSpaceInfoIndex, dstAllocOffset))
12813 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12814 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12815 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12816 VkDeviceSize freeSpaceBlockSize = pFreeSpaceMetadata->GetSize();
12819 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12821 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12825 VmaSuballocation suballoc = *srcSuballocIt;
12826 suballoc.offset = dstAllocOffset;
12827 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12828 m_BytesMoved += srcAllocSize;
12829 ++m_AllocationsMoved;
12831 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12833 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12834 srcSuballocIt = nextSuballocIt;
12836 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12838 VmaDefragmentationMove move = {
12839 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12840 srcAllocOffset, dstAllocOffset,
12842 moves.push_back(move);
12849 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
12851 VmaSuballocation suballoc = *srcSuballocIt;
12852 suballoc.offset = dstAllocOffset;
12853 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
12854 m_BytesMoved += srcAllocSize;
12855 ++m_AllocationsMoved;
12857 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12859 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12860 srcSuballocIt = nextSuballocIt;
12862 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12864 VmaDefragmentationMove move = {
12865 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12866 srcAllocOffset, dstAllocOffset,
12868 moves.push_back(move);
12873 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
12876 while(dstBlockInfoIndex < srcBlockInfoIndex &&
12877 dstAllocOffset + srcAllocSize > dstBlockSize)
12880 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
12882 ++dstBlockInfoIndex;
12883 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12884 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12885 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12886 dstBlockSize = pDstMetadata->GetSize();
12888 dstAllocOffset = 0;
12892 if(dstBlockInfoIndex == srcBlockInfoIndex)
12894 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12896 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
12898 bool skipOver = overlap;
12899 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
12903 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
12908 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
12910 dstOffset = srcAllocOffset + srcAllocSize;
12916 srcSuballocIt->offset = dstAllocOffset;
12917 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
12918 dstOffset = dstAllocOffset + srcAllocSize;
12919 m_BytesMoved += srcAllocSize;
12920 ++m_AllocationsMoved;
12922 VmaDefragmentationMove move = {
12923 srcOrigBlockIndex, dstOrigBlockIndex,
12924 srcAllocOffset, dstAllocOffset,
12926 moves.push_back(move);
12934 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
12935 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
12937 VmaSuballocation suballoc = *srcSuballocIt;
12938 suballoc.offset = dstAllocOffset;
12939 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
12940 dstOffset = dstAllocOffset + srcAllocSize;
12941 m_BytesMoved += srcAllocSize;
12942 ++m_AllocationsMoved;
12944 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12946 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12947 srcSuballocIt = nextSuballocIt;
12949 pDstMetadata->m_Suballocations.push_back(suballoc);
12951 VmaDefragmentationMove move = {
12952 srcOrigBlockIndex, dstOrigBlockIndex,
12953 srcAllocOffset, dstAllocOffset,
12955 moves.push_back(move);
12961 m_BlockInfos.clear();
12963 PostprocessMetadata();
12968 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
12970 const size_t blockCount = m_pBlockVector->GetBlockCount();
12971 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12973 VmaBlockMetadata_Generic*
const pMetadata =
12974 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
12975 pMetadata->m_FreeCount = 0;
12976 pMetadata->m_SumFreeSize = pMetadata->GetSize();
12977 pMetadata->m_FreeSuballocationsBySize.clear();
12978 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
12979 it != pMetadata->m_Suballocations.end(); )
12981 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
12983 VmaSuballocationList::iterator nextIt = it;
12985 pMetadata->m_Suballocations.erase(it);
12996 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
12998 const size_t blockCount = m_pBlockVector->GetBlockCount();
12999 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13001 VmaBlockMetadata_Generic*
const pMetadata =
13002 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13003 const VkDeviceSize blockSize = pMetadata->GetSize();
13006 if(pMetadata->m_Suballocations.empty())
13008 pMetadata->m_FreeCount = 1;
13010 VmaSuballocation suballoc = {
13014 VMA_SUBALLOCATION_TYPE_FREE };
13015 pMetadata->m_Suballocations.push_back(suballoc);
13016 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13021 VkDeviceSize offset = 0;
13022 VmaSuballocationList::iterator it;
13023 for(it = pMetadata->m_Suballocations.begin();
13024 it != pMetadata->m_Suballocations.end();
13027 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13028 VMA_ASSERT(it->offset >= offset);
13031 if(it->offset > offset)
13033 ++pMetadata->m_FreeCount;
13034 const VkDeviceSize freeSize = it->offset - offset;
13035 VmaSuballocation suballoc = {
13039 VMA_SUBALLOCATION_TYPE_FREE };
13040 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13041 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13043 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13047 pMetadata->m_SumFreeSize -= it->size;
13048 offset = it->offset + it->size;
13052 if(offset < blockSize)
13054 ++pMetadata->m_FreeCount;
13055 const VkDeviceSize freeSize = blockSize - offset;
13056 VmaSuballocation suballoc = {
13060 VMA_SUBALLOCATION_TYPE_FREE };
13061 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13062 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13063 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13065 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13070 pMetadata->m_FreeSuballocationsBySize.begin(),
13071 pMetadata->m_FreeSuballocationsBySize.end(),
13072 VmaSuballocationItemSizeLess());
13075 VMA_HEAVY_ASSERT(pMetadata->Validate());
13079 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13082 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13083 while(it != pMetadata->m_Suballocations.end())
13085 if(it->offset < suballoc.offset)
13090 pMetadata->m_Suballocations.insert(it, suballoc);
13096 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13099 VmaBlockVector* pBlockVector,
13100 uint32_t currFrameIndex,
13101 uint32_t algorithmFlags) :
13103 mutexLocked(false),
13104 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13105 m_hAllocator(hAllocator),
13106 m_hCustomPool(hCustomPool),
13107 m_pBlockVector(pBlockVector),
13108 m_CurrFrameIndex(currFrameIndex),
13109 m_AlgorithmFlags(algorithmFlags),
13110 m_pAlgorithm(VMA_NULL),
13111 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13112 m_AllAllocations(false)
13116 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13118 vma_delete(m_hAllocator, m_pAlgorithm);
13121 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13123 AllocInfo info = { hAlloc, pChanged };
13124 m_Allocations.push_back(info);
13127 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13129 const bool allAllocations = m_AllAllocations ||
13130 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13142 if(VMA_DEBUG_MARGIN == 0 &&
13144 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13146 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13147 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13151 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13152 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13157 m_pAlgorithm->AddAll();
13161 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13163 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13171 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13173 uint32_t currFrameIndex,
13176 m_hAllocator(hAllocator),
13177 m_CurrFrameIndex(currFrameIndex),
13180 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13182 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13185 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13187 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13189 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13190 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13191 vma_delete(m_hAllocator, pBlockVectorCtx);
13193 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13195 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13196 if(pBlockVectorCtx)
13198 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13199 vma_delete(m_hAllocator, pBlockVectorCtx);
13204 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13206 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13208 VmaPool pool = pPools[poolIndex];
13211 if(pool->m_BlockVector.GetAlgorithm() == 0)
13213 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13215 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13217 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13219 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13224 if(!pBlockVectorDefragCtx)
13226 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13229 &pool->m_BlockVector,
13232 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13235 pBlockVectorDefragCtx->AddAll();
13240 void VmaDefragmentationContext_T::AddAllocations(
13241 uint32_t allocationCount,
13243 VkBool32* pAllocationsChanged)
13246 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13249 VMA_ASSERT(hAlloc);
13251 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13253 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13255 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13257 const VmaPool hAllocPool = hAlloc->GetPool();
13259 if(hAllocPool != VK_NULL_HANDLE)
13262 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13264 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13266 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13268 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13272 if(!pBlockVectorDefragCtx)
13274 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13277 &hAllocPool->m_BlockVector,
13280 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13287 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13288 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13289 if(!pBlockVectorDefragCtx)
13291 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13294 m_hAllocator->m_pBlockVectors[memTypeIndex],
13297 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13301 if(pBlockVectorDefragCtx)
13303 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13304 &pAllocationsChanged[allocIndex] : VMA_NULL;
13305 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13311 VkResult VmaDefragmentationContext_T::Defragment(
13312 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13313 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13321 if(commandBuffer == VK_NULL_HANDLE)
13323 maxGpuBytesToMove = 0;
13324 maxGpuAllocationsToMove = 0;
13327 VkResult res = VK_SUCCESS;
13330 for(uint32_t memTypeIndex = 0;
13331 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13334 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13335 if(pBlockVectorCtx)
13337 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13338 pBlockVectorCtx->GetBlockVector()->Defragment(
13341 maxCpuBytesToMove, maxCpuAllocationsToMove,
13342 maxGpuBytesToMove, maxGpuAllocationsToMove,
13344 if(pBlockVectorCtx->res != VK_SUCCESS)
13346 res = pBlockVectorCtx->res;
13352 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13353 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13356 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13357 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13358 pBlockVectorCtx->GetBlockVector()->Defragment(
13361 maxCpuBytesToMove, maxCpuAllocationsToMove,
13362 maxGpuBytesToMove, maxGpuAllocationsToMove,
13364 if(pBlockVectorCtx->res != VK_SUCCESS)
13366 res = pBlockVectorCtx->res;
13376 #if VMA_RECORDING_ENABLED 13378 VmaRecorder::VmaRecorder() :
13383 m_StartCounter(INT64_MAX)
13389 m_UseMutex = useMutex;
13390 m_Flags = settings.
flags;
13392 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13393 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13396 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13399 return VK_ERROR_INITIALIZATION_FAILED;
13403 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13404 fprintf(m_File,
"%s\n",
"1,5");
13409 VmaRecorder::~VmaRecorder()
13411 if(m_File != VMA_NULL)
13417 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13419 CallParams callParams;
13420 GetBasicParams(callParams);
13422 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13423 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13427 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13429 CallParams callParams;
13430 GetBasicParams(callParams);
13432 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13433 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13439 CallParams callParams;
13440 GetBasicParams(callParams);
13442 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13443 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13454 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13456 CallParams callParams;
13457 GetBasicParams(callParams);
13459 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13460 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13465 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13466 const VkMemoryRequirements& vkMemReq,
13470 CallParams callParams;
13471 GetBasicParams(callParams);
13473 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13474 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13475 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13477 vkMemReq.alignment,
13478 vkMemReq.memoryTypeBits,
13486 userDataStr.GetString());
13490 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13491 const VkMemoryRequirements& vkMemReq,
13493 uint64_t allocationCount,
13496 CallParams callParams;
13497 GetBasicParams(callParams);
13499 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13500 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13501 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13503 vkMemReq.alignment,
13504 vkMemReq.memoryTypeBits,
13511 PrintPointerList(allocationCount, pAllocations);
13512 fprintf(m_File,
",%s\n", userDataStr.GetString());
13516 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13517 const VkMemoryRequirements& vkMemReq,
13518 bool requiresDedicatedAllocation,
13519 bool prefersDedicatedAllocation,
13523 CallParams callParams;
13524 GetBasicParams(callParams);
13526 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13527 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13528 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13530 vkMemReq.alignment,
13531 vkMemReq.memoryTypeBits,
13532 requiresDedicatedAllocation ? 1 : 0,
13533 prefersDedicatedAllocation ? 1 : 0,
13541 userDataStr.GetString());
13545 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13546 const VkMemoryRequirements& vkMemReq,
13547 bool requiresDedicatedAllocation,
13548 bool prefersDedicatedAllocation,
13552 CallParams callParams;
13553 GetBasicParams(callParams);
13555 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13556 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13557 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13559 vkMemReq.alignment,
13560 vkMemReq.memoryTypeBits,
13561 requiresDedicatedAllocation ? 1 : 0,
13562 prefersDedicatedAllocation ? 1 : 0,
13570 userDataStr.GetString());
13574 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13577 CallParams callParams;
13578 GetBasicParams(callParams);
13580 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13581 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13586 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13587 uint64_t allocationCount,
13590 CallParams callParams;
13591 GetBasicParams(callParams);
13593 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13594 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13595 PrintPointerList(allocationCount, pAllocations);
13596 fprintf(m_File,
"\n");
13600 void VmaRecorder::RecordResizeAllocation(
13601 uint32_t frameIndex,
13603 VkDeviceSize newSize)
13605 CallParams callParams;
13606 GetBasicParams(callParams);
13608 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13609 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13610 allocation, newSize);
13614 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13616 const void* pUserData)
13618 CallParams callParams;
13619 GetBasicParams(callParams);
13621 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13622 UserDataString userDataStr(
13625 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13627 userDataStr.GetString());
13631 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13634 CallParams callParams;
13635 GetBasicParams(callParams);
13637 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13638 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13643 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13646 CallParams callParams;
13647 GetBasicParams(callParams);
13649 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13650 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13655 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13658 CallParams callParams;
13659 GetBasicParams(callParams);
13661 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13662 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13667 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13668 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13670 CallParams callParams;
13671 GetBasicParams(callParams);
13673 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13674 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13681 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13682 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13684 CallParams callParams;
13685 GetBasicParams(callParams);
13687 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13688 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13695 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13696 const VkBufferCreateInfo& bufCreateInfo,
13700 CallParams callParams;
13701 GetBasicParams(callParams);
13703 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13704 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13705 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13706 bufCreateInfo.flags,
13707 bufCreateInfo.size,
13708 bufCreateInfo.usage,
13709 bufCreateInfo.sharingMode,
13710 allocCreateInfo.
flags,
13711 allocCreateInfo.
usage,
13715 allocCreateInfo.
pool,
13717 userDataStr.GetString());
13721 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13722 const VkImageCreateInfo& imageCreateInfo,
13726 CallParams callParams;
13727 GetBasicParams(callParams);
13729 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13730 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13731 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13732 imageCreateInfo.flags,
13733 imageCreateInfo.imageType,
13734 imageCreateInfo.format,
13735 imageCreateInfo.extent.width,
13736 imageCreateInfo.extent.height,
13737 imageCreateInfo.extent.depth,
13738 imageCreateInfo.mipLevels,
13739 imageCreateInfo.arrayLayers,
13740 imageCreateInfo.samples,
13741 imageCreateInfo.tiling,
13742 imageCreateInfo.usage,
13743 imageCreateInfo.sharingMode,
13744 imageCreateInfo.initialLayout,
13745 allocCreateInfo.
flags,
13746 allocCreateInfo.
usage,
13750 allocCreateInfo.
pool,
13752 userDataStr.GetString());
13756 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13759 CallParams callParams;
13760 GetBasicParams(callParams);
13762 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13763 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13768 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13771 CallParams callParams;
13772 GetBasicParams(callParams);
13774 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13775 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13780 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13783 CallParams callParams;
13784 GetBasicParams(callParams);
13786 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13787 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13792 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13795 CallParams callParams;
13796 GetBasicParams(callParams);
13798 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13799 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13804 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13807 CallParams callParams;
13808 GetBasicParams(callParams);
13810 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13811 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13816 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13820 CallParams callParams;
13821 GetBasicParams(callParams);
13823 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13824 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13827 fprintf(m_File,
",");
13829 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
13839 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
13842 CallParams callParams;
13843 GetBasicParams(callParams);
13845 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13846 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
13853 if(pUserData != VMA_NULL)
13857 m_Str = (
const char*)pUserData;
13861 sprintf_s(m_PtrStr,
"%p", pUserData);
13871 void VmaRecorder::WriteConfiguration(
13872 const VkPhysicalDeviceProperties& devProps,
13873 const VkPhysicalDeviceMemoryProperties& memProps,
13874 bool dedicatedAllocationExtensionEnabled)
13876 fprintf(m_File,
"Config,Begin\n");
13878 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
13879 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
13880 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
13881 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
13882 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
13883 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
13885 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
13886 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
13887 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
13889 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
13890 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
13892 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
13893 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
13895 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
13896 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
13898 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
13899 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
13902 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
13904 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
13905 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
13906 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
13907 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
13908 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
13909 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
13910 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
13911 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
13912 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
13914 fprintf(m_File,
"Config,End\n");
13917 void VmaRecorder::GetBasicParams(CallParams& outParams)
13919 outParams.threadId = GetCurrentThreadId();
13921 LARGE_INTEGER counter;
13922 QueryPerformanceCounter(&counter);
13923 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
13926 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
13930 fprintf(m_File,
"%p", pItems[0]);
13931 for(uint64_t i = 1; i < count; ++i)
13933 fprintf(m_File,
" %p", pItems[i]);
13938 void VmaRecorder::Flush()
13946 #endif // #if VMA_RECORDING_ENABLED 13954 m_hDevice(pCreateInfo->device),
13955 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
13956 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
13957 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
13958 m_PreferredLargeHeapBlockSize(0),
13959 m_PhysicalDevice(pCreateInfo->physicalDevice),
13960 m_CurrentFrameIndex(0),
13961 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
13964 ,m_pRecorder(VMA_NULL)
13967 if(VMA_DEBUG_DETECT_CORRUPTION)
13970 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
13975 #if !(VMA_DEDICATED_ALLOCATION) 13978 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
13982 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
13983 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
13984 memset(&m_MemProps, 0,
sizeof(m_MemProps));
13986 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
13987 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
13989 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
13991 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14002 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14003 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14005 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14006 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14007 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14008 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14015 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14017 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14018 if(limit != VK_WHOLE_SIZE)
14020 m_HeapSizeLimit[heapIndex] = limit;
14021 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14023 m_MemProps.memoryHeaps[heapIndex].size = limit;
14029 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14031 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14033 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14036 preferredBlockSize,
14039 GetBufferImageGranularity(),
14046 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14053 VkResult res = VK_SUCCESS;
14058 #if VMA_RECORDING_ENABLED 14059 m_pRecorder = vma_new(
this, VmaRecorder)();
14061 if(res != VK_SUCCESS)
14065 m_pRecorder->WriteConfiguration(
14066 m_PhysicalDeviceProperties,
14068 m_UseKhrDedicatedAllocation);
14069 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14071 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14072 return VK_ERROR_FEATURE_NOT_PRESENT;
14079 VmaAllocator_T::~VmaAllocator_T()
14081 #if VMA_RECORDING_ENABLED 14082 if(m_pRecorder != VMA_NULL)
14084 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14085 vma_delete(
this, m_pRecorder);
14089 VMA_ASSERT(m_Pools.empty());
14091 for(
size_t i = GetMemoryTypeCount(); i--; )
14093 vma_delete(
this, m_pDedicatedAllocations[i]);
14094 vma_delete(
this, m_pBlockVectors[i]);
14098 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14100 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14101 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
14102 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
14103 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
14104 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
14105 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
14106 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
14107 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
14108 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
14109 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
14110 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
14111 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
14112 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
14113 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
14114 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
14115 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
14116 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
14117 m_VulkanFunctions.vkCmdCopyBuffer = &vkCmdCopyBuffer;
14118 #if VMA_DEDICATED_ALLOCATION 14119 if(m_UseKhrDedicatedAllocation)
14121 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14122 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14123 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14124 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14126 #endif // #if VMA_DEDICATED_ALLOCATION 14127 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14129 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14130 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14132 if(pVulkanFunctions != VMA_NULL)
14134 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14135 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14136 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14137 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14138 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14139 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14140 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14141 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14142 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14143 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14144 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14145 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14146 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14147 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14148 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14149 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14150 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14151 #if VMA_DEDICATED_ALLOCATION 14152 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14153 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14157 #undef VMA_COPY_IF_NOT_NULL 14161 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14162 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14163 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14164 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14165 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14166 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14167 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14168 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14169 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14170 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14171 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14172 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14173 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14174 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14175 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14176 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14177 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14178 #if VMA_DEDICATED_ALLOCATION 14179 if(m_UseKhrDedicatedAllocation)
14181 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14182 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14187 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14189 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14190 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14191 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14192 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14195 VkResult VmaAllocator_T::AllocateMemoryOfType(
14197 VkDeviceSize alignment,
14198 bool dedicatedAllocation,
14199 VkBuffer dedicatedBuffer,
14200 VkImage dedicatedImage,
14202 uint32_t memTypeIndex,
14203 VmaSuballocationType suballocType,
14204 size_t allocationCount,
14207 VMA_ASSERT(pAllocations != VMA_NULL);
14208 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, vkMemReq.size);
14214 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14219 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14220 VMA_ASSERT(blockVector);
14222 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14223 bool preferDedicatedMemory =
14224 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14225 dedicatedAllocation ||
14227 size > preferredBlockSize / 2;
14229 if(preferDedicatedMemory &&
14231 finalCreateInfo.
pool == VK_NULL_HANDLE)
14240 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14244 return AllocateDedicatedMemory(
14259 VkResult res = blockVector->Allocate(
14261 m_CurrentFrameIndex.load(),
14268 if(res == VK_SUCCESS)
14276 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14280 res = AllocateDedicatedMemory(
14286 finalCreateInfo.pUserData,
14291 if(res == VK_SUCCESS)
14294 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14300 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14307 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14309 VmaSuballocationType suballocType,
14310 uint32_t memTypeIndex,
14312 bool isUserDataString,
14314 VkBuffer dedicatedBuffer,
14315 VkImage dedicatedImage,
14316 size_t allocationCount,
14319 VMA_ASSERT(allocationCount > 0 && pAllocations);
14321 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14322 allocInfo.memoryTypeIndex = memTypeIndex;
14323 allocInfo.allocationSize = size;
14325 #if VMA_DEDICATED_ALLOCATION 14326 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14327 if(m_UseKhrDedicatedAllocation)
14329 if(dedicatedBuffer != VK_NULL_HANDLE)
14331 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14332 dedicatedAllocInfo.buffer = dedicatedBuffer;
14333 allocInfo.pNext = &dedicatedAllocInfo;
14335 else if(dedicatedImage != VK_NULL_HANDLE)
14337 dedicatedAllocInfo.image = dedicatedImage;
14338 allocInfo.pNext = &dedicatedAllocInfo;
14341 #endif // #if VMA_DEDICATED_ALLOCATION 14345 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14347 res = AllocateDedicatedMemoryPage(
14355 pAllocations + allocIndex);
14356 if(res != VK_SUCCESS)
14362 if(res == VK_SUCCESS)
14366 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14367 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14368 VMA_ASSERT(pDedicatedAllocations);
14369 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14371 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14375 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14380 while(allocIndex--)
14383 VkDeviceMemory hMemory = currAlloc->GetMemory();
14395 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14397 currAlloc->SetUserData(
this, VMA_NULL);
14398 vma_delete(
this, currAlloc);
14401 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14407 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14409 VmaSuballocationType suballocType,
14410 uint32_t memTypeIndex,
14411 const VkMemoryAllocateInfo& allocInfo,
14413 bool isUserDataString,
14417 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14418 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14421 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14425 void* pMappedData = VMA_NULL;
14428 res = (*m_VulkanFunctions.vkMapMemory)(
14437 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14438 FreeVulkanMemory(memTypeIndex, size, hMemory);
14443 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
14444 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14445 (*pAllocation)->SetUserData(
this, pUserData);
14446 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14448 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14454 void VmaAllocator_T::GetBufferMemoryRequirements(
14456 VkMemoryRequirements& memReq,
14457 bool& requiresDedicatedAllocation,
14458 bool& prefersDedicatedAllocation)
const 14460 #if VMA_DEDICATED_ALLOCATION 14461 if(m_UseKhrDedicatedAllocation)
14463 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14464 memReqInfo.buffer = hBuffer;
14466 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14468 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14469 memReq2.pNext = &memDedicatedReq;
14471 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14473 memReq = memReq2.memoryRequirements;
14474 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14475 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14478 #endif // #if VMA_DEDICATED_ALLOCATION 14480 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14481 requiresDedicatedAllocation =
false;
14482 prefersDedicatedAllocation =
false;
14486 void VmaAllocator_T::GetImageMemoryRequirements(
14488 VkMemoryRequirements& memReq,
14489 bool& requiresDedicatedAllocation,
14490 bool& prefersDedicatedAllocation)
const 14492 #if VMA_DEDICATED_ALLOCATION 14493 if(m_UseKhrDedicatedAllocation)
14495 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14496 memReqInfo.image = hImage;
14498 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14500 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14501 memReq2.pNext = &memDedicatedReq;
14503 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14505 memReq = memReq2.memoryRequirements;
14506 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14507 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14510 #endif // #if VMA_DEDICATED_ALLOCATION 14512 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14513 requiresDedicatedAllocation =
false;
14514 prefersDedicatedAllocation =
false;
14518 VkResult VmaAllocator_T::AllocateMemory(
14519 const VkMemoryRequirements& vkMemReq,
14520 bool requiresDedicatedAllocation,
14521 bool prefersDedicatedAllocation,
14522 VkBuffer dedicatedBuffer,
14523 VkImage dedicatedImage,
14525 VmaSuballocationType suballocType,
14526 size_t allocationCount,
14529 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14531 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14533 if(vkMemReq.size == 0)
14535 return VK_ERROR_VALIDATION_FAILED_EXT;
14540 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14541 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14546 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14547 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14549 if(requiresDedicatedAllocation)
14553 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14554 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14556 if(createInfo.
pool != VK_NULL_HANDLE)
14558 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14559 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14562 if((createInfo.
pool != VK_NULL_HANDLE) &&
14565 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14566 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14569 if(createInfo.
pool != VK_NULL_HANDLE)
14571 const VkDeviceSize alignmentForPool = VMA_MAX(
14572 vkMemReq.alignment,
14573 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14574 return createInfo.
pool->m_BlockVector.Allocate(
14576 m_CurrentFrameIndex.load(),
14587 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14588 uint32_t memTypeIndex = UINT32_MAX;
14590 if(res == VK_SUCCESS)
14592 VkDeviceSize alignmentForMemType = VMA_MAX(
14593 vkMemReq.alignment,
14594 GetMemoryTypeMinAlignment(memTypeIndex));
14596 res = AllocateMemoryOfType(
14598 alignmentForMemType,
14599 requiresDedicatedAllocation || prefersDedicatedAllocation,
14608 if(res == VK_SUCCESS)
14618 memoryTypeBits &= ~(1u << memTypeIndex);
14621 if(res == VK_SUCCESS)
14623 alignmentForMemType = VMA_MAX(
14624 vkMemReq.alignment,
14625 GetMemoryTypeMinAlignment(memTypeIndex));
14627 res = AllocateMemoryOfType(
14629 alignmentForMemType,
14630 requiresDedicatedAllocation || prefersDedicatedAllocation,
14639 if(res == VK_SUCCESS)
14649 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14660 void VmaAllocator_T::FreeMemory(
14661 size_t allocationCount,
14664 VMA_ASSERT(pAllocations);
14666 for(
size_t allocIndex = allocationCount; allocIndex--; )
14670 if(allocation != VK_NULL_HANDLE)
14672 if(TouchAllocation(allocation))
14674 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14676 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14679 switch(allocation->GetType())
14681 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14683 VmaBlockVector* pBlockVector = VMA_NULL;
14684 VmaPool hPool = allocation->GetPool();
14685 if(hPool != VK_NULL_HANDLE)
14687 pBlockVector = &hPool->m_BlockVector;
14691 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14692 pBlockVector = m_pBlockVectors[memTypeIndex];
14694 pBlockVector->Free(allocation);
14697 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14698 FreeDedicatedMemory(allocation);
14705 allocation->SetUserData(
this, VMA_NULL);
14706 vma_delete(
this, allocation);
14711 VkResult VmaAllocator_T::ResizeAllocation(
14713 VkDeviceSize newSize)
14715 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14717 return VK_ERROR_VALIDATION_FAILED_EXT;
14719 if(newSize == alloc->GetSize())
14724 switch(alloc->GetType())
14726 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14727 return VK_ERROR_FEATURE_NOT_PRESENT;
14728 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14729 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14731 alloc->ChangeSize(newSize);
14732 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14737 return VK_ERROR_OUT_OF_POOL_MEMORY;
14741 return VK_ERROR_VALIDATION_FAILED_EXT;
14745 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14748 InitStatInfo(pStats->
total);
14749 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14751 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14755 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14757 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14758 VMA_ASSERT(pBlockVector);
14759 pBlockVector->AddStats(pStats);
14764 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14765 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14767 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14772 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14774 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14775 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14776 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14777 VMA_ASSERT(pDedicatedAllocVector);
14778 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14781 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14782 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14783 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14784 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14789 VmaPostprocessCalcStatInfo(pStats->
total);
14790 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14791 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14792 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14793 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14796 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14798 VkResult VmaAllocator_T::DefragmentationBegin(
14808 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
14809 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
14812 (*pContext)->AddAllocations(
14815 VkResult res = (*pContext)->Defragment(
14820 if(res != VK_NOT_READY)
14822 vma_delete(
this, *pContext);
14823 *pContext = VMA_NULL;
14829 VkResult VmaAllocator_T::DefragmentationEnd(
14832 vma_delete(
this, context);
14838 if(hAllocation->CanBecomeLost())
14844 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14845 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14848 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14852 pAllocationInfo->
offset = 0;
14853 pAllocationInfo->
size = hAllocation->GetSize();
14855 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14858 else if(localLastUseFrameIndex == localCurrFrameIndex)
14860 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14861 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14862 pAllocationInfo->
offset = hAllocation->GetOffset();
14863 pAllocationInfo->
size = hAllocation->GetSize();
14865 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14870 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14872 localLastUseFrameIndex = localCurrFrameIndex;
14879 #if VMA_STATS_STRING_ENABLED 14880 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14881 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14884 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14885 if(localLastUseFrameIndex == localCurrFrameIndex)
14891 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14893 localLastUseFrameIndex = localCurrFrameIndex;
14899 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14900 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14901 pAllocationInfo->
offset = hAllocation->GetOffset();
14902 pAllocationInfo->
size = hAllocation->GetSize();
14903 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
14904 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14908 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
14911 if(hAllocation->CanBecomeLost())
14913 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14914 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14917 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14921 else if(localLastUseFrameIndex == localCurrFrameIndex)
14927 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14929 localLastUseFrameIndex = localCurrFrameIndex;
14936 #if VMA_STATS_STRING_ENABLED 14937 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14938 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14941 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14942 if(localLastUseFrameIndex == localCurrFrameIndex)
14948 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14950 localLastUseFrameIndex = localCurrFrameIndex;
14962 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
14972 return VK_ERROR_INITIALIZATION_FAILED;
14975 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
14977 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
14979 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
14980 if(res != VK_SUCCESS)
14982 vma_delete(
this, *pPool);
14989 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
14990 (*pPool)->SetId(m_NextPoolId++);
14991 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
14997 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15001 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15002 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15003 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15006 vma_delete(
this, pool);
15011 pool->m_BlockVector.GetPoolStats(pPoolStats);
15014 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15016 m_CurrentFrameIndex.store(frameIndex);
15019 void VmaAllocator_T::MakePoolAllocationsLost(
15021 size_t* pLostAllocationCount)
15023 hPool->m_BlockVector.MakePoolAllocationsLost(
15024 m_CurrentFrameIndex.load(),
15025 pLostAllocationCount);
15028 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15030 return hPool->m_BlockVector.CheckCorruption();
15033 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15035 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15038 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15040 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15042 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15043 VMA_ASSERT(pBlockVector);
15044 VkResult localRes = pBlockVector->CheckCorruption();
15047 case VK_ERROR_FEATURE_NOT_PRESENT:
15050 finalRes = VK_SUCCESS;
15060 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15061 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15063 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15065 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15068 case VK_ERROR_FEATURE_NOT_PRESENT:
15071 finalRes = VK_SUCCESS;
15083 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15085 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
15086 (*pAllocation)->InitLost();
15089 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15091 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15094 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15096 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15097 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15099 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15100 if(res == VK_SUCCESS)
15102 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15107 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15112 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15115 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15117 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15123 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15125 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15127 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15130 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15132 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15133 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15135 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15136 m_HeapSizeLimit[heapIndex] += size;
15140 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15142 if(hAllocation->CanBecomeLost())
15144 return VK_ERROR_MEMORY_MAP_FAILED;
15147 switch(hAllocation->GetType())
15149 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15151 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15152 char *pBytes = VMA_NULL;
15153 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15154 if(res == VK_SUCCESS)
15156 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15157 hAllocation->BlockAllocMap();
15161 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15162 return hAllocation->DedicatedAllocMap(
this, ppData);
15165 return VK_ERROR_MEMORY_MAP_FAILED;
15171 switch(hAllocation->GetType())
15173 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15175 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15176 hAllocation->BlockAllocUnmap();
15177 pBlock->Unmap(
this, 1);
15180 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15181 hAllocation->DedicatedAllocUnmap(
this);
15188 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15190 VkResult res = VK_SUCCESS;
15191 switch(hAllocation->GetType())
15193 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15194 res = GetVulkanFunctions().vkBindBufferMemory(
15197 hAllocation->GetMemory(),
15200 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15202 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15203 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15204 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15213 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15215 VkResult res = VK_SUCCESS;
15216 switch(hAllocation->GetType())
15218 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15219 res = GetVulkanFunctions().vkBindImageMemory(
15222 hAllocation->GetMemory(),
15225 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15227 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15228 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15229 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15238 void VmaAllocator_T::FlushOrInvalidateAllocation(
15240 VkDeviceSize offset, VkDeviceSize size,
15241 VMA_CACHE_OPERATION op)
15243 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15244 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15246 const VkDeviceSize allocationSize = hAllocation->GetSize();
15247 VMA_ASSERT(offset <= allocationSize);
15249 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15251 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15252 memRange.memory = hAllocation->GetMemory();
15254 switch(hAllocation->GetType())
15256 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15257 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15258 if(size == VK_WHOLE_SIZE)
15260 memRange.size = allocationSize - memRange.offset;
15264 VMA_ASSERT(offset + size <= allocationSize);
15265 memRange.size = VMA_MIN(
15266 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15267 allocationSize - memRange.offset);
15271 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15274 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15275 if(size == VK_WHOLE_SIZE)
15277 size = allocationSize - offset;
15281 VMA_ASSERT(offset + size <= allocationSize);
15283 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15286 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15287 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15288 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15289 memRange.offset += allocationOffset;
15290 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15301 case VMA_CACHE_FLUSH:
15302 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15304 case VMA_CACHE_INVALIDATE:
15305 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15314 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15316 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15318 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15320 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15321 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15322 VMA_ASSERT(pDedicatedAllocations);
15323 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15324 VMA_ASSERT(success);
15327 VkDeviceMemory hMemory = allocation->GetMemory();
15339 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15341 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15344 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15346 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15347 !hAllocation->CanBecomeLost() &&
15348 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15350 void* pData = VMA_NULL;
15351 VkResult res = Map(hAllocation, &pData);
15352 if(res == VK_SUCCESS)
15354 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15355 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15356 Unmap(hAllocation);
15360 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15365 #if VMA_STATS_STRING_ENABLED 15367 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15369 bool dedicatedAllocationsStarted =
false;
15370 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15372 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15373 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15374 VMA_ASSERT(pDedicatedAllocVector);
15375 if(pDedicatedAllocVector->empty() ==
false)
15377 if(dedicatedAllocationsStarted ==
false)
15379 dedicatedAllocationsStarted =
true;
15380 json.WriteString(
"DedicatedAllocations");
15381 json.BeginObject();
15384 json.BeginString(
"Type ");
15385 json.ContinueString(memTypeIndex);
15390 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15392 json.BeginObject(
true);
15394 hAlloc->PrintParameters(json);
15401 if(dedicatedAllocationsStarted)
15407 bool allocationsStarted =
false;
15408 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15410 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15412 if(allocationsStarted ==
false)
15414 allocationsStarted =
true;
15415 json.WriteString(
"DefaultPools");
15416 json.BeginObject();
15419 json.BeginString(
"Type ");
15420 json.ContinueString(memTypeIndex);
15423 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15426 if(allocationsStarted)
15434 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15435 const size_t poolCount = m_Pools.size();
15438 json.WriteString(
"Pools");
15439 json.BeginObject();
15440 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15442 json.BeginString();
15443 json.ContinueString(m_Pools[poolIndex]->GetId());
15446 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15453 #endif // #if VMA_STATS_STRING_ENABLED 15462 VMA_ASSERT(pCreateInfo && pAllocator);
15463 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15465 return (*pAllocator)->Init(pCreateInfo);
15471 if(allocator != VK_NULL_HANDLE)
15473 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15474 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15475 vma_delete(&allocationCallbacks, allocator);
15481 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15483 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15484 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15489 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15491 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15492 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15497 uint32_t memoryTypeIndex,
15498 VkMemoryPropertyFlags* pFlags)
15500 VMA_ASSERT(allocator && pFlags);
15501 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15502 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15507 uint32_t frameIndex)
15509 VMA_ASSERT(allocator);
15510 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15512 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15514 allocator->SetCurrentFrameIndex(frameIndex);
15521 VMA_ASSERT(allocator && pStats);
15522 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15523 allocator->CalculateStats(pStats);
15526 #if VMA_STATS_STRING_ENABLED 15530 char** ppStatsString,
15531 VkBool32 detailedMap)
15533 VMA_ASSERT(allocator && ppStatsString);
15534 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15536 VmaStringBuilder sb(allocator);
15538 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15539 json.BeginObject();
15542 allocator->CalculateStats(&stats);
15544 json.WriteString(
"Total");
15545 VmaPrintStatInfo(json, stats.
total);
15547 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15549 json.BeginString(
"Heap ");
15550 json.ContinueString(heapIndex);
15552 json.BeginObject();
15554 json.WriteString(
"Size");
15555 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15557 json.WriteString(
"Flags");
15558 json.BeginArray(
true);
15559 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15561 json.WriteString(
"DEVICE_LOCAL");
15567 json.WriteString(
"Stats");
15568 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15571 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15573 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15575 json.BeginString(
"Type ");
15576 json.ContinueString(typeIndex);
15579 json.BeginObject();
15581 json.WriteString(
"Flags");
15582 json.BeginArray(
true);
15583 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15584 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15586 json.WriteString(
"DEVICE_LOCAL");
15588 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15590 json.WriteString(
"HOST_VISIBLE");
15592 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15594 json.WriteString(
"HOST_COHERENT");
15596 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15598 json.WriteString(
"HOST_CACHED");
15600 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15602 json.WriteString(
"LAZILY_ALLOCATED");
15608 json.WriteString(
"Stats");
15609 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15618 if(detailedMap == VK_TRUE)
15620 allocator->PrintDetailedMap(json);
15626 const size_t len = sb.GetLength();
15627 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15630 memcpy(pChars, sb.GetData(), len);
15632 pChars[len] =
'\0';
15633 *ppStatsString = pChars;
15638 char* pStatsString)
15640 if(pStatsString != VMA_NULL)
15642 VMA_ASSERT(allocator);
15643 size_t len = strlen(pStatsString);
15644 vma_delete_array(allocator, pStatsString, len + 1);
15648 #endif // #if VMA_STATS_STRING_ENABLED 15655 uint32_t memoryTypeBits,
15657 uint32_t* pMemoryTypeIndex)
15659 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15660 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15661 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15668 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15669 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15674 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15678 switch(pAllocationCreateInfo->
usage)
15683 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15685 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15689 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15692 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15693 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15695 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15699 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15700 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15706 *pMemoryTypeIndex = UINT32_MAX;
15707 uint32_t minCost = UINT32_MAX;
15708 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15709 memTypeIndex < allocator->GetMemoryTypeCount();
15710 ++memTypeIndex, memTypeBit <<= 1)
15713 if((memTypeBit & memoryTypeBits) != 0)
15715 const VkMemoryPropertyFlags currFlags =
15716 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15718 if((requiredFlags & ~currFlags) == 0)
15721 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15723 if(currCost < minCost)
15725 *pMemoryTypeIndex = memTypeIndex;
15730 minCost = currCost;
15735 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15740 const VkBufferCreateInfo* pBufferCreateInfo,
15742 uint32_t* pMemoryTypeIndex)
15744 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15745 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15746 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15747 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15749 const VkDevice hDev = allocator->m_hDevice;
15750 VkBuffer hBuffer = VK_NULL_HANDLE;
15751 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15752 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15753 if(res == VK_SUCCESS)
15755 VkMemoryRequirements memReq = {};
15756 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15757 hDev, hBuffer, &memReq);
15761 memReq.memoryTypeBits,
15762 pAllocationCreateInfo,
15765 allocator->GetVulkanFunctions().vkDestroyBuffer(
15766 hDev, hBuffer, allocator->GetAllocationCallbacks());
15773 const VkImageCreateInfo* pImageCreateInfo,
15775 uint32_t* pMemoryTypeIndex)
15777 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15778 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15779 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15780 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15782 const VkDevice hDev = allocator->m_hDevice;
15783 VkImage hImage = VK_NULL_HANDLE;
15784 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15785 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15786 if(res == VK_SUCCESS)
15788 VkMemoryRequirements memReq = {};
15789 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15790 hDev, hImage, &memReq);
15794 memReq.memoryTypeBits,
15795 pAllocationCreateInfo,
15798 allocator->GetVulkanFunctions().vkDestroyImage(
15799 hDev, hImage, allocator->GetAllocationCallbacks());
15809 VMA_ASSERT(allocator && pCreateInfo && pPool);
15811 VMA_DEBUG_LOG(
"vmaCreatePool");
15813 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15815 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
15817 #if VMA_RECORDING_ENABLED 15818 if(allocator->GetRecorder() != VMA_NULL)
15820 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
15831 VMA_ASSERT(allocator);
15833 if(pool == VK_NULL_HANDLE)
15838 VMA_DEBUG_LOG(
"vmaDestroyPool");
15840 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15842 #if VMA_RECORDING_ENABLED 15843 if(allocator->GetRecorder() != VMA_NULL)
15845 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
15849 allocator->DestroyPool(pool);
15857 VMA_ASSERT(allocator && pool && pPoolStats);
15859 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15861 allocator->GetPoolStats(pool, pPoolStats);
15867 size_t* pLostAllocationCount)
15869 VMA_ASSERT(allocator && pool);
15871 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15873 #if VMA_RECORDING_ENABLED 15874 if(allocator->GetRecorder() != VMA_NULL)
15876 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
15880 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
15885 VMA_ASSERT(allocator && pool);
15887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15889 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
15891 return allocator->CheckPoolCorruption(pool);
15896 const VkMemoryRequirements* pVkMemoryRequirements,
15901 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
15903 VMA_DEBUG_LOG(
"vmaAllocateMemory");
15905 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15907 VkResult result = allocator->AllocateMemory(
15908 *pVkMemoryRequirements,
15914 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15918 #if VMA_RECORDING_ENABLED 15919 if(allocator->GetRecorder() != VMA_NULL)
15921 allocator->GetRecorder()->RecordAllocateMemory(
15922 allocator->GetCurrentFrameIndex(),
15923 *pVkMemoryRequirements,
15929 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15931 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15939 const VkMemoryRequirements* pVkMemoryRequirements,
15941 size_t allocationCount,
15945 if(allocationCount == 0)
15950 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
15952 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
15954 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15956 VkResult result = allocator->AllocateMemory(
15957 *pVkMemoryRequirements,
15963 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15967 #if VMA_RECORDING_ENABLED 15968 if(allocator->GetRecorder() != VMA_NULL)
15970 allocator->GetRecorder()->RecordAllocateMemoryPages(
15971 allocator->GetCurrentFrameIndex(),
15972 *pVkMemoryRequirements,
15974 (uint64_t)allocationCount,
15979 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15981 for(
size_t i = 0; i < allocationCount; ++i)
15983 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
15997 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
15999 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16001 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16003 VkMemoryRequirements vkMemReq = {};
16004 bool requiresDedicatedAllocation =
false;
16005 bool prefersDedicatedAllocation =
false;
16006 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16007 requiresDedicatedAllocation,
16008 prefersDedicatedAllocation);
16010 VkResult result = allocator->AllocateMemory(
16012 requiresDedicatedAllocation,
16013 prefersDedicatedAllocation,
16017 VMA_SUBALLOCATION_TYPE_BUFFER,
16021 #if VMA_RECORDING_ENABLED 16022 if(allocator->GetRecorder() != VMA_NULL)
16024 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16025 allocator->GetCurrentFrameIndex(),
16027 requiresDedicatedAllocation,
16028 prefersDedicatedAllocation,
16034 if(pAllocationInfo && result == VK_SUCCESS)
16036 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16049 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16051 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16053 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16055 VkMemoryRequirements vkMemReq = {};
16056 bool requiresDedicatedAllocation =
false;
16057 bool prefersDedicatedAllocation =
false;
16058 allocator->GetImageMemoryRequirements(image, vkMemReq,
16059 requiresDedicatedAllocation, prefersDedicatedAllocation);
16061 VkResult result = allocator->AllocateMemory(
16063 requiresDedicatedAllocation,
16064 prefersDedicatedAllocation,
16068 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16072 #if VMA_RECORDING_ENABLED 16073 if(allocator->GetRecorder() != VMA_NULL)
16075 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16076 allocator->GetCurrentFrameIndex(),
16078 requiresDedicatedAllocation,
16079 prefersDedicatedAllocation,
16085 if(pAllocationInfo && result == VK_SUCCESS)
16087 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16097 VMA_ASSERT(allocator);
16099 if(allocation == VK_NULL_HANDLE)
16104 VMA_DEBUG_LOG(
"vmaFreeMemory");
16106 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16108 #if VMA_RECORDING_ENABLED 16109 if(allocator->GetRecorder() != VMA_NULL)
16111 allocator->GetRecorder()->RecordFreeMemory(
16112 allocator->GetCurrentFrameIndex(),
16117 allocator->FreeMemory(
16124 size_t allocationCount,
16127 if(allocationCount == 0)
16132 VMA_ASSERT(allocator);
16134 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16136 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16138 #if VMA_RECORDING_ENABLED 16139 if(allocator->GetRecorder() != VMA_NULL)
16141 allocator->GetRecorder()->RecordFreeMemoryPages(
16142 allocator->GetCurrentFrameIndex(),
16143 (uint64_t)allocationCount,
16148 allocator->FreeMemory(allocationCount, pAllocations);
16154 VkDeviceSize newSize)
16156 VMA_ASSERT(allocator && allocation);
16158 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16160 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16162 #if VMA_RECORDING_ENABLED 16163 if(allocator->GetRecorder() != VMA_NULL)
16165 allocator->GetRecorder()->RecordResizeAllocation(
16166 allocator->GetCurrentFrameIndex(),
16172 return allocator->ResizeAllocation(allocation, newSize);
16180 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16182 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16184 #if VMA_RECORDING_ENABLED 16185 if(allocator->GetRecorder() != VMA_NULL)
16187 allocator->GetRecorder()->RecordGetAllocationInfo(
16188 allocator->GetCurrentFrameIndex(),
16193 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16200 VMA_ASSERT(allocator && allocation);
16202 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16204 #if VMA_RECORDING_ENABLED 16205 if(allocator->GetRecorder() != VMA_NULL)
16207 allocator->GetRecorder()->RecordTouchAllocation(
16208 allocator->GetCurrentFrameIndex(),
16213 return allocator->TouchAllocation(allocation);
16221 VMA_ASSERT(allocator && allocation);
16223 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16225 allocation->SetUserData(allocator, pUserData);
16227 #if VMA_RECORDING_ENABLED 16228 if(allocator->GetRecorder() != VMA_NULL)
16230 allocator->GetRecorder()->RecordSetAllocationUserData(
16231 allocator->GetCurrentFrameIndex(),
16242 VMA_ASSERT(allocator && pAllocation);
16244 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16246 allocator->CreateLostAllocation(pAllocation);
16248 #if VMA_RECORDING_ENABLED 16249 if(allocator->GetRecorder() != VMA_NULL)
16251 allocator->GetRecorder()->RecordCreateLostAllocation(
16252 allocator->GetCurrentFrameIndex(),
16263 VMA_ASSERT(allocator && allocation && ppData);
16265 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16267 VkResult res = allocator->Map(allocation, ppData);
16269 #if VMA_RECORDING_ENABLED 16270 if(allocator->GetRecorder() != VMA_NULL)
16272 allocator->GetRecorder()->RecordMapMemory(
16273 allocator->GetCurrentFrameIndex(),
16285 VMA_ASSERT(allocator && allocation);
16287 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16289 #if VMA_RECORDING_ENABLED 16290 if(allocator->GetRecorder() != VMA_NULL)
16292 allocator->GetRecorder()->RecordUnmapMemory(
16293 allocator->GetCurrentFrameIndex(),
16298 allocator->Unmap(allocation);
16303 VMA_ASSERT(allocator && allocation);
16305 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16307 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16309 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16311 #if VMA_RECORDING_ENABLED 16312 if(allocator->GetRecorder() != VMA_NULL)
16314 allocator->GetRecorder()->RecordFlushAllocation(
16315 allocator->GetCurrentFrameIndex(),
16316 allocation, offset, size);
16323 VMA_ASSERT(allocator && allocation);
16325 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16327 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16329 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16331 #if VMA_RECORDING_ENABLED 16332 if(allocator->GetRecorder() != VMA_NULL)
16334 allocator->GetRecorder()->RecordInvalidateAllocation(
16335 allocator->GetCurrentFrameIndex(),
16336 allocation, offset, size);
16343 VMA_ASSERT(allocator);
16345 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16347 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16349 return allocator->CheckCorruption(memoryTypeBits);
16355 size_t allocationCount,
16356 VkBool32* pAllocationsChanged,
16366 if(pDefragmentationInfo != VMA_NULL)
16380 if(res == VK_NOT_READY)
16393 VMA_ASSERT(allocator && pInfo && pContext);
16404 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16406 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16408 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16410 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16412 #if VMA_RECORDING_ENABLED 16413 if(allocator->GetRecorder() != VMA_NULL)
16415 allocator->GetRecorder()->RecordDefragmentationBegin(
16416 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16427 VMA_ASSERT(allocator);
16429 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16431 if(context != VK_NULL_HANDLE)
16433 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16435 #if VMA_RECORDING_ENABLED 16436 if(allocator->GetRecorder() != VMA_NULL)
16438 allocator->GetRecorder()->RecordDefragmentationEnd(
16439 allocator->GetCurrentFrameIndex(), context);
16443 return allocator->DefragmentationEnd(context);
16456 VMA_ASSERT(allocator && allocation && buffer);
16458 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16460 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16462 return allocator->BindBufferMemory(allocation, buffer);
16470 VMA_ASSERT(allocator && allocation && image);
16472 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16474 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16476 return allocator->BindImageMemory(allocation, image);
16481 const VkBufferCreateInfo* pBufferCreateInfo,
16487 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16489 if(pBufferCreateInfo->size == 0)
16491 return VK_ERROR_VALIDATION_FAILED_EXT;
16494 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16496 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16498 *pBuffer = VK_NULL_HANDLE;
16499 *pAllocation = VK_NULL_HANDLE;
16502 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16503 allocator->m_hDevice,
16505 allocator->GetAllocationCallbacks(),
16510 VkMemoryRequirements vkMemReq = {};
16511 bool requiresDedicatedAllocation =
false;
16512 bool prefersDedicatedAllocation =
false;
16513 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16514 requiresDedicatedAllocation, prefersDedicatedAllocation);
16518 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16520 VMA_ASSERT(vkMemReq.alignment %
16521 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16523 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16525 VMA_ASSERT(vkMemReq.alignment %
16526 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16528 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16530 VMA_ASSERT(vkMemReq.alignment %
16531 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16535 res = allocator->AllocateMemory(
16537 requiresDedicatedAllocation,
16538 prefersDedicatedAllocation,
16541 *pAllocationCreateInfo,
16542 VMA_SUBALLOCATION_TYPE_BUFFER,
16546 #if VMA_RECORDING_ENABLED 16547 if(allocator->GetRecorder() != VMA_NULL)
16549 allocator->GetRecorder()->RecordCreateBuffer(
16550 allocator->GetCurrentFrameIndex(),
16551 *pBufferCreateInfo,
16552 *pAllocationCreateInfo,
16560 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16564 #if VMA_STATS_STRING_ENABLED 16565 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16567 if(pAllocationInfo != VMA_NULL)
16569 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16574 allocator->FreeMemory(
16577 *pAllocation = VK_NULL_HANDLE;
16578 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16579 *pBuffer = VK_NULL_HANDLE;
16582 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16583 *pBuffer = VK_NULL_HANDLE;
16594 VMA_ASSERT(allocator);
16596 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16601 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16603 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16605 #if VMA_RECORDING_ENABLED 16606 if(allocator->GetRecorder() != VMA_NULL)
16608 allocator->GetRecorder()->RecordDestroyBuffer(
16609 allocator->GetCurrentFrameIndex(),
16614 if(buffer != VK_NULL_HANDLE)
16616 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16619 if(allocation != VK_NULL_HANDLE)
16621 allocator->FreeMemory(
16629 const VkImageCreateInfo* pImageCreateInfo,
16635 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16637 if(pImageCreateInfo->extent.width == 0 ||
16638 pImageCreateInfo->extent.height == 0 ||
16639 pImageCreateInfo->extent.depth == 0 ||
16640 pImageCreateInfo->mipLevels == 0 ||
16641 pImageCreateInfo->arrayLayers == 0)
16643 return VK_ERROR_VALIDATION_FAILED_EXT;
16646 VMA_DEBUG_LOG(
"vmaCreateImage");
16648 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16650 *pImage = VK_NULL_HANDLE;
16651 *pAllocation = VK_NULL_HANDLE;
16654 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16655 allocator->m_hDevice,
16657 allocator->GetAllocationCallbacks(),
16661 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16662 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16663 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16666 VkMemoryRequirements vkMemReq = {};
16667 bool requiresDedicatedAllocation =
false;
16668 bool prefersDedicatedAllocation =
false;
16669 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16670 requiresDedicatedAllocation, prefersDedicatedAllocation);
16672 res = allocator->AllocateMemory(
16674 requiresDedicatedAllocation,
16675 prefersDedicatedAllocation,
16678 *pAllocationCreateInfo,
16683 #if VMA_RECORDING_ENABLED 16684 if(allocator->GetRecorder() != VMA_NULL)
16686 allocator->GetRecorder()->RecordCreateImage(
16687 allocator->GetCurrentFrameIndex(),
16689 *pAllocationCreateInfo,
16697 res = allocator->BindImageMemory(*pAllocation, *pImage);
16701 #if VMA_STATS_STRING_ENABLED 16702 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16704 if(pAllocationInfo != VMA_NULL)
16706 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16711 allocator->FreeMemory(
16714 *pAllocation = VK_NULL_HANDLE;
16715 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16716 *pImage = VK_NULL_HANDLE;
16719 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16720 *pImage = VK_NULL_HANDLE;
16731 VMA_ASSERT(allocator);
16733 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16738 VMA_DEBUG_LOG(
"vmaDestroyImage");
16740 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16742 #if VMA_RECORDING_ENABLED 16743 if(allocator->GetRecorder() != VMA_NULL)
16745 allocator->GetRecorder()->RecordDestroyImage(
16746 allocator->GetCurrentFrameIndex(),
16751 if(image != VK_NULL_HANDLE)
16753 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16755 if(allocation != VK_NULL_HANDLE)
16757 allocator->FreeMemory(
16763 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1723
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2026
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1781
diff --git a/src/Tests.cpp b/src/Tests.cpp
index 8675c3f..058fedb 100644
--- a/src/Tests.cpp
+++ b/src/Tests.cpp
@@ -13,6 +13,10 @@ extern VkCommandBuffer g_hTemporaryCommandBuffer;
void BeginSingleTimeCommands();
void EndSingleTimeCommands();
+#ifndef VMA_DEBUG_MARGIN
+ #define VMA_DEBUG_MARGIN 0
+#endif
+
enum CONFIG_TYPE {
CONFIG_TYPE_MINIMUM,
CONFIG_TYPE_SMALL,
@@ -1379,6 +1383,7 @@ void TestDefragmentationSimple()
Allocation that must be move to an overlapping place using memmove().
Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
*/
+ if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
{
AllocInfo allocInfo[2];
diff --git a/src/vk_mem_alloc.h b/src/vk_mem_alloc.h
index 8c663fd..c007128 100644
--- a/src/vk_mem_alloc.h
+++ b/src/vk_mem_alloc.h
@@ -2499,7 +2499,7 @@ VkResult vmaAllocateMemory(
@param pCreateInfo Creation parameters for each alloction.
@param allocationCount Number of allocations to make.
@param[out] pAllocations Pointer to array that will be filled with handles to created allocations.
-@param[out] pAlocationInfo Optional. Pointer to array that will be filled with parameters of created allocations.
+@param[out] pAllocationInfo Optional. Pointer to array that will be filled with parameters of created allocations.
You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages().
@@ -2875,8 +2875,8 @@ typedef struct VmaDefragmentationStats {
@param allocator Allocator object.
@param pInfo Structure filled with parameters of defragmentation.
-@param pStats[out] Optional. Statistics of defragmentation. You can pass null if you are not interested in this information.
-@param pContext[out] Context object that must be passed to vmaDefragmentationEnd() to finish defragmentation.
+@param[out] pStats Optional. Statistics of defragmentation. You can pass null if you are not interested in this information.
+@param[out] pContext Context object that must be passed to vmaDefragmentationEnd() to finish defragmentation.
@return `VK_SUCCESS` and `*pContext == null` if defragmentation finished within this function call. `VK_NOT_READY` and `*pContext != null` if defragmentation has been started and you need to call vmaDefragmentationEnd() to finish it. Negative value in case of error.
Use this function instead of old, deprecated vmaDefragment().
@@ -6272,9 +6272,6 @@ private:
struct VmaBlockDefragmentationContext
{
-private:
- VMA_CLASS_NO_COPY(VmaBlockDefragmentationContext)
-public:
enum BLOCK_FLAG
{
BLOCK_FLAG_USED = 0x00000001,