23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1648 #ifndef VMA_RECORDING_ENABLED 1650 #define VMA_RECORDING_ENABLED 1 1652 #define VMA_RECORDING_ENABLED 0 1657 #define NOMINMAX // For windows.h 1661 #include <vulkan/vulkan.h> 1664 #if VMA_RECORDING_ENABLED 1665 #include <windows.h> 1668 #if !defined(VMA_DEDICATED_ALLOCATION) 1669 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1670 #define VMA_DEDICATED_ALLOCATION 1 1672 #define VMA_DEDICATED_ALLOCATION 0 1690 uint32_t memoryType,
1691 VkDeviceMemory memory,
1696 uint32_t memoryType,
1697 VkDeviceMemory memory,
1770 #if VMA_DEDICATED_ALLOCATION 1771 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1772 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1899 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1907 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1917 uint32_t memoryTypeIndex,
1918 VkMemoryPropertyFlags* pFlags);
1930 uint32_t frameIndex);
1963 #ifndef VMA_STATS_STRING_ENABLED 1964 #define VMA_STATS_STRING_ENABLED 1 1967 #if VMA_STATS_STRING_ENABLED 1974 char** ppStatsString,
1975 VkBool32 detailedMap);
1979 char* pStatsString);
1981 #endif // #if VMA_STATS_STRING_ENABLED 2214 uint32_t memoryTypeBits,
2216 uint32_t* pMemoryTypeIndex);
2232 const VkBufferCreateInfo* pBufferCreateInfo,
2234 uint32_t* pMemoryTypeIndex);
2250 const VkImageCreateInfo* pImageCreateInfo,
2252 uint32_t* pMemoryTypeIndex);
2424 size_t* pLostAllocationCount);
2523 const VkMemoryRequirements* pVkMemoryRequirements,
2549 const VkMemoryRequirements* pVkMemoryRequirements,
2551 size_t allocationCount,
2596 size_t allocationCount,
2622 VkDeviceSize newSize);
2999 size_t allocationCount,
3000 VkBool32* pAllocationsChanged,
3066 const VkBufferCreateInfo* pBufferCreateInfo,
3091 const VkImageCreateInfo* pImageCreateInfo,
3117 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3120 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3121 #define VMA_IMPLEMENTATION 3124 #ifdef VMA_IMPLEMENTATION 3125 #undef VMA_IMPLEMENTATION 3147 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3148 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3160 #if VMA_USE_STL_CONTAINERS 3161 #define VMA_USE_STL_VECTOR 1 3162 #define VMA_USE_STL_UNORDERED_MAP 1 3163 #define VMA_USE_STL_LIST 1 3166 #ifndef VMA_USE_STL_SHARED_MUTEX 3168 #if __cplusplus >= 201703L 3169 #define VMA_USE_STL_SHARED_MUTEX 1 3173 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3174 #define VMA_USE_STL_SHARED_MUTEX 1 3176 #define VMA_USE_STL_SHARED_MUTEX 0 3184 #if VMA_USE_STL_VECTOR 3188 #if VMA_USE_STL_UNORDERED_MAP 3189 #include <unordered_map> 3192 #if VMA_USE_STL_LIST 3201 #include <algorithm> 3207 #define VMA_NULL nullptr 3210 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3212 void *aligned_alloc(
size_t alignment,
size_t size)
3215 if(alignment <
sizeof(
void*))
3217 alignment =
sizeof(
void*);
3220 return memalign(alignment, size);
3222 #elif defined(__APPLE__) || defined(__ANDROID__) 3224 void *aligned_alloc(
size_t alignment,
size_t size)
3227 if(alignment <
sizeof(
void*))
3229 alignment =
sizeof(
void*);
3233 if(posix_memalign(&pointer, alignment, size) == 0)
3247 #define VMA_ASSERT(expr) assert(expr) 3249 #define VMA_ASSERT(expr) 3255 #ifndef VMA_HEAVY_ASSERT 3257 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3259 #define VMA_HEAVY_ASSERT(expr) 3263 #ifndef VMA_ALIGN_OF 3264 #define VMA_ALIGN_OF(type) (__alignof(type)) 3267 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3269 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3271 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3275 #ifndef VMA_SYSTEM_FREE 3277 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3279 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3284 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3288 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3292 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3296 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3299 #ifndef VMA_DEBUG_LOG 3300 #define VMA_DEBUG_LOG(format, ...) 3310 #if VMA_STATS_STRING_ENABLED 3311 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3313 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3315 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3317 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3319 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3321 snprintf(outStr, strLen,
"%p", ptr);
3329 void Lock() { m_Mutex.lock(); }
3330 void Unlock() { m_Mutex.unlock(); }
3334 #define VMA_MUTEX VmaMutex 3338 #ifndef VMA_RW_MUTEX 3339 #if VMA_USE_STL_SHARED_MUTEX 3341 #include <shared_mutex> 3345 void LockRead() { m_Mutex.lock_shared(); }
3346 void UnlockRead() { m_Mutex.unlock_shared(); }
3347 void LockWrite() { m_Mutex.lock(); }
3348 void UnlockWrite() { m_Mutex.unlock(); }
3350 std::shared_mutex m_Mutex;
3352 #define VMA_RW_MUTEX VmaRWMutex 3353 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3359 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3360 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3361 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3362 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3363 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3367 #define VMA_RW_MUTEX VmaRWMutex 3373 void LockRead() { m_Mutex.Lock(); }
3374 void UnlockRead() { m_Mutex.Unlock(); }
3375 void LockWrite() { m_Mutex.Lock(); }
3376 void UnlockWrite() { m_Mutex.Unlock(); }
3380 #define VMA_RW_MUTEX VmaRWMutex 3381 #endif // #if VMA_USE_STL_SHARED_MUTEX 3382 #endif // #ifndef VMA_RW_MUTEX 3392 #ifndef VMA_ATOMIC_UINT32 3393 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3396 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3401 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3404 #ifndef VMA_DEBUG_ALIGNMENT 3409 #define VMA_DEBUG_ALIGNMENT (1) 3412 #ifndef VMA_DEBUG_MARGIN 3417 #define VMA_DEBUG_MARGIN (0) 3420 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3425 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3428 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3434 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3437 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3442 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3445 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3450 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3453 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3454 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3458 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3459 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3463 #ifndef VMA_CLASS_NO_COPY 3464 #define VMA_CLASS_NO_COPY(className) \ 3466 className(const className&) = delete; \ 3467 className& operator=(const className&) = delete; 3470 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3473 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3475 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3476 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3482 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3484 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3485 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3488 static inline uint32_t VmaCountBitsSet(uint32_t v)
3490 uint32_t c = v - ((v >> 1) & 0x55555555);
3491 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3492 c = ((c >> 4) + c) & 0x0F0F0F0F;
3493 c = ((c >> 8) + c) & 0x00FF00FF;
3494 c = ((c >> 16) + c) & 0x0000FFFF;
3500 template <
typename T>
3501 static inline T VmaAlignUp(T val, T align)
3503 return (val + align - 1) / align * align;
3507 template <
typename T>
3508 static inline T VmaAlignDown(T val, T align)
3510 return val / align * align;
3514 template <
typename T>
3515 static inline T VmaRoundDiv(T x, T y)
3517 return (x + (y / (T)2)) / y;
3525 template <
typename T>
3526 inline bool VmaIsPow2(T x)
3528 return (x & (x-1)) == 0;
3532 static inline uint32_t VmaNextPow2(uint32_t v)
3543 static inline uint64_t VmaNextPow2(uint64_t v)
3557 static inline uint32_t VmaPrevPow2(uint32_t v)
3567 static inline uint64_t VmaPrevPow2(uint64_t v)
3579 static inline bool VmaStrIsEmpty(
const char* pStr)
3581 return pStr == VMA_NULL || *pStr ==
'\0';
3584 #if VMA_STATS_STRING_ENABLED 3586 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3602 #endif // #if VMA_STATS_STRING_ENABLED 3606 template<
typename Iterator,
typename Compare>
3607 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3609 Iterator centerValue = end; --centerValue;
3610 Iterator insertIndex = beg;
3611 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3613 if(cmp(*memTypeIndex, *centerValue))
3615 if(insertIndex != memTypeIndex)
3617 VMA_SWAP(*memTypeIndex, *insertIndex);
3622 if(insertIndex != centerValue)
3624 VMA_SWAP(*insertIndex, *centerValue);
3629 template<
typename Iterator,
typename Compare>
3630 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3634 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3635 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3636 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3640 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3642 #endif // #ifndef VMA_SORT 3651 static inline bool VmaBlocksOnSamePage(
3652 VkDeviceSize resourceAOffset,
3653 VkDeviceSize resourceASize,
3654 VkDeviceSize resourceBOffset,
3655 VkDeviceSize pageSize)
3657 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3658 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3659 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3660 VkDeviceSize resourceBStart = resourceBOffset;
3661 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3662 return resourceAEndPage == resourceBStartPage;
3665 enum VmaSuballocationType
3667 VMA_SUBALLOCATION_TYPE_FREE = 0,
3668 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3669 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3670 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3671 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3672 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3673 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3682 static inline bool VmaIsBufferImageGranularityConflict(
3683 VmaSuballocationType suballocType1,
3684 VmaSuballocationType suballocType2)
3686 if(suballocType1 > suballocType2)
3688 VMA_SWAP(suballocType1, suballocType2);
3691 switch(suballocType1)
3693 case VMA_SUBALLOCATION_TYPE_FREE:
3695 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3697 case VMA_SUBALLOCATION_TYPE_BUFFER:
3699 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3700 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3701 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3703 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3704 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3705 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3706 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3708 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3709 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3717 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3719 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3720 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3721 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3723 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3727 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3729 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3730 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3731 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3733 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3744 VMA_CLASS_NO_COPY(VmaMutexLock)
3746 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3747 m_pMutex(useMutex ? &mutex : VMA_NULL)
3748 {
if(m_pMutex) { m_pMutex->Lock(); } }
3750 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3752 VMA_MUTEX* m_pMutex;
3756 struct VmaMutexLockRead
3758 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3760 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3761 m_pMutex(useMutex ? &mutex : VMA_NULL)
3762 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3763 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3765 VMA_RW_MUTEX* m_pMutex;
3769 struct VmaMutexLockWrite
3771 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3773 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3774 m_pMutex(useMutex ? &mutex : VMA_NULL)
3775 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3776 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3778 VMA_RW_MUTEX* m_pMutex;
3781 #if VMA_DEBUG_GLOBAL_MUTEX 3782 static VMA_MUTEX gDebugGlobalMutex;
3783 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3785 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3789 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3800 template <
typename CmpLess,
typename IterT,
typename KeyT>
3801 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3803 size_t down = 0, up = (end - beg);
3806 const size_t mid = (down + up) / 2;
3807 if(cmp(*(beg+mid), key))
3824 template<
typename T>
3825 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3827 for(uint32_t i = 0; i < count; ++i)
3829 const T iPtr = arr[i];
3830 if(iPtr == VMA_NULL)
3834 for(uint32_t j = i + 1; j < count; ++j)
3848 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3850 if((pAllocationCallbacks != VMA_NULL) &&
3851 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3853 return (*pAllocationCallbacks->pfnAllocation)(
3854 pAllocationCallbacks->pUserData,
3857 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3861 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3865 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3867 if((pAllocationCallbacks != VMA_NULL) &&
3868 (pAllocationCallbacks->pfnFree != VMA_NULL))
3870 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3874 VMA_SYSTEM_FREE(ptr);
3878 template<
typename T>
3879 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3881 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3884 template<
typename T>
3885 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3887 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3890 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3892 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3894 template<
typename T>
3895 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3898 VmaFree(pAllocationCallbacks, ptr);
3901 template<
typename T>
3902 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3906 for(
size_t i = count; i--; )
3910 VmaFree(pAllocationCallbacks, ptr);
3915 template<
typename T>
3916 class VmaStlAllocator
3919 const VkAllocationCallbacks*
const m_pCallbacks;
3920 typedef T value_type;
3922 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3923 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3925 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3926 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3928 template<
typename U>
3929 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3931 return m_pCallbacks == rhs.m_pCallbacks;
3933 template<
typename U>
3934 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3936 return m_pCallbacks != rhs.m_pCallbacks;
3939 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3942 #if VMA_USE_STL_VECTOR 3944 #define VmaVector std::vector 3946 template<
typename T,
typename allocatorT>
3947 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3949 vec.insert(vec.begin() + index, item);
3952 template<
typename T,
typename allocatorT>
3953 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3955 vec.erase(vec.begin() + index);
3958 #else // #if VMA_USE_STL_VECTOR 3963 template<
typename T,
typename AllocatorT>
3967 typedef T value_type;
3969 VmaVector(
const AllocatorT& allocator) :
3970 m_Allocator(allocator),
3977 VmaVector(
size_t count,
const AllocatorT& allocator) :
3978 m_Allocator(allocator),
3979 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3985 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3986 m_Allocator(src.m_Allocator),
3987 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3988 m_Count(src.m_Count),
3989 m_Capacity(src.m_Count)
3993 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3999 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4002 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4006 resize(rhs.m_Count);
4009 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4015 bool empty()
const {
return m_Count == 0; }
4016 size_t size()
const {
return m_Count; }
4017 T* data() {
return m_pArray; }
4018 const T* data()
const {
return m_pArray; }
4020 T& operator[](
size_t index)
4022 VMA_HEAVY_ASSERT(index < m_Count);
4023 return m_pArray[index];
4025 const T& operator[](
size_t index)
const 4027 VMA_HEAVY_ASSERT(index < m_Count);
4028 return m_pArray[index];
4033 VMA_HEAVY_ASSERT(m_Count > 0);
4036 const T& front()
const 4038 VMA_HEAVY_ASSERT(m_Count > 0);
4043 VMA_HEAVY_ASSERT(m_Count > 0);
4044 return m_pArray[m_Count - 1];
4046 const T& back()
const 4048 VMA_HEAVY_ASSERT(m_Count > 0);
4049 return m_pArray[m_Count - 1];
4052 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4054 newCapacity = VMA_MAX(newCapacity, m_Count);
4056 if((newCapacity < m_Capacity) && !freeMemory)
4058 newCapacity = m_Capacity;
4061 if(newCapacity != m_Capacity)
4063 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4066 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4068 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4069 m_Capacity = newCapacity;
4070 m_pArray = newArray;
4074 void resize(
size_t newCount,
bool freeMemory =
false)
4076 size_t newCapacity = m_Capacity;
4077 if(newCount > m_Capacity)
4079 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4083 newCapacity = newCount;
4086 if(newCapacity != m_Capacity)
4088 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4089 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4090 if(elementsToCopy != 0)
4092 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4094 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4095 m_Capacity = newCapacity;
4096 m_pArray = newArray;
4102 void clear(
bool freeMemory =
false)
4104 resize(0, freeMemory);
4107 void insert(
size_t index,
const T& src)
4109 VMA_HEAVY_ASSERT(index <= m_Count);
4110 const size_t oldCount = size();
4111 resize(oldCount + 1);
4112 if(index < oldCount)
4114 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4116 m_pArray[index] = src;
4119 void remove(
size_t index)
4121 VMA_HEAVY_ASSERT(index < m_Count);
4122 const size_t oldCount = size();
4123 if(index < oldCount - 1)
4125 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4127 resize(oldCount - 1);
4130 void push_back(
const T& src)
4132 const size_t newIndex = size();
4133 resize(newIndex + 1);
4134 m_pArray[newIndex] = src;
4139 VMA_HEAVY_ASSERT(m_Count > 0);
4143 void push_front(
const T& src)
4150 VMA_HEAVY_ASSERT(m_Count > 0);
4154 typedef T* iterator;
4156 iterator begin() {
return m_pArray; }
4157 iterator end() {
return m_pArray + m_Count; }
4160 AllocatorT m_Allocator;
4166 template<
typename T,
typename allocatorT>
4167 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4169 vec.insert(index, item);
4172 template<
typename T,
typename allocatorT>
4173 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4178 #endif // #if VMA_USE_STL_VECTOR 4180 template<
typename CmpLess,
typename VectorT>
4181 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4183 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4185 vector.data() + vector.size(),
4187 CmpLess()) - vector.data();
4188 VmaVectorInsert(vector, indexToInsert, value);
4189 return indexToInsert;
4192 template<
typename CmpLess,
typename VectorT>
4193 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4196 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4201 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4203 size_t indexToRemove = it - vector.begin();
4204 VmaVectorRemove(vector, indexToRemove);
4210 template<
typename CmpLess,
typename IterT,
typename KeyT>
4211 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4214 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4215 beg, end, value, comparator);
4217 (!comparator(*it, value) && !comparator(value, *it)))
4232 template<
typename T>
4233 class VmaPoolAllocator
4235 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4237 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4238 ~VmaPoolAllocator();
4246 uint32_t NextFreeIndex;
4254 uint32_t FirstFreeIndex;
4257 const VkAllocationCallbacks* m_pAllocationCallbacks;
4258 const uint32_t m_FirstBlockCapacity;
4259 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4261 ItemBlock& CreateNewBlock();
4264 template<
typename T>
4265 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4266 m_pAllocationCallbacks(pAllocationCallbacks),
4267 m_FirstBlockCapacity(firstBlockCapacity),
4268 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4270 VMA_ASSERT(m_FirstBlockCapacity > 1);
4273 template<
typename T>
4274 VmaPoolAllocator<T>::~VmaPoolAllocator()
4279 template<
typename T>
4280 void VmaPoolAllocator<T>::Clear()
4282 for(
size_t i = m_ItemBlocks.size(); i--; )
4283 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4284 m_ItemBlocks.clear();
4287 template<
typename T>
4288 T* VmaPoolAllocator<T>::Alloc()
4290 for(
size_t i = m_ItemBlocks.size(); i--; )
4292 ItemBlock& block = m_ItemBlocks[i];
4294 if(block.FirstFreeIndex != UINT32_MAX)
4296 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4297 block.FirstFreeIndex = pItem->NextFreeIndex;
4298 return &pItem->Value;
4303 ItemBlock& newBlock = CreateNewBlock();
4304 Item*
const pItem = &newBlock.pItems[0];
4305 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4306 return &pItem->Value;
4309 template<
typename T>
4310 void VmaPoolAllocator<T>::Free(T* ptr)
4313 for(
size_t i = m_ItemBlocks.size(); i--; )
4315 ItemBlock& block = m_ItemBlocks[i];
4319 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4322 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4324 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4325 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4326 block.FirstFreeIndex = index;
4330 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4333 template<
typename T>
4334 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4336 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4337 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4339 const ItemBlock newBlock = {
4340 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4344 m_ItemBlocks.push_back(newBlock);
4347 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4348 newBlock.pItems[i].NextFreeIndex = i + 1;
4349 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4350 return m_ItemBlocks.back();
4356 #if VMA_USE_STL_LIST 4358 #define VmaList std::list 4360 #else // #if VMA_USE_STL_LIST 4362 template<
typename T>
4371 template<
typename T>
4374 VMA_CLASS_NO_COPY(VmaRawList)
4376 typedef VmaListItem<T> ItemType;
4378 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4382 size_t GetCount()
const {
return m_Count; }
4383 bool IsEmpty()
const {
return m_Count == 0; }
4385 ItemType* Front() {
return m_pFront; }
4386 const ItemType* Front()
const {
return m_pFront; }
4387 ItemType* Back() {
return m_pBack; }
4388 const ItemType* Back()
const {
return m_pBack; }
4390 ItemType* PushBack();
4391 ItemType* PushFront();
4392 ItemType* PushBack(
const T& value);
4393 ItemType* PushFront(
const T& value);
4398 ItemType* InsertBefore(ItemType* pItem);
4400 ItemType* InsertAfter(ItemType* pItem);
4402 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4403 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4405 void Remove(ItemType* pItem);
4408 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4409 VmaPoolAllocator<ItemType> m_ItemAllocator;
4415 template<
typename T>
4416 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4417 m_pAllocationCallbacks(pAllocationCallbacks),
4418 m_ItemAllocator(pAllocationCallbacks, 128),
4425 template<
typename T>
4426 VmaRawList<T>::~VmaRawList()
4432 template<
typename T>
4433 void VmaRawList<T>::Clear()
4435 if(IsEmpty() ==
false)
4437 ItemType* pItem = m_pBack;
4438 while(pItem != VMA_NULL)
4440 ItemType*
const pPrevItem = pItem->pPrev;
4441 m_ItemAllocator.Free(pItem);
4444 m_pFront = VMA_NULL;
4450 template<
typename T>
4451 VmaListItem<T>* VmaRawList<T>::PushBack()
4453 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4454 pNewItem->pNext = VMA_NULL;
4457 pNewItem->pPrev = VMA_NULL;
4458 m_pFront = pNewItem;
4464 pNewItem->pPrev = m_pBack;
4465 m_pBack->pNext = pNewItem;
4472 template<
typename T>
4473 VmaListItem<T>* VmaRawList<T>::PushFront()
4475 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4476 pNewItem->pPrev = VMA_NULL;
4479 pNewItem->pNext = VMA_NULL;
4480 m_pFront = pNewItem;
4486 pNewItem->pNext = m_pFront;
4487 m_pFront->pPrev = pNewItem;
4488 m_pFront = pNewItem;
4494 template<
typename T>
4495 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4497 ItemType*
const pNewItem = PushBack();
4498 pNewItem->Value = value;
4502 template<
typename T>
4503 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4505 ItemType*
const pNewItem = PushFront();
4506 pNewItem->Value = value;
4510 template<
typename T>
4511 void VmaRawList<T>::PopBack()
4513 VMA_HEAVY_ASSERT(m_Count > 0);
4514 ItemType*
const pBackItem = m_pBack;
4515 ItemType*
const pPrevItem = pBackItem->pPrev;
4516 if(pPrevItem != VMA_NULL)
4518 pPrevItem->pNext = VMA_NULL;
4520 m_pBack = pPrevItem;
4521 m_ItemAllocator.Free(pBackItem);
4525 template<
typename T>
4526 void VmaRawList<T>::PopFront()
4528 VMA_HEAVY_ASSERT(m_Count > 0);
4529 ItemType*
const pFrontItem = m_pFront;
4530 ItemType*
const pNextItem = pFrontItem->pNext;
4531 if(pNextItem != VMA_NULL)
4533 pNextItem->pPrev = VMA_NULL;
4535 m_pFront = pNextItem;
4536 m_ItemAllocator.Free(pFrontItem);
4540 template<
typename T>
4541 void VmaRawList<T>::Remove(ItemType* pItem)
4543 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4544 VMA_HEAVY_ASSERT(m_Count > 0);
4546 if(pItem->pPrev != VMA_NULL)
4548 pItem->pPrev->pNext = pItem->pNext;
4552 VMA_HEAVY_ASSERT(m_pFront == pItem);
4553 m_pFront = pItem->pNext;
4556 if(pItem->pNext != VMA_NULL)
4558 pItem->pNext->pPrev = pItem->pPrev;
4562 VMA_HEAVY_ASSERT(m_pBack == pItem);
4563 m_pBack = pItem->pPrev;
4566 m_ItemAllocator.Free(pItem);
4570 template<
typename T>
4571 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4573 if(pItem != VMA_NULL)
4575 ItemType*
const prevItem = pItem->pPrev;
4576 ItemType*
const newItem = m_ItemAllocator.Alloc();
4577 newItem->pPrev = prevItem;
4578 newItem->pNext = pItem;
4579 pItem->pPrev = newItem;
4580 if(prevItem != VMA_NULL)
4582 prevItem->pNext = newItem;
4586 VMA_HEAVY_ASSERT(m_pFront == pItem);
4596 template<
typename T>
4597 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4599 if(pItem != VMA_NULL)
4601 ItemType*
const nextItem = pItem->pNext;
4602 ItemType*
const newItem = m_ItemAllocator.Alloc();
4603 newItem->pNext = nextItem;
4604 newItem->pPrev = pItem;
4605 pItem->pNext = newItem;
4606 if(nextItem != VMA_NULL)
4608 nextItem->pPrev = newItem;
4612 VMA_HEAVY_ASSERT(m_pBack == pItem);
4622 template<
typename T>
4623 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4625 ItemType*
const newItem = InsertBefore(pItem);
4626 newItem->Value = value;
4630 template<
typename T>
4631 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4633 ItemType*
const newItem = InsertAfter(pItem);
4634 newItem->Value = value;
4638 template<
typename T,
typename AllocatorT>
4641 VMA_CLASS_NO_COPY(VmaList)
4652 T& operator*()
const 4654 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4655 return m_pItem->Value;
4657 T* operator->()
const 4659 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4660 return &m_pItem->Value;
4663 iterator& operator++()
4665 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4666 m_pItem = m_pItem->pNext;
4669 iterator& operator--()
4671 if(m_pItem != VMA_NULL)
4673 m_pItem = m_pItem->pPrev;
4677 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4678 m_pItem = m_pList->Back();
4683 iterator operator++(
int)
4685 iterator result = *
this;
4689 iterator operator--(
int)
4691 iterator result = *
this;
4696 bool operator==(
const iterator& rhs)
const 4698 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4699 return m_pItem == rhs.m_pItem;
4701 bool operator!=(
const iterator& rhs)
const 4703 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4704 return m_pItem != rhs.m_pItem;
4708 VmaRawList<T>* m_pList;
4709 VmaListItem<T>* m_pItem;
4711 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4717 friend class VmaList<T, AllocatorT>;
4720 class const_iterator
4729 const_iterator(
const iterator& src) :
4730 m_pList(src.m_pList),
4731 m_pItem(src.m_pItem)
4735 const T& operator*()
const 4737 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4738 return m_pItem->Value;
4740 const T* operator->()
const 4742 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4743 return &m_pItem->Value;
4746 const_iterator& operator++()
4748 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4749 m_pItem = m_pItem->pNext;
4752 const_iterator& operator--()
4754 if(m_pItem != VMA_NULL)
4756 m_pItem = m_pItem->pPrev;
4760 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4761 m_pItem = m_pList->Back();
4766 const_iterator operator++(
int)
4768 const_iterator result = *
this;
4772 const_iterator operator--(
int)
4774 const_iterator result = *
this;
4779 bool operator==(
const const_iterator& rhs)
const 4781 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4782 return m_pItem == rhs.m_pItem;
4784 bool operator!=(
const const_iterator& rhs)
const 4786 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4787 return m_pItem != rhs.m_pItem;
4791 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4797 const VmaRawList<T>* m_pList;
4798 const VmaListItem<T>* m_pItem;
4800 friend class VmaList<T, AllocatorT>;
4803 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4805 bool empty()
const {
return m_RawList.IsEmpty(); }
4806 size_t size()
const {
return m_RawList.GetCount(); }
4808 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4809 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4811 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4812 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4814 void clear() { m_RawList.Clear(); }
4815 void push_back(
const T& value) { m_RawList.PushBack(value); }
4816 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4817 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4820 VmaRawList<T> m_RawList;
4823 #endif // #if VMA_USE_STL_LIST 4831 #if VMA_USE_STL_UNORDERED_MAP 4833 #define VmaPair std::pair 4835 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4836 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4838 #else // #if VMA_USE_STL_UNORDERED_MAP 4840 template<
typename T1,
typename T2>
4846 VmaPair() : first(), second() { }
4847 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4853 template<
typename KeyT,
typename ValueT>
4857 typedef VmaPair<KeyT, ValueT> PairType;
4858 typedef PairType* iterator;
4860 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4862 iterator begin() {
return m_Vector.begin(); }
4863 iterator end() {
return m_Vector.end(); }
4865 void insert(
const PairType& pair);
4866 iterator find(
const KeyT& key);
4867 void erase(iterator it);
4870 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4873 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4875 template<
typename FirstT,
typename SecondT>
4876 struct VmaPairFirstLess
4878 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4880 return lhs.first < rhs.first;
4882 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4884 return lhs.first < rhsFirst;
4888 template<
typename KeyT,
typename ValueT>
4889 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4891 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4893 m_Vector.data() + m_Vector.size(),
4895 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4896 VmaVectorInsert(m_Vector, indexToInsert, pair);
4899 template<
typename KeyT,
typename ValueT>
4900 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4902 PairType* it = VmaBinaryFindFirstNotLess(
4904 m_Vector.data() + m_Vector.size(),
4906 VmaPairFirstLess<KeyT, ValueT>());
4907 if((it != m_Vector.end()) && (it->first == key))
4913 return m_Vector.end();
4917 template<
typename KeyT,
typename ValueT>
4918 void VmaMap<KeyT, ValueT>::erase(iterator it)
4920 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4923 #endif // #if VMA_USE_STL_UNORDERED_MAP 4929 class VmaDeviceMemoryBlock;
4931 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4933 struct VmaAllocation_T
4936 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4940 FLAG_USER_DATA_STRING = 0x01,
4944 enum ALLOCATION_TYPE
4946 ALLOCATION_TYPE_NONE,
4947 ALLOCATION_TYPE_BLOCK,
4948 ALLOCATION_TYPE_DEDICATED,
4956 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
4960 m_pUserData = VMA_NULL;
4961 m_LastUseFrameIndex = currentFrameIndex;
4962 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
4963 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
4965 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
4967 #if VMA_STATS_STRING_ENABLED 4968 m_CreationFrameIndex = currentFrameIndex;
4969 m_BufferImageUsage = 0;
4975 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4978 VMA_ASSERT(m_pUserData == VMA_NULL);
4981 void InitBlockAllocation(
4982 VmaDeviceMemoryBlock* block,
4983 VkDeviceSize offset,
4984 VkDeviceSize alignment,
4986 VmaSuballocationType suballocationType,
4990 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4991 VMA_ASSERT(block != VMA_NULL);
4992 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4993 m_Alignment = alignment;
4995 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4996 m_SuballocationType = (uint8_t)suballocationType;
4997 m_BlockAllocation.m_Block = block;
4998 m_BlockAllocation.m_Offset = offset;
4999 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5004 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5005 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5006 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5007 m_BlockAllocation.m_Block = VMA_NULL;
5008 m_BlockAllocation.m_Offset = 0;
5009 m_BlockAllocation.m_CanBecomeLost =
true;
5012 void ChangeBlockAllocation(
5014 VmaDeviceMemoryBlock* block,
5015 VkDeviceSize offset);
5017 void ChangeSize(VkDeviceSize newSize);
5018 void ChangeOffset(VkDeviceSize newOffset);
5021 void InitDedicatedAllocation(
5022 uint32_t memoryTypeIndex,
5023 VkDeviceMemory hMemory,
5024 VmaSuballocationType suballocationType,
5028 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5029 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5030 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5033 m_SuballocationType = (uint8_t)suballocationType;
5034 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5035 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5036 m_DedicatedAllocation.m_hMemory = hMemory;
5037 m_DedicatedAllocation.m_pMappedData = pMappedData;
5040 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5041 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5042 VkDeviceSize GetSize()
const {
return m_Size; }
5043 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5044 void* GetUserData()
const {
return m_pUserData; }
5045 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5046 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5048 VmaDeviceMemoryBlock* GetBlock()
const 5050 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5051 return m_BlockAllocation.m_Block;
5053 VkDeviceSize GetOffset()
const;
5054 VkDeviceMemory GetMemory()
const;
5055 uint32_t GetMemoryTypeIndex()
const;
5056 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5057 void* GetMappedData()
const;
5058 bool CanBecomeLost()
const;
5060 uint32_t GetLastUseFrameIndex()
const 5062 return m_LastUseFrameIndex.load();
5064 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5066 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5076 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5078 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5080 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5091 void BlockAllocMap();
5092 void BlockAllocUnmap();
5093 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5096 #if VMA_STATS_STRING_ENABLED 5097 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5098 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5100 void InitBufferImageUsage(uint32_t bufferImageUsage)
5102 VMA_ASSERT(m_BufferImageUsage == 0);
5103 m_BufferImageUsage = bufferImageUsage;
5106 void PrintParameters(
class VmaJsonWriter& json)
const;
5110 VkDeviceSize m_Alignment;
5111 VkDeviceSize m_Size;
5113 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5115 uint8_t m_SuballocationType;
5122 struct BlockAllocation
5124 VmaDeviceMemoryBlock* m_Block;
5125 VkDeviceSize m_Offset;
5126 bool m_CanBecomeLost;
5130 struct DedicatedAllocation
5132 uint32_t m_MemoryTypeIndex;
5133 VkDeviceMemory m_hMemory;
5134 void* m_pMappedData;
5140 BlockAllocation m_BlockAllocation;
5142 DedicatedAllocation m_DedicatedAllocation;
5145 #if VMA_STATS_STRING_ENABLED 5146 uint32_t m_CreationFrameIndex;
5147 uint32_t m_BufferImageUsage;
5157 struct VmaSuballocation
5159 VkDeviceSize offset;
5162 VmaSuballocationType type;
5166 struct VmaSuballocationOffsetLess
5168 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5170 return lhs.offset < rhs.offset;
5173 struct VmaSuballocationOffsetGreater
5175 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5177 return lhs.offset > rhs.offset;
5181 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5184 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5186 enum class VmaAllocationRequestType
5208 struct VmaAllocationRequest
5210 VkDeviceSize offset;
5211 VkDeviceSize sumFreeSize;
5212 VkDeviceSize sumItemSize;
5213 VmaSuballocationList::iterator item;
5214 size_t itemsToMakeLostCount;
5216 VmaAllocationRequestType type;
5218 VkDeviceSize CalcCost()
const 5220 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5228 class VmaBlockMetadata
5232 virtual ~VmaBlockMetadata() { }
5233 virtual void Init(VkDeviceSize size) { m_Size = size; }
5236 virtual bool Validate()
const = 0;
5237 VkDeviceSize GetSize()
const {
return m_Size; }
5238 virtual size_t GetAllocationCount()
const = 0;
5239 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5240 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5242 virtual bool IsEmpty()
const = 0;
5244 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5246 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5248 #if VMA_STATS_STRING_ENABLED 5249 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5255 virtual bool CreateAllocationRequest(
5256 uint32_t currentFrameIndex,
5257 uint32_t frameInUseCount,
5258 VkDeviceSize bufferImageGranularity,
5259 VkDeviceSize allocSize,
5260 VkDeviceSize allocAlignment,
5262 VmaSuballocationType allocType,
5263 bool canMakeOtherLost,
5266 VmaAllocationRequest* pAllocationRequest) = 0;
5268 virtual bool MakeRequestedAllocationsLost(
5269 uint32_t currentFrameIndex,
5270 uint32_t frameInUseCount,
5271 VmaAllocationRequest* pAllocationRequest) = 0;
5273 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5275 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5279 const VmaAllocationRequest& request,
5280 VmaSuballocationType type,
5281 VkDeviceSize allocSize,
5286 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5289 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5292 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5294 #if VMA_STATS_STRING_ENABLED 5295 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5296 VkDeviceSize unusedBytes,
5297 size_t allocationCount,
5298 size_t unusedRangeCount)
const;
5299 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5300 VkDeviceSize offset,
5302 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5303 VkDeviceSize offset,
5304 VkDeviceSize size)
const;
5305 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5309 VkDeviceSize m_Size;
5310 const VkAllocationCallbacks* m_pAllocationCallbacks;
5313 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5314 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5318 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5320 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5323 virtual ~VmaBlockMetadata_Generic();
5324 virtual void Init(VkDeviceSize size);
5326 virtual bool Validate()
const;
5327 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5328 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5329 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5330 virtual bool IsEmpty()
const;
5332 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5333 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5335 #if VMA_STATS_STRING_ENABLED 5336 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5339 virtual bool CreateAllocationRequest(
5340 uint32_t currentFrameIndex,
5341 uint32_t frameInUseCount,
5342 VkDeviceSize bufferImageGranularity,
5343 VkDeviceSize allocSize,
5344 VkDeviceSize allocAlignment,
5346 VmaSuballocationType allocType,
5347 bool canMakeOtherLost,
5349 VmaAllocationRequest* pAllocationRequest);
5351 virtual bool MakeRequestedAllocationsLost(
5352 uint32_t currentFrameIndex,
5353 uint32_t frameInUseCount,
5354 VmaAllocationRequest* pAllocationRequest);
5356 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5358 virtual VkResult CheckCorruption(
const void* pBlockData);
5361 const VmaAllocationRequest& request,
5362 VmaSuballocationType type,
5363 VkDeviceSize allocSize,
5367 virtual void FreeAtOffset(VkDeviceSize offset);
5369 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5374 bool IsBufferImageGranularityConflictPossible(
5375 VkDeviceSize bufferImageGranularity,
5376 VmaSuballocationType& inOutPrevSuballocType)
const;
5379 friend class VmaDefragmentationAlgorithm_Generic;
5380 friend class VmaDefragmentationAlgorithm_Fast;
5382 uint32_t m_FreeCount;
5383 VkDeviceSize m_SumFreeSize;
5384 VmaSuballocationList m_Suballocations;
5387 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5389 bool ValidateFreeSuballocationList()
const;
5393 bool CheckAllocation(
5394 uint32_t currentFrameIndex,
5395 uint32_t frameInUseCount,
5396 VkDeviceSize bufferImageGranularity,
5397 VkDeviceSize allocSize,
5398 VkDeviceSize allocAlignment,
5399 VmaSuballocationType allocType,
5400 VmaSuballocationList::const_iterator suballocItem,
5401 bool canMakeOtherLost,
5402 VkDeviceSize* pOffset,
5403 size_t* itemsToMakeLostCount,
5404 VkDeviceSize* pSumFreeSize,
5405 VkDeviceSize* pSumItemSize)
const;
5407 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5411 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5414 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5417 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5498 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5500 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5503 virtual ~VmaBlockMetadata_Linear();
5504 virtual void Init(VkDeviceSize size);
5506 virtual bool Validate()
const;
5507 virtual size_t GetAllocationCount()
const;
5508 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5509 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5510 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5512 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5513 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5515 #if VMA_STATS_STRING_ENABLED 5516 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5519 virtual bool CreateAllocationRequest(
5520 uint32_t currentFrameIndex,
5521 uint32_t frameInUseCount,
5522 VkDeviceSize bufferImageGranularity,
5523 VkDeviceSize allocSize,
5524 VkDeviceSize allocAlignment,
5526 VmaSuballocationType allocType,
5527 bool canMakeOtherLost,
5529 VmaAllocationRequest* pAllocationRequest);
5531 virtual bool MakeRequestedAllocationsLost(
5532 uint32_t currentFrameIndex,
5533 uint32_t frameInUseCount,
5534 VmaAllocationRequest* pAllocationRequest);
5536 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5538 virtual VkResult CheckCorruption(
const void* pBlockData);
5541 const VmaAllocationRequest& request,
5542 VmaSuballocationType type,
5543 VkDeviceSize allocSize,
5547 virtual void FreeAtOffset(VkDeviceSize offset);
5557 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5559 enum SECOND_VECTOR_MODE
5561 SECOND_VECTOR_EMPTY,
5566 SECOND_VECTOR_RING_BUFFER,
5572 SECOND_VECTOR_DOUBLE_STACK,
5575 VkDeviceSize m_SumFreeSize;
5576 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5577 uint32_t m_1stVectorIndex;
5578 SECOND_VECTOR_MODE m_2ndVectorMode;
5580 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5581 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5582 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5583 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5586 size_t m_1stNullItemsBeginCount;
5588 size_t m_1stNullItemsMiddleCount;
5590 size_t m_2ndNullItemsCount;
5592 bool ShouldCompact1st()
const;
5593 void CleanupAfterFree();
5595 bool CreateAllocationRequest_LowerAddress(
5596 uint32_t currentFrameIndex,
5597 uint32_t frameInUseCount,
5598 VkDeviceSize bufferImageGranularity,
5599 VkDeviceSize allocSize,
5600 VkDeviceSize allocAlignment,
5601 VmaSuballocationType allocType,
5602 bool canMakeOtherLost,
5604 VmaAllocationRequest* pAllocationRequest);
5605 bool CreateAllocationRequest_UpperAddress(
5606 uint32_t currentFrameIndex,
5607 uint32_t frameInUseCount,
5608 VkDeviceSize bufferImageGranularity,
5609 VkDeviceSize allocSize,
5610 VkDeviceSize allocAlignment,
5611 VmaSuballocationType allocType,
5612 bool canMakeOtherLost,
5614 VmaAllocationRequest* pAllocationRequest);
5628 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5630 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5633 virtual ~VmaBlockMetadata_Buddy();
5634 virtual void Init(VkDeviceSize size);
5636 virtual bool Validate()
const;
5637 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5638 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5639 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5640 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5642 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5643 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5645 #if VMA_STATS_STRING_ENABLED 5646 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5649 virtual bool CreateAllocationRequest(
5650 uint32_t currentFrameIndex,
5651 uint32_t frameInUseCount,
5652 VkDeviceSize bufferImageGranularity,
5653 VkDeviceSize allocSize,
5654 VkDeviceSize allocAlignment,
5656 VmaSuballocationType allocType,
5657 bool canMakeOtherLost,
5659 VmaAllocationRequest* pAllocationRequest);
5661 virtual bool MakeRequestedAllocationsLost(
5662 uint32_t currentFrameIndex,
5663 uint32_t frameInUseCount,
5664 VmaAllocationRequest* pAllocationRequest);
5666 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5668 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5671 const VmaAllocationRequest& request,
5672 VmaSuballocationType type,
5673 VkDeviceSize allocSize,
5676 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5677 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5680 static const VkDeviceSize MIN_NODE_SIZE = 32;
5681 static const size_t MAX_LEVELS = 30;
5683 struct ValidationContext
5685 size_t calculatedAllocationCount;
5686 size_t calculatedFreeCount;
5687 VkDeviceSize calculatedSumFreeSize;
5689 ValidationContext() :
5690 calculatedAllocationCount(0),
5691 calculatedFreeCount(0),
5692 calculatedSumFreeSize(0) { }
5697 VkDeviceSize offset;
5727 VkDeviceSize m_UsableSize;
5728 uint32_t m_LevelCount;
5734 } m_FreeList[MAX_LEVELS];
5736 size_t m_AllocationCount;
5740 VkDeviceSize m_SumFreeSize;
5742 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5743 void DeleteNode(Node* node);
5744 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5745 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5746 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5748 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5749 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5753 void AddToFreeListFront(uint32_t level, Node* node);
5757 void RemoveFromFreeList(uint32_t level, Node* node);
5759 #if VMA_STATS_STRING_ENABLED 5760 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5770 class VmaDeviceMemoryBlock
5772 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5774 VmaBlockMetadata* m_pMetadata;
5778 ~VmaDeviceMemoryBlock()
5780 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5781 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5788 uint32_t newMemoryTypeIndex,
5789 VkDeviceMemory newMemory,
5790 VkDeviceSize newSize,
5792 uint32_t algorithm);
5796 VmaPool GetParentPool()
const {
return m_hParentPool; }
5797 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5798 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5799 uint32_t GetId()
const {
return m_Id; }
5800 void* GetMappedData()
const {
return m_pMappedData; }
5803 bool Validate()
const;
5808 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5811 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5812 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5814 VkResult BindBufferMemory(
5818 VkResult BindImageMemory(
5825 uint32_t m_MemoryTypeIndex;
5827 VkDeviceMemory m_hMemory;
5835 uint32_t m_MapCount;
5836 void* m_pMappedData;
5839 struct VmaPointerLess
5841 bool operator()(
const void* lhs,
const void* rhs)
const 5847 struct VmaDefragmentationMove
5849 size_t srcBlockIndex;
5850 size_t dstBlockIndex;
5851 VkDeviceSize srcOffset;
5852 VkDeviceSize dstOffset;
5856 class VmaDefragmentationAlgorithm;
5864 struct VmaBlockVector
5866 VMA_CLASS_NO_COPY(VmaBlockVector)
5871 uint32_t memoryTypeIndex,
5872 VkDeviceSize preferredBlockSize,
5873 size_t minBlockCount,
5874 size_t maxBlockCount,
5875 VkDeviceSize bufferImageGranularity,
5876 uint32_t frameInUseCount,
5878 bool explicitBlockSize,
5879 uint32_t algorithm);
5882 VkResult CreateMinBlocks();
5884 VmaPool GetParentPool()
const {
return m_hParentPool; }
5885 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5886 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5887 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5888 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5889 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5893 bool IsEmpty()
const {
return m_Blocks.empty(); }
5894 bool IsCorruptionDetectionEnabled()
const;
5897 uint32_t currentFrameIndex,
5899 VkDeviceSize alignment,
5901 VmaSuballocationType suballocType,
5902 size_t allocationCount,
5911 #if VMA_STATS_STRING_ENABLED 5912 void PrintDetailedMap(
class VmaJsonWriter& json);
5915 void MakePoolAllocationsLost(
5916 uint32_t currentFrameIndex,
5917 size_t* pLostAllocationCount);
5918 VkResult CheckCorruption();
5922 class VmaBlockVectorDefragmentationContext* pCtx,
5924 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5925 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5926 VkCommandBuffer commandBuffer);
5927 void DefragmentationEnd(
5928 class VmaBlockVectorDefragmentationContext* pCtx,
5934 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5935 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5936 size_t CalcAllocationCount()
const;
5937 bool IsBufferImageGranularityConflictPossible()
const;
5940 friend class VmaDefragmentationAlgorithm_Generic;
5944 const uint32_t m_MemoryTypeIndex;
5945 const VkDeviceSize m_PreferredBlockSize;
5946 const size_t m_MinBlockCount;
5947 const size_t m_MaxBlockCount;
5948 const VkDeviceSize m_BufferImageGranularity;
5949 const uint32_t m_FrameInUseCount;
5950 const bool m_IsCustomPool;
5951 const bool m_ExplicitBlockSize;
5952 const uint32_t m_Algorithm;
5956 bool m_HasEmptyBlock;
5957 VMA_RW_MUTEX m_Mutex;
5959 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5960 uint32_t m_NextBlockId;
5962 VkDeviceSize CalcMaxBlockSize()
const;
5965 void Remove(VmaDeviceMemoryBlock* pBlock);
5969 void IncrementallySortBlocks();
5971 VkResult AllocatePage(
5972 uint32_t currentFrameIndex,
5974 VkDeviceSize alignment,
5976 VmaSuballocationType suballocType,
5980 VkResult AllocateFromBlock(
5981 VmaDeviceMemoryBlock* pBlock,
5982 uint32_t currentFrameIndex,
5984 VkDeviceSize alignment,
5987 VmaSuballocationType suballocType,
5991 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5994 void ApplyDefragmentationMovesCpu(
5995 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5996 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5998 void ApplyDefragmentationMovesGpu(
5999 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6000 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6001 VkCommandBuffer commandBuffer);
6012 VMA_CLASS_NO_COPY(VmaPool_T)
6014 VmaBlockVector m_BlockVector;
6019 VkDeviceSize preferredBlockSize);
6022 uint32_t GetId()
const {
return m_Id; }
6023 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6025 #if VMA_STATS_STRING_ENABLED 6040 class VmaDefragmentationAlgorithm
6042 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6044 VmaDefragmentationAlgorithm(
6046 VmaBlockVector* pBlockVector,
6047 uint32_t currentFrameIndex) :
6048 m_hAllocator(hAllocator),
6049 m_pBlockVector(pBlockVector),
6050 m_CurrentFrameIndex(currentFrameIndex)
6053 virtual ~VmaDefragmentationAlgorithm()
6057 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6058 virtual void AddAll() = 0;
6060 virtual VkResult Defragment(
6061 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6062 VkDeviceSize maxBytesToMove,
6063 uint32_t maxAllocationsToMove) = 0;
6065 virtual VkDeviceSize GetBytesMoved()
const = 0;
6066 virtual uint32_t GetAllocationsMoved()
const = 0;
6070 VmaBlockVector*
const m_pBlockVector;
6071 const uint32_t m_CurrentFrameIndex;
6073 struct AllocationInfo
6076 VkBool32* m_pChanged;
6079 m_hAllocation(VK_NULL_HANDLE),
6080 m_pChanged(VMA_NULL)
6084 m_hAllocation(hAlloc),
6085 m_pChanged(pChanged)
6091 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6093 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6095 VmaDefragmentationAlgorithm_Generic(
6097 VmaBlockVector* pBlockVector,
6098 uint32_t currentFrameIndex,
6099 bool overlappingMoveSupported);
6100 virtual ~VmaDefragmentationAlgorithm_Generic();
6102 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6103 virtual void AddAll() { m_AllAllocations =
true; }
6105 virtual VkResult Defragment(
6106 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6107 VkDeviceSize maxBytesToMove,
6108 uint32_t maxAllocationsToMove);
6110 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6111 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6114 uint32_t m_AllocationCount;
6115 bool m_AllAllocations;
6117 VkDeviceSize m_BytesMoved;
6118 uint32_t m_AllocationsMoved;
6120 struct AllocationInfoSizeGreater
6122 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6124 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6128 struct AllocationInfoOffsetGreater
6130 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6132 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6138 size_t m_OriginalBlockIndex;
6139 VmaDeviceMemoryBlock* m_pBlock;
6140 bool m_HasNonMovableAllocations;
6141 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6143 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6144 m_OriginalBlockIndex(SIZE_MAX),
6146 m_HasNonMovableAllocations(true),
6147 m_Allocations(pAllocationCallbacks)
6151 void CalcHasNonMovableAllocations()
6153 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6154 const size_t defragmentAllocCount = m_Allocations.size();
6155 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6158 void SortAllocationsBySizeDescending()
6160 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6163 void SortAllocationsByOffsetDescending()
6165 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6169 struct BlockPointerLess
6171 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6173 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6175 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6177 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6183 struct BlockInfoCompareMoveDestination
6185 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6187 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6191 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6195 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6203 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6204 BlockInfoVector m_Blocks;
6206 VkResult DefragmentRound(
6207 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6208 VkDeviceSize maxBytesToMove,
6209 uint32_t maxAllocationsToMove);
6211 size_t CalcBlocksWithNonMovableCount()
const;
6213 static bool MoveMakesSense(
6214 size_t dstBlockIndex, VkDeviceSize dstOffset,
6215 size_t srcBlockIndex, VkDeviceSize srcOffset);
6218 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6220 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6222 VmaDefragmentationAlgorithm_Fast(
6224 VmaBlockVector* pBlockVector,
6225 uint32_t currentFrameIndex,
6226 bool overlappingMoveSupported);
6227 virtual ~VmaDefragmentationAlgorithm_Fast();
6229 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6230 virtual void AddAll() { m_AllAllocations =
true; }
6232 virtual VkResult Defragment(
6233 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6234 VkDeviceSize maxBytesToMove,
6235 uint32_t maxAllocationsToMove);
6237 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6238 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6243 size_t origBlockIndex;
6246 class FreeSpaceDatabase
6252 s.blockInfoIndex = SIZE_MAX;
6253 for(
size_t i = 0; i < MAX_COUNT; ++i)
6255 m_FreeSpaces[i] = s;
6259 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6261 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6267 size_t bestIndex = SIZE_MAX;
6268 for(
size_t i = 0; i < MAX_COUNT; ++i)
6271 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6276 if(m_FreeSpaces[i].size < size &&
6277 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6283 if(bestIndex != SIZE_MAX)
6285 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6286 m_FreeSpaces[bestIndex].offset = offset;
6287 m_FreeSpaces[bestIndex].size = size;
6291 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6292 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6294 size_t bestIndex = SIZE_MAX;
6295 VkDeviceSize bestFreeSpaceAfter = 0;
6296 for(
size_t i = 0; i < MAX_COUNT; ++i)
6299 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6301 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6303 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6305 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6307 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6310 bestFreeSpaceAfter = freeSpaceAfter;
6316 if(bestIndex != SIZE_MAX)
6318 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6319 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6321 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6324 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6325 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6326 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6331 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6341 static const size_t MAX_COUNT = 4;
6345 size_t blockInfoIndex;
6346 VkDeviceSize offset;
6348 } m_FreeSpaces[MAX_COUNT];
6351 const bool m_OverlappingMoveSupported;
6353 uint32_t m_AllocationCount;
6354 bool m_AllAllocations;
6356 VkDeviceSize m_BytesMoved;
6357 uint32_t m_AllocationsMoved;
6359 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6361 void PreprocessMetadata();
6362 void PostprocessMetadata();
6363 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6366 struct VmaBlockDefragmentationContext
6370 BLOCK_FLAG_USED = 0x00000001,
6375 VmaBlockDefragmentationContext() :
6377 hBuffer(VK_NULL_HANDLE)
6382 class VmaBlockVectorDefragmentationContext
6384 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6388 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6390 VmaBlockVectorDefragmentationContext(
6393 VmaBlockVector* pBlockVector,
6394 uint32_t currFrameIndex,
6396 ~VmaBlockVectorDefragmentationContext();
6398 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6399 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6400 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6402 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6403 void AddAll() { m_AllAllocations =
true; }
6405 void Begin(
bool overlappingMoveSupported);
6412 VmaBlockVector*
const m_pBlockVector;
6413 const uint32_t m_CurrFrameIndex;
6414 const uint32_t m_AlgorithmFlags;
6416 VmaDefragmentationAlgorithm* m_pAlgorithm;
6424 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6425 bool m_AllAllocations;
6428 struct VmaDefragmentationContext_T
6431 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6433 VmaDefragmentationContext_T(
6435 uint32_t currFrameIndex,
6438 ~VmaDefragmentationContext_T();
6440 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6441 void AddAllocations(
6442 uint32_t allocationCount,
6444 VkBool32* pAllocationsChanged);
6452 VkResult Defragment(
6453 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6454 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6459 const uint32_t m_CurrFrameIndex;
6460 const uint32_t m_Flags;
6463 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6465 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6468 #if VMA_RECORDING_ENABLED 6475 void WriteConfiguration(
6476 const VkPhysicalDeviceProperties& devProps,
6477 const VkPhysicalDeviceMemoryProperties& memProps,
6478 bool dedicatedAllocationExtensionEnabled);
6481 void RecordCreateAllocator(uint32_t frameIndex);
6482 void RecordDestroyAllocator(uint32_t frameIndex);
6483 void RecordCreatePool(uint32_t frameIndex,
6486 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6487 void RecordAllocateMemory(uint32_t frameIndex,
6488 const VkMemoryRequirements& vkMemReq,
6491 void RecordAllocateMemoryPages(uint32_t frameIndex,
6492 const VkMemoryRequirements& vkMemReq,
6494 uint64_t allocationCount,
6496 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6497 const VkMemoryRequirements& vkMemReq,
6498 bool requiresDedicatedAllocation,
6499 bool prefersDedicatedAllocation,
6502 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6503 const VkMemoryRequirements& vkMemReq,
6504 bool requiresDedicatedAllocation,
6505 bool prefersDedicatedAllocation,
6508 void RecordFreeMemory(uint32_t frameIndex,
6510 void RecordFreeMemoryPages(uint32_t frameIndex,
6511 uint64_t allocationCount,
6513 void RecordResizeAllocation(
6514 uint32_t frameIndex,
6516 VkDeviceSize newSize);
6517 void RecordSetAllocationUserData(uint32_t frameIndex,
6519 const void* pUserData);
6520 void RecordCreateLostAllocation(uint32_t frameIndex,
6522 void RecordMapMemory(uint32_t frameIndex,
6524 void RecordUnmapMemory(uint32_t frameIndex,
6526 void RecordFlushAllocation(uint32_t frameIndex,
6527 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6528 void RecordInvalidateAllocation(uint32_t frameIndex,
6529 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6530 void RecordCreateBuffer(uint32_t frameIndex,
6531 const VkBufferCreateInfo& bufCreateInfo,
6534 void RecordCreateImage(uint32_t frameIndex,
6535 const VkImageCreateInfo& imageCreateInfo,
6538 void RecordDestroyBuffer(uint32_t frameIndex,
6540 void RecordDestroyImage(uint32_t frameIndex,
6542 void RecordTouchAllocation(uint32_t frameIndex,
6544 void RecordGetAllocationInfo(uint32_t frameIndex,
6546 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6548 void RecordDefragmentationBegin(uint32_t frameIndex,
6551 void RecordDefragmentationEnd(uint32_t frameIndex,
6561 class UserDataString
6565 const char* GetString()
const {
return m_Str; }
6575 VMA_MUTEX m_FileMutex;
6577 int64_t m_StartCounter;
6579 void GetBasicParams(CallParams& outParams);
6582 template<
typename T>
6583 void PrintPointerList(uint64_t count,
const T* pItems)
6587 fprintf(m_File,
"%p", pItems[0]);
6588 for(uint64_t i = 1; i < count; ++i)
6590 fprintf(m_File,
" %p", pItems[i]);
6595 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6599 #endif // #if VMA_RECORDING_ENABLED 6604 class VmaAllocationObjectAllocator
6606 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6608 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6615 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6619 struct VmaAllocator_T
6621 VMA_CLASS_NO_COPY(VmaAllocator_T)
6624 bool m_UseKhrDedicatedAllocation;
6626 bool m_AllocationCallbacksSpecified;
6627 VkAllocationCallbacks m_AllocationCallbacks;
6629 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6632 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6633 VMA_MUTEX m_HeapSizeLimitMutex;
6635 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6636 VkPhysicalDeviceMemoryProperties m_MemProps;
6639 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6642 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6643 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6644 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6650 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6652 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6656 return m_VulkanFunctions;
6659 VkDeviceSize GetBufferImageGranularity()
const 6662 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6663 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6666 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6667 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6669 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6671 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6672 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6675 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6677 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6678 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6681 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6683 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6684 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6685 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6688 bool IsIntegratedGpu()
const 6690 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6693 #if VMA_RECORDING_ENABLED 6694 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6697 void GetBufferMemoryRequirements(
6699 VkMemoryRequirements& memReq,
6700 bool& requiresDedicatedAllocation,
6701 bool& prefersDedicatedAllocation)
const;
6702 void GetImageMemoryRequirements(
6704 VkMemoryRequirements& memReq,
6705 bool& requiresDedicatedAllocation,
6706 bool& prefersDedicatedAllocation)
const;
6709 VkResult AllocateMemory(
6710 const VkMemoryRequirements& vkMemReq,
6711 bool requiresDedicatedAllocation,
6712 bool prefersDedicatedAllocation,
6713 VkBuffer dedicatedBuffer,
6714 VkImage dedicatedImage,
6716 VmaSuballocationType suballocType,
6717 size_t allocationCount,
6722 size_t allocationCount,
6725 VkResult ResizeAllocation(
6727 VkDeviceSize newSize);
6729 void CalculateStats(
VmaStats* pStats);
6731 #if VMA_STATS_STRING_ENABLED 6732 void PrintDetailedMap(
class VmaJsonWriter& json);
6735 VkResult DefragmentationBegin(
6739 VkResult DefragmentationEnd(
6746 void DestroyPool(
VmaPool pool);
6749 void SetCurrentFrameIndex(uint32_t frameIndex);
6750 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6752 void MakePoolAllocationsLost(
6754 size_t* pLostAllocationCount);
6755 VkResult CheckPoolCorruption(
VmaPool hPool);
6756 VkResult CheckCorruption(uint32_t memoryTypeBits);
6760 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6761 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6766 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6767 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6769 void FlushOrInvalidateAllocation(
6771 VkDeviceSize offset, VkDeviceSize size,
6772 VMA_CACHE_OPERATION op);
6774 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6777 VkDeviceSize m_PreferredLargeHeapBlockSize;
6779 VkPhysicalDevice m_PhysicalDevice;
6780 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6782 VMA_RW_MUTEX m_PoolsMutex;
6784 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6785 uint32_t m_NextPoolId;
6789 #if VMA_RECORDING_ENABLED 6790 VmaRecorder* m_pRecorder;
6795 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6797 VkResult AllocateMemoryOfType(
6799 VkDeviceSize alignment,
6800 bool dedicatedAllocation,
6801 VkBuffer dedicatedBuffer,
6802 VkImage dedicatedImage,
6804 uint32_t memTypeIndex,
6805 VmaSuballocationType suballocType,
6806 size_t allocationCount,
6810 VkResult AllocateDedicatedMemoryPage(
6812 VmaSuballocationType suballocType,
6813 uint32_t memTypeIndex,
6814 const VkMemoryAllocateInfo& allocInfo,
6816 bool isUserDataString,
6821 VkResult AllocateDedicatedMemory(
6823 VmaSuballocationType suballocType,
6824 uint32_t memTypeIndex,
6826 bool isUserDataString,
6828 VkBuffer dedicatedBuffer,
6829 VkImage dedicatedImage,
6830 size_t allocationCount,
6840 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6842 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6845 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6847 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6850 template<
typename T>
6853 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6856 template<
typename T>
6857 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6859 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6862 template<
typename T>
6863 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6868 VmaFree(hAllocator, ptr);
6872 template<
typename T>
6873 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6877 for(
size_t i = count; i--; )
6879 VmaFree(hAllocator, ptr);
6886 #if VMA_STATS_STRING_ENABLED 6888 class VmaStringBuilder
6891 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6892 size_t GetLength()
const {
return m_Data.size(); }
6893 const char* GetData()
const {
return m_Data.data(); }
6895 void Add(
char ch) { m_Data.push_back(ch); }
6896 void Add(
const char* pStr);
6897 void AddNewLine() { Add(
'\n'); }
6898 void AddNumber(uint32_t num);
6899 void AddNumber(uint64_t num);
6900 void AddPointer(
const void* ptr);
6903 VmaVector< char, VmaStlAllocator<char> > m_Data;
6906 void VmaStringBuilder::Add(
const char* pStr)
6908 const size_t strLen = strlen(pStr);
6911 const size_t oldCount = m_Data.size();
6912 m_Data.resize(oldCount + strLen);
6913 memcpy(m_Data.data() + oldCount, pStr, strLen);
6917 void VmaStringBuilder::AddNumber(uint32_t num)
6920 VmaUint32ToStr(buf,
sizeof(buf), num);
6924 void VmaStringBuilder::AddNumber(uint64_t num)
6927 VmaUint64ToStr(buf,
sizeof(buf), num);
6931 void VmaStringBuilder::AddPointer(
const void* ptr)
6934 VmaPtrToStr(buf,
sizeof(buf), ptr);
6938 #endif // #if VMA_STATS_STRING_ENABLED 6943 #if VMA_STATS_STRING_ENABLED 6947 VMA_CLASS_NO_COPY(VmaJsonWriter)
6949 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6952 void BeginObject(
bool singleLine =
false);
6955 void BeginArray(
bool singleLine =
false);
6958 void WriteString(
const char* pStr);
6959 void BeginString(
const char* pStr = VMA_NULL);
6960 void ContinueString(
const char* pStr);
6961 void ContinueString(uint32_t n);
6962 void ContinueString(uint64_t n);
6963 void ContinueString_Pointer(
const void* ptr);
6964 void EndString(
const char* pStr = VMA_NULL);
6966 void WriteNumber(uint32_t n);
6967 void WriteNumber(uint64_t n);
6968 void WriteBool(
bool b);
6972 static const char*
const INDENT;
6974 enum COLLECTION_TYPE
6976 COLLECTION_TYPE_OBJECT,
6977 COLLECTION_TYPE_ARRAY,
6981 COLLECTION_TYPE type;
6982 uint32_t valueCount;
6983 bool singleLineMode;
6986 VmaStringBuilder& m_SB;
6987 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6988 bool m_InsideString;
6990 void BeginValue(
bool isString);
6991 void WriteIndent(
bool oneLess =
false);
6994 const char*
const VmaJsonWriter::INDENT =
" ";
6996 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6998 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6999 m_InsideString(false)
7003 VmaJsonWriter::~VmaJsonWriter()
7005 VMA_ASSERT(!m_InsideString);
7006 VMA_ASSERT(m_Stack.empty());
7009 void VmaJsonWriter::BeginObject(
bool singleLine)
7011 VMA_ASSERT(!m_InsideString);
7017 item.type = COLLECTION_TYPE_OBJECT;
7018 item.valueCount = 0;
7019 item.singleLineMode = singleLine;
7020 m_Stack.push_back(item);
7023 void VmaJsonWriter::EndObject()
7025 VMA_ASSERT(!m_InsideString);
7030 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7034 void VmaJsonWriter::BeginArray(
bool singleLine)
7036 VMA_ASSERT(!m_InsideString);
7042 item.type = COLLECTION_TYPE_ARRAY;
7043 item.valueCount = 0;
7044 item.singleLineMode = singleLine;
7045 m_Stack.push_back(item);
7048 void VmaJsonWriter::EndArray()
7050 VMA_ASSERT(!m_InsideString);
7055 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7059 void VmaJsonWriter::WriteString(
const char* pStr)
7065 void VmaJsonWriter::BeginString(
const char* pStr)
7067 VMA_ASSERT(!m_InsideString);
7071 m_InsideString =
true;
7072 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7074 ContinueString(pStr);
7078 void VmaJsonWriter::ContinueString(
const char* pStr)
7080 VMA_ASSERT(m_InsideString);
7082 const size_t strLen = strlen(pStr);
7083 for(
size_t i = 0; i < strLen; ++i)
7116 VMA_ASSERT(0 &&
"Character not currently supported.");
7122 void VmaJsonWriter::ContinueString(uint32_t n)
7124 VMA_ASSERT(m_InsideString);
7128 void VmaJsonWriter::ContinueString(uint64_t n)
7130 VMA_ASSERT(m_InsideString);
7134 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7136 VMA_ASSERT(m_InsideString);
7137 m_SB.AddPointer(ptr);
7140 void VmaJsonWriter::EndString(
const char* pStr)
7142 VMA_ASSERT(m_InsideString);
7143 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7145 ContinueString(pStr);
7148 m_InsideString =
false;
7151 void VmaJsonWriter::WriteNumber(uint32_t n)
7153 VMA_ASSERT(!m_InsideString);
7158 void VmaJsonWriter::WriteNumber(uint64_t n)
7160 VMA_ASSERT(!m_InsideString);
7165 void VmaJsonWriter::WriteBool(
bool b)
7167 VMA_ASSERT(!m_InsideString);
7169 m_SB.Add(b ?
"true" :
"false");
7172 void VmaJsonWriter::WriteNull()
7174 VMA_ASSERT(!m_InsideString);
7179 void VmaJsonWriter::BeginValue(
bool isString)
7181 if(!m_Stack.empty())
7183 StackItem& currItem = m_Stack.back();
7184 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7185 currItem.valueCount % 2 == 0)
7187 VMA_ASSERT(isString);
7190 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7191 currItem.valueCount % 2 != 0)
7195 else if(currItem.valueCount > 0)
7204 ++currItem.valueCount;
7208 void VmaJsonWriter::WriteIndent(
bool oneLess)
7210 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7214 size_t count = m_Stack.size();
7215 if(count > 0 && oneLess)
7219 for(
size_t i = 0; i < count; ++i)
7226 #endif // #if VMA_STATS_STRING_ENABLED 7230 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7232 if(IsUserDataString())
7234 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7236 FreeUserDataString(hAllocator);
7238 if(pUserData != VMA_NULL)
7240 const char*
const newStrSrc = (
char*)pUserData;
7241 const size_t newStrLen = strlen(newStrSrc);
7242 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7243 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7244 m_pUserData = newStrDst;
7249 m_pUserData = pUserData;
7253 void VmaAllocation_T::ChangeBlockAllocation(
7255 VmaDeviceMemoryBlock* block,
7256 VkDeviceSize offset)
7258 VMA_ASSERT(block != VMA_NULL);
7259 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7262 if(block != m_BlockAllocation.m_Block)
7264 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7265 if(IsPersistentMap())
7267 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7268 block->Map(hAllocator, mapRefCount, VMA_NULL);
7271 m_BlockAllocation.m_Block = block;
7272 m_BlockAllocation.m_Offset = offset;
7275 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7277 VMA_ASSERT(newSize > 0);
7281 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7283 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7284 m_BlockAllocation.m_Offset = newOffset;
7287 VkDeviceSize VmaAllocation_T::GetOffset()
const 7291 case ALLOCATION_TYPE_BLOCK:
7292 return m_BlockAllocation.m_Offset;
7293 case ALLOCATION_TYPE_DEDICATED:
7301 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7305 case ALLOCATION_TYPE_BLOCK:
7306 return m_BlockAllocation.m_Block->GetDeviceMemory();
7307 case ALLOCATION_TYPE_DEDICATED:
7308 return m_DedicatedAllocation.m_hMemory;
7311 return VK_NULL_HANDLE;
7315 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7319 case ALLOCATION_TYPE_BLOCK:
7320 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7321 case ALLOCATION_TYPE_DEDICATED:
7322 return m_DedicatedAllocation.m_MemoryTypeIndex;
7329 void* VmaAllocation_T::GetMappedData()
const 7333 case ALLOCATION_TYPE_BLOCK:
7336 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7337 VMA_ASSERT(pBlockData != VMA_NULL);
7338 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7345 case ALLOCATION_TYPE_DEDICATED:
7346 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7347 return m_DedicatedAllocation.m_pMappedData;
7354 bool VmaAllocation_T::CanBecomeLost()
const 7358 case ALLOCATION_TYPE_BLOCK:
7359 return m_BlockAllocation.m_CanBecomeLost;
7360 case ALLOCATION_TYPE_DEDICATED:
7368 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7370 VMA_ASSERT(CanBecomeLost());
7376 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7379 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7384 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7390 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7400 #if VMA_STATS_STRING_ENABLED 7403 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7412 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7414 json.WriteString(
"Type");
7415 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7417 json.WriteString(
"Size");
7418 json.WriteNumber(m_Size);
7420 if(m_pUserData != VMA_NULL)
7422 json.WriteString(
"UserData");
7423 if(IsUserDataString())
7425 json.WriteString((
const char*)m_pUserData);
7430 json.ContinueString_Pointer(m_pUserData);
7435 json.WriteString(
"CreationFrameIndex");
7436 json.WriteNumber(m_CreationFrameIndex);
7438 json.WriteString(
"LastUseFrameIndex");
7439 json.WriteNumber(GetLastUseFrameIndex());
7441 if(m_BufferImageUsage != 0)
7443 json.WriteString(
"Usage");
7444 json.WriteNumber(m_BufferImageUsage);
7450 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7452 VMA_ASSERT(IsUserDataString());
7453 if(m_pUserData != VMA_NULL)
7455 char*
const oldStr = (
char*)m_pUserData;
7456 const size_t oldStrLen = strlen(oldStr);
7457 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7458 m_pUserData = VMA_NULL;
7462 void VmaAllocation_T::BlockAllocMap()
7464 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7466 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7472 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7476 void VmaAllocation_T::BlockAllocUnmap()
7478 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7480 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7486 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7490 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7492 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7496 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7498 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7499 *ppData = m_DedicatedAllocation.m_pMappedData;
7505 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7506 return VK_ERROR_MEMORY_MAP_FAILED;
7511 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7512 hAllocator->m_hDevice,
7513 m_DedicatedAllocation.m_hMemory,
7518 if(result == VK_SUCCESS)
7520 m_DedicatedAllocation.m_pMappedData = *ppData;
7527 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7529 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7531 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7536 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7537 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7538 hAllocator->m_hDevice,
7539 m_DedicatedAllocation.m_hMemory);
7544 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7548 #if VMA_STATS_STRING_ENABLED 7550 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7554 json.WriteString(
"Blocks");
7557 json.WriteString(
"Allocations");
7560 json.WriteString(
"UnusedRanges");
7563 json.WriteString(
"UsedBytes");
7566 json.WriteString(
"UnusedBytes");
7571 json.WriteString(
"AllocationSize");
7572 json.BeginObject(
true);
7573 json.WriteString(
"Min");
7575 json.WriteString(
"Avg");
7577 json.WriteString(
"Max");
7584 json.WriteString(
"UnusedRangeSize");
7585 json.BeginObject(
true);
7586 json.WriteString(
"Min");
7588 json.WriteString(
"Avg");
7590 json.WriteString(
"Max");
7598 #endif // #if VMA_STATS_STRING_ENABLED 7600 struct VmaSuballocationItemSizeLess
7603 const VmaSuballocationList::iterator lhs,
7604 const VmaSuballocationList::iterator rhs)
const 7606 return lhs->size < rhs->size;
7609 const VmaSuballocationList::iterator lhs,
7610 VkDeviceSize rhsSize)
const 7612 return lhs->size < rhsSize;
7620 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7622 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7626 #if VMA_STATS_STRING_ENABLED 7628 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7629 VkDeviceSize unusedBytes,
7630 size_t allocationCount,
7631 size_t unusedRangeCount)
const 7635 json.WriteString(
"TotalBytes");
7636 json.WriteNumber(GetSize());
7638 json.WriteString(
"UnusedBytes");
7639 json.WriteNumber(unusedBytes);
7641 json.WriteString(
"Allocations");
7642 json.WriteNumber((uint64_t)allocationCount);
7644 json.WriteString(
"UnusedRanges");
7645 json.WriteNumber((uint64_t)unusedRangeCount);
7647 json.WriteString(
"Suballocations");
7651 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7652 VkDeviceSize offset,
7655 json.BeginObject(
true);
7657 json.WriteString(
"Offset");
7658 json.WriteNumber(offset);
7660 hAllocation->PrintParameters(json);
7665 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7666 VkDeviceSize offset,
7667 VkDeviceSize size)
const 7669 json.BeginObject(
true);
7671 json.WriteString(
"Offset");
7672 json.WriteNumber(offset);
7674 json.WriteString(
"Type");
7675 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7677 json.WriteString(
"Size");
7678 json.WriteNumber(size);
7683 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7689 #endif // #if VMA_STATS_STRING_ENABLED 7694 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7695 VmaBlockMetadata(hAllocator),
7698 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7699 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7703 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7707 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7709 VmaBlockMetadata::Init(size);
7712 m_SumFreeSize = size;
7714 VmaSuballocation suballoc = {};
7715 suballoc.offset = 0;
7716 suballoc.size = size;
7717 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7718 suballoc.hAllocation = VK_NULL_HANDLE;
7720 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7721 m_Suballocations.push_back(suballoc);
7722 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7724 m_FreeSuballocationsBySize.push_back(suballocItem);
7727 bool VmaBlockMetadata_Generic::Validate()
const 7729 VMA_VALIDATE(!m_Suballocations.empty());
7732 VkDeviceSize calculatedOffset = 0;
7734 uint32_t calculatedFreeCount = 0;
7736 VkDeviceSize calculatedSumFreeSize = 0;
7739 size_t freeSuballocationsToRegister = 0;
7741 bool prevFree =
false;
7743 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7744 suballocItem != m_Suballocations.cend();
7747 const VmaSuballocation& subAlloc = *suballocItem;
7750 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7752 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7754 VMA_VALIDATE(!prevFree || !currFree);
7756 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7760 calculatedSumFreeSize += subAlloc.size;
7761 ++calculatedFreeCount;
7762 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7764 ++freeSuballocationsToRegister;
7768 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7772 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7773 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7776 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7779 calculatedOffset += subAlloc.size;
7780 prevFree = currFree;
7785 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7787 VkDeviceSize lastSize = 0;
7788 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7790 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7793 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7795 VMA_VALIDATE(suballocItem->size >= lastSize);
7797 lastSize = suballocItem->size;
7801 VMA_VALIDATE(ValidateFreeSuballocationList());
7802 VMA_VALIDATE(calculatedOffset == GetSize());
7803 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7804 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7809 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7811 if(!m_FreeSuballocationsBySize.empty())
7813 return m_FreeSuballocationsBySize.back()->size;
7821 bool VmaBlockMetadata_Generic::IsEmpty()
const 7823 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7826 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7830 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7842 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7843 suballocItem != m_Suballocations.cend();
7846 const VmaSuballocation& suballoc = *suballocItem;
7847 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7860 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7862 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7864 inoutStats.
size += GetSize();
7871 #if VMA_STATS_STRING_ENABLED 7873 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7875 PrintDetailedMap_Begin(json,
7877 m_Suballocations.size() - (size_t)m_FreeCount,
7881 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7882 suballocItem != m_Suballocations.cend();
7883 ++suballocItem, ++i)
7885 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7887 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7891 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7895 PrintDetailedMap_End(json);
7898 #endif // #if VMA_STATS_STRING_ENABLED 7900 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7901 uint32_t currentFrameIndex,
7902 uint32_t frameInUseCount,
7903 VkDeviceSize bufferImageGranularity,
7904 VkDeviceSize allocSize,
7905 VkDeviceSize allocAlignment,
7907 VmaSuballocationType allocType,
7908 bool canMakeOtherLost,
7910 VmaAllocationRequest* pAllocationRequest)
7912 VMA_ASSERT(allocSize > 0);
7913 VMA_ASSERT(!upperAddress);
7914 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7915 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7916 VMA_HEAVY_ASSERT(Validate());
7918 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7921 if(canMakeOtherLost ==
false &&
7922 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7928 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7929 if(freeSuballocCount > 0)
7934 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7935 m_FreeSuballocationsBySize.data(),
7936 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7937 allocSize + 2 * VMA_DEBUG_MARGIN,
7938 VmaSuballocationItemSizeLess());
7939 size_t index = it - m_FreeSuballocationsBySize.data();
7940 for(; index < freeSuballocCount; ++index)
7945 bufferImageGranularity,
7949 m_FreeSuballocationsBySize[index],
7951 &pAllocationRequest->offset,
7952 &pAllocationRequest->itemsToMakeLostCount,
7953 &pAllocationRequest->sumFreeSize,
7954 &pAllocationRequest->sumItemSize))
7956 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7961 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7963 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7964 it != m_Suballocations.end();
7967 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7970 bufferImageGranularity,
7976 &pAllocationRequest->offset,
7977 &pAllocationRequest->itemsToMakeLostCount,
7978 &pAllocationRequest->sumFreeSize,
7979 &pAllocationRequest->sumItemSize))
7981 pAllocationRequest->item = it;
7989 for(
size_t index = freeSuballocCount; index--; )
7994 bufferImageGranularity,
7998 m_FreeSuballocationsBySize[index],
8000 &pAllocationRequest->offset,
8001 &pAllocationRequest->itemsToMakeLostCount,
8002 &pAllocationRequest->sumFreeSize,
8003 &pAllocationRequest->sumItemSize))
8005 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8012 if(canMakeOtherLost)
8017 VmaAllocationRequest tmpAllocRequest = {};
8018 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8019 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8020 suballocIt != m_Suballocations.end();
8023 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8024 suballocIt->hAllocation->CanBecomeLost())
8029 bufferImageGranularity,
8035 &tmpAllocRequest.offset,
8036 &tmpAllocRequest.itemsToMakeLostCount,
8037 &tmpAllocRequest.sumFreeSize,
8038 &tmpAllocRequest.sumItemSize))
8042 *pAllocationRequest = tmpAllocRequest;
8043 pAllocationRequest->item = suballocIt;
8046 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8048 *pAllocationRequest = tmpAllocRequest;
8049 pAllocationRequest->item = suballocIt;
8062 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8063 uint32_t currentFrameIndex,
8064 uint32_t frameInUseCount,
8065 VmaAllocationRequest* pAllocationRequest)
8067 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8069 while(pAllocationRequest->itemsToMakeLostCount > 0)
8071 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8073 ++pAllocationRequest->item;
8075 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8076 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8077 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8078 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8080 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8081 --pAllocationRequest->itemsToMakeLostCount;
8089 VMA_HEAVY_ASSERT(Validate());
8090 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8091 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8096 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8098 uint32_t lostAllocationCount = 0;
8099 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8100 it != m_Suballocations.end();
8103 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8104 it->hAllocation->CanBecomeLost() &&
8105 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8107 it = FreeSuballocation(it);
8108 ++lostAllocationCount;
8111 return lostAllocationCount;
8114 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8116 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8117 it != m_Suballocations.end();
8120 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8122 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8124 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8125 return VK_ERROR_VALIDATION_FAILED_EXT;
8127 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8129 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8130 return VK_ERROR_VALIDATION_FAILED_EXT;
8138 void VmaBlockMetadata_Generic::Alloc(
8139 const VmaAllocationRequest& request,
8140 VmaSuballocationType type,
8141 VkDeviceSize allocSize,
8144 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8145 VMA_ASSERT(request.item != m_Suballocations.end());
8146 VmaSuballocation& suballoc = *request.item;
8148 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8150 VMA_ASSERT(request.offset >= suballoc.offset);
8151 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8152 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8153 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8157 UnregisterFreeSuballocation(request.item);
8159 suballoc.offset = request.offset;
8160 suballoc.size = allocSize;
8161 suballoc.type = type;
8162 suballoc.hAllocation = hAllocation;
8167 VmaSuballocation paddingSuballoc = {};
8168 paddingSuballoc.offset = request.offset + allocSize;
8169 paddingSuballoc.size = paddingEnd;
8170 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8171 VmaSuballocationList::iterator next = request.item;
8173 const VmaSuballocationList::iterator paddingEndItem =
8174 m_Suballocations.insert(next, paddingSuballoc);
8175 RegisterFreeSuballocation(paddingEndItem);
8181 VmaSuballocation paddingSuballoc = {};
8182 paddingSuballoc.offset = request.offset - paddingBegin;
8183 paddingSuballoc.size = paddingBegin;
8184 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8185 const VmaSuballocationList::iterator paddingBeginItem =
8186 m_Suballocations.insert(request.item, paddingSuballoc);
8187 RegisterFreeSuballocation(paddingBeginItem);
8191 m_FreeCount = m_FreeCount - 1;
8192 if(paddingBegin > 0)
8200 m_SumFreeSize -= allocSize;
8203 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8205 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8206 suballocItem != m_Suballocations.end();
8209 VmaSuballocation& suballoc = *suballocItem;
8210 if(suballoc.hAllocation == allocation)
8212 FreeSuballocation(suballocItem);
8213 VMA_HEAVY_ASSERT(Validate());
8217 VMA_ASSERT(0 &&
"Not found!");
8220 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8222 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8223 suballocItem != m_Suballocations.end();
8226 VmaSuballocation& suballoc = *suballocItem;
8227 if(suballoc.offset == offset)
8229 FreeSuballocation(suballocItem);
8233 VMA_ASSERT(0 &&
"Not found!");
8236 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8238 typedef VmaSuballocationList::iterator iter_type;
8239 for(iter_type suballocItem = m_Suballocations.begin();
8240 suballocItem != m_Suballocations.end();
8243 VmaSuballocation& suballoc = *suballocItem;
8244 if(suballoc.hAllocation == alloc)
8246 iter_type nextItem = suballocItem;
8250 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8253 if(newSize < alloc->GetSize())
8255 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8258 if(nextItem != m_Suballocations.end())
8261 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8264 UnregisterFreeSuballocation(nextItem);
8265 nextItem->offset -= sizeDiff;
8266 nextItem->size += sizeDiff;
8267 RegisterFreeSuballocation(nextItem);
8273 VmaSuballocation newFreeSuballoc;
8274 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8275 newFreeSuballoc.offset = suballoc.offset + newSize;
8276 newFreeSuballoc.size = sizeDiff;
8277 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8278 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8279 RegisterFreeSuballocation(newFreeSuballocIt);
8288 VmaSuballocation newFreeSuballoc;
8289 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8290 newFreeSuballoc.offset = suballoc.offset + newSize;
8291 newFreeSuballoc.size = sizeDiff;
8292 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8293 m_Suballocations.push_back(newFreeSuballoc);
8295 iter_type newFreeSuballocIt = m_Suballocations.end();
8296 RegisterFreeSuballocation(--newFreeSuballocIt);
8301 suballoc.size = newSize;
8302 m_SumFreeSize += sizeDiff;
8307 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8310 if(nextItem != m_Suballocations.end())
8313 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8316 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8322 if(nextItem->size > sizeDiff)
8325 UnregisterFreeSuballocation(nextItem);
8326 nextItem->offset += sizeDiff;
8327 nextItem->size -= sizeDiff;
8328 RegisterFreeSuballocation(nextItem);
8334 UnregisterFreeSuballocation(nextItem);
8335 m_Suballocations.erase(nextItem);
8351 suballoc.size = newSize;
8352 m_SumFreeSize -= sizeDiff;
8359 VMA_ASSERT(0 &&
"Not found!");
8363 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8365 VkDeviceSize lastSize = 0;
8366 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8368 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8370 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8371 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8372 VMA_VALIDATE(it->size >= lastSize);
8373 lastSize = it->size;
8378 bool VmaBlockMetadata_Generic::CheckAllocation(
8379 uint32_t currentFrameIndex,
8380 uint32_t frameInUseCount,
8381 VkDeviceSize bufferImageGranularity,
8382 VkDeviceSize allocSize,
8383 VkDeviceSize allocAlignment,
8384 VmaSuballocationType allocType,
8385 VmaSuballocationList::const_iterator suballocItem,
8386 bool canMakeOtherLost,
8387 VkDeviceSize* pOffset,
8388 size_t* itemsToMakeLostCount,
8389 VkDeviceSize* pSumFreeSize,
8390 VkDeviceSize* pSumItemSize)
const 8392 VMA_ASSERT(allocSize > 0);
8393 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8394 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8395 VMA_ASSERT(pOffset != VMA_NULL);
8397 *itemsToMakeLostCount = 0;
8401 if(canMakeOtherLost)
8403 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8405 *pSumFreeSize = suballocItem->size;
8409 if(suballocItem->hAllocation->CanBecomeLost() &&
8410 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8412 ++*itemsToMakeLostCount;
8413 *pSumItemSize = suballocItem->size;
8422 if(GetSize() - suballocItem->offset < allocSize)
8428 *pOffset = suballocItem->offset;
8431 if(VMA_DEBUG_MARGIN > 0)
8433 *pOffset += VMA_DEBUG_MARGIN;
8437 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8441 if(bufferImageGranularity > 1)
8443 bool bufferImageGranularityConflict =
false;
8444 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8445 while(prevSuballocItem != m_Suballocations.cbegin())
8448 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8449 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8451 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8453 bufferImageGranularityConflict =
true;
8461 if(bufferImageGranularityConflict)
8463 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8469 if(*pOffset >= suballocItem->offset + suballocItem->size)
8475 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8478 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8480 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8482 if(suballocItem->offset + totalSize > GetSize())
8489 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8490 if(totalSize > suballocItem->size)
8492 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8493 while(remainingSize > 0)
8496 if(lastSuballocItem == m_Suballocations.cend())
8500 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8502 *pSumFreeSize += lastSuballocItem->size;
8506 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8507 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8508 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8510 ++*itemsToMakeLostCount;
8511 *pSumItemSize += lastSuballocItem->size;
8518 remainingSize = (lastSuballocItem->size < remainingSize) ?
8519 remainingSize - lastSuballocItem->size : 0;
8525 if(bufferImageGranularity > 1)
8527 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8529 while(nextSuballocItem != m_Suballocations.cend())
8531 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8532 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8534 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8536 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8537 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8538 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8540 ++*itemsToMakeLostCount;
8559 const VmaSuballocation& suballoc = *suballocItem;
8560 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8562 *pSumFreeSize = suballoc.size;
8565 if(suballoc.size < allocSize)
8571 *pOffset = suballoc.offset;
8574 if(VMA_DEBUG_MARGIN > 0)
8576 *pOffset += VMA_DEBUG_MARGIN;
8580 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8584 if(bufferImageGranularity > 1)
8586 bool bufferImageGranularityConflict =
false;
8587 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8588 while(prevSuballocItem != m_Suballocations.cbegin())
8591 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8592 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8594 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8596 bufferImageGranularityConflict =
true;
8604 if(bufferImageGranularityConflict)
8606 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8611 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8614 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8617 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8624 if(bufferImageGranularity > 1)
8626 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8628 while(nextSuballocItem != m_Suballocations.cend())
8630 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8631 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8633 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8652 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8654 VMA_ASSERT(item != m_Suballocations.end());
8655 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8657 VmaSuballocationList::iterator nextItem = item;
8659 VMA_ASSERT(nextItem != m_Suballocations.end());
8660 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8662 item->size += nextItem->size;
8664 m_Suballocations.erase(nextItem);
8667 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8670 VmaSuballocation& suballoc = *suballocItem;
8671 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8672 suballoc.hAllocation = VK_NULL_HANDLE;
8676 m_SumFreeSize += suballoc.size;
8679 bool mergeWithNext =
false;
8680 bool mergeWithPrev =
false;
8682 VmaSuballocationList::iterator nextItem = suballocItem;
8684 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8686 mergeWithNext =
true;
8689 VmaSuballocationList::iterator prevItem = suballocItem;
8690 if(suballocItem != m_Suballocations.begin())
8693 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8695 mergeWithPrev =
true;
8701 UnregisterFreeSuballocation(nextItem);
8702 MergeFreeWithNext(suballocItem);
8707 UnregisterFreeSuballocation(prevItem);
8708 MergeFreeWithNext(prevItem);
8709 RegisterFreeSuballocation(prevItem);
8714 RegisterFreeSuballocation(suballocItem);
8715 return suballocItem;
8719 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8721 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8722 VMA_ASSERT(item->size > 0);
8726 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8728 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8730 if(m_FreeSuballocationsBySize.empty())
8732 m_FreeSuballocationsBySize.push_back(item);
8736 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8744 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8746 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8747 VMA_ASSERT(item->size > 0);
8751 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8753 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8755 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8756 m_FreeSuballocationsBySize.data(),
8757 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8759 VmaSuballocationItemSizeLess());
8760 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8761 index < m_FreeSuballocationsBySize.size();
8764 if(m_FreeSuballocationsBySize[index] == item)
8766 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8769 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8771 VMA_ASSERT(0 &&
"Not found.");
8777 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8778 VkDeviceSize bufferImageGranularity,
8779 VmaSuballocationType& inOutPrevSuballocType)
const 8781 if(bufferImageGranularity == 1 || IsEmpty())
8786 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8787 bool typeConflictFound =
false;
8788 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8789 it != m_Suballocations.cend();
8792 const VmaSuballocationType suballocType = it->type;
8793 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8795 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8796 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8798 typeConflictFound =
true;
8800 inOutPrevSuballocType = suballocType;
8804 return typeConflictFound || minAlignment >= bufferImageGranularity;
8810 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8811 VmaBlockMetadata(hAllocator),
8813 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8814 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8815 m_1stVectorIndex(0),
8816 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8817 m_1stNullItemsBeginCount(0),
8818 m_1stNullItemsMiddleCount(0),
8819 m_2ndNullItemsCount(0)
8823 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8827 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8829 VmaBlockMetadata::Init(size);
8830 m_SumFreeSize = size;
8833 bool VmaBlockMetadata_Linear::Validate()
const 8835 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8836 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8838 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8839 VMA_VALIDATE(!suballocations1st.empty() ||
8840 suballocations2nd.empty() ||
8841 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8843 if(!suballocations1st.empty())
8846 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8848 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8850 if(!suballocations2nd.empty())
8853 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8856 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8857 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8859 VkDeviceSize sumUsedSize = 0;
8860 const size_t suballoc1stCount = suballocations1st.size();
8861 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8863 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8865 const size_t suballoc2ndCount = suballocations2nd.size();
8866 size_t nullItem2ndCount = 0;
8867 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8869 const VmaSuballocation& suballoc = suballocations2nd[i];
8870 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8872 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8873 VMA_VALIDATE(suballoc.offset >= offset);
8877 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8878 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8879 sumUsedSize += suballoc.size;
8886 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8889 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8892 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8894 const VmaSuballocation& suballoc = suballocations1st[i];
8895 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8896 suballoc.hAllocation == VK_NULL_HANDLE);
8899 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8901 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8903 const VmaSuballocation& suballoc = suballocations1st[i];
8904 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8906 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8907 VMA_VALIDATE(suballoc.offset >= offset);
8908 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8912 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8913 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8914 sumUsedSize += suballoc.size;
8921 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8923 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8925 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8927 const size_t suballoc2ndCount = suballocations2nd.size();
8928 size_t nullItem2ndCount = 0;
8929 for(
size_t i = suballoc2ndCount; i--; )
8931 const VmaSuballocation& suballoc = suballocations2nd[i];
8932 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8934 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8935 VMA_VALIDATE(suballoc.offset >= offset);
8939 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8940 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8941 sumUsedSize += suballoc.size;
8948 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8951 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8954 VMA_VALIDATE(offset <= GetSize());
8955 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8960 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8962 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8963 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8966 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8968 const VkDeviceSize size = GetSize();
8980 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8982 switch(m_2ndVectorMode)
8984 case SECOND_VECTOR_EMPTY:
8990 const size_t suballocations1stCount = suballocations1st.size();
8991 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8992 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8993 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8995 firstSuballoc.offset,
8996 size - (lastSuballoc.offset + lastSuballoc.size));
9000 case SECOND_VECTOR_RING_BUFFER:
9005 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9006 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9007 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9008 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9012 case SECOND_VECTOR_DOUBLE_STACK:
9017 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9018 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9019 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9020 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9030 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9032 const VkDeviceSize size = GetSize();
9033 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9034 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9035 const size_t suballoc1stCount = suballocations1st.size();
9036 const size_t suballoc2ndCount = suballocations2nd.size();
9047 VkDeviceSize lastOffset = 0;
9049 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9051 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9052 size_t nextAlloc2ndIndex = 0;
9053 while(lastOffset < freeSpace2ndTo1stEnd)
9056 while(nextAlloc2ndIndex < suballoc2ndCount &&
9057 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9059 ++nextAlloc2ndIndex;
9063 if(nextAlloc2ndIndex < suballoc2ndCount)
9065 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9068 if(lastOffset < suballoc.offset)
9071 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9085 lastOffset = suballoc.offset + suballoc.size;
9086 ++nextAlloc2ndIndex;
9092 if(lastOffset < freeSpace2ndTo1stEnd)
9094 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9102 lastOffset = freeSpace2ndTo1stEnd;
9107 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9108 const VkDeviceSize freeSpace1stTo2ndEnd =
9109 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9110 while(lastOffset < freeSpace1stTo2ndEnd)
9113 while(nextAlloc1stIndex < suballoc1stCount &&
9114 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9116 ++nextAlloc1stIndex;
9120 if(nextAlloc1stIndex < suballoc1stCount)
9122 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9125 if(lastOffset < suballoc.offset)
9128 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9142 lastOffset = suballoc.offset + suballoc.size;
9143 ++nextAlloc1stIndex;
9149 if(lastOffset < freeSpace1stTo2ndEnd)
9151 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9159 lastOffset = freeSpace1stTo2ndEnd;
9163 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9165 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9166 while(lastOffset < size)
9169 while(nextAlloc2ndIndex != SIZE_MAX &&
9170 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9172 --nextAlloc2ndIndex;
9176 if(nextAlloc2ndIndex != SIZE_MAX)
9178 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9181 if(lastOffset < suballoc.offset)
9184 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9198 lastOffset = suballoc.offset + suballoc.size;
9199 --nextAlloc2ndIndex;
9205 if(lastOffset < size)
9207 const VkDeviceSize unusedRangeSize = size - lastOffset;
9223 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9225 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9226 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9227 const VkDeviceSize size = GetSize();
9228 const size_t suballoc1stCount = suballocations1st.size();
9229 const size_t suballoc2ndCount = suballocations2nd.size();
9231 inoutStats.
size += size;
9233 VkDeviceSize lastOffset = 0;
9235 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9237 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9238 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9239 while(lastOffset < freeSpace2ndTo1stEnd)
9242 while(nextAlloc2ndIndex < suballoc2ndCount &&
9243 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9245 ++nextAlloc2ndIndex;
9249 if(nextAlloc2ndIndex < suballoc2ndCount)
9251 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9254 if(lastOffset < suballoc.offset)
9257 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9268 lastOffset = suballoc.offset + suballoc.size;
9269 ++nextAlloc2ndIndex;
9274 if(lastOffset < freeSpace2ndTo1stEnd)
9277 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9284 lastOffset = freeSpace2ndTo1stEnd;
9289 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9290 const VkDeviceSize freeSpace1stTo2ndEnd =
9291 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9292 while(lastOffset < freeSpace1stTo2ndEnd)
9295 while(nextAlloc1stIndex < suballoc1stCount &&
9296 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9298 ++nextAlloc1stIndex;
9302 if(nextAlloc1stIndex < suballoc1stCount)
9304 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9307 if(lastOffset < suballoc.offset)
9310 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9321 lastOffset = suballoc.offset + suballoc.size;
9322 ++nextAlloc1stIndex;
9327 if(lastOffset < freeSpace1stTo2ndEnd)
9330 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9337 lastOffset = freeSpace1stTo2ndEnd;
9341 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9343 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9344 while(lastOffset < size)
9347 while(nextAlloc2ndIndex != SIZE_MAX &&
9348 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9350 --nextAlloc2ndIndex;
9354 if(nextAlloc2ndIndex != SIZE_MAX)
9356 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9359 if(lastOffset < suballoc.offset)
9362 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9373 lastOffset = suballoc.offset + suballoc.size;
9374 --nextAlloc2ndIndex;
9379 if(lastOffset < size)
9382 const VkDeviceSize unusedRangeSize = size - lastOffset;
9395 #if VMA_STATS_STRING_ENABLED 9396 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9398 const VkDeviceSize size = GetSize();
9399 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9400 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9401 const size_t suballoc1stCount = suballocations1st.size();
9402 const size_t suballoc2ndCount = suballocations2nd.size();
9406 size_t unusedRangeCount = 0;
9407 VkDeviceSize usedBytes = 0;
9409 VkDeviceSize lastOffset = 0;
9411 size_t alloc2ndCount = 0;
9412 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9414 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9415 size_t nextAlloc2ndIndex = 0;
9416 while(lastOffset < freeSpace2ndTo1stEnd)
9419 while(nextAlloc2ndIndex < suballoc2ndCount &&
9420 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9422 ++nextAlloc2ndIndex;
9426 if(nextAlloc2ndIndex < suballoc2ndCount)
9428 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9431 if(lastOffset < suballoc.offset)
9440 usedBytes += suballoc.size;
9443 lastOffset = suballoc.offset + suballoc.size;
9444 ++nextAlloc2ndIndex;
9449 if(lastOffset < freeSpace2ndTo1stEnd)
9456 lastOffset = freeSpace2ndTo1stEnd;
9461 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9462 size_t alloc1stCount = 0;
9463 const VkDeviceSize freeSpace1stTo2ndEnd =
9464 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9465 while(lastOffset < freeSpace1stTo2ndEnd)
9468 while(nextAlloc1stIndex < suballoc1stCount &&
9469 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9471 ++nextAlloc1stIndex;
9475 if(nextAlloc1stIndex < suballoc1stCount)
9477 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9480 if(lastOffset < suballoc.offset)
9489 usedBytes += suballoc.size;
9492 lastOffset = suballoc.offset + suballoc.size;
9493 ++nextAlloc1stIndex;
9498 if(lastOffset < size)
9505 lastOffset = freeSpace1stTo2ndEnd;
9509 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9511 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9512 while(lastOffset < size)
9515 while(nextAlloc2ndIndex != SIZE_MAX &&
9516 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9518 --nextAlloc2ndIndex;
9522 if(nextAlloc2ndIndex != SIZE_MAX)
9524 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9527 if(lastOffset < suballoc.offset)
9536 usedBytes += suballoc.size;
9539 lastOffset = suballoc.offset + suballoc.size;
9540 --nextAlloc2ndIndex;
9545 if(lastOffset < size)
9557 const VkDeviceSize unusedBytes = size - usedBytes;
9558 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9563 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9565 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9566 size_t nextAlloc2ndIndex = 0;
9567 while(lastOffset < freeSpace2ndTo1stEnd)
9570 while(nextAlloc2ndIndex < suballoc2ndCount &&
9571 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9573 ++nextAlloc2ndIndex;
9577 if(nextAlloc2ndIndex < suballoc2ndCount)
9579 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9582 if(lastOffset < suballoc.offset)
9585 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9586 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9591 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9594 lastOffset = suballoc.offset + suballoc.size;
9595 ++nextAlloc2ndIndex;
9600 if(lastOffset < freeSpace2ndTo1stEnd)
9603 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9604 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9608 lastOffset = freeSpace2ndTo1stEnd;
9613 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9614 while(lastOffset < freeSpace1stTo2ndEnd)
9617 while(nextAlloc1stIndex < suballoc1stCount &&
9618 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9620 ++nextAlloc1stIndex;
9624 if(nextAlloc1stIndex < suballoc1stCount)
9626 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9629 if(lastOffset < suballoc.offset)
9632 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9633 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9638 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9641 lastOffset = suballoc.offset + suballoc.size;
9642 ++nextAlloc1stIndex;
9647 if(lastOffset < freeSpace1stTo2ndEnd)
9650 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9651 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9655 lastOffset = freeSpace1stTo2ndEnd;
9659 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9661 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9662 while(lastOffset < size)
9665 while(nextAlloc2ndIndex != SIZE_MAX &&
9666 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9668 --nextAlloc2ndIndex;
9672 if(nextAlloc2ndIndex != SIZE_MAX)
9674 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9677 if(lastOffset < suballoc.offset)
9680 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9681 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9686 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9689 lastOffset = suballoc.offset + suballoc.size;
9690 --nextAlloc2ndIndex;
9695 if(lastOffset < size)
9698 const VkDeviceSize unusedRangeSize = size - lastOffset;
9699 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9708 PrintDetailedMap_End(json);
9710 #endif // #if VMA_STATS_STRING_ENABLED 9712 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9713 uint32_t currentFrameIndex,
9714 uint32_t frameInUseCount,
9715 VkDeviceSize bufferImageGranularity,
9716 VkDeviceSize allocSize,
9717 VkDeviceSize allocAlignment,
9719 VmaSuballocationType allocType,
9720 bool canMakeOtherLost,
9722 VmaAllocationRequest* pAllocationRequest)
9724 VMA_ASSERT(allocSize > 0);
9725 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9726 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9727 VMA_HEAVY_ASSERT(Validate());
9728 return upperAddress ?
9729 CreateAllocationRequest_UpperAddress(
9730 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9731 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9732 CreateAllocationRequest_LowerAddress(
9733 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9734 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9737 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9738 uint32_t currentFrameIndex,
9739 uint32_t frameInUseCount,
9740 VkDeviceSize bufferImageGranularity,
9741 VkDeviceSize allocSize,
9742 VkDeviceSize allocAlignment,
9743 VmaSuballocationType allocType,
9744 bool canMakeOtherLost,
9746 VmaAllocationRequest* pAllocationRequest)
9748 const VkDeviceSize size = GetSize();
9749 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9750 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9752 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9754 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9759 if(allocSize > size)
9763 VkDeviceSize resultBaseOffset = size - allocSize;
9764 if(!suballocations2nd.empty())
9766 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9767 resultBaseOffset = lastSuballoc.offset - allocSize;
9768 if(allocSize > lastSuballoc.offset)
9775 VkDeviceSize resultOffset = resultBaseOffset;
9778 if(VMA_DEBUG_MARGIN > 0)
9780 if(resultOffset < VMA_DEBUG_MARGIN)
9784 resultOffset -= VMA_DEBUG_MARGIN;
9788 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9792 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9794 bool bufferImageGranularityConflict =
false;
9795 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9797 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9798 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9800 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9802 bufferImageGranularityConflict =
true;
9810 if(bufferImageGranularityConflict)
9812 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9817 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9818 suballocations1st.back().offset + suballocations1st.back().size :
9820 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9824 if(bufferImageGranularity > 1)
9826 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9828 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9829 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9831 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9845 pAllocationRequest->offset = resultOffset;
9846 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9847 pAllocationRequest->sumItemSize = 0;
9849 pAllocationRequest->itemsToMakeLostCount = 0;
9850 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9857 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9858 uint32_t currentFrameIndex,
9859 uint32_t frameInUseCount,
9860 VkDeviceSize bufferImageGranularity,
9861 VkDeviceSize allocSize,
9862 VkDeviceSize allocAlignment,
9863 VmaSuballocationType allocType,
9864 bool canMakeOtherLost,
9866 VmaAllocationRequest* pAllocationRequest)
9868 const VkDeviceSize size = GetSize();
9869 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9870 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9872 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9876 VkDeviceSize resultBaseOffset = 0;
9877 if(!suballocations1st.empty())
9879 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9880 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9884 VkDeviceSize resultOffset = resultBaseOffset;
9887 if(VMA_DEBUG_MARGIN > 0)
9889 resultOffset += VMA_DEBUG_MARGIN;
9893 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9897 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9899 bool bufferImageGranularityConflict =
false;
9900 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9902 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9903 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9905 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9907 bufferImageGranularityConflict =
true;
9915 if(bufferImageGranularityConflict)
9917 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9921 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9922 suballocations2nd.back().offset : size;
9925 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9929 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9931 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9933 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9934 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9936 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9950 pAllocationRequest->offset = resultOffset;
9951 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9952 pAllocationRequest->sumItemSize = 0;
9954 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9955 pAllocationRequest->itemsToMakeLostCount = 0;
9962 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9964 VMA_ASSERT(!suballocations1st.empty());
9966 VkDeviceSize resultBaseOffset = 0;
9967 if(!suballocations2nd.empty())
9969 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9970 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9974 VkDeviceSize resultOffset = resultBaseOffset;
9977 if(VMA_DEBUG_MARGIN > 0)
9979 resultOffset += VMA_DEBUG_MARGIN;
9983 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9987 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9989 bool bufferImageGranularityConflict =
false;
9990 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9992 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9993 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9995 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9997 bufferImageGranularityConflict =
true;
10005 if(bufferImageGranularityConflict)
10007 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10011 pAllocationRequest->itemsToMakeLostCount = 0;
10012 pAllocationRequest->sumItemSize = 0;
10013 size_t index1st = m_1stNullItemsBeginCount;
10015 if(canMakeOtherLost)
10017 while(index1st < suballocations1st.size() &&
10018 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10021 const VmaSuballocation& suballoc = suballocations1st[index1st];
10022 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10028 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10029 if(suballoc.hAllocation->CanBecomeLost() &&
10030 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10032 ++pAllocationRequest->itemsToMakeLostCount;
10033 pAllocationRequest->sumItemSize += suballoc.size;
10045 if(bufferImageGranularity > 1)
10047 while(index1st < suballocations1st.size())
10049 const VmaSuballocation& suballoc = suballocations1st[index1st];
10050 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10052 if(suballoc.hAllocation != VK_NULL_HANDLE)
10055 if(suballoc.hAllocation->CanBecomeLost() &&
10056 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10058 ++pAllocationRequest->itemsToMakeLostCount;
10059 pAllocationRequest->sumItemSize += suballoc.size;
10077 if(index1st == suballocations1st.size() &&
10078 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10081 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10086 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10087 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10091 if(bufferImageGranularity > 1)
10093 for(
size_t nextSuballocIndex = index1st;
10094 nextSuballocIndex < suballocations1st.size();
10095 nextSuballocIndex++)
10097 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10098 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10100 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10114 pAllocationRequest->offset = resultOffset;
10115 pAllocationRequest->sumFreeSize =
10116 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10118 - pAllocationRequest->sumItemSize;
10119 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10128 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10129 uint32_t currentFrameIndex,
10130 uint32_t frameInUseCount,
10131 VmaAllocationRequest* pAllocationRequest)
10133 if(pAllocationRequest->itemsToMakeLostCount == 0)
10138 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10141 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10142 size_t index = m_1stNullItemsBeginCount;
10143 size_t madeLostCount = 0;
10144 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10146 if(index == suballocations->size())
10150 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10152 suballocations = &AccessSuballocations2nd();
10156 VMA_ASSERT(!suballocations->empty());
10158 VmaSuballocation& suballoc = (*suballocations)[index];
10159 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10161 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10162 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10163 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10165 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10166 suballoc.hAllocation = VK_NULL_HANDLE;
10167 m_SumFreeSize += suballoc.size;
10168 if(suballocations == &AccessSuballocations1st())
10170 ++m_1stNullItemsMiddleCount;
10174 ++m_2ndNullItemsCount;
10186 CleanupAfterFree();
10192 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10194 uint32_t lostAllocationCount = 0;
10196 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10197 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10199 VmaSuballocation& suballoc = suballocations1st[i];
10200 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10201 suballoc.hAllocation->CanBecomeLost() &&
10202 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10204 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10205 suballoc.hAllocation = VK_NULL_HANDLE;
10206 ++m_1stNullItemsMiddleCount;
10207 m_SumFreeSize += suballoc.size;
10208 ++lostAllocationCount;
10212 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10213 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10215 VmaSuballocation& suballoc = suballocations2nd[i];
10216 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10217 suballoc.hAllocation->CanBecomeLost() &&
10218 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10220 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10221 suballoc.hAllocation = VK_NULL_HANDLE;
10222 ++m_2ndNullItemsCount;
10223 m_SumFreeSize += suballoc.size;
10224 ++lostAllocationCount;
10228 if(lostAllocationCount)
10230 CleanupAfterFree();
10233 return lostAllocationCount;
10236 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10238 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10239 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10241 const VmaSuballocation& suballoc = suballocations1st[i];
10242 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10244 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10246 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10247 return VK_ERROR_VALIDATION_FAILED_EXT;
10249 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10251 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10252 return VK_ERROR_VALIDATION_FAILED_EXT;
10257 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10258 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10260 const VmaSuballocation& suballoc = suballocations2nd[i];
10261 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10263 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10265 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10266 return VK_ERROR_VALIDATION_FAILED_EXT;
10268 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10270 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10271 return VK_ERROR_VALIDATION_FAILED_EXT;
10279 void VmaBlockMetadata_Linear::Alloc(
10280 const VmaAllocationRequest& request,
10281 VmaSuballocationType type,
10282 VkDeviceSize allocSize,
10285 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10287 switch(request.type)
10289 case VmaAllocationRequestType::UpperAddress:
10291 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10292 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10293 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10294 suballocations2nd.push_back(newSuballoc);
10295 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10298 case VmaAllocationRequestType::EndOf1st:
10300 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10302 VMA_ASSERT(suballocations1st.empty() ||
10303 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10305 VMA_ASSERT(request.offset + allocSize <= GetSize());
10307 suballocations1st.push_back(newSuballoc);
10310 case VmaAllocationRequestType::EndOf2nd:
10312 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10314 VMA_ASSERT(!suballocations1st.empty() &&
10315 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10316 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10318 switch(m_2ndVectorMode)
10320 case SECOND_VECTOR_EMPTY:
10322 VMA_ASSERT(suballocations2nd.empty());
10323 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10325 case SECOND_VECTOR_RING_BUFFER:
10327 VMA_ASSERT(!suballocations2nd.empty());
10329 case SECOND_VECTOR_DOUBLE_STACK:
10330 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10336 suballocations2nd.push_back(newSuballoc);
10340 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10343 m_SumFreeSize -= newSuballoc.size;
10346 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10348 FreeAtOffset(allocation->GetOffset());
10351 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10353 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10354 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10356 if(!suballocations1st.empty())
10359 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10360 if(firstSuballoc.offset == offset)
10362 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10363 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10364 m_SumFreeSize += firstSuballoc.size;
10365 ++m_1stNullItemsBeginCount;
10366 CleanupAfterFree();
10372 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10373 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10375 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10376 if(lastSuballoc.offset == offset)
10378 m_SumFreeSize += lastSuballoc.size;
10379 suballocations2nd.pop_back();
10380 CleanupAfterFree();
10385 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10387 VmaSuballocation& lastSuballoc = suballocations1st.back();
10388 if(lastSuballoc.offset == offset)
10390 m_SumFreeSize += lastSuballoc.size;
10391 suballocations1st.pop_back();
10392 CleanupAfterFree();
10399 VmaSuballocation refSuballoc;
10400 refSuballoc.offset = offset;
10402 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10403 suballocations1st.begin() + m_1stNullItemsBeginCount,
10404 suballocations1st.end(),
10406 if(it != suballocations1st.end())
10408 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10409 it->hAllocation = VK_NULL_HANDLE;
10410 ++m_1stNullItemsMiddleCount;
10411 m_SumFreeSize += it->size;
10412 CleanupAfterFree();
10417 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10420 VmaSuballocation refSuballoc;
10421 refSuballoc.offset = offset;
10423 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10424 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10425 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10426 if(it != suballocations2nd.end())
10428 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10429 it->hAllocation = VK_NULL_HANDLE;
10430 ++m_2ndNullItemsCount;
10431 m_SumFreeSize += it->size;
10432 CleanupAfterFree();
10437 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10440 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10442 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10443 const size_t suballocCount = AccessSuballocations1st().size();
10444 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10447 void VmaBlockMetadata_Linear::CleanupAfterFree()
10449 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10450 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10454 suballocations1st.clear();
10455 suballocations2nd.clear();
10456 m_1stNullItemsBeginCount = 0;
10457 m_1stNullItemsMiddleCount = 0;
10458 m_2ndNullItemsCount = 0;
10459 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10463 const size_t suballoc1stCount = suballocations1st.size();
10464 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10465 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10468 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10469 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10471 ++m_1stNullItemsBeginCount;
10472 --m_1stNullItemsMiddleCount;
10476 while(m_1stNullItemsMiddleCount > 0 &&
10477 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10479 --m_1stNullItemsMiddleCount;
10480 suballocations1st.pop_back();
10484 while(m_2ndNullItemsCount > 0 &&
10485 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10487 --m_2ndNullItemsCount;
10488 suballocations2nd.pop_back();
10492 while(m_2ndNullItemsCount > 0 &&
10493 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10495 --m_2ndNullItemsCount;
10496 suballocations2nd.remove(0);
10499 if(ShouldCompact1st())
10501 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10502 size_t srcIndex = m_1stNullItemsBeginCount;
10503 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10505 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10509 if(dstIndex != srcIndex)
10511 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10515 suballocations1st.resize(nonNullItemCount);
10516 m_1stNullItemsBeginCount = 0;
10517 m_1stNullItemsMiddleCount = 0;
10521 if(suballocations2nd.empty())
10523 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10527 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10529 suballocations1st.clear();
10530 m_1stNullItemsBeginCount = 0;
10532 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10535 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10536 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10537 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10538 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10540 ++m_1stNullItemsBeginCount;
10541 --m_1stNullItemsMiddleCount;
10543 m_2ndNullItemsCount = 0;
10544 m_1stVectorIndex ^= 1;
10549 VMA_HEAVY_ASSERT(Validate());
10556 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10557 VmaBlockMetadata(hAllocator),
10559 m_AllocationCount(0),
10563 memset(m_FreeList, 0,
sizeof(m_FreeList));
10566 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10568 DeleteNode(m_Root);
10571 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10573 VmaBlockMetadata::Init(size);
10575 m_UsableSize = VmaPrevPow2(size);
10576 m_SumFreeSize = m_UsableSize;
10580 while(m_LevelCount < MAX_LEVELS &&
10581 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10586 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10587 rootNode->offset = 0;
10588 rootNode->type = Node::TYPE_FREE;
10589 rootNode->parent = VMA_NULL;
10590 rootNode->buddy = VMA_NULL;
10593 AddToFreeListFront(0, rootNode);
10596 bool VmaBlockMetadata_Buddy::Validate()
const 10599 ValidationContext ctx;
10600 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10602 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10604 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10605 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10608 for(uint32_t level = 0; level < m_LevelCount; ++level)
10610 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10611 m_FreeList[level].front->free.prev == VMA_NULL);
10613 for(Node* node = m_FreeList[level].front;
10615 node = node->free.next)
10617 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10619 if(node->free.next == VMA_NULL)
10621 VMA_VALIDATE(m_FreeList[level].back == node);
10625 VMA_VALIDATE(node->free.next->free.prev == node);
10631 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10633 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10639 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10641 for(uint32_t level = 0; level < m_LevelCount; ++level)
10643 if(m_FreeList[level].front != VMA_NULL)
10645 return LevelToNodeSize(level);
10651 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10653 const VkDeviceSize unusableSize = GetUnusableSize();
10664 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10666 if(unusableSize > 0)
10675 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10677 const VkDeviceSize unusableSize = GetUnusableSize();
10679 inoutStats.
size += GetSize();
10680 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10685 if(unusableSize > 0)
10692 #if VMA_STATS_STRING_ENABLED 10694 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10698 CalcAllocationStatInfo(stat);
10700 PrintDetailedMap_Begin(
10706 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10708 const VkDeviceSize unusableSize = GetUnusableSize();
10709 if(unusableSize > 0)
10711 PrintDetailedMap_UnusedRange(json,
10716 PrintDetailedMap_End(json);
10719 #endif // #if VMA_STATS_STRING_ENABLED 10721 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10722 uint32_t currentFrameIndex,
10723 uint32_t frameInUseCount,
10724 VkDeviceSize bufferImageGranularity,
10725 VkDeviceSize allocSize,
10726 VkDeviceSize allocAlignment,
10728 VmaSuballocationType allocType,
10729 bool canMakeOtherLost,
10731 VmaAllocationRequest* pAllocationRequest)
10733 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10737 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10738 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10739 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10741 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10742 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10745 if(allocSize > m_UsableSize)
10750 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10751 for(uint32_t level = targetLevel + 1; level--; )
10753 for(Node* freeNode = m_FreeList[level].front;
10754 freeNode != VMA_NULL;
10755 freeNode = freeNode->free.next)
10757 if(freeNode->offset % allocAlignment == 0)
10759 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10760 pAllocationRequest->offset = freeNode->offset;
10761 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10762 pAllocationRequest->sumItemSize = 0;
10763 pAllocationRequest->itemsToMakeLostCount = 0;
10764 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10773 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10774 uint32_t currentFrameIndex,
10775 uint32_t frameInUseCount,
10776 VmaAllocationRequest* pAllocationRequest)
10782 return pAllocationRequest->itemsToMakeLostCount == 0;
10785 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10794 void VmaBlockMetadata_Buddy::Alloc(
10795 const VmaAllocationRequest& request,
10796 VmaSuballocationType type,
10797 VkDeviceSize allocSize,
10800 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10802 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10803 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10805 Node* currNode = m_FreeList[currLevel].front;
10806 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10807 while(currNode->offset != request.offset)
10809 currNode = currNode->free.next;
10810 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10814 while(currLevel < targetLevel)
10818 RemoveFromFreeList(currLevel, currNode);
10820 const uint32_t childrenLevel = currLevel + 1;
10823 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10824 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10826 leftChild->offset = currNode->offset;
10827 leftChild->type = Node::TYPE_FREE;
10828 leftChild->parent = currNode;
10829 leftChild->buddy = rightChild;
10831 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10832 rightChild->type = Node::TYPE_FREE;
10833 rightChild->parent = currNode;
10834 rightChild->buddy = leftChild;
10837 currNode->type = Node::TYPE_SPLIT;
10838 currNode->split.leftChild = leftChild;
10841 AddToFreeListFront(childrenLevel, rightChild);
10842 AddToFreeListFront(childrenLevel, leftChild);
10847 currNode = m_FreeList[currLevel].front;
10856 VMA_ASSERT(currLevel == targetLevel &&
10857 currNode != VMA_NULL &&
10858 currNode->type == Node::TYPE_FREE);
10859 RemoveFromFreeList(currLevel, currNode);
10862 currNode->type = Node::TYPE_ALLOCATION;
10863 currNode->allocation.alloc = hAllocation;
10865 ++m_AllocationCount;
10867 m_SumFreeSize -= allocSize;
10870 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10872 if(node->type == Node::TYPE_SPLIT)
10874 DeleteNode(node->split.leftChild->buddy);
10875 DeleteNode(node->split.leftChild);
10878 vma_delete(GetAllocationCallbacks(), node);
10881 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10883 VMA_VALIDATE(level < m_LevelCount);
10884 VMA_VALIDATE(curr->parent == parent);
10885 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10886 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10889 case Node::TYPE_FREE:
10891 ctx.calculatedSumFreeSize += levelNodeSize;
10892 ++ctx.calculatedFreeCount;
10894 case Node::TYPE_ALLOCATION:
10895 ++ctx.calculatedAllocationCount;
10896 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10897 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10899 case Node::TYPE_SPLIT:
10901 const uint32_t childrenLevel = level + 1;
10902 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10903 const Node*
const leftChild = curr->split.leftChild;
10904 VMA_VALIDATE(leftChild != VMA_NULL);
10905 VMA_VALIDATE(leftChild->offset == curr->offset);
10906 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10908 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10910 const Node*
const rightChild = leftChild->buddy;
10911 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10912 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10914 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10925 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10928 uint32_t level = 0;
10929 VkDeviceSize currLevelNodeSize = m_UsableSize;
10930 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10931 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10934 currLevelNodeSize = nextLevelNodeSize;
10935 nextLevelNodeSize = currLevelNodeSize >> 1;
10940 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10943 Node* node = m_Root;
10944 VkDeviceSize nodeOffset = 0;
10945 uint32_t level = 0;
10946 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10947 while(node->type == Node::TYPE_SPLIT)
10949 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10950 if(offset < nodeOffset + nextLevelSize)
10952 node = node->split.leftChild;
10956 node = node->split.leftChild->buddy;
10957 nodeOffset += nextLevelSize;
10960 levelNodeSize = nextLevelSize;
10963 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10964 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10967 --m_AllocationCount;
10968 m_SumFreeSize += alloc->GetSize();
10970 node->type = Node::TYPE_FREE;
10973 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10975 RemoveFromFreeList(level, node->buddy);
10976 Node*
const parent = node->parent;
10978 vma_delete(GetAllocationCallbacks(), node->buddy);
10979 vma_delete(GetAllocationCallbacks(), node);
10980 parent->type = Node::TYPE_FREE;
10988 AddToFreeListFront(level, node);
10991 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10995 case Node::TYPE_FREE:
11001 case Node::TYPE_ALLOCATION:
11003 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11009 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11010 if(unusedRangeSize > 0)
11019 case Node::TYPE_SPLIT:
11021 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11022 const Node*
const leftChild = node->split.leftChild;
11023 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11024 const Node*
const rightChild = leftChild->buddy;
11025 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11033 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11035 VMA_ASSERT(node->type == Node::TYPE_FREE);
11038 Node*
const frontNode = m_FreeList[level].front;
11039 if(frontNode == VMA_NULL)
11041 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11042 node->free.prev = node->free.next = VMA_NULL;
11043 m_FreeList[level].front = m_FreeList[level].back = node;
11047 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11048 node->free.prev = VMA_NULL;
11049 node->free.next = frontNode;
11050 frontNode->free.prev = node;
11051 m_FreeList[level].front = node;
11055 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11057 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11060 if(node->free.prev == VMA_NULL)
11062 VMA_ASSERT(m_FreeList[level].front == node);
11063 m_FreeList[level].front = node->free.next;
11067 Node*
const prevFreeNode = node->free.prev;
11068 VMA_ASSERT(prevFreeNode->free.next == node);
11069 prevFreeNode->free.next = node->free.next;
11073 if(node->free.next == VMA_NULL)
11075 VMA_ASSERT(m_FreeList[level].back == node);
11076 m_FreeList[level].back = node->free.prev;
11080 Node*
const nextFreeNode = node->free.next;
11081 VMA_ASSERT(nextFreeNode->free.prev == node);
11082 nextFreeNode->free.prev = node->free.prev;
11086 #if VMA_STATS_STRING_ENABLED 11087 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11091 case Node::TYPE_FREE:
11092 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11094 case Node::TYPE_ALLOCATION:
11096 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11097 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11098 if(allocSize < levelNodeSize)
11100 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11104 case Node::TYPE_SPLIT:
11106 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11107 const Node*
const leftChild = node->split.leftChild;
11108 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11109 const Node*
const rightChild = leftChild->buddy;
11110 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11117 #endif // #if VMA_STATS_STRING_ENABLED 11123 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11124 m_pMetadata(VMA_NULL),
11125 m_MemoryTypeIndex(UINT32_MAX),
11127 m_hMemory(VK_NULL_HANDLE),
11129 m_pMappedData(VMA_NULL)
11133 void VmaDeviceMemoryBlock::Init(
11136 uint32_t newMemoryTypeIndex,
11137 VkDeviceMemory newMemory,
11138 VkDeviceSize newSize,
11140 uint32_t algorithm)
11142 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11144 m_hParentPool = hParentPool;
11145 m_MemoryTypeIndex = newMemoryTypeIndex;
11147 m_hMemory = newMemory;
11152 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11155 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11161 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11163 m_pMetadata->Init(newSize);
11166 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11170 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11172 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11173 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11174 m_hMemory = VK_NULL_HANDLE;
11176 vma_delete(allocator, m_pMetadata);
11177 m_pMetadata = VMA_NULL;
11180 bool VmaDeviceMemoryBlock::Validate()
const 11182 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11183 (m_pMetadata->GetSize() != 0));
11185 return m_pMetadata->Validate();
11188 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11190 void* pData =
nullptr;
11191 VkResult res = Map(hAllocator, 1, &pData);
11192 if(res != VK_SUCCESS)
11197 res = m_pMetadata->CheckCorruption(pData);
11199 Unmap(hAllocator, 1);
11204 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11211 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11212 if(m_MapCount != 0)
11214 m_MapCount += count;
11215 VMA_ASSERT(m_pMappedData != VMA_NULL);
11216 if(ppData != VMA_NULL)
11218 *ppData = m_pMappedData;
11224 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11225 hAllocator->m_hDevice,
11231 if(result == VK_SUCCESS)
11233 if(ppData != VMA_NULL)
11235 *ppData = m_pMappedData;
11237 m_MapCount = count;
11243 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11250 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11251 if(m_MapCount >= count)
11253 m_MapCount -= count;
11254 if(m_MapCount == 0)
11256 m_pMappedData = VMA_NULL;
11257 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11262 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11266 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11268 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11269 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11272 VkResult res = Map(hAllocator, 1, &pData);
11273 if(res != VK_SUCCESS)
11278 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11279 VmaWriteMagicValue(pData, allocOffset + allocSize);
11281 Unmap(hAllocator, 1);
11286 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11288 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11289 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11292 VkResult res = Map(hAllocator, 1, &pData);
11293 if(res != VK_SUCCESS)
11298 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11300 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11302 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11304 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11307 Unmap(hAllocator, 1);
11312 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11317 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11318 hAllocation->GetBlock() ==
this);
11320 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11321 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11322 hAllocator->m_hDevice,
11325 hAllocation->GetOffset());
11328 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11333 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11334 hAllocation->GetBlock() ==
this);
11336 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11337 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11338 hAllocator->m_hDevice,
11341 hAllocation->GetOffset());
11346 memset(&outInfo, 0,
sizeof(outInfo));
11365 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11373 VmaPool_T::VmaPool_T(
11376 VkDeviceSize preferredBlockSize) :
11380 createInfo.memoryTypeIndex,
11381 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11382 createInfo.minBlockCount,
11383 createInfo.maxBlockCount,
11385 createInfo.frameInUseCount,
11387 createInfo.blockSize != 0,
11393 VmaPool_T::~VmaPool_T()
11397 #if VMA_STATS_STRING_ENABLED 11399 #endif // #if VMA_STATS_STRING_ENABLED 11401 VmaBlockVector::VmaBlockVector(
11404 uint32_t memoryTypeIndex,
11405 VkDeviceSize preferredBlockSize,
11406 size_t minBlockCount,
11407 size_t maxBlockCount,
11408 VkDeviceSize bufferImageGranularity,
11409 uint32_t frameInUseCount,
11411 bool explicitBlockSize,
11412 uint32_t algorithm) :
11413 m_hAllocator(hAllocator),
11414 m_hParentPool(hParentPool),
11415 m_MemoryTypeIndex(memoryTypeIndex),
11416 m_PreferredBlockSize(preferredBlockSize),
11417 m_MinBlockCount(minBlockCount),
11418 m_MaxBlockCount(maxBlockCount),
11419 m_BufferImageGranularity(bufferImageGranularity),
11420 m_FrameInUseCount(frameInUseCount),
11421 m_IsCustomPool(isCustomPool),
11422 m_ExplicitBlockSize(explicitBlockSize),
11423 m_Algorithm(algorithm),
11424 m_HasEmptyBlock(false),
11425 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11430 VmaBlockVector::~VmaBlockVector()
11432 for(
size_t i = m_Blocks.size(); i--; )
11434 m_Blocks[i]->Destroy(m_hAllocator);
11435 vma_delete(m_hAllocator, m_Blocks[i]);
11439 VkResult VmaBlockVector::CreateMinBlocks()
11441 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11443 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11444 if(res != VK_SUCCESS)
11452 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11454 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11456 const size_t blockCount = m_Blocks.size();
11465 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11467 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11468 VMA_ASSERT(pBlock);
11469 VMA_HEAVY_ASSERT(pBlock->Validate());
11470 pBlock->m_pMetadata->AddPoolStats(*pStats);
11474 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11476 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11477 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11478 (VMA_DEBUG_MARGIN > 0) &&
11480 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11483 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11485 VkResult VmaBlockVector::Allocate(
11486 uint32_t currentFrameIndex,
11488 VkDeviceSize alignment,
11490 VmaSuballocationType suballocType,
11491 size_t allocationCount,
11495 VkResult res = VK_SUCCESS;
11497 if(IsCorruptionDetectionEnabled())
11499 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11500 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11504 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11505 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11507 res = AllocatePage(
11513 pAllocations + allocIndex);
11514 if(res != VK_SUCCESS)
11521 if(res != VK_SUCCESS)
11524 while(allocIndex--)
11526 Free(pAllocations[allocIndex]);
11528 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11534 VkResult VmaBlockVector::AllocatePage(
11535 uint32_t currentFrameIndex,
11537 VkDeviceSize alignment,
11539 VmaSuballocationType suballocType,
11546 const bool canCreateNewBlock =
11548 (m_Blocks.size() < m_MaxBlockCount);
11555 canMakeOtherLost =
false;
11559 if(isUpperAddress &&
11562 return VK_ERROR_FEATURE_NOT_PRESENT;
11576 return VK_ERROR_FEATURE_NOT_PRESENT;
11580 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11582 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11590 if(!canMakeOtherLost || canCreateNewBlock)
11599 if(!m_Blocks.empty())
11601 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11602 VMA_ASSERT(pCurrBlock);
11603 VkResult res = AllocateFromBlock(
11613 if(res == VK_SUCCESS)
11615 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11625 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11627 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11628 VMA_ASSERT(pCurrBlock);
11629 VkResult res = AllocateFromBlock(
11639 if(res == VK_SUCCESS)
11641 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11649 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11651 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11652 VMA_ASSERT(pCurrBlock);
11653 VkResult res = AllocateFromBlock(
11663 if(res == VK_SUCCESS)
11665 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11673 if(canCreateNewBlock)
11676 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11677 uint32_t newBlockSizeShift = 0;
11678 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11680 if(!m_ExplicitBlockSize)
11683 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11684 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11686 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11687 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11689 newBlockSize = smallerNewBlockSize;
11690 ++newBlockSizeShift;
11699 size_t newBlockIndex = 0;
11700 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11702 if(!m_ExplicitBlockSize)
11704 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11706 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11707 if(smallerNewBlockSize >= size)
11709 newBlockSize = smallerNewBlockSize;
11710 ++newBlockSizeShift;
11711 res = CreateBlock(newBlockSize, &newBlockIndex);
11720 if(res == VK_SUCCESS)
11722 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11723 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11725 res = AllocateFromBlock(
11735 if(res == VK_SUCCESS)
11737 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11743 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11750 if(canMakeOtherLost)
11752 uint32_t tryIndex = 0;
11753 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11755 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11756 VmaAllocationRequest bestRequest = {};
11757 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11763 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11765 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11766 VMA_ASSERT(pCurrBlock);
11767 VmaAllocationRequest currRequest = {};
11768 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11771 m_BufferImageGranularity,
11780 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11781 if(pBestRequestBlock == VMA_NULL ||
11782 currRequestCost < bestRequestCost)
11784 pBestRequestBlock = pCurrBlock;
11785 bestRequest = currRequest;
11786 bestRequestCost = currRequestCost;
11788 if(bestRequestCost == 0)
11799 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11801 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11802 VMA_ASSERT(pCurrBlock);
11803 VmaAllocationRequest currRequest = {};
11804 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11807 m_BufferImageGranularity,
11816 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11817 if(pBestRequestBlock == VMA_NULL ||
11818 currRequestCost < bestRequestCost ||
11821 pBestRequestBlock = pCurrBlock;
11822 bestRequest = currRequest;
11823 bestRequestCost = currRequestCost;
11825 if(bestRequestCost == 0 ||
11835 if(pBestRequestBlock != VMA_NULL)
11839 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11840 if(res != VK_SUCCESS)
11846 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11852 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11854 m_HasEmptyBlock =
false;
11857 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11858 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11859 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11860 (*pAllocation)->InitBlockAllocation(
11862 bestRequest.offset,
11868 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11869 VMA_DEBUG_LOG(
" Returned from existing block");
11870 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11871 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11873 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11875 if(IsCorruptionDetectionEnabled())
11877 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11878 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11893 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11895 return VK_ERROR_TOO_MANY_OBJECTS;
11899 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11902 void VmaBlockVector::Free(
11905 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11909 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11911 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11913 if(IsCorruptionDetectionEnabled())
11915 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11916 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11919 if(hAllocation->IsPersistentMap())
11921 pBlock->Unmap(m_hAllocator, 1);
11924 pBlock->m_pMetadata->Free(hAllocation);
11925 VMA_HEAVY_ASSERT(pBlock->Validate());
11927 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11930 if(pBlock->m_pMetadata->IsEmpty())
11933 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11935 pBlockToDelete = pBlock;
11941 m_HasEmptyBlock =
true;
11946 else if(m_HasEmptyBlock)
11948 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11949 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11951 pBlockToDelete = pLastBlock;
11952 m_Blocks.pop_back();
11953 m_HasEmptyBlock =
false;
11957 IncrementallySortBlocks();
11962 if(pBlockToDelete != VMA_NULL)
11964 VMA_DEBUG_LOG(
" Deleted empty allocation");
11965 pBlockToDelete->Destroy(m_hAllocator);
11966 vma_delete(m_hAllocator, pBlockToDelete);
11970 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11972 VkDeviceSize result = 0;
11973 for(
size_t i = m_Blocks.size(); i--; )
11975 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11976 if(result >= m_PreferredBlockSize)
11984 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11986 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11988 if(m_Blocks[blockIndex] == pBlock)
11990 VmaVectorRemove(m_Blocks, blockIndex);
11997 void VmaBlockVector::IncrementallySortBlocks()
12002 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12004 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12006 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12013 VkResult VmaBlockVector::AllocateFromBlock(
12014 VmaDeviceMemoryBlock* pBlock,
12015 uint32_t currentFrameIndex,
12017 VkDeviceSize alignment,
12020 VmaSuballocationType suballocType,
12029 VmaAllocationRequest currRequest = {};
12030 if(pBlock->m_pMetadata->CreateAllocationRequest(
12033 m_BufferImageGranularity,
12043 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12047 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12048 if(res != VK_SUCCESS)
12055 if(pBlock->m_pMetadata->IsEmpty())
12057 m_HasEmptyBlock =
false;
12060 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12061 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12062 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12063 (*pAllocation)->InitBlockAllocation(
12065 currRequest.offset,
12071 VMA_HEAVY_ASSERT(pBlock->Validate());
12072 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12073 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12075 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12077 if(IsCorruptionDetectionEnabled())
12079 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12080 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12084 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12087 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12089 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12090 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12091 allocInfo.allocationSize = blockSize;
12092 VkDeviceMemory mem = VK_NULL_HANDLE;
12093 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12102 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12108 allocInfo.allocationSize,
12112 m_Blocks.push_back(pBlock);
12113 if(pNewBlockIndex != VMA_NULL)
12115 *pNewBlockIndex = m_Blocks.size() - 1;
12121 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12122 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12123 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12125 const size_t blockCount = m_Blocks.size();
12126 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12130 BLOCK_FLAG_USED = 0x00000001,
12131 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12139 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12140 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12141 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12144 const size_t moveCount = moves.size();
12145 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12147 const VmaDefragmentationMove& move = moves[moveIndex];
12148 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12149 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12152 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12155 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12157 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12158 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12159 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12161 currBlockInfo.pMappedData = pBlock->GetMappedData();
12163 if(currBlockInfo.pMappedData == VMA_NULL)
12165 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12166 if(pDefragCtx->res == VK_SUCCESS)
12168 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12175 if(pDefragCtx->res == VK_SUCCESS)
12177 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12178 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12180 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12182 const VmaDefragmentationMove& move = moves[moveIndex];
12184 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12185 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12187 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12192 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12193 memRange.memory = pSrcBlock->GetDeviceMemory();
12194 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12195 memRange.size = VMA_MIN(
12196 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12197 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12198 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12203 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12204 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12205 static_cast<size_t>(move.size));
12207 if(IsCorruptionDetectionEnabled())
12209 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12210 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12216 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12217 memRange.memory = pDstBlock->GetDeviceMemory();
12218 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12219 memRange.size = VMA_MIN(
12220 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12221 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12222 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12229 for(
size_t blockIndex = blockCount; blockIndex--; )
12231 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12232 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12234 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12235 pBlock->Unmap(m_hAllocator, 1);
12240 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12241 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12242 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12243 VkCommandBuffer commandBuffer)
12245 const size_t blockCount = m_Blocks.size();
12247 pDefragCtx->blockContexts.resize(blockCount);
12248 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12251 const size_t moveCount = moves.size();
12252 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12254 const VmaDefragmentationMove& move = moves[moveIndex];
12255 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12256 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12259 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12263 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
12264 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
12265 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
12267 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12269 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12270 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12271 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12273 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12274 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12275 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12276 if(pDefragCtx->res == VK_SUCCESS)
12278 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12279 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12286 if(pDefragCtx->res == VK_SUCCESS)
12288 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12289 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12291 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12293 const VmaDefragmentationMove& move = moves[moveIndex];
12295 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12296 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12298 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12300 VkBufferCopy region = {
12304 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12305 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12310 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12312 pDefragCtx->res = VK_NOT_READY;
12318 m_HasEmptyBlock =
false;
12319 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12321 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12322 if(pBlock->m_pMetadata->IsEmpty())
12324 if(m_Blocks.size() > m_MinBlockCount)
12326 if(pDefragmentationStats != VMA_NULL)
12329 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12332 VmaVectorRemove(m_Blocks, blockIndex);
12333 pBlock->Destroy(m_hAllocator);
12334 vma_delete(m_hAllocator, pBlock);
12338 m_HasEmptyBlock =
true;
12344 #if VMA_STATS_STRING_ENABLED 12346 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12348 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12350 json.BeginObject();
12354 json.WriteString(
"MemoryTypeIndex");
12355 json.WriteNumber(m_MemoryTypeIndex);
12357 json.WriteString(
"BlockSize");
12358 json.WriteNumber(m_PreferredBlockSize);
12360 json.WriteString(
"BlockCount");
12361 json.BeginObject(
true);
12362 if(m_MinBlockCount > 0)
12364 json.WriteString(
"Min");
12365 json.WriteNumber((uint64_t)m_MinBlockCount);
12367 if(m_MaxBlockCount < SIZE_MAX)
12369 json.WriteString(
"Max");
12370 json.WriteNumber((uint64_t)m_MaxBlockCount);
12372 json.WriteString(
"Cur");
12373 json.WriteNumber((uint64_t)m_Blocks.size());
12376 if(m_FrameInUseCount > 0)
12378 json.WriteString(
"FrameInUseCount");
12379 json.WriteNumber(m_FrameInUseCount);
12382 if(m_Algorithm != 0)
12384 json.WriteString(
"Algorithm");
12385 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12390 json.WriteString(
"PreferredBlockSize");
12391 json.WriteNumber(m_PreferredBlockSize);
12394 json.WriteString(
"Blocks");
12395 json.BeginObject();
12396 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12398 json.BeginString();
12399 json.ContinueString(m_Blocks[i]->GetId());
12402 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12409 #endif // #if VMA_STATS_STRING_ENABLED 12411 void VmaBlockVector::Defragment(
12412 class VmaBlockVectorDefragmentationContext* pCtx,
12414 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12415 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12416 VkCommandBuffer commandBuffer)
12418 pCtx->res = VK_SUCCESS;
12420 const VkMemoryPropertyFlags memPropFlags =
12421 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12422 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12423 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12425 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12427 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12428 !IsCorruptionDetectionEnabled();
12431 if(canDefragmentOnCpu || canDefragmentOnGpu)
12433 bool defragmentOnGpu;
12435 if(canDefragmentOnGpu != canDefragmentOnCpu)
12437 defragmentOnGpu = canDefragmentOnGpu;
12442 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12443 m_hAllocator->IsIntegratedGpu();
12446 bool overlappingMoveSupported = !defragmentOnGpu;
12448 if(m_hAllocator->m_UseMutex)
12450 m_Mutex.LockWrite();
12451 pCtx->mutexLocked =
true;
12454 pCtx->Begin(overlappingMoveSupported);
12458 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12459 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12460 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12461 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12462 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12465 if(pStats != VMA_NULL)
12467 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12468 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12471 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12472 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12473 if(defragmentOnGpu)
12475 maxGpuBytesToMove -= bytesMoved;
12476 maxGpuAllocationsToMove -= allocationsMoved;
12480 maxCpuBytesToMove -= bytesMoved;
12481 maxCpuAllocationsToMove -= allocationsMoved;
12485 if(pCtx->res >= VK_SUCCESS)
12487 if(defragmentOnGpu)
12489 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12493 ApplyDefragmentationMovesCpu(pCtx, moves);
12499 void VmaBlockVector::DefragmentationEnd(
12500 class VmaBlockVectorDefragmentationContext* pCtx,
12504 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12506 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12507 if(blockCtx.hBuffer)
12509 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12510 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12514 if(pCtx->res >= VK_SUCCESS)
12516 FreeEmptyBlocks(pStats);
12519 if(pCtx->mutexLocked)
12521 VMA_ASSERT(m_hAllocator->m_UseMutex);
12522 m_Mutex.UnlockWrite();
12526 size_t VmaBlockVector::CalcAllocationCount()
const 12529 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12531 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12536 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12538 if(m_BufferImageGranularity == 1)
12542 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12543 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12545 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12546 VMA_ASSERT(m_Algorithm == 0);
12547 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12548 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12556 void VmaBlockVector::MakePoolAllocationsLost(
12557 uint32_t currentFrameIndex,
12558 size_t* pLostAllocationCount)
12560 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12561 size_t lostAllocationCount = 0;
12562 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12564 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12565 VMA_ASSERT(pBlock);
12566 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12568 if(pLostAllocationCount != VMA_NULL)
12570 *pLostAllocationCount = lostAllocationCount;
12574 VkResult VmaBlockVector::CheckCorruption()
12576 if(!IsCorruptionDetectionEnabled())
12578 return VK_ERROR_FEATURE_NOT_PRESENT;
12581 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12582 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12584 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12585 VMA_ASSERT(pBlock);
12586 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12587 if(res != VK_SUCCESS)
12595 void VmaBlockVector::AddStats(
VmaStats* pStats)
12597 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12598 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12600 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12602 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12604 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12605 VMA_ASSERT(pBlock);
12606 VMA_HEAVY_ASSERT(pBlock->Validate());
12608 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12609 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12610 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12611 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12618 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12620 VmaBlockVector* pBlockVector,
12621 uint32_t currentFrameIndex,
12622 bool overlappingMoveSupported) :
12623 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12624 m_AllocationCount(0),
12625 m_AllAllocations(false),
12627 m_AllocationsMoved(0),
12628 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12631 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12632 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12634 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12635 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12636 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12637 m_Blocks.push_back(pBlockInfo);
12641 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12644 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12646 for(
size_t i = m_Blocks.size(); i--; )
12648 vma_delete(m_hAllocator, m_Blocks[i]);
12652 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12655 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12657 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12658 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12659 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12661 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12662 (*it)->m_Allocations.push_back(allocInfo);
12669 ++m_AllocationCount;
12673 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12674 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12675 VkDeviceSize maxBytesToMove,
12676 uint32_t maxAllocationsToMove)
12678 if(m_Blocks.empty())
12691 size_t srcBlockMinIndex = 0;
12704 size_t srcBlockIndex = m_Blocks.size() - 1;
12705 size_t srcAllocIndex = SIZE_MAX;
12711 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12713 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12716 if(srcBlockIndex == srcBlockMinIndex)
12723 srcAllocIndex = SIZE_MAX;
12728 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12732 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12733 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12735 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12736 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12737 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12738 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12741 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12743 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12744 VmaAllocationRequest dstAllocRequest;
12745 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12746 m_CurrentFrameIndex,
12747 m_pBlockVector->GetFrameInUseCount(),
12748 m_pBlockVector->GetBufferImageGranularity(),
12755 &dstAllocRequest) &&
12757 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12759 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12762 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12763 (m_BytesMoved + size > maxBytesToMove))
12768 VmaDefragmentationMove move;
12769 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12770 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12771 move.srcOffset = srcOffset;
12772 move.dstOffset = dstAllocRequest.offset;
12774 moves.push_back(move);
12776 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12780 allocInfo.m_hAllocation);
12781 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12783 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12785 if(allocInfo.m_pChanged != VMA_NULL)
12787 *allocInfo.m_pChanged = VK_TRUE;
12790 ++m_AllocationsMoved;
12791 m_BytesMoved += size;
12793 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12801 if(srcAllocIndex > 0)
12807 if(srcBlockIndex > 0)
12810 srcAllocIndex = SIZE_MAX;
12820 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12823 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12825 if(m_Blocks[i]->m_HasNonMovableAllocations)
12833 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12834 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12835 VkDeviceSize maxBytesToMove,
12836 uint32_t maxAllocationsToMove)
12838 if(!m_AllAllocations && m_AllocationCount == 0)
12843 const size_t blockCount = m_Blocks.size();
12844 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12846 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12848 if(m_AllAllocations)
12850 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12851 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12852 it != pMetadata->m_Suballocations.end();
12855 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12857 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12858 pBlockInfo->m_Allocations.push_back(allocInfo);
12863 pBlockInfo->CalcHasNonMovableAllocations();
12867 pBlockInfo->SortAllocationsByOffsetDescending();
12873 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12876 const uint32_t roundCount = 2;
12879 VkResult result = VK_SUCCESS;
12880 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12882 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12888 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12889 size_t dstBlockIndex, VkDeviceSize dstOffset,
12890 size_t srcBlockIndex, VkDeviceSize srcOffset)
12892 if(dstBlockIndex < srcBlockIndex)
12896 if(dstBlockIndex > srcBlockIndex)
12900 if(dstOffset < srcOffset)
12910 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12912 VmaBlockVector* pBlockVector,
12913 uint32_t currentFrameIndex,
12914 bool overlappingMoveSupported) :
12915 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12916 m_OverlappingMoveSupported(overlappingMoveSupported),
12917 m_AllocationCount(0),
12918 m_AllAllocations(false),
12920 m_AllocationsMoved(0),
12921 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12923 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12927 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12931 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12932 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12933 VkDeviceSize maxBytesToMove,
12934 uint32_t maxAllocationsToMove)
12936 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12938 const size_t blockCount = m_pBlockVector->GetBlockCount();
12939 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12944 PreprocessMetadata();
12948 m_BlockInfos.resize(blockCount);
12949 for(
size_t i = 0; i < blockCount; ++i)
12951 m_BlockInfos[i].origBlockIndex = i;
12954 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12955 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12956 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12961 FreeSpaceDatabase freeSpaceDb;
12963 size_t dstBlockInfoIndex = 0;
12964 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12965 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12966 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12967 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12968 VkDeviceSize dstOffset = 0;
12971 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12973 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12974 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12975 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12976 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12977 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12979 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12980 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12981 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12982 if(m_AllocationsMoved == maxAllocationsToMove ||
12983 m_BytesMoved + srcAllocSize > maxBytesToMove)
12988 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12991 size_t freeSpaceInfoIndex;
12992 VkDeviceSize dstAllocOffset;
12993 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12994 freeSpaceInfoIndex, dstAllocOffset))
12996 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12997 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12998 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13001 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13003 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13007 VmaSuballocation suballoc = *srcSuballocIt;
13008 suballoc.offset = dstAllocOffset;
13009 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13010 m_BytesMoved += srcAllocSize;
13011 ++m_AllocationsMoved;
13013 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13015 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13016 srcSuballocIt = nextSuballocIt;
13018 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13020 VmaDefragmentationMove move = {
13021 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13022 srcAllocOffset, dstAllocOffset,
13024 moves.push_back(move);
13031 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13033 VmaSuballocation suballoc = *srcSuballocIt;
13034 suballoc.offset = dstAllocOffset;
13035 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13036 m_BytesMoved += srcAllocSize;
13037 ++m_AllocationsMoved;
13039 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13041 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13042 srcSuballocIt = nextSuballocIt;
13044 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13046 VmaDefragmentationMove move = {
13047 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13048 srcAllocOffset, dstAllocOffset,
13050 moves.push_back(move);
13055 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13058 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13059 dstAllocOffset + srcAllocSize > dstBlockSize)
13062 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13064 ++dstBlockInfoIndex;
13065 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13066 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13067 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13068 dstBlockSize = pDstMetadata->GetSize();
13070 dstAllocOffset = 0;
13074 if(dstBlockInfoIndex == srcBlockInfoIndex)
13076 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13078 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13080 bool skipOver = overlap;
13081 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13085 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13090 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13092 dstOffset = srcAllocOffset + srcAllocSize;
13098 srcSuballocIt->offset = dstAllocOffset;
13099 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13100 dstOffset = dstAllocOffset + srcAllocSize;
13101 m_BytesMoved += srcAllocSize;
13102 ++m_AllocationsMoved;
13104 VmaDefragmentationMove move = {
13105 srcOrigBlockIndex, dstOrigBlockIndex,
13106 srcAllocOffset, dstAllocOffset,
13108 moves.push_back(move);
13116 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13117 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13119 VmaSuballocation suballoc = *srcSuballocIt;
13120 suballoc.offset = dstAllocOffset;
13121 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13122 dstOffset = dstAllocOffset + srcAllocSize;
13123 m_BytesMoved += srcAllocSize;
13124 ++m_AllocationsMoved;
13126 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13128 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13129 srcSuballocIt = nextSuballocIt;
13131 pDstMetadata->m_Suballocations.push_back(suballoc);
13133 VmaDefragmentationMove move = {
13134 srcOrigBlockIndex, dstOrigBlockIndex,
13135 srcAllocOffset, dstAllocOffset,
13137 moves.push_back(move);
13143 m_BlockInfos.clear();
13145 PostprocessMetadata();
13150 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13152 const size_t blockCount = m_pBlockVector->GetBlockCount();
13153 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13155 VmaBlockMetadata_Generic*
const pMetadata =
13156 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13157 pMetadata->m_FreeCount = 0;
13158 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13159 pMetadata->m_FreeSuballocationsBySize.clear();
13160 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13161 it != pMetadata->m_Suballocations.end(); )
13163 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13165 VmaSuballocationList::iterator nextIt = it;
13167 pMetadata->m_Suballocations.erase(it);
13178 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13180 const size_t blockCount = m_pBlockVector->GetBlockCount();
13181 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13183 VmaBlockMetadata_Generic*
const pMetadata =
13184 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13185 const VkDeviceSize blockSize = pMetadata->GetSize();
13188 if(pMetadata->m_Suballocations.empty())
13190 pMetadata->m_FreeCount = 1;
13192 VmaSuballocation suballoc = {
13196 VMA_SUBALLOCATION_TYPE_FREE };
13197 pMetadata->m_Suballocations.push_back(suballoc);
13198 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13203 VkDeviceSize offset = 0;
13204 VmaSuballocationList::iterator it;
13205 for(it = pMetadata->m_Suballocations.begin();
13206 it != pMetadata->m_Suballocations.end();
13209 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13210 VMA_ASSERT(it->offset >= offset);
13213 if(it->offset > offset)
13215 ++pMetadata->m_FreeCount;
13216 const VkDeviceSize freeSize = it->offset - offset;
13217 VmaSuballocation suballoc = {
13221 VMA_SUBALLOCATION_TYPE_FREE };
13222 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13223 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13225 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13229 pMetadata->m_SumFreeSize -= it->size;
13230 offset = it->offset + it->size;
13234 if(offset < blockSize)
13236 ++pMetadata->m_FreeCount;
13237 const VkDeviceSize freeSize = blockSize - offset;
13238 VmaSuballocation suballoc = {
13242 VMA_SUBALLOCATION_TYPE_FREE };
13243 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13244 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13245 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13247 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13252 pMetadata->m_FreeSuballocationsBySize.begin(),
13253 pMetadata->m_FreeSuballocationsBySize.end(),
13254 VmaSuballocationItemSizeLess());
13257 VMA_HEAVY_ASSERT(pMetadata->Validate());
13261 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13264 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13265 while(it != pMetadata->m_Suballocations.end())
13267 if(it->offset < suballoc.offset)
13272 pMetadata->m_Suballocations.insert(it, suballoc);
13278 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13281 VmaBlockVector* pBlockVector,
13282 uint32_t currFrameIndex,
13283 uint32_t algorithmFlags) :
13285 mutexLocked(false),
13286 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13287 m_hAllocator(hAllocator),
13288 m_hCustomPool(hCustomPool),
13289 m_pBlockVector(pBlockVector),
13290 m_CurrFrameIndex(currFrameIndex),
13291 m_AlgorithmFlags(algorithmFlags),
13292 m_pAlgorithm(VMA_NULL),
13293 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13294 m_AllAllocations(false)
13298 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13300 vma_delete(m_hAllocator, m_pAlgorithm);
13303 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13305 AllocInfo info = { hAlloc, pChanged };
13306 m_Allocations.push_back(info);
13309 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13311 const bool allAllocations = m_AllAllocations ||
13312 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13324 if(VMA_DEBUG_MARGIN == 0 &&
13326 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13328 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13329 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13333 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13334 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13339 m_pAlgorithm->AddAll();
13343 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13345 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13353 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13355 uint32_t currFrameIndex,
13358 m_hAllocator(hAllocator),
13359 m_CurrFrameIndex(currFrameIndex),
13362 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13364 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13367 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13369 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13371 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13372 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13373 vma_delete(m_hAllocator, pBlockVectorCtx);
13375 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13377 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13378 if(pBlockVectorCtx)
13380 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13381 vma_delete(m_hAllocator, pBlockVectorCtx);
13386 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13388 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13390 VmaPool pool = pPools[poolIndex];
13393 if(pool->m_BlockVector.GetAlgorithm() == 0)
13395 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13397 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13399 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13401 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13406 if(!pBlockVectorDefragCtx)
13408 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13411 &pool->m_BlockVector,
13414 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13417 pBlockVectorDefragCtx->AddAll();
13422 void VmaDefragmentationContext_T::AddAllocations(
13423 uint32_t allocationCount,
13425 VkBool32* pAllocationsChanged)
13428 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13431 VMA_ASSERT(hAlloc);
13433 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13435 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13437 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13439 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13441 if(hAllocPool != VK_NULL_HANDLE)
13444 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13446 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13448 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13450 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13454 if(!pBlockVectorDefragCtx)
13456 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13459 &hAllocPool->m_BlockVector,
13462 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13469 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13470 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13471 if(!pBlockVectorDefragCtx)
13473 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13476 m_hAllocator->m_pBlockVectors[memTypeIndex],
13479 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13483 if(pBlockVectorDefragCtx)
13485 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13486 &pAllocationsChanged[allocIndex] : VMA_NULL;
13487 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13493 VkResult VmaDefragmentationContext_T::Defragment(
13494 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13495 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13503 if(commandBuffer == VK_NULL_HANDLE)
13505 maxGpuBytesToMove = 0;
13506 maxGpuAllocationsToMove = 0;
13509 VkResult res = VK_SUCCESS;
13512 for(uint32_t memTypeIndex = 0;
13513 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13516 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13517 if(pBlockVectorCtx)
13519 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13520 pBlockVectorCtx->GetBlockVector()->Defragment(
13523 maxCpuBytesToMove, maxCpuAllocationsToMove,
13524 maxGpuBytesToMove, maxGpuAllocationsToMove,
13526 if(pBlockVectorCtx->res != VK_SUCCESS)
13528 res = pBlockVectorCtx->res;
13534 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13535 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13538 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13539 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13540 pBlockVectorCtx->GetBlockVector()->Defragment(
13543 maxCpuBytesToMove, maxCpuAllocationsToMove,
13544 maxGpuBytesToMove, maxGpuAllocationsToMove,
13546 if(pBlockVectorCtx->res != VK_SUCCESS)
13548 res = pBlockVectorCtx->res;
13558 #if VMA_RECORDING_ENABLED 13560 VmaRecorder::VmaRecorder() :
13565 m_StartCounter(INT64_MAX)
13571 m_UseMutex = useMutex;
13572 m_Flags = settings.
flags;
13574 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13575 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13578 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13581 return VK_ERROR_INITIALIZATION_FAILED;
13585 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13586 fprintf(m_File,
"%s\n",
"1,5");
13591 VmaRecorder::~VmaRecorder()
13593 if(m_File != VMA_NULL)
13599 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13601 CallParams callParams;
13602 GetBasicParams(callParams);
13604 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13605 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13609 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13611 CallParams callParams;
13612 GetBasicParams(callParams);
13614 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13615 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13621 CallParams callParams;
13622 GetBasicParams(callParams);
13624 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13625 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13636 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13638 CallParams callParams;
13639 GetBasicParams(callParams);
13641 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13642 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13647 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13648 const VkMemoryRequirements& vkMemReq,
13652 CallParams callParams;
13653 GetBasicParams(callParams);
13655 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13656 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13657 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13659 vkMemReq.alignment,
13660 vkMemReq.memoryTypeBits,
13668 userDataStr.GetString());
13672 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13673 const VkMemoryRequirements& vkMemReq,
13675 uint64_t allocationCount,
13678 CallParams callParams;
13679 GetBasicParams(callParams);
13681 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13682 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13683 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13685 vkMemReq.alignment,
13686 vkMemReq.memoryTypeBits,
13693 PrintPointerList(allocationCount, pAllocations);
13694 fprintf(m_File,
",%s\n", userDataStr.GetString());
13698 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13699 const VkMemoryRequirements& vkMemReq,
13700 bool requiresDedicatedAllocation,
13701 bool prefersDedicatedAllocation,
13705 CallParams callParams;
13706 GetBasicParams(callParams);
13708 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13709 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13710 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13712 vkMemReq.alignment,
13713 vkMemReq.memoryTypeBits,
13714 requiresDedicatedAllocation ? 1 : 0,
13715 prefersDedicatedAllocation ? 1 : 0,
13723 userDataStr.GetString());
13727 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13728 const VkMemoryRequirements& vkMemReq,
13729 bool requiresDedicatedAllocation,
13730 bool prefersDedicatedAllocation,
13734 CallParams callParams;
13735 GetBasicParams(callParams);
13737 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13738 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13739 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13741 vkMemReq.alignment,
13742 vkMemReq.memoryTypeBits,
13743 requiresDedicatedAllocation ? 1 : 0,
13744 prefersDedicatedAllocation ? 1 : 0,
13752 userDataStr.GetString());
13756 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13759 CallParams callParams;
13760 GetBasicParams(callParams);
13762 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13763 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13768 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13769 uint64_t allocationCount,
13772 CallParams callParams;
13773 GetBasicParams(callParams);
13775 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13776 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13777 PrintPointerList(allocationCount, pAllocations);
13778 fprintf(m_File,
"\n");
13782 void VmaRecorder::RecordResizeAllocation(
13783 uint32_t frameIndex,
13785 VkDeviceSize newSize)
13787 CallParams callParams;
13788 GetBasicParams(callParams);
13790 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13791 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13792 allocation, newSize);
13796 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13798 const void* pUserData)
13800 CallParams callParams;
13801 GetBasicParams(callParams);
13803 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13804 UserDataString userDataStr(
13807 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13809 userDataStr.GetString());
13813 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13816 CallParams callParams;
13817 GetBasicParams(callParams);
13819 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13820 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13825 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13828 CallParams callParams;
13829 GetBasicParams(callParams);
13831 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13832 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13837 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13840 CallParams callParams;
13841 GetBasicParams(callParams);
13843 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13844 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13849 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13850 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13852 CallParams callParams;
13853 GetBasicParams(callParams);
13855 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13856 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13863 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13864 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13866 CallParams callParams;
13867 GetBasicParams(callParams);
13869 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13870 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13877 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13878 const VkBufferCreateInfo& bufCreateInfo,
13882 CallParams callParams;
13883 GetBasicParams(callParams);
13885 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13886 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13887 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13888 bufCreateInfo.flags,
13889 bufCreateInfo.size,
13890 bufCreateInfo.usage,
13891 bufCreateInfo.sharingMode,
13892 allocCreateInfo.
flags,
13893 allocCreateInfo.
usage,
13897 allocCreateInfo.
pool,
13899 userDataStr.GetString());
13903 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13904 const VkImageCreateInfo& imageCreateInfo,
13908 CallParams callParams;
13909 GetBasicParams(callParams);
13911 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13912 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13913 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13914 imageCreateInfo.flags,
13915 imageCreateInfo.imageType,
13916 imageCreateInfo.format,
13917 imageCreateInfo.extent.width,
13918 imageCreateInfo.extent.height,
13919 imageCreateInfo.extent.depth,
13920 imageCreateInfo.mipLevels,
13921 imageCreateInfo.arrayLayers,
13922 imageCreateInfo.samples,
13923 imageCreateInfo.tiling,
13924 imageCreateInfo.usage,
13925 imageCreateInfo.sharingMode,
13926 imageCreateInfo.initialLayout,
13927 allocCreateInfo.
flags,
13928 allocCreateInfo.
usage,
13932 allocCreateInfo.
pool,
13934 userDataStr.GetString());
13938 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13941 CallParams callParams;
13942 GetBasicParams(callParams);
13944 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13945 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13950 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13953 CallParams callParams;
13954 GetBasicParams(callParams);
13956 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13957 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13962 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13965 CallParams callParams;
13966 GetBasicParams(callParams);
13968 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13969 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13974 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13977 CallParams callParams;
13978 GetBasicParams(callParams);
13980 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13981 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13986 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13989 CallParams callParams;
13990 GetBasicParams(callParams);
13992 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13993 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13998 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14002 CallParams callParams;
14003 GetBasicParams(callParams);
14005 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14006 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14009 fprintf(m_File,
",");
14011 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14021 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14024 CallParams callParams;
14025 GetBasicParams(callParams);
14027 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14028 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14035 if(pUserData != VMA_NULL)
14039 m_Str = (
const char*)pUserData;
14043 sprintf_s(m_PtrStr,
"%p", pUserData);
14053 void VmaRecorder::WriteConfiguration(
14054 const VkPhysicalDeviceProperties& devProps,
14055 const VkPhysicalDeviceMemoryProperties& memProps,
14056 bool dedicatedAllocationExtensionEnabled)
14058 fprintf(m_File,
"Config,Begin\n");
14060 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14061 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14062 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14063 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14064 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14065 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14067 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14068 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14069 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14071 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14072 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14074 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14075 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14077 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14078 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14080 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14081 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14084 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14086 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14087 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14088 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14089 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14090 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14091 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14092 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14093 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14094 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14096 fprintf(m_File,
"Config,End\n");
14099 void VmaRecorder::GetBasicParams(CallParams& outParams)
14101 outParams.threadId = GetCurrentThreadId();
14103 LARGE_INTEGER counter;
14104 QueryPerformanceCounter(&counter);
14105 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14108 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14112 fprintf(m_File,
"%p", pItems[0]);
14113 for(uint64_t i = 1; i < count; ++i)
14115 fprintf(m_File,
" %p", pItems[i]);
14120 void VmaRecorder::Flush()
14128 #endif // #if VMA_RECORDING_ENABLED 14133 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14134 m_Allocator(pAllocationCallbacks, 1024)
14140 VmaMutexLock mutexLock(m_Mutex);
14141 return m_Allocator.Alloc();
14144 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14146 VmaMutexLock mutexLock(m_Mutex);
14147 m_Allocator.Free(hAlloc);
14156 m_hDevice(pCreateInfo->device),
14157 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14158 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14159 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14160 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14161 m_PreferredLargeHeapBlockSize(0),
14162 m_PhysicalDevice(pCreateInfo->physicalDevice),
14163 m_CurrentFrameIndex(0),
14164 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14167 ,m_pRecorder(VMA_NULL)
14170 if(VMA_DEBUG_DETECT_CORRUPTION)
14173 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14178 #if !(VMA_DEDICATED_ALLOCATION) 14181 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14185 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14186 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14187 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14189 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14190 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14192 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14194 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14205 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14206 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14208 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14209 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14210 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14211 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14218 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14220 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14221 if(limit != VK_WHOLE_SIZE)
14223 m_HeapSizeLimit[heapIndex] = limit;
14224 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14226 m_MemProps.memoryHeaps[heapIndex].size = limit;
14232 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14234 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14236 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14240 preferredBlockSize,
14243 GetBufferImageGranularity(),
14250 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14257 VkResult res = VK_SUCCESS;
14262 #if VMA_RECORDING_ENABLED 14263 m_pRecorder = vma_new(
this, VmaRecorder)();
14265 if(res != VK_SUCCESS)
14269 m_pRecorder->WriteConfiguration(
14270 m_PhysicalDeviceProperties,
14272 m_UseKhrDedicatedAllocation);
14273 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14275 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14276 return VK_ERROR_FEATURE_NOT_PRESENT;
14283 VmaAllocator_T::~VmaAllocator_T()
14285 #if VMA_RECORDING_ENABLED 14286 if(m_pRecorder != VMA_NULL)
14288 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14289 vma_delete(
this, m_pRecorder);
14293 VMA_ASSERT(m_Pools.empty());
14295 for(
size_t i = GetMemoryTypeCount(); i--; )
14297 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14299 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14302 vma_delete(
this, m_pDedicatedAllocations[i]);
14303 vma_delete(
this, m_pBlockVectors[i]);
14307 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14309 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14310 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14311 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14312 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14313 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14314 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14315 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14316 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14317 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14318 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14319 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14320 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14321 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14322 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14323 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14324 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14325 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14326 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14327 #if VMA_DEDICATED_ALLOCATION 14328 if(m_UseKhrDedicatedAllocation)
14330 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14331 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14332 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14333 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14335 #endif // #if VMA_DEDICATED_ALLOCATION 14336 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14338 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14339 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14341 if(pVulkanFunctions != VMA_NULL)
14343 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14344 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14345 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14346 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14347 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14348 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14349 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14350 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14351 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14352 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14353 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14354 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14355 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14356 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14357 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14358 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14359 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14360 #if VMA_DEDICATED_ALLOCATION 14361 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14362 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14366 #undef VMA_COPY_IF_NOT_NULL 14370 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14371 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14372 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14373 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14374 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14375 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14376 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14377 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14378 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14379 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14380 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14381 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14382 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14383 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14384 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14385 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14386 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14387 #if VMA_DEDICATED_ALLOCATION 14388 if(m_UseKhrDedicatedAllocation)
14390 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14391 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14396 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14398 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14399 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14400 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14401 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14404 VkResult VmaAllocator_T::AllocateMemoryOfType(
14406 VkDeviceSize alignment,
14407 bool dedicatedAllocation,
14408 VkBuffer dedicatedBuffer,
14409 VkImage dedicatedImage,
14411 uint32_t memTypeIndex,
14412 VmaSuballocationType suballocType,
14413 size_t allocationCount,
14416 VMA_ASSERT(pAllocations != VMA_NULL);
14417 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14423 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14428 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14429 VMA_ASSERT(blockVector);
14431 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14432 bool preferDedicatedMemory =
14433 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14434 dedicatedAllocation ||
14436 size > preferredBlockSize / 2;
14438 if(preferDedicatedMemory &&
14440 finalCreateInfo.
pool == VK_NULL_HANDLE)
14449 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14453 return AllocateDedicatedMemory(
14468 VkResult res = blockVector->Allocate(
14469 m_CurrentFrameIndex.load(),
14476 if(res == VK_SUCCESS)
14484 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14488 res = AllocateDedicatedMemory(
14494 finalCreateInfo.pUserData,
14499 if(res == VK_SUCCESS)
14502 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14508 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14515 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14517 VmaSuballocationType suballocType,
14518 uint32_t memTypeIndex,
14520 bool isUserDataString,
14522 VkBuffer dedicatedBuffer,
14523 VkImage dedicatedImage,
14524 size_t allocationCount,
14527 VMA_ASSERT(allocationCount > 0 && pAllocations);
14529 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14530 allocInfo.memoryTypeIndex = memTypeIndex;
14531 allocInfo.allocationSize = size;
14533 #if VMA_DEDICATED_ALLOCATION 14534 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14535 if(m_UseKhrDedicatedAllocation)
14537 if(dedicatedBuffer != VK_NULL_HANDLE)
14539 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14540 dedicatedAllocInfo.buffer = dedicatedBuffer;
14541 allocInfo.pNext = &dedicatedAllocInfo;
14543 else if(dedicatedImage != VK_NULL_HANDLE)
14545 dedicatedAllocInfo.image = dedicatedImage;
14546 allocInfo.pNext = &dedicatedAllocInfo;
14549 #endif // #if VMA_DEDICATED_ALLOCATION 14552 VkResult res = VK_SUCCESS;
14553 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14555 res = AllocateDedicatedMemoryPage(
14563 pAllocations + allocIndex);
14564 if(res != VK_SUCCESS)
14570 if(res == VK_SUCCESS)
14574 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14575 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14576 VMA_ASSERT(pDedicatedAllocations);
14577 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14579 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14583 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14588 while(allocIndex--)
14591 VkDeviceMemory hMemory = currAlloc->GetMemory();
14603 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14605 currAlloc->SetUserData(
this, VMA_NULL);
14607 m_AllocationObjectAllocator.Free(currAlloc);
14610 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14616 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14618 VmaSuballocationType suballocType,
14619 uint32_t memTypeIndex,
14620 const VkMemoryAllocateInfo& allocInfo,
14622 bool isUserDataString,
14626 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14627 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14630 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14634 void* pMappedData = VMA_NULL;
14637 res = (*m_VulkanFunctions.vkMapMemory)(
14646 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14647 FreeVulkanMemory(memTypeIndex, size, hMemory);
14652 *pAllocation = m_AllocationObjectAllocator.Allocate();
14653 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14654 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14655 (*pAllocation)->SetUserData(
this, pUserData);
14656 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14658 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14664 void VmaAllocator_T::GetBufferMemoryRequirements(
14666 VkMemoryRequirements& memReq,
14667 bool& requiresDedicatedAllocation,
14668 bool& prefersDedicatedAllocation)
const 14670 #if VMA_DEDICATED_ALLOCATION 14671 if(m_UseKhrDedicatedAllocation)
14673 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14674 memReqInfo.buffer = hBuffer;
14676 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14678 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14679 memReq2.pNext = &memDedicatedReq;
14681 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14683 memReq = memReq2.memoryRequirements;
14684 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14685 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14688 #endif // #if VMA_DEDICATED_ALLOCATION 14690 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14691 requiresDedicatedAllocation =
false;
14692 prefersDedicatedAllocation =
false;
14696 void VmaAllocator_T::GetImageMemoryRequirements(
14698 VkMemoryRequirements& memReq,
14699 bool& requiresDedicatedAllocation,
14700 bool& prefersDedicatedAllocation)
const 14702 #if VMA_DEDICATED_ALLOCATION 14703 if(m_UseKhrDedicatedAllocation)
14705 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14706 memReqInfo.image = hImage;
14708 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14710 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14711 memReq2.pNext = &memDedicatedReq;
14713 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14715 memReq = memReq2.memoryRequirements;
14716 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14717 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14720 #endif // #if VMA_DEDICATED_ALLOCATION 14722 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14723 requiresDedicatedAllocation =
false;
14724 prefersDedicatedAllocation =
false;
14728 VkResult VmaAllocator_T::AllocateMemory(
14729 const VkMemoryRequirements& vkMemReq,
14730 bool requiresDedicatedAllocation,
14731 bool prefersDedicatedAllocation,
14732 VkBuffer dedicatedBuffer,
14733 VkImage dedicatedImage,
14735 VmaSuballocationType suballocType,
14736 size_t allocationCount,
14739 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14741 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14743 if(vkMemReq.size == 0)
14745 return VK_ERROR_VALIDATION_FAILED_EXT;
14750 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14751 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14756 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14757 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14759 if(requiresDedicatedAllocation)
14763 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14764 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14766 if(createInfo.
pool != VK_NULL_HANDLE)
14768 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14769 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14772 if((createInfo.
pool != VK_NULL_HANDLE) &&
14775 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14776 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14779 if(createInfo.
pool != VK_NULL_HANDLE)
14781 const VkDeviceSize alignmentForPool = VMA_MAX(
14782 vkMemReq.alignment,
14783 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14784 return createInfo.
pool->m_BlockVector.Allocate(
14785 m_CurrentFrameIndex.load(),
14796 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14797 uint32_t memTypeIndex = UINT32_MAX;
14799 if(res == VK_SUCCESS)
14801 VkDeviceSize alignmentForMemType = VMA_MAX(
14802 vkMemReq.alignment,
14803 GetMemoryTypeMinAlignment(memTypeIndex));
14805 res = AllocateMemoryOfType(
14807 alignmentForMemType,
14808 requiresDedicatedAllocation || prefersDedicatedAllocation,
14817 if(res == VK_SUCCESS)
14827 memoryTypeBits &= ~(1u << memTypeIndex);
14830 if(res == VK_SUCCESS)
14832 alignmentForMemType = VMA_MAX(
14833 vkMemReq.alignment,
14834 GetMemoryTypeMinAlignment(memTypeIndex));
14836 res = AllocateMemoryOfType(
14838 alignmentForMemType,
14839 requiresDedicatedAllocation || prefersDedicatedAllocation,
14848 if(res == VK_SUCCESS)
14858 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14869 void VmaAllocator_T::FreeMemory(
14870 size_t allocationCount,
14873 VMA_ASSERT(pAllocations);
14875 for(
size_t allocIndex = allocationCount; allocIndex--; )
14879 if(allocation != VK_NULL_HANDLE)
14881 if(TouchAllocation(allocation))
14883 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14885 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14888 switch(allocation->GetType())
14890 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14892 VmaBlockVector* pBlockVector = VMA_NULL;
14893 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14894 if(hPool != VK_NULL_HANDLE)
14896 pBlockVector = &hPool->m_BlockVector;
14900 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14901 pBlockVector = m_pBlockVectors[memTypeIndex];
14903 pBlockVector->Free(allocation);
14906 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14907 FreeDedicatedMemory(allocation);
14914 allocation->SetUserData(
this, VMA_NULL);
14915 allocation->Dtor();
14916 m_AllocationObjectAllocator.Free(allocation);
14921 VkResult VmaAllocator_T::ResizeAllocation(
14923 VkDeviceSize newSize)
14925 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14927 return VK_ERROR_VALIDATION_FAILED_EXT;
14929 if(newSize == alloc->GetSize())
14934 switch(alloc->GetType())
14936 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14937 return VK_ERROR_FEATURE_NOT_PRESENT;
14938 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14939 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14941 alloc->ChangeSize(newSize);
14942 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14947 return VK_ERROR_OUT_OF_POOL_MEMORY;
14951 return VK_ERROR_VALIDATION_FAILED_EXT;
14955 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14958 InitStatInfo(pStats->
total);
14959 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14961 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14965 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14967 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14968 VMA_ASSERT(pBlockVector);
14969 pBlockVector->AddStats(pStats);
14974 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14975 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14977 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14982 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14984 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14985 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14986 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14987 VMA_ASSERT(pDedicatedAllocVector);
14988 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14991 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14992 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14993 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14994 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14999 VmaPostprocessCalcStatInfo(pStats->
total);
15000 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15001 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15002 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15003 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15006 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15008 VkResult VmaAllocator_T::DefragmentationBegin(
15018 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15019 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15022 (*pContext)->AddAllocations(
15025 VkResult res = (*pContext)->Defragment(
15030 if(res != VK_NOT_READY)
15032 vma_delete(
this, *pContext);
15033 *pContext = VMA_NULL;
15039 VkResult VmaAllocator_T::DefragmentationEnd(
15042 vma_delete(
this, context);
15048 if(hAllocation->CanBecomeLost())
15054 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15055 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15058 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15062 pAllocationInfo->
offset = 0;
15063 pAllocationInfo->
size = hAllocation->GetSize();
15065 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15068 else if(localLastUseFrameIndex == localCurrFrameIndex)
15070 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15071 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15072 pAllocationInfo->
offset = hAllocation->GetOffset();
15073 pAllocationInfo->
size = hAllocation->GetSize();
15075 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15080 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15082 localLastUseFrameIndex = localCurrFrameIndex;
15089 #if VMA_STATS_STRING_ENABLED 15090 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15091 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15094 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15095 if(localLastUseFrameIndex == localCurrFrameIndex)
15101 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15103 localLastUseFrameIndex = localCurrFrameIndex;
15109 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15110 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15111 pAllocationInfo->
offset = hAllocation->GetOffset();
15112 pAllocationInfo->
size = hAllocation->GetSize();
15113 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15114 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15118 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15121 if(hAllocation->CanBecomeLost())
15123 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15124 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15127 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15131 else if(localLastUseFrameIndex == localCurrFrameIndex)
15137 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15139 localLastUseFrameIndex = localCurrFrameIndex;
15146 #if VMA_STATS_STRING_ENABLED 15147 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15148 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15151 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15152 if(localLastUseFrameIndex == localCurrFrameIndex)
15158 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15160 localLastUseFrameIndex = localCurrFrameIndex;
15172 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15182 return VK_ERROR_INITIALIZATION_FAILED;
15185 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15187 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15189 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15190 if(res != VK_SUCCESS)
15192 vma_delete(
this, *pPool);
15199 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15200 (*pPool)->SetId(m_NextPoolId++);
15201 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15207 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15211 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15212 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15213 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15216 vma_delete(
this, pool);
15221 pool->m_BlockVector.GetPoolStats(pPoolStats);
15224 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15226 m_CurrentFrameIndex.store(frameIndex);
15229 void VmaAllocator_T::MakePoolAllocationsLost(
15231 size_t* pLostAllocationCount)
15233 hPool->m_BlockVector.MakePoolAllocationsLost(
15234 m_CurrentFrameIndex.load(),
15235 pLostAllocationCount);
15238 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15240 return hPool->m_BlockVector.CheckCorruption();
15243 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15245 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15248 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15250 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15252 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15253 VMA_ASSERT(pBlockVector);
15254 VkResult localRes = pBlockVector->CheckCorruption();
15257 case VK_ERROR_FEATURE_NOT_PRESENT:
15260 finalRes = VK_SUCCESS;
15270 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15271 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15273 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15275 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15278 case VK_ERROR_FEATURE_NOT_PRESENT:
15281 finalRes = VK_SUCCESS;
15293 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15295 *pAllocation = m_AllocationObjectAllocator.Allocate();
15296 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15297 (*pAllocation)->InitLost();
15300 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15302 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15305 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15307 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15308 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15310 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15311 if(res == VK_SUCCESS)
15313 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15318 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15323 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15326 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15328 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15334 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15336 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15338 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15341 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15343 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15344 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15346 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15347 m_HeapSizeLimit[heapIndex] += size;
15351 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15353 if(hAllocation->CanBecomeLost())
15355 return VK_ERROR_MEMORY_MAP_FAILED;
15358 switch(hAllocation->GetType())
15360 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15362 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15363 char *pBytes = VMA_NULL;
15364 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15365 if(res == VK_SUCCESS)
15367 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15368 hAllocation->BlockAllocMap();
15372 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15373 return hAllocation->DedicatedAllocMap(
this, ppData);
15376 return VK_ERROR_MEMORY_MAP_FAILED;
15382 switch(hAllocation->GetType())
15384 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15386 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15387 hAllocation->BlockAllocUnmap();
15388 pBlock->Unmap(
this, 1);
15391 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15392 hAllocation->DedicatedAllocUnmap(
this);
15399 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15401 VkResult res = VK_SUCCESS;
15402 switch(hAllocation->GetType())
15404 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15405 res = GetVulkanFunctions().vkBindBufferMemory(
15408 hAllocation->GetMemory(),
15411 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15413 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15414 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15415 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15424 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15426 VkResult res = VK_SUCCESS;
15427 switch(hAllocation->GetType())
15429 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15430 res = GetVulkanFunctions().vkBindImageMemory(
15433 hAllocation->GetMemory(),
15436 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15438 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15439 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15440 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15449 void VmaAllocator_T::FlushOrInvalidateAllocation(
15451 VkDeviceSize offset, VkDeviceSize size,
15452 VMA_CACHE_OPERATION op)
15454 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15455 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15457 const VkDeviceSize allocationSize = hAllocation->GetSize();
15458 VMA_ASSERT(offset <= allocationSize);
15460 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15462 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15463 memRange.memory = hAllocation->GetMemory();
15465 switch(hAllocation->GetType())
15467 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15468 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15469 if(size == VK_WHOLE_SIZE)
15471 memRange.size = allocationSize - memRange.offset;
15475 VMA_ASSERT(offset + size <= allocationSize);
15476 memRange.size = VMA_MIN(
15477 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15478 allocationSize - memRange.offset);
15482 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15485 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15486 if(size == VK_WHOLE_SIZE)
15488 size = allocationSize - offset;
15492 VMA_ASSERT(offset + size <= allocationSize);
15494 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15497 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15498 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15499 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15500 memRange.offset += allocationOffset;
15501 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15512 case VMA_CACHE_FLUSH:
15513 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15515 case VMA_CACHE_INVALIDATE:
15516 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15525 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15527 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15529 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15531 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15532 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15533 VMA_ASSERT(pDedicatedAllocations);
15534 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15535 VMA_ASSERT(success);
15538 VkDeviceMemory hMemory = allocation->GetMemory();
15550 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15552 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15555 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15557 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15558 !hAllocation->CanBecomeLost() &&
15559 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15561 void* pData = VMA_NULL;
15562 VkResult res = Map(hAllocation, &pData);
15563 if(res == VK_SUCCESS)
15565 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15566 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15567 Unmap(hAllocation);
15571 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15576 #if VMA_STATS_STRING_ENABLED 15578 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15580 bool dedicatedAllocationsStarted =
false;
15581 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15583 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15584 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15585 VMA_ASSERT(pDedicatedAllocVector);
15586 if(pDedicatedAllocVector->empty() ==
false)
15588 if(dedicatedAllocationsStarted ==
false)
15590 dedicatedAllocationsStarted =
true;
15591 json.WriteString(
"DedicatedAllocations");
15592 json.BeginObject();
15595 json.BeginString(
"Type ");
15596 json.ContinueString(memTypeIndex);
15601 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15603 json.BeginObject(
true);
15605 hAlloc->PrintParameters(json);
15612 if(dedicatedAllocationsStarted)
15618 bool allocationsStarted =
false;
15619 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15621 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15623 if(allocationsStarted ==
false)
15625 allocationsStarted =
true;
15626 json.WriteString(
"DefaultPools");
15627 json.BeginObject();
15630 json.BeginString(
"Type ");
15631 json.ContinueString(memTypeIndex);
15634 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15637 if(allocationsStarted)
15645 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15646 const size_t poolCount = m_Pools.size();
15649 json.WriteString(
"Pools");
15650 json.BeginObject();
15651 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15653 json.BeginString();
15654 json.ContinueString(m_Pools[poolIndex]->GetId());
15657 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15664 #endif // #if VMA_STATS_STRING_ENABLED 15673 VMA_ASSERT(pCreateInfo && pAllocator);
15674 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15676 return (*pAllocator)->Init(pCreateInfo);
15682 if(allocator != VK_NULL_HANDLE)
15684 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15685 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15686 vma_delete(&allocationCallbacks, allocator);
15692 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15694 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15695 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15700 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15702 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15703 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15708 uint32_t memoryTypeIndex,
15709 VkMemoryPropertyFlags* pFlags)
15711 VMA_ASSERT(allocator && pFlags);
15712 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15713 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15718 uint32_t frameIndex)
15720 VMA_ASSERT(allocator);
15721 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15723 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15725 allocator->SetCurrentFrameIndex(frameIndex);
15732 VMA_ASSERT(allocator && pStats);
15733 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15734 allocator->CalculateStats(pStats);
15737 #if VMA_STATS_STRING_ENABLED 15741 char** ppStatsString,
15742 VkBool32 detailedMap)
15744 VMA_ASSERT(allocator && ppStatsString);
15745 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15747 VmaStringBuilder sb(allocator);
15749 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15750 json.BeginObject();
15753 allocator->CalculateStats(&stats);
15755 json.WriteString(
"Total");
15756 VmaPrintStatInfo(json, stats.
total);
15758 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15760 json.BeginString(
"Heap ");
15761 json.ContinueString(heapIndex);
15763 json.BeginObject();
15765 json.WriteString(
"Size");
15766 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15768 json.WriteString(
"Flags");
15769 json.BeginArray(
true);
15770 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15772 json.WriteString(
"DEVICE_LOCAL");
15778 json.WriteString(
"Stats");
15779 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15782 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15784 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15786 json.BeginString(
"Type ");
15787 json.ContinueString(typeIndex);
15790 json.BeginObject();
15792 json.WriteString(
"Flags");
15793 json.BeginArray(
true);
15794 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15795 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15797 json.WriteString(
"DEVICE_LOCAL");
15799 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15801 json.WriteString(
"HOST_VISIBLE");
15803 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15805 json.WriteString(
"HOST_COHERENT");
15807 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15809 json.WriteString(
"HOST_CACHED");
15811 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15813 json.WriteString(
"LAZILY_ALLOCATED");
15819 json.WriteString(
"Stats");
15820 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15829 if(detailedMap == VK_TRUE)
15831 allocator->PrintDetailedMap(json);
15837 const size_t len = sb.GetLength();
15838 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15841 memcpy(pChars, sb.GetData(), len);
15843 pChars[len] =
'\0';
15844 *ppStatsString = pChars;
15849 char* pStatsString)
15851 if(pStatsString != VMA_NULL)
15853 VMA_ASSERT(allocator);
15854 size_t len = strlen(pStatsString);
15855 vma_delete_array(allocator, pStatsString, len + 1);
15859 #endif // #if VMA_STATS_STRING_ENABLED 15866 uint32_t memoryTypeBits,
15868 uint32_t* pMemoryTypeIndex)
15870 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15871 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15872 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15879 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15880 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15885 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15889 switch(pAllocationCreateInfo->
usage)
15894 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15896 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15900 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15903 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15904 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15906 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15910 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15911 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15917 *pMemoryTypeIndex = UINT32_MAX;
15918 uint32_t minCost = UINT32_MAX;
15919 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15920 memTypeIndex < allocator->GetMemoryTypeCount();
15921 ++memTypeIndex, memTypeBit <<= 1)
15924 if((memTypeBit & memoryTypeBits) != 0)
15926 const VkMemoryPropertyFlags currFlags =
15927 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15929 if((requiredFlags & ~currFlags) == 0)
15932 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15934 if(currCost < minCost)
15936 *pMemoryTypeIndex = memTypeIndex;
15941 minCost = currCost;
15946 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15951 const VkBufferCreateInfo* pBufferCreateInfo,
15953 uint32_t* pMemoryTypeIndex)
15955 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15956 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15957 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15958 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15960 const VkDevice hDev = allocator->m_hDevice;
15961 VkBuffer hBuffer = VK_NULL_HANDLE;
15962 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15963 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15964 if(res == VK_SUCCESS)
15966 VkMemoryRequirements memReq = {};
15967 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15968 hDev, hBuffer, &memReq);
15972 memReq.memoryTypeBits,
15973 pAllocationCreateInfo,
15976 allocator->GetVulkanFunctions().vkDestroyBuffer(
15977 hDev, hBuffer, allocator->GetAllocationCallbacks());
15984 const VkImageCreateInfo* pImageCreateInfo,
15986 uint32_t* pMemoryTypeIndex)
15988 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15989 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15990 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15991 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15993 const VkDevice hDev = allocator->m_hDevice;
15994 VkImage hImage = VK_NULL_HANDLE;
15995 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15996 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15997 if(res == VK_SUCCESS)
15999 VkMemoryRequirements memReq = {};
16000 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16001 hDev, hImage, &memReq);
16005 memReq.memoryTypeBits,
16006 pAllocationCreateInfo,
16009 allocator->GetVulkanFunctions().vkDestroyImage(
16010 hDev, hImage, allocator->GetAllocationCallbacks());
16020 VMA_ASSERT(allocator && pCreateInfo && pPool);
16022 VMA_DEBUG_LOG(
"vmaCreatePool");
16024 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16026 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16028 #if VMA_RECORDING_ENABLED 16029 if(allocator->GetRecorder() != VMA_NULL)
16031 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16042 VMA_ASSERT(allocator);
16044 if(pool == VK_NULL_HANDLE)
16049 VMA_DEBUG_LOG(
"vmaDestroyPool");
16051 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16053 #if VMA_RECORDING_ENABLED 16054 if(allocator->GetRecorder() != VMA_NULL)
16056 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16060 allocator->DestroyPool(pool);
16068 VMA_ASSERT(allocator && pool && pPoolStats);
16070 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16072 allocator->GetPoolStats(pool, pPoolStats);
16078 size_t* pLostAllocationCount)
16080 VMA_ASSERT(allocator && pool);
16082 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16084 #if VMA_RECORDING_ENABLED 16085 if(allocator->GetRecorder() != VMA_NULL)
16087 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16091 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16096 VMA_ASSERT(allocator && pool);
16098 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16100 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16102 return allocator->CheckPoolCorruption(pool);
16107 const VkMemoryRequirements* pVkMemoryRequirements,
16112 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16114 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16116 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16118 VkResult result = allocator->AllocateMemory(
16119 *pVkMemoryRequirements,
16125 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16129 #if VMA_RECORDING_ENABLED 16130 if(allocator->GetRecorder() != VMA_NULL)
16132 allocator->GetRecorder()->RecordAllocateMemory(
16133 allocator->GetCurrentFrameIndex(),
16134 *pVkMemoryRequirements,
16140 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16142 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16150 const VkMemoryRequirements* pVkMemoryRequirements,
16152 size_t allocationCount,
16156 if(allocationCount == 0)
16161 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16163 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16165 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16167 VkResult result = allocator->AllocateMemory(
16168 *pVkMemoryRequirements,
16174 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16178 #if VMA_RECORDING_ENABLED 16179 if(allocator->GetRecorder() != VMA_NULL)
16181 allocator->GetRecorder()->RecordAllocateMemoryPages(
16182 allocator->GetCurrentFrameIndex(),
16183 *pVkMemoryRequirements,
16185 (uint64_t)allocationCount,
16190 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16192 for(
size_t i = 0; i < allocationCount; ++i)
16194 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16208 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16210 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16212 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16214 VkMemoryRequirements vkMemReq = {};
16215 bool requiresDedicatedAllocation =
false;
16216 bool prefersDedicatedAllocation =
false;
16217 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16218 requiresDedicatedAllocation,
16219 prefersDedicatedAllocation);
16221 VkResult result = allocator->AllocateMemory(
16223 requiresDedicatedAllocation,
16224 prefersDedicatedAllocation,
16228 VMA_SUBALLOCATION_TYPE_BUFFER,
16232 #if VMA_RECORDING_ENABLED 16233 if(allocator->GetRecorder() != VMA_NULL)
16235 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16236 allocator->GetCurrentFrameIndex(),
16238 requiresDedicatedAllocation,
16239 prefersDedicatedAllocation,
16245 if(pAllocationInfo && result == VK_SUCCESS)
16247 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16260 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16262 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16264 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16266 VkMemoryRequirements vkMemReq = {};
16267 bool requiresDedicatedAllocation =
false;
16268 bool prefersDedicatedAllocation =
false;
16269 allocator->GetImageMemoryRequirements(image, vkMemReq,
16270 requiresDedicatedAllocation, prefersDedicatedAllocation);
16272 VkResult result = allocator->AllocateMemory(
16274 requiresDedicatedAllocation,
16275 prefersDedicatedAllocation,
16279 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16283 #if VMA_RECORDING_ENABLED 16284 if(allocator->GetRecorder() != VMA_NULL)
16286 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16287 allocator->GetCurrentFrameIndex(),
16289 requiresDedicatedAllocation,
16290 prefersDedicatedAllocation,
16296 if(pAllocationInfo && result == VK_SUCCESS)
16298 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16308 VMA_ASSERT(allocator);
16310 if(allocation == VK_NULL_HANDLE)
16315 VMA_DEBUG_LOG(
"vmaFreeMemory");
16317 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16319 #if VMA_RECORDING_ENABLED 16320 if(allocator->GetRecorder() != VMA_NULL)
16322 allocator->GetRecorder()->RecordFreeMemory(
16323 allocator->GetCurrentFrameIndex(),
16328 allocator->FreeMemory(
16335 size_t allocationCount,
16338 if(allocationCount == 0)
16343 VMA_ASSERT(allocator);
16345 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16347 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16349 #if VMA_RECORDING_ENABLED 16350 if(allocator->GetRecorder() != VMA_NULL)
16352 allocator->GetRecorder()->RecordFreeMemoryPages(
16353 allocator->GetCurrentFrameIndex(),
16354 (uint64_t)allocationCount,
16359 allocator->FreeMemory(allocationCount, pAllocations);
16365 VkDeviceSize newSize)
16367 VMA_ASSERT(allocator && allocation);
16369 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16371 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16373 #if VMA_RECORDING_ENABLED 16374 if(allocator->GetRecorder() != VMA_NULL)
16376 allocator->GetRecorder()->RecordResizeAllocation(
16377 allocator->GetCurrentFrameIndex(),
16383 return allocator->ResizeAllocation(allocation, newSize);
16391 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16393 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16395 #if VMA_RECORDING_ENABLED 16396 if(allocator->GetRecorder() != VMA_NULL)
16398 allocator->GetRecorder()->RecordGetAllocationInfo(
16399 allocator->GetCurrentFrameIndex(),
16404 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16411 VMA_ASSERT(allocator && allocation);
16413 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16415 #if VMA_RECORDING_ENABLED 16416 if(allocator->GetRecorder() != VMA_NULL)
16418 allocator->GetRecorder()->RecordTouchAllocation(
16419 allocator->GetCurrentFrameIndex(),
16424 return allocator->TouchAllocation(allocation);
16432 VMA_ASSERT(allocator && allocation);
16434 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16436 allocation->SetUserData(allocator, pUserData);
16438 #if VMA_RECORDING_ENABLED 16439 if(allocator->GetRecorder() != VMA_NULL)
16441 allocator->GetRecorder()->RecordSetAllocationUserData(
16442 allocator->GetCurrentFrameIndex(),
16453 VMA_ASSERT(allocator && pAllocation);
16455 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16457 allocator->CreateLostAllocation(pAllocation);
16459 #if VMA_RECORDING_ENABLED 16460 if(allocator->GetRecorder() != VMA_NULL)
16462 allocator->GetRecorder()->RecordCreateLostAllocation(
16463 allocator->GetCurrentFrameIndex(),
16474 VMA_ASSERT(allocator && allocation && ppData);
16476 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16478 VkResult res = allocator->Map(allocation, ppData);
16480 #if VMA_RECORDING_ENABLED 16481 if(allocator->GetRecorder() != VMA_NULL)
16483 allocator->GetRecorder()->RecordMapMemory(
16484 allocator->GetCurrentFrameIndex(),
16496 VMA_ASSERT(allocator && allocation);
16498 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16500 #if VMA_RECORDING_ENABLED 16501 if(allocator->GetRecorder() != VMA_NULL)
16503 allocator->GetRecorder()->RecordUnmapMemory(
16504 allocator->GetCurrentFrameIndex(),
16509 allocator->Unmap(allocation);
16514 VMA_ASSERT(allocator && allocation);
16516 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16518 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16520 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16522 #if VMA_RECORDING_ENABLED 16523 if(allocator->GetRecorder() != VMA_NULL)
16525 allocator->GetRecorder()->RecordFlushAllocation(
16526 allocator->GetCurrentFrameIndex(),
16527 allocation, offset, size);
16534 VMA_ASSERT(allocator && allocation);
16536 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16538 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16540 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16542 #if VMA_RECORDING_ENABLED 16543 if(allocator->GetRecorder() != VMA_NULL)
16545 allocator->GetRecorder()->RecordInvalidateAllocation(
16546 allocator->GetCurrentFrameIndex(),
16547 allocation, offset, size);
16554 VMA_ASSERT(allocator);
16556 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16558 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16560 return allocator->CheckCorruption(memoryTypeBits);
16566 size_t allocationCount,
16567 VkBool32* pAllocationsChanged,
16577 if(pDefragmentationInfo != VMA_NULL)
16591 if(res == VK_NOT_READY)
16604 VMA_ASSERT(allocator && pInfo && pContext);
16615 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16617 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16619 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16621 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16623 #if VMA_RECORDING_ENABLED 16624 if(allocator->GetRecorder() != VMA_NULL)
16626 allocator->GetRecorder()->RecordDefragmentationBegin(
16627 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16638 VMA_ASSERT(allocator);
16640 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16642 if(context != VK_NULL_HANDLE)
16644 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16646 #if VMA_RECORDING_ENABLED 16647 if(allocator->GetRecorder() != VMA_NULL)
16649 allocator->GetRecorder()->RecordDefragmentationEnd(
16650 allocator->GetCurrentFrameIndex(), context);
16654 return allocator->DefragmentationEnd(context);
16667 VMA_ASSERT(allocator && allocation && buffer);
16669 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16671 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16673 return allocator->BindBufferMemory(allocation, buffer);
16681 VMA_ASSERT(allocator && allocation && image);
16683 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16685 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16687 return allocator->BindImageMemory(allocation, image);
16692 const VkBufferCreateInfo* pBufferCreateInfo,
16698 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16700 if(pBufferCreateInfo->size == 0)
16702 return VK_ERROR_VALIDATION_FAILED_EXT;
16705 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16707 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16709 *pBuffer = VK_NULL_HANDLE;
16710 *pAllocation = VK_NULL_HANDLE;
16713 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16714 allocator->m_hDevice,
16716 allocator->GetAllocationCallbacks(),
16721 VkMemoryRequirements vkMemReq = {};
16722 bool requiresDedicatedAllocation =
false;
16723 bool prefersDedicatedAllocation =
false;
16724 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16725 requiresDedicatedAllocation, prefersDedicatedAllocation);
16729 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16731 VMA_ASSERT(vkMemReq.alignment %
16732 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16734 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16736 VMA_ASSERT(vkMemReq.alignment %
16737 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16739 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16741 VMA_ASSERT(vkMemReq.alignment %
16742 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16746 res = allocator->AllocateMemory(
16748 requiresDedicatedAllocation,
16749 prefersDedicatedAllocation,
16752 *pAllocationCreateInfo,
16753 VMA_SUBALLOCATION_TYPE_BUFFER,
16757 #if VMA_RECORDING_ENABLED 16758 if(allocator->GetRecorder() != VMA_NULL)
16760 allocator->GetRecorder()->RecordCreateBuffer(
16761 allocator->GetCurrentFrameIndex(),
16762 *pBufferCreateInfo,
16763 *pAllocationCreateInfo,
16773 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16778 #if VMA_STATS_STRING_ENABLED 16779 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16781 if(pAllocationInfo != VMA_NULL)
16783 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16788 allocator->FreeMemory(
16791 *pAllocation = VK_NULL_HANDLE;
16792 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16793 *pBuffer = VK_NULL_HANDLE;
16796 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16797 *pBuffer = VK_NULL_HANDLE;
16808 VMA_ASSERT(allocator);
16810 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16815 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16817 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16819 #if VMA_RECORDING_ENABLED 16820 if(allocator->GetRecorder() != VMA_NULL)
16822 allocator->GetRecorder()->RecordDestroyBuffer(
16823 allocator->GetCurrentFrameIndex(),
16828 if(buffer != VK_NULL_HANDLE)
16830 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16833 if(allocation != VK_NULL_HANDLE)
16835 allocator->FreeMemory(
16843 const VkImageCreateInfo* pImageCreateInfo,
16849 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16851 if(pImageCreateInfo->extent.width == 0 ||
16852 pImageCreateInfo->extent.height == 0 ||
16853 pImageCreateInfo->extent.depth == 0 ||
16854 pImageCreateInfo->mipLevels == 0 ||
16855 pImageCreateInfo->arrayLayers == 0)
16857 return VK_ERROR_VALIDATION_FAILED_EXT;
16860 VMA_DEBUG_LOG(
"vmaCreateImage");
16862 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16864 *pImage = VK_NULL_HANDLE;
16865 *pAllocation = VK_NULL_HANDLE;
16868 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16869 allocator->m_hDevice,
16871 allocator->GetAllocationCallbacks(),
16875 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16876 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16877 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16880 VkMemoryRequirements vkMemReq = {};
16881 bool requiresDedicatedAllocation =
false;
16882 bool prefersDedicatedAllocation =
false;
16883 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16884 requiresDedicatedAllocation, prefersDedicatedAllocation);
16886 res = allocator->AllocateMemory(
16888 requiresDedicatedAllocation,
16889 prefersDedicatedAllocation,
16892 *pAllocationCreateInfo,
16897 #if VMA_RECORDING_ENABLED 16898 if(allocator->GetRecorder() != VMA_NULL)
16900 allocator->GetRecorder()->RecordCreateImage(
16901 allocator->GetCurrentFrameIndex(),
16903 *pAllocationCreateInfo,
16913 res = allocator->BindImageMemory(*pAllocation, *pImage);
16918 #if VMA_STATS_STRING_ENABLED 16919 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16921 if(pAllocationInfo != VMA_NULL)
16923 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16928 allocator->FreeMemory(
16931 *pAllocation = VK_NULL_HANDLE;
16932 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16933 *pImage = VK_NULL_HANDLE;
16936 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16937 *pImage = VK_NULL_HANDLE;
16948 VMA_ASSERT(allocator);
16950 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16955 VMA_DEBUG_LOG(
"vmaDestroyImage");
16957 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16959 #if VMA_RECORDING_ENABLED 16960 if(allocator->GetRecorder() != VMA_NULL)
16962 allocator->GetRecorder()->RecordDestroyImage(
16963 allocator->GetCurrentFrameIndex(),
16968 if(image != VK_NULL_HANDLE)
16970 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16972 if(allocation != VK_NULL_HANDLE)
16974 allocator->FreeMemory(
16980 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1753
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2053
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1811
diff --git a/src/vk_mem_alloc.h b/src/vk_mem_alloc.h
index ffd2473..92c4c89 100644
--- a/src/vk_mem_alloc.h
+++ b/src/vk_mem_alloc.h
@@ -3177,6 +3177,10 @@ the containers.
#endif
#endif
+/*
+THESE INCLUDES ARE NOT ENABLED BY DEFAULT.
+Library has its own container implementation.
+*/
#if VMA_USE_STL_VECTOR
#include
#endif