23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1622 #ifndef VMA_RECORDING_ENABLED 1624 #define VMA_RECORDING_ENABLED 1 1626 #define VMA_RECORDING_ENABLED 0 1631 #define NOMINMAX // For windows.h 1635 #include <vulkan/vulkan.h> 1638 #if VMA_RECORDING_ENABLED 1639 #include <windows.h> 1642 #if !defined(VMA_DEDICATED_ALLOCATION) 1643 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1644 #define VMA_DEDICATED_ALLOCATION 1 1646 #define VMA_DEDICATED_ALLOCATION 0 1664 uint32_t memoryType,
1665 VkDeviceMemory memory,
1670 uint32_t memoryType,
1671 VkDeviceMemory memory,
1744 #if VMA_DEDICATED_ALLOCATION 1745 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1746 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1873 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1881 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1891 uint32_t memoryTypeIndex,
1892 VkMemoryPropertyFlags* pFlags);
1904 uint32_t frameIndex);
1937 #define VMA_STATS_STRING_ENABLED 1 1939 #if VMA_STATS_STRING_ENABLED 1946 char** ppStatsString,
1947 VkBool32 detailedMap);
1951 char* pStatsString);
1953 #endif // #if VMA_STATS_STRING_ENABLED 2185 uint32_t memoryTypeBits,
2187 uint32_t* pMemoryTypeIndex);
2203 const VkBufferCreateInfo* pBufferCreateInfo,
2205 uint32_t* pMemoryTypeIndex);
2221 const VkImageCreateInfo* pImageCreateInfo,
2223 uint32_t* pMemoryTypeIndex);
2395 size_t* pLostAllocationCount);
2494 const VkMemoryRequirements* pVkMemoryRequirements,
2548 VkDeviceSize newSize);
2917 size_t allocationCount,
2918 VkBool32* pAllocationsChanged,
2984 const VkBufferCreateInfo* pBufferCreateInfo,
3009 const VkImageCreateInfo* pImageCreateInfo,
3035 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3038 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3039 #define VMA_IMPLEMENTATION 3042 #ifdef VMA_IMPLEMENTATION 3043 #undef VMA_IMPLEMENTATION 3065 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3066 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3078 #if VMA_USE_STL_CONTAINERS 3079 #define VMA_USE_STL_VECTOR 1 3080 #define VMA_USE_STL_UNORDERED_MAP 1 3081 #define VMA_USE_STL_LIST 1 3084 #ifndef VMA_USE_STL_SHARED_MUTEX 3086 #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 3087 #define VMA_USE_STL_SHARED_MUTEX 1 3091 #if VMA_USE_STL_VECTOR 3095 #if VMA_USE_STL_UNORDERED_MAP 3096 #include <unordered_map> 3099 #if VMA_USE_STL_LIST 3108 #include <algorithm> 3114 #define VMA_NULL nullptr 3117 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3119 void *aligned_alloc(
size_t alignment,
size_t size)
3122 if(alignment <
sizeof(
void*))
3124 alignment =
sizeof(
void*);
3127 return memalign(alignment, size);
3129 #elif defined(__APPLE__) || defined(__ANDROID__) 3131 void *aligned_alloc(
size_t alignment,
size_t size)
3134 if(alignment <
sizeof(
void*))
3136 alignment =
sizeof(
void*);
3140 if(posix_memalign(&pointer, alignment, size) == 0)
3154 #define VMA_ASSERT(expr) assert(expr) 3156 #define VMA_ASSERT(expr) 3162 #ifndef VMA_HEAVY_ASSERT 3164 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3166 #define VMA_HEAVY_ASSERT(expr) 3170 #ifndef VMA_ALIGN_OF 3171 #define VMA_ALIGN_OF(type) (__alignof(type)) 3174 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3176 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3178 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3182 #ifndef VMA_SYSTEM_FREE 3184 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3186 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3191 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3195 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3199 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3203 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3206 #ifndef VMA_DEBUG_LOG 3207 #define VMA_DEBUG_LOG(format, ...) 3217 #if VMA_STATS_STRING_ENABLED 3218 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3220 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3222 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3224 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3226 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3228 snprintf(outStr, strLen,
"%p", ptr);
3236 void Lock() { m_Mutex.lock(); }
3237 void Unlock() { m_Mutex.unlock(); }
3241 #define VMA_MUTEX VmaMutex 3245 #ifndef VMA_RW_MUTEX 3246 #if VMA_USE_STL_SHARED_MUTEX 3248 #include <shared_mutex> 3252 void LockRead() { m_Mutex.lock_shared(); }
3253 void UnlockRead() { m_Mutex.unlock_shared(); }
3254 void LockWrite() { m_Mutex.lock(); }
3255 void UnlockWrite() { m_Mutex.unlock(); }
3257 std::shared_mutex m_Mutex;
3259 #define VMA_RW_MUTEX VmaRWMutex 3260 #elif defined(_WIN32) 3265 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3266 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3267 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3268 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3269 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3273 #define VMA_RW_MUTEX VmaRWMutex 3279 void LockRead() { m_Mutex.Lock(); }
3280 void UnlockRead() { m_Mutex.Unlock(); }
3281 void LockWrite() { m_Mutex.Lock(); }
3282 void UnlockWrite() { m_Mutex.Unlock(); }
3286 #define VMA_RW_MUTEX VmaRWMutex 3287 #endif // #if VMA_USE_STL_SHARED_MUTEX 3288 #endif // #ifndef VMA_RW_MUTEX 3298 #ifndef VMA_ATOMIC_UINT32 3299 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3302 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3307 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3310 #ifndef VMA_DEBUG_ALIGNMENT 3315 #define VMA_DEBUG_ALIGNMENT (1) 3318 #ifndef VMA_DEBUG_MARGIN 3323 #define VMA_DEBUG_MARGIN (0) 3326 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3331 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3334 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3340 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3343 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3348 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3351 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3356 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3359 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3360 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3364 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3365 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3369 #ifndef VMA_CLASS_NO_COPY 3370 #define VMA_CLASS_NO_COPY(className) \ 3372 className(const className&) = delete; \ 3373 className& operator=(const className&) = delete; 3376 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3379 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3381 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3382 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3388 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3390 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3391 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3394 static inline uint32_t VmaCountBitsSet(uint32_t v)
3396 uint32_t c = v - ((v >> 1) & 0x55555555);
3397 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3398 c = ((c >> 4) + c) & 0x0F0F0F0F;
3399 c = ((c >> 8) + c) & 0x00FF00FF;
3400 c = ((c >> 16) + c) & 0x0000FFFF;
3406 template <
typename T>
3407 static inline T VmaAlignUp(T val, T align)
3409 return (val + align - 1) / align * align;
3413 template <
typename T>
3414 static inline T VmaAlignDown(T val, T align)
3416 return val / align * align;
3420 template <
typename T>
3421 static inline T VmaRoundDiv(T x, T y)
3423 return (x + (y / (T)2)) / y;
3431 template <
typename T>
3432 inline bool VmaIsPow2(T x)
3434 return (x & (x-1)) == 0;
3438 static inline uint32_t VmaNextPow2(uint32_t v)
3449 static inline uint64_t VmaNextPow2(uint64_t v)
3463 static inline uint32_t VmaPrevPow2(uint32_t v)
3473 static inline uint64_t VmaPrevPow2(uint64_t v)
3485 static inline bool VmaStrIsEmpty(
const char* pStr)
3487 return pStr == VMA_NULL || *pStr ==
'\0';
3490 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3508 template<
typename Iterator,
typename Compare>
3509 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3511 Iterator centerValue = end; --centerValue;
3512 Iterator insertIndex = beg;
3513 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3515 if(cmp(*memTypeIndex, *centerValue))
3517 if(insertIndex != memTypeIndex)
3519 VMA_SWAP(*memTypeIndex, *insertIndex);
3524 if(insertIndex != centerValue)
3526 VMA_SWAP(*insertIndex, *centerValue);
3531 template<
typename Iterator,
typename Compare>
3532 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3536 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3537 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3538 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3542 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3544 #endif // #ifndef VMA_SORT 3553 static inline bool VmaBlocksOnSamePage(
3554 VkDeviceSize resourceAOffset,
3555 VkDeviceSize resourceASize,
3556 VkDeviceSize resourceBOffset,
3557 VkDeviceSize pageSize)
3559 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3560 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3561 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3562 VkDeviceSize resourceBStart = resourceBOffset;
3563 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3564 return resourceAEndPage == resourceBStartPage;
3567 enum VmaSuballocationType
3569 VMA_SUBALLOCATION_TYPE_FREE = 0,
3570 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3571 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3572 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3573 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3574 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3575 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3584 static inline bool VmaIsBufferImageGranularityConflict(
3585 VmaSuballocationType suballocType1,
3586 VmaSuballocationType suballocType2)
3588 if(suballocType1 > suballocType2)
3590 VMA_SWAP(suballocType1, suballocType2);
3593 switch(suballocType1)
3595 case VMA_SUBALLOCATION_TYPE_FREE:
3597 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3599 case VMA_SUBALLOCATION_TYPE_BUFFER:
3601 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3602 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3603 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3605 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3606 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3607 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3608 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3610 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3611 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3619 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3621 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3622 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3623 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3625 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3629 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3631 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3632 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3633 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3635 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3646 VMA_CLASS_NO_COPY(VmaMutexLock)
3648 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3649 m_pMutex(useMutex ? &mutex : VMA_NULL)
3650 {
if(m_pMutex) { m_pMutex->Lock(); } }
3652 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3654 VMA_MUTEX* m_pMutex;
3658 struct VmaMutexLockRead
3660 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3662 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3663 m_pMutex(useMutex ? &mutex : VMA_NULL)
3664 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3665 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3667 VMA_RW_MUTEX* m_pMutex;
3671 struct VmaMutexLockWrite
3673 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3675 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3676 m_pMutex(useMutex ? &mutex : VMA_NULL)
3677 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3678 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3680 VMA_RW_MUTEX* m_pMutex;
3683 #if VMA_DEBUG_GLOBAL_MUTEX 3684 static VMA_MUTEX gDebugGlobalMutex;
3685 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3687 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3691 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3702 template <
typename CmpLess,
typename IterT,
typename KeyT>
3703 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3705 size_t down = 0, up = (end - beg);
3708 const size_t mid = (down + up) / 2;
3709 if(cmp(*(beg+mid), key))
3726 template<
typename T>
3727 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3729 for(uint32_t i = 0; i < count; ++i)
3731 const T iPtr = arr[i];
3732 if(iPtr == VMA_NULL)
3736 for(uint32_t j = i + 1; j < count; ++j)
3750 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3752 if((pAllocationCallbacks != VMA_NULL) &&
3753 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3755 return (*pAllocationCallbacks->pfnAllocation)(
3756 pAllocationCallbacks->pUserData,
3759 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3763 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3767 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3769 if((pAllocationCallbacks != VMA_NULL) &&
3770 (pAllocationCallbacks->pfnFree != VMA_NULL))
3772 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3776 VMA_SYSTEM_FREE(ptr);
3780 template<
typename T>
3781 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3783 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3786 template<
typename T>
3787 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3789 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3792 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3794 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3796 template<
typename T>
3797 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3800 VmaFree(pAllocationCallbacks, ptr);
3803 template<
typename T>
3804 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3808 for(
size_t i = count; i--; )
3812 VmaFree(pAllocationCallbacks, ptr);
3817 template<
typename T>
3818 class VmaStlAllocator
3821 const VkAllocationCallbacks*
const m_pCallbacks;
3822 typedef T value_type;
3824 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3825 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3827 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3828 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3830 template<
typename U>
3831 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3833 return m_pCallbacks == rhs.m_pCallbacks;
3835 template<
typename U>
3836 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3838 return m_pCallbacks != rhs.m_pCallbacks;
3841 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3844 #if VMA_USE_STL_VECTOR 3846 #define VmaVector std::vector 3848 template<
typename T,
typename allocatorT>
3849 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3851 vec.insert(vec.begin() + index, item);
3854 template<
typename T,
typename allocatorT>
3855 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3857 vec.erase(vec.begin() + index);
3860 #else // #if VMA_USE_STL_VECTOR 3865 template<
typename T,
typename AllocatorT>
3869 typedef T value_type;
3871 VmaVector(
const AllocatorT& allocator) :
3872 m_Allocator(allocator),
3879 VmaVector(
size_t count,
const AllocatorT& allocator) :
3880 m_Allocator(allocator),
3881 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3887 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3888 m_Allocator(src.m_Allocator),
3889 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3890 m_Count(src.m_Count),
3891 m_Capacity(src.m_Count)
3895 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3901 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3904 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3908 resize(rhs.m_Count);
3911 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3917 bool empty()
const {
return m_Count == 0; }
3918 size_t size()
const {
return m_Count; }
3919 T* data() {
return m_pArray; }
3920 const T* data()
const {
return m_pArray; }
3922 T& operator[](
size_t index)
3924 VMA_HEAVY_ASSERT(index < m_Count);
3925 return m_pArray[index];
3927 const T& operator[](
size_t index)
const 3929 VMA_HEAVY_ASSERT(index < m_Count);
3930 return m_pArray[index];
3935 VMA_HEAVY_ASSERT(m_Count > 0);
3938 const T& front()
const 3940 VMA_HEAVY_ASSERT(m_Count > 0);
3945 VMA_HEAVY_ASSERT(m_Count > 0);
3946 return m_pArray[m_Count - 1];
3948 const T& back()
const 3950 VMA_HEAVY_ASSERT(m_Count > 0);
3951 return m_pArray[m_Count - 1];
3954 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3956 newCapacity = VMA_MAX(newCapacity, m_Count);
3958 if((newCapacity < m_Capacity) && !freeMemory)
3960 newCapacity = m_Capacity;
3963 if(newCapacity != m_Capacity)
3965 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3968 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3970 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3971 m_Capacity = newCapacity;
3972 m_pArray = newArray;
3976 void resize(
size_t newCount,
bool freeMemory =
false)
3978 size_t newCapacity = m_Capacity;
3979 if(newCount > m_Capacity)
3981 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3985 newCapacity = newCount;
3988 if(newCapacity != m_Capacity)
3990 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3991 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3992 if(elementsToCopy != 0)
3994 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3996 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3997 m_Capacity = newCapacity;
3998 m_pArray = newArray;
4004 void clear(
bool freeMemory =
false)
4006 resize(0, freeMemory);
4009 void insert(
size_t index,
const T& src)
4011 VMA_HEAVY_ASSERT(index <= m_Count);
4012 const size_t oldCount = size();
4013 resize(oldCount + 1);
4014 if(index < oldCount)
4016 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4018 m_pArray[index] = src;
4021 void remove(
size_t index)
4023 VMA_HEAVY_ASSERT(index < m_Count);
4024 const size_t oldCount = size();
4025 if(index < oldCount - 1)
4027 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4029 resize(oldCount - 1);
4032 void push_back(
const T& src)
4034 const size_t newIndex = size();
4035 resize(newIndex + 1);
4036 m_pArray[newIndex] = src;
4041 VMA_HEAVY_ASSERT(m_Count > 0);
4045 void push_front(
const T& src)
4052 VMA_HEAVY_ASSERT(m_Count > 0);
4056 typedef T* iterator;
4058 iterator begin() {
return m_pArray; }
4059 iterator end() {
return m_pArray + m_Count; }
4062 AllocatorT m_Allocator;
4068 template<
typename T,
typename allocatorT>
4069 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4071 vec.insert(index, item);
4074 template<
typename T,
typename allocatorT>
4075 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4080 #endif // #if VMA_USE_STL_VECTOR 4082 template<
typename CmpLess,
typename VectorT>
4083 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4085 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4087 vector.data() + vector.size(),
4089 CmpLess()) - vector.data();
4090 VmaVectorInsert(vector, indexToInsert, value);
4091 return indexToInsert;
4094 template<
typename CmpLess,
typename VectorT>
4095 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4098 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4103 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4105 size_t indexToRemove = it - vector.begin();
4106 VmaVectorRemove(vector, indexToRemove);
4112 template<
typename CmpLess,
typename IterT,
typename KeyT>
4113 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4116 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4117 beg, end, value, comparator);
4119 (!comparator(*it, value) && !comparator(value, *it)))
4134 template<
typename T>
4135 class VmaPoolAllocator
4137 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4139 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
4140 ~VmaPoolAllocator();
4148 uint32_t NextFreeIndex;
4155 uint32_t FirstFreeIndex;
4158 const VkAllocationCallbacks* m_pAllocationCallbacks;
4159 size_t m_ItemsPerBlock;
4160 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4162 ItemBlock& CreateNewBlock();
4165 template<
typename T>
4166 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
4167 m_pAllocationCallbacks(pAllocationCallbacks),
4168 m_ItemsPerBlock(itemsPerBlock),
4169 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4171 VMA_ASSERT(itemsPerBlock > 0);
4174 template<
typename T>
4175 VmaPoolAllocator<T>::~VmaPoolAllocator()
4180 template<
typename T>
4181 void VmaPoolAllocator<T>::Clear()
4183 for(
size_t i = m_ItemBlocks.size(); i--; )
4184 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
4185 m_ItemBlocks.clear();
4188 template<
typename T>
4189 T* VmaPoolAllocator<T>::Alloc()
4191 for(
size_t i = m_ItemBlocks.size(); i--; )
4193 ItemBlock& block = m_ItemBlocks[i];
4195 if(block.FirstFreeIndex != UINT32_MAX)
4197 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4198 block.FirstFreeIndex = pItem->NextFreeIndex;
4199 return &pItem->Value;
4204 ItemBlock& newBlock = CreateNewBlock();
4205 Item*
const pItem = &newBlock.pItems[0];
4206 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4207 return &pItem->Value;
4210 template<
typename T>
4211 void VmaPoolAllocator<T>::Free(T* ptr)
4214 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
4216 ItemBlock& block = m_ItemBlocks[i];
4220 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4223 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
4225 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4226 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4227 block.FirstFreeIndex = index;
4231 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4234 template<
typename T>
4235 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4237 ItemBlock newBlock = {
4238 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
4240 m_ItemBlocks.push_back(newBlock);
4243 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
4244 newBlock.pItems[i].NextFreeIndex = i + 1;
4245 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
4246 return m_ItemBlocks.back();
4252 #if VMA_USE_STL_LIST 4254 #define VmaList std::list 4256 #else // #if VMA_USE_STL_LIST 4258 template<
typename T>
4267 template<
typename T>
4270 VMA_CLASS_NO_COPY(VmaRawList)
4272 typedef VmaListItem<T> ItemType;
4274 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4278 size_t GetCount()
const {
return m_Count; }
4279 bool IsEmpty()
const {
return m_Count == 0; }
4281 ItemType* Front() {
return m_pFront; }
4282 const ItemType* Front()
const {
return m_pFront; }
4283 ItemType* Back() {
return m_pBack; }
4284 const ItemType* Back()
const {
return m_pBack; }
4286 ItemType* PushBack();
4287 ItemType* PushFront();
4288 ItemType* PushBack(
const T& value);
4289 ItemType* PushFront(
const T& value);
4294 ItemType* InsertBefore(ItemType* pItem);
4296 ItemType* InsertAfter(ItemType* pItem);
4298 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4299 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4301 void Remove(ItemType* pItem);
4304 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4305 VmaPoolAllocator<ItemType> m_ItemAllocator;
4311 template<
typename T>
4312 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4313 m_pAllocationCallbacks(pAllocationCallbacks),
4314 m_ItemAllocator(pAllocationCallbacks, 128),
4321 template<
typename T>
4322 VmaRawList<T>::~VmaRawList()
4328 template<
typename T>
4329 void VmaRawList<T>::Clear()
4331 if(IsEmpty() ==
false)
4333 ItemType* pItem = m_pBack;
4334 while(pItem != VMA_NULL)
4336 ItemType*
const pPrevItem = pItem->pPrev;
4337 m_ItemAllocator.Free(pItem);
4340 m_pFront = VMA_NULL;
4346 template<
typename T>
4347 VmaListItem<T>* VmaRawList<T>::PushBack()
4349 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4350 pNewItem->pNext = VMA_NULL;
4353 pNewItem->pPrev = VMA_NULL;
4354 m_pFront = pNewItem;
4360 pNewItem->pPrev = m_pBack;
4361 m_pBack->pNext = pNewItem;
4368 template<
typename T>
4369 VmaListItem<T>* VmaRawList<T>::PushFront()
4371 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4372 pNewItem->pPrev = VMA_NULL;
4375 pNewItem->pNext = VMA_NULL;
4376 m_pFront = pNewItem;
4382 pNewItem->pNext = m_pFront;
4383 m_pFront->pPrev = pNewItem;
4384 m_pFront = pNewItem;
4390 template<
typename T>
4391 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4393 ItemType*
const pNewItem = PushBack();
4394 pNewItem->Value = value;
4398 template<
typename T>
4399 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4401 ItemType*
const pNewItem = PushFront();
4402 pNewItem->Value = value;
4406 template<
typename T>
4407 void VmaRawList<T>::PopBack()
4409 VMA_HEAVY_ASSERT(m_Count > 0);
4410 ItemType*
const pBackItem = m_pBack;
4411 ItemType*
const pPrevItem = pBackItem->pPrev;
4412 if(pPrevItem != VMA_NULL)
4414 pPrevItem->pNext = VMA_NULL;
4416 m_pBack = pPrevItem;
4417 m_ItemAllocator.Free(pBackItem);
4421 template<
typename T>
4422 void VmaRawList<T>::PopFront()
4424 VMA_HEAVY_ASSERT(m_Count > 0);
4425 ItemType*
const pFrontItem = m_pFront;
4426 ItemType*
const pNextItem = pFrontItem->pNext;
4427 if(pNextItem != VMA_NULL)
4429 pNextItem->pPrev = VMA_NULL;
4431 m_pFront = pNextItem;
4432 m_ItemAllocator.Free(pFrontItem);
4436 template<
typename T>
4437 void VmaRawList<T>::Remove(ItemType* pItem)
4439 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4440 VMA_HEAVY_ASSERT(m_Count > 0);
4442 if(pItem->pPrev != VMA_NULL)
4444 pItem->pPrev->pNext = pItem->pNext;
4448 VMA_HEAVY_ASSERT(m_pFront == pItem);
4449 m_pFront = pItem->pNext;
4452 if(pItem->pNext != VMA_NULL)
4454 pItem->pNext->pPrev = pItem->pPrev;
4458 VMA_HEAVY_ASSERT(m_pBack == pItem);
4459 m_pBack = pItem->pPrev;
4462 m_ItemAllocator.Free(pItem);
4466 template<
typename T>
4467 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4469 if(pItem != VMA_NULL)
4471 ItemType*
const prevItem = pItem->pPrev;
4472 ItemType*
const newItem = m_ItemAllocator.Alloc();
4473 newItem->pPrev = prevItem;
4474 newItem->pNext = pItem;
4475 pItem->pPrev = newItem;
4476 if(prevItem != VMA_NULL)
4478 prevItem->pNext = newItem;
4482 VMA_HEAVY_ASSERT(m_pFront == pItem);
4492 template<
typename T>
4493 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4495 if(pItem != VMA_NULL)
4497 ItemType*
const nextItem = pItem->pNext;
4498 ItemType*
const newItem = m_ItemAllocator.Alloc();
4499 newItem->pNext = nextItem;
4500 newItem->pPrev = pItem;
4501 pItem->pNext = newItem;
4502 if(nextItem != VMA_NULL)
4504 nextItem->pPrev = newItem;
4508 VMA_HEAVY_ASSERT(m_pBack == pItem);
4518 template<
typename T>
4519 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4521 ItemType*
const newItem = InsertBefore(pItem);
4522 newItem->Value = value;
4526 template<
typename T>
4527 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4529 ItemType*
const newItem = InsertAfter(pItem);
4530 newItem->Value = value;
4534 template<
typename T,
typename AllocatorT>
4537 VMA_CLASS_NO_COPY(VmaList)
4548 T& operator*()
const 4550 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4551 return m_pItem->Value;
4553 T* operator->()
const 4555 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4556 return &m_pItem->Value;
4559 iterator& operator++()
4561 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4562 m_pItem = m_pItem->pNext;
4565 iterator& operator--()
4567 if(m_pItem != VMA_NULL)
4569 m_pItem = m_pItem->pPrev;
4573 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4574 m_pItem = m_pList->Back();
4579 iterator operator++(
int)
4581 iterator result = *
this;
4585 iterator operator--(
int)
4587 iterator result = *
this;
4592 bool operator==(
const iterator& rhs)
const 4594 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4595 return m_pItem == rhs.m_pItem;
4597 bool operator!=(
const iterator& rhs)
const 4599 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4600 return m_pItem != rhs.m_pItem;
4604 VmaRawList<T>* m_pList;
4605 VmaListItem<T>* m_pItem;
4607 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4613 friend class VmaList<T, AllocatorT>;
4616 class const_iterator
4625 const_iterator(
const iterator& src) :
4626 m_pList(src.m_pList),
4627 m_pItem(src.m_pItem)
4631 const T& operator*()
const 4633 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4634 return m_pItem->Value;
4636 const T* operator->()
const 4638 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4639 return &m_pItem->Value;
4642 const_iterator& operator++()
4644 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4645 m_pItem = m_pItem->pNext;
4648 const_iterator& operator--()
4650 if(m_pItem != VMA_NULL)
4652 m_pItem = m_pItem->pPrev;
4656 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4657 m_pItem = m_pList->Back();
4662 const_iterator operator++(
int)
4664 const_iterator result = *
this;
4668 const_iterator operator--(
int)
4670 const_iterator result = *
this;
4675 bool operator==(
const const_iterator& rhs)
const 4677 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4678 return m_pItem == rhs.m_pItem;
4680 bool operator!=(
const const_iterator& rhs)
const 4682 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4683 return m_pItem != rhs.m_pItem;
4687 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4693 const VmaRawList<T>* m_pList;
4694 const VmaListItem<T>* m_pItem;
4696 friend class VmaList<T, AllocatorT>;
4699 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4701 bool empty()
const {
return m_RawList.IsEmpty(); }
4702 size_t size()
const {
return m_RawList.GetCount(); }
4704 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4705 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4707 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4708 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4710 void clear() { m_RawList.Clear(); }
4711 void push_back(
const T& value) { m_RawList.PushBack(value); }
4712 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4713 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4716 VmaRawList<T> m_RawList;
4719 #endif // #if VMA_USE_STL_LIST 4727 #if VMA_USE_STL_UNORDERED_MAP 4729 #define VmaPair std::pair 4731 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4732 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4734 #else // #if VMA_USE_STL_UNORDERED_MAP 4736 template<
typename T1,
typename T2>
4742 VmaPair() : first(), second() { }
4743 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4749 template<
typename KeyT,
typename ValueT>
4753 typedef VmaPair<KeyT, ValueT> PairType;
4754 typedef PairType* iterator;
4756 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4758 iterator begin() {
return m_Vector.begin(); }
4759 iterator end() {
return m_Vector.end(); }
4761 void insert(
const PairType& pair);
4762 iterator find(
const KeyT& key);
4763 void erase(iterator it);
4766 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4769 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4771 template<
typename FirstT,
typename SecondT>
4772 struct VmaPairFirstLess
4774 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4776 return lhs.first < rhs.first;
4778 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4780 return lhs.first < rhsFirst;
4784 template<
typename KeyT,
typename ValueT>
4785 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4787 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4789 m_Vector.data() + m_Vector.size(),
4791 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4792 VmaVectorInsert(m_Vector, indexToInsert, pair);
4795 template<
typename KeyT,
typename ValueT>
4796 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4798 PairType* it = VmaBinaryFindFirstNotLess(
4800 m_Vector.data() + m_Vector.size(),
4802 VmaPairFirstLess<KeyT, ValueT>());
4803 if((it != m_Vector.end()) && (it->first == key))
4809 return m_Vector.end();
4813 template<
typename KeyT,
typename ValueT>
4814 void VmaMap<KeyT, ValueT>::erase(iterator it)
4816 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4819 #endif // #if VMA_USE_STL_UNORDERED_MAP 4825 class VmaDeviceMemoryBlock;
4827 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4829 struct VmaAllocation_T
4831 VMA_CLASS_NO_COPY(VmaAllocation_T)
4833 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4837 FLAG_USER_DATA_STRING = 0x01,
4841 enum ALLOCATION_TYPE
4843 ALLOCATION_TYPE_NONE,
4844 ALLOCATION_TYPE_BLOCK,
4845 ALLOCATION_TYPE_DEDICATED,
4848 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4851 m_pUserData(VMA_NULL),
4852 m_LastUseFrameIndex(currentFrameIndex),
4853 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4854 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4856 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4858 #if VMA_STATS_STRING_ENABLED 4859 m_CreationFrameIndex = currentFrameIndex;
4860 m_BufferImageUsage = 0;
4866 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4869 VMA_ASSERT(m_pUserData == VMA_NULL);
4872 void InitBlockAllocation(
4874 VmaDeviceMemoryBlock* block,
4875 VkDeviceSize offset,
4876 VkDeviceSize alignment,
4878 VmaSuballocationType suballocationType,
4882 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4883 VMA_ASSERT(block != VMA_NULL);
4884 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4885 m_Alignment = alignment;
4887 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4888 m_SuballocationType = (uint8_t)suballocationType;
4889 m_BlockAllocation.m_hPool = hPool;
4890 m_BlockAllocation.m_Block = block;
4891 m_BlockAllocation.m_Offset = offset;
4892 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4897 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4898 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4899 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4900 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4901 m_BlockAllocation.m_Block = VMA_NULL;
4902 m_BlockAllocation.m_Offset = 0;
4903 m_BlockAllocation.m_CanBecomeLost =
true;
4906 void ChangeBlockAllocation(
4908 VmaDeviceMemoryBlock* block,
4909 VkDeviceSize offset);
4911 void ChangeSize(VkDeviceSize newSize);
4912 void ChangeOffset(VkDeviceSize newOffset);
4915 void InitDedicatedAllocation(
4916 uint32_t memoryTypeIndex,
4917 VkDeviceMemory hMemory,
4918 VmaSuballocationType suballocationType,
4922 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4923 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4924 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4927 m_SuballocationType = (uint8_t)suballocationType;
4928 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4929 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4930 m_DedicatedAllocation.m_hMemory = hMemory;
4931 m_DedicatedAllocation.m_pMappedData = pMappedData;
4934 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4935 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4936 VkDeviceSize GetSize()
const {
return m_Size; }
4937 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4938 void* GetUserData()
const {
return m_pUserData; }
4939 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4940 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4942 VmaDeviceMemoryBlock* GetBlock()
const 4944 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4945 return m_BlockAllocation.m_Block;
4947 VkDeviceSize GetOffset()
const;
4948 VkDeviceMemory GetMemory()
const;
4949 uint32_t GetMemoryTypeIndex()
const;
4950 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4951 void* GetMappedData()
const;
4952 bool CanBecomeLost()
const;
4955 uint32_t GetLastUseFrameIndex()
const 4957 return m_LastUseFrameIndex.load();
4959 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4961 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4971 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4973 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4975 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4986 void BlockAllocMap();
4987 void BlockAllocUnmap();
4988 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4991 #if VMA_STATS_STRING_ENABLED 4992 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4993 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4995 void InitBufferImageUsage(uint32_t bufferImageUsage)
4997 VMA_ASSERT(m_BufferImageUsage == 0);
4998 m_BufferImageUsage = bufferImageUsage;
5001 void PrintParameters(
class VmaJsonWriter& json)
const;
5005 VkDeviceSize m_Alignment;
5006 VkDeviceSize m_Size;
5008 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5010 uint8_t m_SuballocationType;
5017 struct BlockAllocation
5020 VmaDeviceMemoryBlock* m_Block;
5021 VkDeviceSize m_Offset;
5022 bool m_CanBecomeLost;
5026 struct DedicatedAllocation
5028 uint32_t m_MemoryTypeIndex;
5029 VkDeviceMemory m_hMemory;
5030 void* m_pMappedData;
5036 BlockAllocation m_BlockAllocation;
5038 DedicatedAllocation m_DedicatedAllocation;
5041 #if VMA_STATS_STRING_ENABLED 5042 uint32_t m_CreationFrameIndex;
5043 uint32_t m_BufferImageUsage;
5053 struct VmaSuballocation
5055 VkDeviceSize offset;
5058 VmaSuballocationType type;
5062 struct VmaSuballocationOffsetLess
5064 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5066 return lhs.offset < rhs.offset;
5069 struct VmaSuballocationOffsetGreater
5071 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5073 return lhs.offset > rhs.offset;
5077 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5080 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5095 struct VmaAllocationRequest
5097 VkDeviceSize offset;
5098 VkDeviceSize sumFreeSize;
5099 VkDeviceSize sumItemSize;
5100 VmaSuballocationList::iterator item;
5101 size_t itemsToMakeLostCount;
5104 VkDeviceSize CalcCost()
const 5106 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5114 class VmaBlockMetadata
5118 virtual ~VmaBlockMetadata() { }
5119 virtual void Init(VkDeviceSize size) { m_Size = size; }
5122 virtual bool Validate()
const = 0;
5123 VkDeviceSize GetSize()
const {
return m_Size; }
5124 virtual size_t GetAllocationCount()
const = 0;
5125 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5126 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5128 virtual bool IsEmpty()
const = 0;
5130 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5132 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5134 #if VMA_STATS_STRING_ENABLED 5135 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5141 virtual bool CreateAllocationRequest(
5142 uint32_t currentFrameIndex,
5143 uint32_t frameInUseCount,
5144 VkDeviceSize bufferImageGranularity,
5145 VkDeviceSize allocSize,
5146 VkDeviceSize allocAlignment,
5148 VmaSuballocationType allocType,
5149 bool canMakeOtherLost,
5152 VmaAllocationRequest* pAllocationRequest) = 0;
5154 virtual bool MakeRequestedAllocationsLost(
5155 uint32_t currentFrameIndex,
5156 uint32_t frameInUseCount,
5157 VmaAllocationRequest* pAllocationRequest) = 0;
5159 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5161 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5165 const VmaAllocationRequest& request,
5166 VmaSuballocationType type,
5167 VkDeviceSize allocSize,
5173 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5176 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5179 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5181 #if VMA_STATS_STRING_ENABLED 5182 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5183 VkDeviceSize unusedBytes,
5184 size_t allocationCount,
5185 size_t unusedRangeCount)
const;
5186 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5187 VkDeviceSize offset,
5189 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5190 VkDeviceSize offset,
5191 VkDeviceSize size)
const;
5192 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5196 VkDeviceSize m_Size;
5197 const VkAllocationCallbacks* m_pAllocationCallbacks;
5200 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5201 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5205 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5207 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5210 virtual ~VmaBlockMetadata_Generic();
5211 virtual void Init(VkDeviceSize size);
5213 virtual bool Validate()
const;
5214 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5215 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5216 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5217 virtual bool IsEmpty()
const;
5219 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5220 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5222 #if VMA_STATS_STRING_ENABLED 5223 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5226 virtual bool CreateAllocationRequest(
5227 uint32_t currentFrameIndex,
5228 uint32_t frameInUseCount,
5229 VkDeviceSize bufferImageGranularity,
5230 VkDeviceSize allocSize,
5231 VkDeviceSize allocAlignment,
5233 VmaSuballocationType allocType,
5234 bool canMakeOtherLost,
5236 VmaAllocationRequest* pAllocationRequest);
5238 virtual bool MakeRequestedAllocationsLost(
5239 uint32_t currentFrameIndex,
5240 uint32_t frameInUseCount,
5241 VmaAllocationRequest* pAllocationRequest);
5243 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5245 virtual VkResult CheckCorruption(
const void* pBlockData);
5248 const VmaAllocationRequest& request,
5249 VmaSuballocationType type,
5250 VkDeviceSize allocSize,
5255 virtual void FreeAtOffset(VkDeviceSize offset);
5257 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5262 bool IsBufferImageGranularityConflictPossible(
5263 VkDeviceSize bufferImageGranularity,
5264 VmaSuballocationType& inOutPrevSuballocType)
const;
5267 friend class VmaDefragmentationAlgorithm_Generic;
5268 friend class VmaDefragmentationAlgorithm_Fast;
5270 uint32_t m_FreeCount;
5271 VkDeviceSize m_SumFreeSize;
5272 VmaSuballocationList m_Suballocations;
5275 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5277 bool ValidateFreeSuballocationList()
const;
5281 bool CheckAllocation(
5282 uint32_t currentFrameIndex,
5283 uint32_t frameInUseCount,
5284 VkDeviceSize bufferImageGranularity,
5285 VkDeviceSize allocSize,
5286 VkDeviceSize allocAlignment,
5287 VmaSuballocationType allocType,
5288 VmaSuballocationList::const_iterator suballocItem,
5289 bool canMakeOtherLost,
5290 VkDeviceSize* pOffset,
5291 size_t* itemsToMakeLostCount,
5292 VkDeviceSize* pSumFreeSize,
5293 VkDeviceSize* pSumItemSize)
const;
5295 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5299 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5302 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5305 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5386 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5388 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5391 virtual ~VmaBlockMetadata_Linear();
5392 virtual void Init(VkDeviceSize size);
5394 virtual bool Validate()
const;
5395 virtual size_t GetAllocationCount()
const;
5396 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5397 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5398 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5400 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5401 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5403 #if VMA_STATS_STRING_ENABLED 5404 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5407 virtual bool CreateAllocationRequest(
5408 uint32_t currentFrameIndex,
5409 uint32_t frameInUseCount,
5410 VkDeviceSize bufferImageGranularity,
5411 VkDeviceSize allocSize,
5412 VkDeviceSize allocAlignment,
5414 VmaSuballocationType allocType,
5415 bool canMakeOtherLost,
5417 VmaAllocationRequest* pAllocationRequest);
5419 virtual bool MakeRequestedAllocationsLost(
5420 uint32_t currentFrameIndex,
5421 uint32_t frameInUseCount,
5422 VmaAllocationRequest* pAllocationRequest);
5424 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5426 virtual VkResult CheckCorruption(
const void* pBlockData);
5429 const VmaAllocationRequest& request,
5430 VmaSuballocationType type,
5431 VkDeviceSize allocSize,
5436 virtual void FreeAtOffset(VkDeviceSize offset);
5446 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5448 enum SECOND_VECTOR_MODE
5450 SECOND_VECTOR_EMPTY,
5455 SECOND_VECTOR_RING_BUFFER,
5461 SECOND_VECTOR_DOUBLE_STACK,
5464 VkDeviceSize m_SumFreeSize;
5465 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5466 uint32_t m_1stVectorIndex;
5467 SECOND_VECTOR_MODE m_2ndVectorMode;
5469 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5470 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5471 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5472 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5475 size_t m_1stNullItemsBeginCount;
5477 size_t m_1stNullItemsMiddleCount;
5479 size_t m_2ndNullItemsCount;
5481 bool ShouldCompact1st()
const;
5482 void CleanupAfterFree();
5496 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5498 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5501 virtual ~VmaBlockMetadata_Buddy();
5502 virtual void Init(VkDeviceSize size);
5504 virtual bool Validate()
const;
5505 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5506 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5507 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5508 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5510 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5511 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5513 #if VMA_STATS_STRING_ENABLED 5514 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5517 virtual bool CreateAllocationRequest(
5518 uint32_t currentFrameIndex,
5519 uint32_t frameInUseCount,
5520 VkDeviceSize bufferImageGranularity,
5521 VkDeviceSize allocSize,
5522 VkDeviceSize allocAlignment,
5524 VmaSuballocationType allocType,
5525 bool canMakeOtherLost,
5527 VmaAllocationRequest* pAllocationRequest);
5529 virtual bool MakeRequestedAllocationsLost(
5530 uint32_t currentFrameIndex,
5531 uint32_t frameInUseCount,
5532 VmaAllocationRequest* pAllocationRequest);
5534 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5536 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5539 const VmaAllocationRequest& request,
5540 VmaSuballocationType type,
5541 VkDeviceSize allocSize,
5545 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5546 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5549 static const VkDeviceSize MIN_NODE_SIZE = 32;
5550 static const size_t MAX_LEVELS = 30;
5552 struct ValidationContext
5554 size_t calculatedAllocationCount;
5555 size_t calculatedFreeCount;
5556 VkDeviceSize calculatedSumFreeSize;
5558 ValidationContext() :
5559 calculatedAllocationCount(0),
5560 calculatedFreeCount(0),
5561 calculatedSumFreeSize(0) { }
5566 VkDeviceSize offset;
5596 VkDeviceSize m_UsableSize;
5597 uint32_t m_LevelCount;
5603 } m_FreeList[MAX_LEVELS];
5605 size_t m_AllocationCount;
5609 VkDeviceSize m_SumFreeSize;
5611 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5612 void DeleteNode(Node* node);
5613 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5614 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5615 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5617 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5618 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5622 void AddToFreeListFront(uint32_t level, Node* node);
5626 void RemoveFromFreeList(uint32_t level, Node* node);
5628 #if VMA_STATS_STRING_ENABLED 5629 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5639 class VmaDeviceMemoryBlock
5641 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5643 VmaBlockMetadata* m_pMetadata;
5647 ~VmaDeviceMemoryBlock()
5649 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5650 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5656 uint32_t newMemoryTypeIndex,
5657 VkDeviceMemory newMemory,
5658 VkDeviceSize newSize,
5660 uint32_t algorithm);
5664 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5665 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5666 uint32_t GetId()
const {
return m_Id; }
5667 void* GetMappedData()
const {
return m_pMappedData; }
5670 bool Validate()
const;
5675 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5678 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5679 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5681 VkResult BindBufferMemory(
5685 VkResult BindImageMemory(
5691 uint32_t m_MemoryTypeIndex;
5693 VkDeviceMemory m_hMemory;
5701 uint32_t m_MapCount;
5702 void* m_pMappedData;
5705 struct VmaPointerLess
5707 bool operator()(
const void* lhs,
const void* rhs)
const 5713 struct VmaDefragmentationMove
5715 size_t srcBlockIndex;
5716 size_t dstBlockIndex;
5717 VkDeviceSize srcOffset;
5718 VkDeviceSize dstOffset;
5722 class VmaDefragmentationAlgorithm;
5730 struct VmaBlockVector
5732 VMA_CLASS_NO_COPY(VmaBlockVector)
5736 uint32_t memoryTypeIndex,
5737 VkDeviceSize preferredBlockSize,
5738 size_t minBlockCount,
5739 size_t maxBlockCount,
5740 VkDeviceSize bufferImageGranularity,
5741 uint32_t frameInUseCount,
5743 bool explicitBlockSize,
5744 uint32_t algorithm);
5747 VkResult CreateMinBlocks();
5749 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5750 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5751 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5752 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5753 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5757 bool IsEmpty()
const {
return m_Blocks.empty(); }
5758 bool IsCorruptionDetectionEnabled()
const;
5762 uint32_t currentFrameIndex,
5764 VkDeviceSize alignment,
5766 VmaSuballocationType suballocType,
5775 #if VMA_STATS_STRING_ENABLED 5776 void PrintDetailedMap(
class VmaJsonWriter& json);
5779 void MakePoolAllocationsLost(
5780 uint32_t currentFrameIndex,
5781 size_t* pLostAllocationCount);
5782 VkResult CheckCorruption();
5786 class VmaBlockVectorDefragmentationContext* pCtx,
5788 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5789 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5790 VkCommandBuffer commandBuffer);
5791 void DefragmentationEnd(
5792 class VmaBlockVectorDefragmentationContext* pCtx,
5798 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5799 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5800 size_t CalcAllocationCount()
const;
5801 bool IsBufferImageGranularityConflictPossible()
const;
5804 friend class VmaDefragmentationAlgorithm_Generic;
5807 const uint32_t m_MemoryTypeIndex;
5808 const VkDeviceSize m_PreferredBlockSize;
5809 const size_t m_MinBlockCount;
5810 const size_t m_MaxBlockCount;
5811 const VkDeviceSize m_BufferImageGranularity;
5812 const uint32_t m_FrameInUseCount;
5813 const bool m_IsCustomPool;
5814 const bool m_ExplicitBlockSize;
5815 const uint32_t m_Algorithm;
5819 bool m_HasEmptyBlock;
5820 VMA_RW_MUTEX m_Mutex;
5822 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5823 uint32_t m_NextBlockId;
5825 VkDeviceSize CalcMaxBlockSize()
const;
5828 void Remove(VmaDeviceMemoryBlock* pBlock);
5832 void IncrementallySortBlocks();
5835 VkResult AllocateFromBlock(
5836 VmaDeviceMemoryBlock* pBlock,
5838 uint32_t currentFrameIndex,
5840 VkDeviceSize alignment,
5843 VmaSuballocationType suballocType,
5847 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5850 void ApplyDefragmentationMovesCpu(
5851 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5852 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5854 void ApplyDefragmentationMovesGpu(
5855 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5856 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5857 VkCommandBuffer commandBuffer);
5868 VMA_CLASS_NO_COPY(VmaPool_T)
5870 VmaBlockVector m_BlockVector;
5875 VkDeviceSize preferredBlockSize);
5878 uint32_t GetId()
const {
return m_Id; }
5879 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5881 #if VMA_STATS_STRING_ENABLED 5896 class VmaDefragmentationAlgorithm
5898 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
5900 VmaDefragmentationAlgorithm(
5902 VmaBlockVector* pBlockVector,
5903 uint32_t currentFrameIndex) :
5904 m_hAllocator(hAllocator),
5905 m_pBlockVector(pBlockVector),
5906 m_CurrentFrameIndex(currentFrameIndex)
5909 virtual ~VmaDefragmentationAlgorithm()
5913 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
5914 virtual void AddAll() = 0;
5916 virtual VkResult Defragment(
5917 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5918 VkDeviceSize maxBytesToMove,
5919 uint32_t maxAllocationsToMove) = 0;
5921 virtual VkDeviceSize GetBytesMoved()
const = 0;
5922 virtual uint32_t GetAllocationsMoved()
const = 0;
5926 VmaBlockVector*
const m_pBlockVector;
5927 const uint32_t m_CurrentFrameIndex;
5929 struct AllocationInfo
5932 VkBool32* m_pChanged;
5935 m_hAllocation(VK_NULL_HANDLE),
5936 m_pChanged(VMA_NULL)
5940 m_hAllocation(hAlloc),
5941 m_pChanged(pChanged)
5947 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
5949 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
5951 VmaDefragmentationAlgorithm_Generic(
5953 VmaBlockVector* pBlockVector,
5954 uint32_t currentFrameIndex,
5955 bool overlappingMoveSupported);
5956 virtual ~VmaDefragmentationAlgorithm_Generic();
5958 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5959 virtual void AddAll() { m_AllAllocations =
true; }
5961 virtual VkResult Defragment(
5962 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5963 VkDeviceSize maxBytesToMove,
5964 uint32_t maxAllocationsToMove);
5966 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5967 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5970 uint32_t m_AllocationCount;
5971 bool m_AllAllocations;
5973 VkDeviceSize m_BytesMoved;
5974 uint32_t m_AllocationsMoved;
5976 struct AllocationInfoSizeGreater
5978 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5980 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5984 struct AllocationInfoOffsetGreater
5986 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5988 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
5994 size_t m_OriginalBlockIndex;
5995 VmaDeviceMemoryBlock* m_pBlock;
5996 bool m_HasNonMovableAllocations;
5997 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5999 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6000 m_OriginalBlockIndex(SIZE_MAX),
6002 m_HasNonMovableAllocations(true),
6003 m_Allocations(pAllocationCallbacks)
6007 void CalcHasNonMovableAllocations()
6009 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6010 const size_t defragmentAllocCount = m_Allocations.size();
6011 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6014 void SortAllocationsBySizeDescending()
6016 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6019 void SortAllocationsByOffsetDescending()
6021 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6025 struct BlockPointerLess
6027 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6029 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6031 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6033 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6039 struct BlockInfoCompareMoveDestination
6041 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6043 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6047 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6051 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6059 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6060 BlockInfoVector m_Blocks;
6062 VkResult DefragmentRound(
6063 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6064 VkDeviceSize maxBytesToMove,
6065 uint32_t maxAllocationsToMove);
6067 size_t CalcBlocksWithNonMovableCount()
const;
6069 static bool MoveMakesSense(
6070 size_t dstBlockIndex, VkDeviceSize dstOffset,
6071 size_t srcBlockIndex, VkDeviceSize srcOffset);
6074 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6076 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6078 VmaDefragmentationAlgorithm_Fast(
6080 VmaBlockVector* pBlockVector,
6081 uint32_t currentFrameIndex,
6082 bool overlappingMoveSupported);
6083 virtual ~VmaDefragmentationAlgorithm_Fast();
6085 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6086 virtual void AddAll() { m_AllAllocations =
true; }
6088 virtual VkResult Defragment(
6089 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6090 VkDeviceSize maxBytesToMove,
6091 uint32_t maxAllocationsToMove);
6093 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6094 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6099 size_t origBlockIndex;
6102 class FreeSpaceDatabase
6108 s.blockInfoIndex = SIZE_MAX;
6109 for(
size_t i = 0; i < MAX_COUNT; ++i)
6111 m_FreeSpaces[i] = s;
6115 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6117 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6123 size_t bestIndex = SIZE_MAX;
6124 for(
size_t i = 0; i < MAX_COUNT; ++i)
6127 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6132 if(m_FreeSpaces[i].size < size &&
6133 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6139 if(bestIndex != SIZE_MAX)
6141 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6142 m_FreeSpaces[bestIndex].offset = offset;
6143 m_FreeSpaces[bestIndex].size = size;
6147 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6148 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6150 size_t bestIndex = SIZE_MAX;
6151 VkDeviceSize bestFreeSpaceAfter = 0;
6152 for(
size_t i = 0; i < MAX_COUNT; ++i)
6155 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6157 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6159 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6161 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6163 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6166 bestFreeSpaceAfter = freeSpaceAfter;
6172 if(bestIndex != SIZE_MAX)
6174 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6175 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6177 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6180 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6181 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6182 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6187 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6197 static const size_t MAX_COUNT = 4;
6201 size_t blockInfoIndex;
6202 VkDeviceSize offset;
6204 } m_FreeSpaces[MAX_COUNT];
6207 const bool m_OverlappingMoveSupported;
6209 uint32_t m_AllocationCount;
6210 bool m_AllAllocations;
6212 VkDeviceSize m_BytesMoved;
6213 uint32_t m_AllocationsMoved;
6215 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6217 void PreprocessMetadata();
6218 void PostprocessMetadata();
6219 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6222 struct VmaBlockDefragmentationContext
6225 VMA_CLASS_NO_COPY(VmaBlockDefragmentationContext)
6229 BLOCK_FLAG_USED = 0x00000001,
6234 VmaBlockDefragmentationContext() :
6236 hBuffer(VK_NULL_HANDLE)
6241 class VmaBlockVectorDefragmentationContext
6243 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6247 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6249 VmaBlockVectorDefragmentationContext(
6252 VmaBlockVector* pBlockVector,
6253 uint32_t currFrameIndex,
6255 ~VmaBlockVectorDefragmentationContext();
6257 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6258 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6259 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6261 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6262 void AddAll() { m_AllAllocations =
true; }
6264 void Begin(
bool overlappingMoveSupported);
6271 VmaBlockVector*
const m_pBlockVector;
6272 const uint32_t m_CurrFrameIndex;
6273 const uint32_t m_AlgorithmFlags;
6275 VmaDefragmentationAlgorithm* m_pAlgorithm;
6283 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6284 bool m_AllAllocations;
6287 struct VmaDefragmentationContext_T
6290 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6292 VmaDefragmentationContext_T(
6294 uint32_t currFrameIndex,
6297 ~VmaDefragmentationContext_T();
6299 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6300 void AddAllocations(
6301 uint32_t allocationCount,
6303 VkBool32* pAllocationsChanged);
6311 VkResult Defragment(
6312 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6313 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6318 const uint32_t m_CurrFrameIndex;
6319 const uint32_t m_Flags;
6322 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6324 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6327 #if VMA_RECORDING_ENABLED 6334 void WriteConfiguration(
6335 const VkPhysicalDeviceProperties& devProps,
6336 const VkPhysicalDeviceMemoryProperties& memProps,
6337 bool dedicatedAllocationExtensionEnabled);
6340 void RecordCreateAllocator(uint32_t frameIndex);
6341 void RecordDestroyAllocator(uint32_t frameIndex);
6342 void RecordCreatePool(uint32_t frameIndex,
6345 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6346 void RecordAllocateMemory(uint32_t frameIndex,
6347 const VkMemoryRequirements& vkMemReq,
6350 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6351 const VkMemoryRequirements& vkMemReq,
6352 bool requiresDedicatedAllocation,
6353 bool prefersDedicatedAllocation,
6356 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6357 const VkMemoryRequirements& vkMemReq,
6358 bool requiresDedicatedAllocation,
6359 bool prefersDedicatedAllocation,
6362 void RecordFreeMemory(uint32_t frameIndex,
6364 void RecordResizeAllocation(
6365 uint32_t frameIndex,
6367 VkDeviceSize newSize);
6368 void RecordSetAllocationUserData(uint32_t frameIndex,
6370 const void* pUserData);
6371 void RecordCreateLostAllocation(uint32_t frameIndex,
6373 void RecordMapMemory(uint32_t frameIndex,
6375 void RecordUnmapMemory(uint32_t frameIndex,
6377 void RecordFlushAllocation(uint32_t frameIndex,
6378 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6379 void RecordInvalidateAllocation(uint32_t frameIndex,
6380 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6381 void RecordCreateBuffer(uint32_t frameIndex,
6382 const VkBufferCreateInfo& bufCreateInfo,
6385 void RecordCreateImage(uint32_t frameIndex,
6386 const VkImageCreateInfo& imageCreateInfo,
6389 void RecordDestroyBuffer(uint32_t frameIndex,
6391 void RecordDestroyImage(uint32_t frameIndex,
6393 void RecordTouchAllocation(uint32_t frameIndex,
6395 void RecordGetAllocationInfo(uint32_t frameIndex,
6397 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6399 void RecordDefragmentationBegin(uint32_t frameIndex,
6402 void RecordDefragmentationEnd(uint32_t frameIndex,
6412 class UserDataString
6416 const char* GetString()
const {
return m_Str; }
6426 VMA_MUTEX m_FileMutex;
6428 int64_t m_StartCounter;
6430 void GetBasicParams(CallParams& outParams);
6433 template<
typename T>
6434 void PrintPointerList(uint64_t count,
const T* pItems)
6438 fprintf(m_File,
"%p", pItems[0]);
6439 for(uint64_t i = 1; i < count; ++i)
6441 fprintf(m_File,
" %p", pItems[i]);
6449 #endif // #if VMA_RECORDING_ENABLED 6452 struct VmaAllocator_T
6454 VMA_CLASS_NO_COPY(VmaAllocator_T)
6457 bool m_UseKhrDedicatedAllocation;
6459 bool m_AllocationCallbacksSpecified;
6460 VkAllocationCallbacks m_AllocationCallbacks;
6464 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6465 VMA_MUTEX m_HeapSizeLimitMutex;
6467 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6468 VkPhysicalDeviceMemoryProperties m_MemProps;
6471 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6474 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6475 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6476 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6482 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6484 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6488 return m_VulkanFunctions;
6491 VkDeviceSize GetBufferImageGranularity()
const 6494 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6495 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6498 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6499 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6501 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6503 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6504 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6507 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6509 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6510 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6513 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6515 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6516 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6517 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6520 bool IsIntegratedGpu()
const 6522 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6525 #if VMA_RECORDING_ENABLED 6526 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6529 void GetBufferMemoryRequirements(
6531 VkMemoryRequirements& memReq,
6532 bool& requiresDedicatedAllocation,
6533 bool& prefersDedicatedAllocation)
const;
6534 void GetImageMemoryRequirements(
6536 VkMemoryRequirements& memReq,
6537 bool& requiresDedicatedAllocation,
6538 bool& prefersDedicatedAllocation)
const;
6541 VkResult AllocateMemory(
6542 const VkMemoryRequirements& vkMemReq,
6543 bool requiresDedicatedAllocation,
6544 bool prefersDedicatedAllocation,
6545 VkBuffer dedicatedBuffer,
6546 VkImage dedicatedImage,
6548 VmaSuballocationType suballocType,
6554 VkResult ResizeAllocation(
6556 VkDeviceSize newSize);
6558 void CalculateStats(
VmaStats* pStats);
6560 #if VMA_STATS_STRING_ENABLED 6561 void PrintDetailedMap(
class VmaJsonWriter& json);
6564 VkResult DefragmentationBegin(
6568 VkResult DefragmentationEnd(
6575 void DestroyPool(
VmaPool pool);
6578 void SetCurrentFrameIndex(uint32_t frameIndex);
6579 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6581 void MakePoolAllocationsLost(
6583 size_t* pLostAllocationCount);
6584 VkResult CheckPoolCorruption(
VmaPool hPool);
6585 VkResult CheckCorruption(uint32_t memoryTypeBits);
6589 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6590 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6595 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6596 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6598 void FlushOrInvalidateAllocation(
6600 VkDeviceSize offset, VkDeviceSize size,
6601 VMA_CACHE_OPERATION op);
6603 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6606 VkDeviceSize m_PreferredLargeHeapBlockSize;
6608 VkPhysicalDevice m_PhysicalDevice;
6609 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6611 VMA_RW_MUTEX m_PoolsMutex;
6613 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6614 uint32_t m_NextPoolId;
6618 #if VMA_RECORDING_ENABLED 6619 VmaRecorder* m_pRecorder;
6624 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6626 VkResult AllocateMemoryOfType(
6628 VkDeviceSize alignment,
6629 bool dedicatedAllocation,
6630 VkBuffer dedicatedBuffer,
6631 VkImage dedicatedImage,
6633 uint32_t memTypeIndex,
6634 VmaSuballocationType suballocType,
6638 VkResult AllocateDedicatedMemory(
6640 VmaSuballocationType suballocType,
6641 uint32_t memTypeIndex,
6643 bool isUserDataString,
6645 VkBuffer dedicatedBuffer,
6646 VkImage dedicatedImage,
6656 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6658 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6661 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6663 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6666 template<
typename T>
6669 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6672 template<
typename T>
6673 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6675 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6678 template<
typename T>
6679 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6684 VmaFree(hAllocator, ptr);
6688 template<
typename T>
6689 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6693 for(
size_t i = count; i--; )
6695 VmaFree(hAllocator, ptr);
6702 #if VMA_STATS_STRING_ENABLED 6704 class VmaStringBuilder
6707 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6708 size_t GetLength()
const {
return m_Data.size(); }
6709 const char* GetData()
const {
return m_Data.data(); }
6711 void Add(
char ch) { m_Data.push_back(ch); }
6712 void Add(
const char* pStr);
6713 void AddNewLine() { Add(
'\n'); }
6714 void AddNumber(uint32_t num);
6715 void AddNumber(uint64_t num);
6716 void AddPointer(
const void* ptr);
6719 VmaVector< char, VmaStlAllocator<char> > m_Data;
6722 void VmaStringBuilder::Add(
const char* pStr)
6724 const size_t strLen = strlen(pStr);
6727 const size_t oldCount = m_Data.size();
6728 m_Data.resize(oldCount + strLen);
6729 memcpy(m_Data.data() + oldCount, pStr, strLen);
6733 void VmaStringBuilder::AddNumber(uint32_t num)
6736 VmaUint32ToStr(buf,
sizeof(buf), num);
6740 void VmaStringBuilder::AddNumber(uint64_t num)
6743 VmaUint64ToStr(buf,
sizeof(buf), num);
6747 void VmaStringBuilder::AddPointer(
const void* ptr)
6750 VmaPtrToStr(buf,
sizeof(buf), ptr);
6754 #endif // #if VMA_STATS_STRING_ENABLED 6759 #if VMA_STATS_STRING_ENABLED 6763 VMA_CLASS_NO_COPY(VmaJsonWriter)
6765 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6768 void BeginObject(
bool singleLine =
false);
6771 void BeginArray(
bool singleLine =
false);
6774 void WriteString(
const char* pStr);
6775 void BeginString(
const char* pStr = VMA_NULL);
6776 void ContinueString(
const char* pStr);
6777 void ContinueString(uint32_t n);
6778 void ContinueString(uint64_t n);
6779 void ContinueString_Pointer(
const void* ptr);
6780 void EndString(
const char* pStr = VMA_NULL);
6782 void WriteNumber(uint32_t n);
6783 void WriteNumber(uint64_t n);
6784 void WriteBool(
bool b);
6788 static const char*
const INDENT;
6790 enum COLLECTION_TYPE
6792 COLLECTION_TYPE_OBJECT,
6793 COLLECTION_TYPE_ARRAY,
6797 COLLECTION_TYPE type;
6798 uint32_t valueCount;
6799 bool singleLineMode;
6802 VmaStringBuilder& m_SB;
6803 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6804 bool m_InsideString;
6806 void BeginValue(
bool isString);
6807 void WriteIndent(
bool oneLess =
false);
6810 const char*
const VmaJsonWriter::INDENT =
" ";
6812 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6814 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6815 m_InsideString(false)
6819 VmaJsonWriter::~VmaJsonWriter()
6821 VMA_ASSERT(!m_InsideString);
6822 VMA_ASSERT(m_Stack.empty());
6825 void VmaJsonWriter::BeginObject(
bool singleLine)
6827 VMA_ASSERT(!m_InsideString);
6833 item.type = COLLECTION_TYPE_OBJECT;
6834 item.valueCount = 0;
6835 item.singleLineMode = singleLine;
6836 m_Stack.push_back(item);
6839 void VmaJsonWriter::EndObject()
6841 VMA_ASSERT(!m_InsideString);
6846 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6850 void VmaJsonWriter::BeginArray(
bool singleLine)
6852 VMA_ASSERT(!m_InsideString);
6858 item.type = COLLECTION_TYPE_ARRAY;
6859 item.valueCount = 0;
6860 item.singleLineMode = singleLine;
6861 m_Stack.push_back(item);
6864 void VmaJsonWriter::EndArray()
6866 VMA_ASSERT(!m_InsideString);
6871 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6875 void VmaJsonWriter::WriteString(
const char* pStr)
6881 void VmaJsonWriter::BeginString(
const char* pStr)
6883 VMA_ASSERT(!m_InsideString);
6887 m_InsideString =
true;
6888 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6890 ContinueString(pStr);
6894 void VmaJsonWriter::ContinueString(
const char* pStr)
6896 VMA_ASSERT(m_InsideString);
6898 const size_t strLen = strlen(pStr);
6899 for(
size_t i = 0; i < strLen; ++i)
6932 VMA_ASSERT(0 &&
"Character not currently supported.");
6938 void VmaJsonWriter::ContinueString(uint32_t n)
6940 VMA_ASSERT(m_InsideString);
6944 void VmaJsonWriter::ContinueString(uint64_t n)
6946 VMA_ASSERT(m_InsideString);
6950 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6952 VMA_ASSERT(m_InsideString);
6953 m_SB.AddPointer(ptr);
6956 void VmaJsonWriter::EndString(
const char* pStr)
6958 VMA_ASSERT(m_InsideString);
6959 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6961 ContinueString(pStr);
6964 m_InsideString =
false;
6967 void VmaJsonWriter::WriteNumber(uint32_t n)
6969 VMA_ASSERT(!m_InsideString);
6974 void VmaJsonWriter::WriteNumber(uint64_t n)
6976 VMA_ASSERT(!m_InsideString);
6981 void VmaJsonWriter::WriteBool(
bool b)
6983 VMA_ASSERT(!m_InsideString);
6985 m_SB.Add(b ?
"true" :
"false");
6988 void VmaJsonWriter::WriteNull()
6990 VMA_ASSERT(!m_InsideString);
6995 void VmaJsonWriter::BeginValue(
bool isString)
6997 if(!m_Stack.empty())
6999 StackItem& currItem = m_Stack.back();
7000 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7001 currItem.valueCount % 2 == 0)
7003 VMA_ASSERT(isString);
7006 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7007 currItem.valueCount % 2 != 0)
7011 else if(currItem.valueCount > 0)
7020 ++currItem.valueCount;
7024 void VmaJsonWriter::WriteIndent(
bool oneLess)
7026 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7030 size_t count = m_Stack.size();
7031 if(count > 0 && oneLess)
7035 for(
size_t i = 0; i < count; ++i)
7042 #endif // #if VMA_STATS_STRING_ENABLED 7046 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7048 if(IsUserDataString())
7050 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7052 FreeUserDataString(hAllocator);
7054 if(pUserData != VMA_NULL)
7056 const char*
const newStrSrc = (
char*)pUserData;
7057 const size_t newStrLen = strlen(newStrSrc);
7058 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7059 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7060 m_pUserData = newStrDst;
7065 m_pUserData = pUserData;
7069 void VmaAllocation_T::ChangeBlockAllocation(
7071 VmaDeviceMemoryBlock* block,
7072 VkDeviceSize offset)
7074 VMA_ASSERT(block != VMA_NULL);
7075 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7078 if(block != m_BlockAllocation.m_Block)
7080 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7081 if(IsPersistentMap())
7083 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7084 block->Map(hAllocator, mapRefCount, VMA_NULL);
7087 m_BlockAllocation.m_Block = block;
7088 m_BlockAllocation.m_Offset = offset;
7091 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7093 VMA_ASSERT(newSize > 0);
7097 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7099 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7100 m_BlockAllocation.m_Offset = newOffset;
7103 VkDeviceSize VmaAllocation_T::GetOffset()
const 7107 case ALLOCATION_TYPE_BLOCK:
7108 return m_BlockAllocation.m_Offset;
7109 case ALLOCATION_TYPE_DEDICATED:
7117 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7121 case ALLOCATION_TYPE_BLOCK:
7122 return m_BlockAllocation.m_Block->GetDeviceMemory();
7123 case ALLOCATION_TYPE_DEDICATED:
7124 return m_DedicatedAllocation.m_hMemory;
7127 return VK_NULL_HANDLE;
7131 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7135 case ALLOCATION_TYPE_BLOCK:
7136 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7137 case ALLOCATION_TYPE_DEDICATED:
7138 return m_DedicatedAllocation.m_MemoryTypeIndex;
7145 void* VmaAllocation_T::GetMappedData()
const 7149 case ALLOCATION_TYPE_BLOCK:
7152 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7153 VMA_ASSERT(pBlockData != VMA_NULL);
7154 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7161 case ALLOCATION_TYPE_DEDICATED:
7162 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7163 return m_DedicatedAllocation.m_pMappedData;
7170 bool VmaAllocation_T::CanBecomeLost()
const 7174 case ALLOCATION_TYPE_BLOCK:
7175 return m_BlockAllocation.m_CanBecomeLost;
7176 case ALLOCATION_TYPE_DEDICATED:
7184 VmaPool VmaAllocation_T::GetPool()
const 7186 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7187 return m_BlockAllocation.m_hPool;
7190 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7192 VMA_ASSERT(CanBecomeLost());
7198 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7201 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7206 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7212 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7222 #if VMA_STATS_STRING_ENABLED 7225 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7234 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7236 json.WriteString(
"Type");
7237 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7239 json.WriteString(
"Size");
7240 json.WriteNumber(m_Size);
7242 if(m_pUserData != VMA_NULL)
7244 json.WriteString(
"UserData");
7245 if(IsUserDataString())
7247 json.WriteString((
const char*)m_pUserData);
7252 json.ContinueString_Pointer(m_pUserData);
7257 json.WriteString(
"CreationFrameIndex");
7258 json.WriteNumber(m_CreationFrameIndex);
7260 json.WriteString(
"LastUseFrameIndex");
7261 json.WriteNumber(GetLastUseFrameIndex());
7263 if(m_BufferImageUsage != 0)
7265 json.WriteString(
"Usage");
7266 json.WriteNumber(m_BufferImageUsage);
7272 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7274 VMA_ASSERT(IsUserDataString());
7275 if(m_pUserData != VMA_NULL)
7277 char*
const oldStr = (
char*)m_pUserData;
7278 const size_t oldStrLen = strlen(oldStr);
7279 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7280 m_pUserData = VMA_NULL;
7284 void VmaAllocation_T::BlockAllocMap()
7286 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7288 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7294 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7298 void VmaAllocation_T::BlockAllocUnmap()
7300 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7302 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7308 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7312 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7314 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7318 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7320 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7321 *ppData = m_DedicatedAllocation.m_pMappedData;
7327 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7328 return VK_ERROR_MEMORY_MAP_FAILED;
7333 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7334 hAllocator->m_hDevice,
7335 m_DedicatedAllocation.m_hMemory,
7340 if(result == VK_SUCCESS)
7342 m_DedicatedAllocation.m_pMappedData = *ppData;
7349 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7351 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7353 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7358 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7359 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7360 hAllocator->m_hDevice,
7361 m_DedicatedAllocation.m_hMemory);
7366 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7370 #if VMA_STATS_STRING_ENABLED 7372 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7376 json.WriteString(
"Blocks");
7379 json.WriteString(
"Allocations");
7382 json.WriteString(
"UnusedRanges");
7385 json.WriteString(
"UsedBytes");
7388 json.WriteString(
"UnusedBytes");
7393 json.WriteString(
"AllocationSize");
7394 json.BeginObject(
true);
7395 json.WriteString(
"Min");
7397 json.WriteString(
"Avg");
7399 json.WriteString(
"Max");
7406 json.WriteString(
"UnusedRangeSize");
7407 json.BeginObject(
true);
7408 json.WriteString(
"Min");
7410 json.WriteString(
"Avg");
7412 json.WriteString(
"Max");
7420 #endif // #if VMA_STATS_STRING_ENABLED 7422 struct VmaSuballocationItemSizeLess
7425 const VmaSuballocationList::iterator lhs,
7426 const VmaSuballocationList::iterator rhs)
const 7428 return lhs->size < rhs->size;
7431 const VmaSuballocationList::iterator lhs,
7432 VkDeviceSize rhsSize)
const 7434 return lhs->size < rhsSize;
7442 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7444 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7448 #if VMA_STATS_STRING_ENABLED 7450 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7451 VkDeviceSize unusedBytes,
7452 size_t allocationCount,
7453 size_t unusedRangeCount)
const 7457 json.WriteString(
"TotalBytes");
7458 json.WriteNumber(GetSize());
7460 json.WriteString(
"UnusedBytes");
7461 json.WriteNumber(unusedBytes);
7463 json.WriteString(
"Allocations");
7464 json.WriteNumber((uint64_t)allocationCount);
7466 json.WriteString(
"UnusedRanges");
7467 json.WriteNumber((uint64_t)unusedRangeCount);
7469 json.WriteString(
"Suballocations");
7473 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7474 VkDeviceSize offset,
7477 json.BeginObject(
true);
7479 json.WriteString(
"Offset");
7480 json.WriteNumber(offset);
7482 hAllocation->PrintParameters(json);
7487 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7488 VkDeviceSize offset,
7489 VkDeviceSize size)
const 7491 json.BeginObject(
true);
7493 json.WriteString(
"Offset");
7494 json.WriteNumber(offset);
7496 json.WriteString(
"Type");
7497 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7499 json.WriteString(
"Size");
7500 json.WriteNumber(size);
7505 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7511 #endif // #if VMA_STATS_STRING_ENABLED 7516 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7517 VmaBlockMetadata(hAllocator),
7520 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7521 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7525 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7529 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7531 VmaBlockMetadata::Init(size);
7534 m_SumFreeSize = size;
7536 VmaSuballocation suballoc = {};
7537 suballoc.offset = 0;
7538 suballoc.size = size;
7539 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7540 suballoc.hAllocation = VK_NULL_HANDLE;
7542 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7543 m_Suballocations.push_back(suballoc);
7544 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7546 m_FreeSuballocationsBySize.push_back(suballocItem);
7549 bool VmaBlockMetadata_Generic::Validate()
const 7551 VMA_VALIDATE(!m_Suballocations.empty());
7554 VkDeviceSize calculatedOffset = 0;
7556 uint32_t calculatedFreeCount = 0;
7558 VkDeviceSize calculatedSumFreeSize = 0;
7561 size_t freeSuballocationsToRegister = 0;
7563 bool prevFree =
false;
7565 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7566 suballocItem != m_Suballocations.cend();
7569 const VmaSuballocation& subAlloc = *suballocItem;
7572 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7574 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7576 VMA_VALIDATE(!prevFree || !currFree);
7578 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7582 calculatedSumFreeSize += subAlloc.size;
7583 ++calculatedFreeCount;
7584 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7586 ++freeSuballocationsToRegister;
7590 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7594 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7595 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7598 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7601 calculatedOffset += subAlloc.size;
7602 prevFree = currFree;
7607 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7609 VkDeviceSize lastSize = 0;
7610 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7612 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7615 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7617 VMA_VALIDATE(suballocItem->size >= lastSize);
7619 lastSize = suballocItem->size;
7623 VMA_VALIDATE(ValidateFreeSuballocationList());
7624 VMA_VALIDATE(calculatedOffset == GetSize());
7625 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7626 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7631 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7633 if(!m_FreeSuballocationsBySize.empty())
7635 return m_FreeSuballocationsBySize.back()->size;
7643 bool VmaBlockMetadata_Generic::IsEmpty()
const 7645 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7648 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7652 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7664 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7665 suballocItem != m_Suballocations.cend();
7668 const VmaSuballocation& suballoc = *suballocItem;
7669 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7682 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7684 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7686 inoutStats.
size += GetSize();
7693 #if VMA_STATS_STRING_ENABLED 7695 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7697 PrintDetailedMap_Begin(json,
7699 m_Suballocations.size() - (size_t)m_FreeCount,
7703 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7704 suballocItem != m_Suballocations.cend();
7705 ++suballocItem, ++i)
7707 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7709 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7713 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7717 PrintDetailedMap_End(json);
7720 #endif // #if VMA_STATS_STRING_ENABLED 7722 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7723 uint32_t currentFrameIndex,
7724 uint32_t frameInUseCount,
7725 VkDeviceSize bufferImageGranularity,
7726 VkDeviceSize allocSize,
7727 VkDeviceSize allocAlignment,
7729 VmaSuballocationType allocType,
7730 bool canMakeOtherLost,
7732 VmaAllocationRequest* pAllocationRequest)
7734 VMA_ASSERT(allocSize > 0);
7735 VMA_ASSERT(!upperAddress);
7736 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7737 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7738 VMA_HEAVY_ASSERT(Validate());
7741 if(canMakeOtherLost ==
false &&
7742 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7748 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7749 if(freeSuballocCount > 0)
7754 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7755 m_FreeSuballocationsBySize.data(),
7756 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7757 allocSize + 2 * VMA_DEBUG_MARGIN,
7758 VmaSuballocationItemSizeLess());
7759 size_t index = it - m_FreeSuballocationsBySize.data();
7760 for(; index < freeSuballocCount; ++index)
7765 bufferImageGranularity,
7769 m_FreeSuballocationsBySize[index],
7771 &pAllocationRequest->offset,
7772 &pAllocationRequest->itemsToMakeLostCount,
7773 &pAllocationRequest->sumFreeSize,
7774 &pAllocationRequest->sumItemSize))
7776 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7781 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7783 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7784 it != m_Suballocations.end();
7787 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7790 bufferImageGranularity,
7796 &pAllocationRequest->offset,
7797 &pAllocationRequest->itemsToMakeLostCount,
7798 &pAllocationRequest->sumFreeSize,
7799 &pAllocationRequest->sumItemSize))
7801 pAllocationRequest->item = it;
7809 for(
size_t index = freeSuballocCount; index--; )
7814 bufferImageGranularity,
7818 m_FreeSuballocationsBySize[index],
7820 &pAllocationRequest->offset,
7821 &pAllocationRequest->itemsToMakeLostCount,
7822 &pAllocationRequest->sumFreeSize,
7823 &pAllocationRequest->sumItemSize))
7825 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7832 if(canMakeOtherLost)
7836 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7837 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7839 VmaAllocationRequest tmpAllocRequest = {};
7840 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7841 suballocIt != m_Suballocations.end();
7844 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7845 suballocIt->hAllocation->CanBecomeLost())
7850 bufferImageGranularity,
7856 &tmpAllocRequest.offset,
7857 &tmpAllocRequest.itemsToMakeLostCount,
7858 &tmpAllocRequest.sumFreeSize,
7859 &tmpAllocRequest.sumItemSize))
7861 tmpAllocRequest.item = suballocIt;
7863 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7866 *pAllocationRequest = tmpAllocRequest;
7872 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7881 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7882 uint32_t currentFrameIndex,
7883 uint32_t frameInUseCount,
7884 VmaAllocationRequest* pAllocationRequest)
7886 while(pAllocationRequest->itemsToMakeLostCount > 0)
7888 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7890 ++pAllocationRequest->item;
7892 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7893 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7894 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7895 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7897 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7898 --pAllocationRequest->itemsToMakeLostCount;
7906 VMA_HEAVY_ASSERT(Validate());
7907 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7908 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7913 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7915 uint32_t lostAllocationCount = 0;
7916 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7917 it != m_Suballocations.end();
7920 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7921 it->hAllocation->CanBecomeLost() &&
7922 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7924 it = FreeSuballocation(it);
7925 ++lostAllocationCount;
7928 return lostAllocationCount;
7931 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7933 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7934 it != m_Suballocations.end();
7937 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7939 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7941 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7942 return VK_ERROR_VALIDATION_FAILED_EXT;
7944 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7946 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7947 return VK_ERROR_VALIDATION_FAILED_EXT;
7955 void VmaBlockMetadata_Generic::Alloc(
7956 const VmaAllocationRequest& request,
7957 VmaSuballocationType type,
7958 VkDeviceSize allocSize,
7962 VMA_ASSERT(!upperAddress);
7963 VMA_ASSERT(request.item != m_Suballocations.end());
7964 VmaSuballocation& suballoc = *request.item;
7966 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7968 VMA_ASSERT(request.offset >= suballoc.offset);
7969 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7970 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7971 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7975 UnregisterFreeSuballocation(request.item);
7977 suballoc.offset = request.offset;
7978 suballoc.size = allocSize;
7979 suballoc.type = type;
7980 suballoc.hAllocation = hAllocation;
7985 VmaSuballocation paddingSuballoc = {};
7986 paddingSuballoc.offset = request.offset + allocSize;
7987 paddingSuballoc.size = paddingEnd;
7988 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7989 VmaSuballocationList::iterator next = request.item;
7991 const VmaSuballocationList::iterator paddingEndItem =
7992 m_Suballocations.insert(next, paddingSuballoc);
7993 RegisterFreeSuballocation(paddingEndItem);
7999 VmaSuballocation paddingSuballoc = {};
8000 paddingSuballoc.offset = request.offset - paddingBegin;
8001 paddingSuballoc.size = paddingBegin;
8002 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8003 const VmaSuballocationList::iterator paddingBeginItem =
8004 m_Suballocations.insert(request.item, paddingSuballoc);
8005 RegisterFreeSuballocation(paddingBeginItem);
8009 m_FreeCount = m_FreeCount - 1;
8010 if(paddingBegin > 0)
8018 m_SumFreeSize -= allocSize;
8021 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8023 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8024 suballocItem != m_Suballocations.end();
8027 VmaSuballocation& suballoc = *suballocItem;
8028 if(suballoc.hAllocation == allocation)
8030 FreeSuballocation(suballocItem);
8031 VMA_HEAVY_ASSERT(Validate());
8035 VMA_ASSERT(0 &&
"Not found!");
8038 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8040 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8041 suballocItem != m_Suballocations.end();
8044 VmaSuballocation& suballoc = *suballocItem;
8045 if(suballoc.offset == offset)
8047 FreeSuballocation(suballocItem);
8051 VMA_ASSERT(0 &&
"Not found!");
8054 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8056 typedef VmaSuballocationList::iterator iter_type;
8057 for(iter_type suballocItem = m_Suballocations.begin();
8058 suballocItem != m_Suballocations.end();
8061 VmaSuballocation& suballoc = *suballocItem;
8062 if(suballoc.hAllocation == alloc)
8064 iter_type nextItem = suballocItem;
8068 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8071 if(newSize < alloc->GetSize())
8073 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8076 if(nextItem != m_Suballocations.end())
8079 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8082 UnregisterFreeSuballocation(nextItem);
8083 nextItem->offset -= sizeDiff;
8084 nextItem->size += sizeDiff;
8085 RegisterFreeSuballocation(nextItem);
8091 VmaSuballocation newFreeSuballoc;
8092 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8093 newFreeSuballoc.offset = suballoc.offset + newSize;
8094 newFreeSuballoc.size = sizeDiff;
8095 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8096 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8097 RegisterFreeSuballocation(newFreeSuballocIt);
8106 VmaSuballocation newFreeSuballoc;
8107 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8108 newFreeSuballoc.offset = suballoc.offset + newSize;
8109 newFreeSuballoc.size = sizeDiff;
8110 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8111 m_Suballocations.push_back(newFreeSuballoc);
8113 iter_type newFreeSuballocIt = m_Suballocations.end();
8114 RegisterFreeSuballocation(--newFreeSuballocIt);
8119 suballoc.size = newSize;
8120 m_SumFreeSize += sizeDiff;
8125 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8128 if(nextItem != m_Suballocations.end())
8131 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8134 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8140 if(nextItem->size > sizeDiff)
8143 UnregisterFreeSuballocation(nextItem);
8144 nextItem->offset += sizeDiff;
8145 nextItem->size -= sizeDiff;
8146 RegisterFreeSuballocation(nextItem);
8152 UnregisterFreeSuballocation(nextItem);
8153 m_Suballocations.erase(nextItem);
8169 suballoc.size = newSize;
8170 m_SumFreeSize -= sizeDiff;
8177 VMA_ASSERT(0 &&
"Not found!");
8181 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8183 VkDeviceSize lastSize = 0;
8184 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8186 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8188 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8189 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8190 VMA_VALIDATE(it->size >= lastSize);
8191 lastSize = it->size;
8196 bool VmaBlockMetadata_Generic::CheckAllocation(
8197 uint32_t currentFrameIndex,
8198 uint32_t frameInUseCount,
8199 VkDeviceSize bufferImageGranularity,
8200 VkDeviceSize allocSize,
8201 VkDeviceSize allocAlignment,
8202 VmaSuballocationType allocType,
8203 VmaSuballocationList::const_iterator suballocItem,
8204 bool canMakeOtherLost,
8205 VkDeviceSize* pOffset,
8206 size_t* itemsToMakeLostCount,
8207 VkDeviceSize* pSumFreeSize,
8208 VkDeviceSize* pSumItemSize)
const 8210 VMA_ASSERT(allocSize > 0);
8211 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8212 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8213 VMA_ASSERT(pOffset != VMA_NULL);
8215 *itemsToMakeLostCount = 0;
8219 if(canMakeOtherLost)
8221 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8223 *pSumFreeSize = suballocItem->size;
8227 if(suballocItem->hAllocation->CanBecomeLost() &&
8228 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8230 ++*itemsToMakeLostCount;
8231 *pSumItemSize = suballocItem->size;
8240 if(GetSize() - suballocItem->offset < allocSize)
8246 *pOffset = suballocItem->offset;
8249 if(VMA_DEBUG_MARGIN > 0)
8251 *pOffset += VMA_DEBUG_MARGIN;
8255 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8259 if(bufferImageGranularity > 1)
8261 bool bufferImageGranularityConflict =
false;
8262 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8263 while(prevSuballocItem != m_Suballocations.cbegin())
8266 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8267 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8269 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8271 bufferImageGranularityConflict =
true;
8279 if(bufferImageGranularityConflict)
8281 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8287 if(*pOffset >= suballocItem->offset + suballocItem->size)
8293 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8296 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8298 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8300 if(suballocItem->offset + totalSize > GetSize())
8307 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8308 if(totalSize > suballocItem->size)
8310 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8311 while(remainingSize > 0)
8314 if(lastSuballocItem == m_Suballocations.cend())
8318 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8320 *pSumFreeSize += lastSuballocItem->size;
8324 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8325 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8326 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8328 ++*itemsToMakeLostCount;
8329 *pSumItemSize += lastSuballocItem->size;
8336 remainingSize = (lastSuballocItem->size < remainingSize) ?
8337 remainingSize - lastSuballocItem->size : 0;
8343 if(bufferImageGranularity > 1)
8345 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8347 while(nextSuballocItem != m_Suballocations.cend())
8349 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8350 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8352 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8354 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8355 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8356 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8358 ++*itemsToMakeLostCount;
8377 const VmaSuballocation& suballoc = *suballocItem;
8378 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8380 *pSumFreeSize = suballoc.size;
8383 if(suballoc.size < allocSize)
8389 *pOffset = suballoc.offset;
8392 if(VMA_DEBUG_MARGIN > 0)
8394 *pOffset += VMA_DEBUG_MARGIN;
8398 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8402 if(bufferImageGranularity > 1)
8404 bool bufferImageGranularityConflict =
false;
8405 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8406 while(prevSuballocItem != m_Suballocations.cbegin())
8409 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8410 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8412 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8414 bufferImageGranularityConflict =
true;
8422 if(bufferImageGranularityConflict)
8424 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8429 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8432 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8435 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8442 if(bufferImageGranularity > 1)
8444 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8446 while(nextSuballocItem != m_Suballocations.cend())
8448 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8449 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8451 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8470 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8472 VMA_ASSERT(item != m_Suballocations.end());
8473 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8475 VmaSuballocationList::iterator nextItem = item;
8477 VMA_ASSERT(nextItem != m_Suballocations.end());
8478 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8480 item->size += nextItem->size;
8482 m_Suballocations.erase(nextItem);
8485 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8488 VmaSuballocation& suballoc = *suballocItem;
8489 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8490 suballoc.hAllocation = VK_NULL_HANDLE;
8494 m_SumFreeSize += suballoc.size;
8497 bool mergeWithNext =
false;
8498 bool mergeWithPrev =
false;
8500 VmaSuballocationList::iterator nextItem = suballocItem;
8502 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8504 mergeWithNext =
true;
8507 VmaSuballocationList::iterator prevItem = suballocItem;
8508 if(suballocItem != m_Suballocations.begin())
8511 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8513 mergeWithPrev =
true;
8519 UnregisterFreeSuballocation(nextItem);
8520 MergeFreeWithNext(suballocItem);
8525 UnregisterFreeSuballocation(prevItem);
8526 MergeFreeWithNext(prevItem);
8527 RegisterFreeSuballocation(prevItem);
8532 RegisterFreeSuballocation(suballocItem);
8533 return suballocItem;
8537 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8539 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8540 VMA_ASSERT(item->size > 0);
8544 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8546 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8548 if(m_FreeSuballocationsBySize.empty())
8550 m_FreeSuballocationsBySize.push_back(item);
8554 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8562 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8564 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8565 VMA_ASSERT(item->size > 0);
8569 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8571 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8573 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8574 m_FreeSuballocationsBySize.data(),
8575 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8577 VmaSuballocationItemSizeLess());
8578 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8579 index < m_FreeSuballocationsBySize.size();
8582 if(m_FreeSuballocationsBySize[index] == item)
8584 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8587 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8589 VMA_ASSERT(0 &&
"Not found.");
8595 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8596 VkDeviceSize bufferImageGranularity,
8597 VmaSuballocationType& inOutPrevSuballocType)
const 8599 if(bufferImageGranularity == 1 || IsEmpty())
8604 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8605 bool typeConflictFound =
false;
8606 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8607 it != m_Suballocations.cend();
8610 const VmaSuballocationType suballocType = it->type;
8611 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8613 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8614 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8616 typeConflictFound =
true;
8618 inOutPrevSuballocType = suballocType;
8622 return typeConflictFound || minAlignment >= bufferImageGranularity;
8628 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8629 VmaBlockMetadata(hAllocator),
8631 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8632 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8633 m_1stVectorIndex(0),
8634 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8635 m_1stNullItemsBeginCount(0),
8636 m_1stNullItemsMiddleCount(0),
8637 m_2ndNullItemsCount(0)
8641 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8645 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8647 VmaBlockMetadata::Init(size);
8648 m_SumFreeSize = size;
8651 bool VmaBlockMetadata_Linear::Validate()
const 8653 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8654 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8656 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8657 VMA_VALIDATE(!suballocations1st.empty() ||
8658 suballocations2nd.empty() ||
8659 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8661 if(!suballocations1st.empty())
8664 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8666 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8668 if(!suballocations2nd.empty())
8671 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8674 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8675 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8677 VkDeviceSize sumUsedSize = 0;
8678 const size_t suballoc1stCount = suballocations1st.size();
8679 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8681 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8683 const size_t suballoc2ndCount = suballocations2nd.size();
8684 size_t nullItem2ndCount = 0;
8685 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8687 const VmaSuballocation& suballoc = suballocations2nd[i];
8688 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8690 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8691 VMA_VALIDATE(suballoc.offset >= offset);
8695 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8696 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8697 sumUsedSize += suballoc.size;
8704 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8707 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8710 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8712 const VmaSuballocation& suballoc = suballocations1st[i];
8713 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8714 suballoc.hAllocation == VK_NULL_HANDLE);
8717 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8719 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8721 const VmaSuballocation& suballoc = suballocations1st[i];
8722 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8724 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8725 VMA_VALIDATE(suballoc.offset >= offset);
8726 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8730 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8731 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8732 sumUsedSize += suballoc.size;
8739 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8741 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8743 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8745 const size_t suballoc2ndCount = suballocations2nd.size();
8746 size_t nullItem2ndCount = 0;
8747 for(
size_t i = suballoc2ndCount; i--; )
8749 const VmaSuballocation& suballoc = suballocations2nd[i];
8750 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8752 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8753 VMA_VALIDATE(suballoc.offset >= offset);
8757 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8758 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8759 sumUsedSize += suballoc.size;
8766 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8769 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8772 VMA_VALIDATE(offset <= GetSize());
8773 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8778 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8780 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8781 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8784 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8786 const VkDeviceSize size = GetSize();
8798 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8800 switch(m_2ndVectorMode)
8802 case SECOND_VECTOR_EMPTY:
8808 const size_t suballocations1stCount = suballocations1st.size();
8809 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8810 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8811 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8813 firstSuballoc.offset,
8814 size - (lastSuballoc.offset + lastSuballoc.size));
8818 case SECOND_VECTOR_RING_BUFFER:
8823 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8824 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8825 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8826 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8830 case SECOND_VECTOR_DOUBLE_STACK:
8835 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8836 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8837 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8838 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8848 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8850 const VkDeviceSize size = GetSize();
8851 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8852 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8853 const size_t suballoc1stCount = suballocations1st.size();
8854 const size_t suballoc2ndCount = suballocations2nd.size();
8865 VkDeviceSize lastOffset = 0;
8867 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8869 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8870 size_t nextAlloc2ndIndex = 0;
8871 while(lastOffset < freeSpace2ndTo1stEnd)
8874 while(nextAlloc2ndIndex < suballoc2ndCount &&
8875 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8877 ++nextAlloc2ndIndex;
8881 if(nextAlloc2ndIndex < suballoc2ndCount)
8883 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8886 if(lastOffset < suballoc.offset)
8889 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8903 lastOffset = suballoc.offset + suballoc.size;
8904 ++nextAlloc2ndIndex;
8910 if(lastOffset < freeSpace2ndTo1stEnd)
8912 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8920 lastOffset = freeSpace2ndTo1stEnd;
8925 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8926 const VkDeviceSize freeSpace1stTo2ndEnd =
8927 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8928 while(lastOffset < freeSpace1stTo2ndEnd)
8931 while(nextAlloc1stIndex < suballoc1stCount &&
8932 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8934 ++nextAlloc1stIndex;
8938 if(nextAlloc1stIndex < suballoc1stCount)
8940 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8943 if(lastOffset < suballoc.offset)
8946 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8960 lastOffset = suballoc.offset + suballoc.size;
8961 ++nextAlloc1stIndex;
8967 if(lastOffset < freeSpace1stTo2ndEnd)
8969 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8977 lastOffset = freeSpace1stTo2ndEnd;
8981 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8983 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8984 while(lastOffset < size)
8987 while(nextAlloc2ndIndex != SIZE_MAX &&
8988 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8990 --nextAlloc2ndIndex;
8994 if(nextAlloc2ndIndex != SIZE_MAX)
8996 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8999 if(lastOffset < suballoc.offset)
9002 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9016 lastOffset = suballoc.offset + suballoc.size;
9017 --nextAlloc2ndIndex;
9023 if(lastOffset < size)
9025 const VkDeviceSize unusedRangeSize = size - lastOffset;
9041 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9043 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9044 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9045 const VkDeviceSize size = GetSize();
9046 const size_t suballoc1stCount = suballocations1st.size();
9047 const size_t suballoc2ndCount = suballocations2nd.size();
9049 inoutStats.
size += size;
9051 VkDeviceSize lastOffset = 0;
9053 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9055 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9056 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9057 while(lastOffset < freeSpace2ndTo1stEnd)
9060 while(nextAlloc2ndIndex < suballoc2ndCount &&
9061 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9063 ++nextAlloc2ndIndex;
9067 if(nextAlloc2ndIndex < suballoc2ndCount)
9069 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9072 if(lastOffset < suballoc.offset)
9075 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9086 lastOffset = suballoc.offset + suballoc.size;
9087 ++nextAlloc2ndIndex;
9092 if(lastOffset < freeSpace2ndTo1stEnd)
9095 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9102 lastOffset = freeSpace2ndTo1stEnd;
9107 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9108 const VkDeviceSize freeSpace1stTo2ndEnd =
9109 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9110 while(lastOffset < freeSpace1stTo2ndEnd)
9113 while(nextAlloc1stIndex < suballoc1stCount &&
9114 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9116 ++nextAlloc1stIndex;
9120 if(nextAlloc1stIndex < suballoc1stCount)
9122 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9125 if(lastOffset < suballoc.offset)
9128 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9139 lastOffset = suballoc.offset + suballoc.size;
9140 ++nextAlloc1stIndex;
9145 if(lastOffset < freeSpace1stTo2ndEnd)
9148 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9155 lastOffset = freeSpace1stTo2ndEnd;
9159 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9161 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9162 while(lastOffset < size)
9165 while(nextAlloc2ndIndex != SIZE_MAX &&
9166 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9168 --nextAlloc2ndIndex;
9172 if(nextAlloc2ndIndex != SIZE_MAX)
9174 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9177 if(lastOffset < suballoc.offset)
9180 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9191 lastOffset = suballoc.offset + suballoc.size;
9192 --nextAlloc2ndIndex;
9197 if(lastOffset < size)
9200 const VkDeviceSize unusedRangeSize = size - lastOffset;
9213 #if VMA_STATS_STRING_ENABLED 9214 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9216 const VkDeviceSize size = GetSize();
9217 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9218 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9219 const size_t suballoc1stCount = suballocations1st.size();
9220 const size_t suballoc2ndCount = suballocations2nd.size();
9224 size_t unusedRangeCount = 0;
9225 VkDeviceSize usedBytes = 0;
9227 VkDeviceSize lastOffset = 0;
9229 size_t alloc2ndCount = 0;
9230 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9232 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9233 size_t nextAlloc2ndIndex = 0;
9234 while(lastOffset < freeSpace2ndTo1stEnd)
9237 while(nextAlloc2ndIndex < suballoc2ndCount &&
9238 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9240 ++nextAlloc2ndIndex;
9244 if(nextAlloc2ndIndex < suballoc2ndCount)
9246 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9249 if(lastOffset < suballoc.offset)
9258 usedBytes += suballoc.size;
9261 lastOffset = suballoc.offset + suballoc.size;
9262 ++nextAlloc2ndIndex;
9267 if(lastOffset < freeSpace2ndTo1stEnd)
9274 lastOffset = freeSpace2ndTo1stEnd;
9279 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9280 size_t alloc1stCount = 0;
9281 const VkDeviceSize freeSpace1stTo2ndEnd =
9282 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9283 while(lastOffset < freeSpace1stTo2ndEnd)
9286 while(nextAlloc1stIndex < suballoc1stCount &&
9287 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9289 ++nextAlloc1stIndex;
9293 if(nextAlloc1stIndex < suballoc1stCount)
9295 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9298 if(lastOffset < suballoc.offset)
9307 usedBytes += suballoc.size;
9310 lastOffset = suballoc.offset + suballoc.size;
9311 ++nextAlloc1stIndex;
9316 if(lastOffset < size)
9323 lastOffset = freeSpace1stTo2ndEnd;
9327 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9329 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9330 while(lastOffset < size)
9333 while(nextAlloc2ndIndex != SIZE_MAX &&
9334 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9336 --nextAlloc2ndIndex;
9340 if(nextAlloc2ndIndex != SIZE_MAX)
9342 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9345 if(lastOffset < suballoc.offset)
9354 usedBytes += suballoc.size;
9357 lastOffset = suballoc.offset + suballoc.size;
9358 --nextAlloc2ndIndex;
9363 if(lastOffset < size)
9375 const VkDeviceSize unusedBytes = size - usedBytes;
9376 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9381 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9383 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9384 size_t nextAlloc2ndIndex = 0;
9385 while(lastOffset < freeSpace2ndTo1stEnd)
9388 while(nextAlloc2ndIndex < suballoc2ndCount &&
9389 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9391 ++nextAlloc2ndIndex;
9395 if(nextAlloc2ndIndex < suballoc2ndCount)
9397 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9400 if(lastOffset < suballoc.offset)
9403 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9404 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9409 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9412 lastOffset = suballoc.offset + suballoc.size;
9413 ++nextAlloc2ndIndex;
9418 if(lastOffset < freeSpace2ndTo1stEnd)
9421 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9422 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9426 lastOffset = freeSpace2ndTo1stEnd;
9431 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9432 while(lastOffset < freeSpace1stTo2ndEnd)
9435 while(nextAlloc1stIndex < suballoc1stCount &&
9436 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9438 ++nextAlloc1stIndex;
9442 if(nextAlloc1stIndex < suballoc1stCount)
9444 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9447 if(lastOffset < suballoc.offset)
9450 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9451 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9456 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9459 lastOffset = suballoc.offset + suballoc.size;
9460 ++nextAlloc1stIndex;
9465 if(lastOffset < freeSpace1stTo2ndEnd)
9468 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9469 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9473 lastOffset = freeSpace1stTo2ndEnd;
9477 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9479 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9480 while(lastOffset < size)
9483 while(nextAlloc2ndIndex != SIZE_MAX &&
9484 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9486 --nextAlloc2ndIndex;
9490 if(nextAlloc2ndIndex != SIZE_MAX)
9492 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9495 if(lastOffset < suballoc.offset)
9498 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9499 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9504 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9507 lastOffset = suballoc.offset + suballoc.size;
9508 --nextAlloc2ndIndex;
9513 if(lastOffset < size)
9516 const VkDeviceSize unusedRangeSize = size - lastOffset;
9517 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9526 PrintDetailedMap_End(json);
9528 #endif // #if VMA_STATS_STRING_ENABLED 9530 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9531 uint32_t currentFrameIndex,
9532 uint32_t frameInUseCount,
9533 VkDeviceSize bufferImageGranularity,
9534 VkDeviceSize allocSize,
9535 VkDeviceSize allocAlignment,
9537 VmaSuballocationType allocType,
9538 bool canMakeOtherLost,
9540 VmaAllocationRequest* pAllocationRequest)
9542 VMA_ASSERT(allocSize > 0);
9543 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9544 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9545 VMA_HEAVY_ASSERT(Validate());
9547 const VkDeviceSize size = GetSize();
9548 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9549 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9553 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9555 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9560 if(allocSize > size)
9564 VkDeviceSize resultBaseOffset = size - allocSize;
9565 if(!suballocations2nd.empty())
9567 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9568 resultBaseOffset = lastSuballoc.offset - allocSize;
9569 if(allocSize > lastSuballoc.offset)
9576 VkDeviceSize resultOffset = resultBaseOffset;
9579 if(VMA_DEBUG_MARGIN > 0)
9581 if(resultOffset < VMA_DEBUG_MARGIN)
9585 resultOffset -= VMA_DEBUG_MARGIN;
9589 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9593 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9595 bool bufferImageGranularityConflict =
false;
9596 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9598 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9599 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9601 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9603 bufferImageGranularityConflict =
true;
9611 if(bufferImageGranularityConflict)
9613 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9618 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9619 suballocations1st.back().offset + suballocations1st.back().size :
9621 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9625 if(bufferImageGranularity > 1)
9627 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9629 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9630 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9632 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9646 pAllocationRequest->offset = resultOffset;
9647 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9648 pAllocationRequest->sumItemSize = 0;
9650 pAllocationRequest->itemsToMakeLostCount = 0;
9656 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9660 VkDeviceSize resultBaseOffset = 0;
9661 if(!suballocations1st.empty())
9663 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9664 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9668 VkDeviceSize resultOffset = resultBaseOffset;
9671 if(VMA_DEBUG_MARGIN > 0)
9673 resultOffset += VMA_DEBUG_MARGIN;
9677 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9681 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9683 bool bufferImageGranularityConflict =
false;
9684 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9686 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9687 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9689 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9691 bufferImageGranularityConflict =
true;
9699 if(bufferImageGranularityConflict)
9701 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9705 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9706 suballocations2nd.back().offset : size;
9709 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9713 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9715 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9717 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9718 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9720 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9734 pAllocationRequest->offset = resultOffset;
9735 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9736 pAllocationRequest->sumItemSize = 0;
9738 pAllocationRequest->itemsToMakeLostCount = 0;
9745 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9747 VMA_ASSERT(!suballocations1st.empty());
9749 VkDeviceSize resultBaseOffset = 0;
9750 if(!suballocations2nd.empty())
9752 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9753 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9757 VkDeviceSize resultOffset = resultBaseOffset;
9760 if(VMA_DEBUG_MARGIN > 0)
9762 resultOffset += VMA_DEBUG_MARGIN;
9766 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9770 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9772 bool bufferImageGranularityConflict =
false;
9773 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9775 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9776 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9778 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9780 bufferImageGranularityConflict =
true;
9788 if(bufferImageGranularityConflict)
9790 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9794 pAllocationRequest->itemsToMakeLostCount = 0;
9795 pAllocationRequest->sumItemSize = 0;
9796 size_t index1st = m_1stNullItemsBeginCount;
9798 if(canMakeOtherLost)
9800 while(index1st < suballocations1st.size() &&
9801 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
9804 const VmaSuballocation& suballoc = suballocations1st[index1st];
9805 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9811 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9812 if(suballoc.hAllocation->CanBecomeLost() &&
9813 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9815 ++pAllocationRequest->itemsToMakeLostCount;
9816 pAllocationRequest->sumItemSize += suballoc.size;
9828 if(bufferImageGranularity > 1)
9830 while(index1st < suballocations1st.size())
9832 const VmaSuballocation& suballoc = suballocations1st[index1st];
9833 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9835 if(suballoc.hAllocation != VK_NULL_HANDLE)
9838 if(suballoc.hAllocation->CanBecomeLost() &&
9839 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9841 ++pAllocationRequest->itemsToMakeLostCount;
9842 pAllocationRequest->sumItemSize += suballoc.size;
9861 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9862 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9866 if(bufferImageGranularity > 1)
9868 for(
size_t nextSuballocIndex = index1st;
9869 nextSuballocIndex < suballocations1st.size();
9870 nextSuballocIndex++)
9872 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9873 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9875 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9889 pAllocationRequest->offset = resultOffset;
9890 pAllocationRequest->sumFreeSize =
9891 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9893 - pAllocationRequest->sumItemSize;
9903 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9904 uint32_t currentFrameIndex,
9905 uint32_t frameInUseCount,
9906 VmaAllocationRequest* pAllocationRequest)
9908 if(pAllocationRequest->itemsToMakeLostCount == 0)
9913 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9915 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9916 size_t index1st = m_1stNullItemsBeginCount;
9917 size_t madeLostCount = 0;
9918 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9920 VMA_ASSERT(index1st < suballocations1st.size());
9921 VmaSuballocation& suballoc = suballocations1st[index1st];
9922 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9924 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9925 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9926 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9928 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9929 suballoc.hAllocation = VK_NULL_HANDLE;
9930 m_SumFreeSize += suballoc.size;
9931 ++m_1stNullItemsMiddleCount;
9948 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9950 uint32_t lostAllocationCount = 0;
9952 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9953 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9955 VmaSuballocation& suballoc = suballocations1st[i];
9956 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9957 suballoc.hAllocation->CanBecomeLost() &&
9958 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9960 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9961 suballoc.hAllocation = VK_NULL_HANDLE;
9962 ++m_1stNullItemsMiddleCount;
9963 m_SumFreeSize += suballoc.size;
9964 ++lostAllocationCount;
9968 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9969 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9971 VmaSuballocation& suballoc = suballocations2nd[i];
9972 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9973 suballoc.hAllocation->CanBecomeLost() &&
9974 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9976 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9977 suballoc.hAllocation = VK_NULL_HANDLE;
9978 ++m_2ndNullItemsCount;
9979 ++lostAllocationCount;
9983 if(lostAllocationCount)
9988 return lostAllocationCount;
9991 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9993 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9994 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9996 const VmaSuballocation& suballoc = suballocations1st[i];
9997 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9999 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10001 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10002 return VK_ERROR_VALIDATION_FAILED_EXT;
10004 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10006 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10007 return VK_ERROR_VALIDATION_FAILED_EXT;
10012 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10013 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10015 const VmaSuballocation& suballoc = suballocations2nd[i];
10016 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10018 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10020 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10021 return VK_ERROR_VALIDATION_FAILED_EXT;
10023 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10025 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10026 return VK_ERROR_VALIDATION_FAILED_EXT;
10034 void VmaBlockMetadata_Linear::Alloc(
10035 const VmaAllocationRequest& request,
10036 VmaSuballocationType type,
10037 VkDeviceSize allocSize,
10041 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10045 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10046 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10047 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10048 suballocations2nd.push_back(newSuballoc);
10049 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10053 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10056 if(suballocations1st.empty())
10058 suballocations1st.push_back(newSuballoc);
10063 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
10066 VMA_ASSERT(request.offset + allocSize <= GetSize());
10067 suballocations1st.push_back(newSuballoc);
10070 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
10072 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10074 switch(m_2ndVectorMode)
10076 case SECOND_VECTOR_EMPTY:
10078 VMA_ASSERT(suballocations2nd.empty());
10079 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10081 case SECOND_VECTOR_RING_BUFFER:
10083 VMA_ASSERT(!suballocations2nd.empty());
10085 case SECOND_VECTOR_DOUBLE_STACK:
10086 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10092 suballocations2nd.push_back(newSuballoc);
10096 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10101 m_SumFreeSize -= newSuballoc.size;
10104 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10106 FreeAtOffset(allocation->GetOffset());
10109 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10111 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10112 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10114 if(!suballocations1st.empty())
10117 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10118 if(firstSuballoc.offset == offset)
10120 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10121 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10122 m_SumFreeSize += firstSuballoc.size;
10123 ++m_1stNullItemsBeginCount;
10124 CleanupAfterFree();
10130 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10131 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10133 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10134 if(lastSuballoc.offset == offset)
10136 m_SumFreeSize += lastSuballoc.size;
10137 suballocations2nd.pop_back();
10138 CleanupAfterFree();
10143 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10145 VmaSuballocation& lastSuballoc = suballocations1st.back();
10146 if(lastSuballoc.offset == offset)
10148 m_SumFreeSize += lastSuballoc.size;
10149 suballocations1st.pop_back();
10150 CleanupAfterFree();
10157 VmaSuballocation refSuballoc;
10158 refSuballoc.offset = offset;
10160 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10161 suballocations1st.begin() + m_1stNullItemsBeginCount,
10162 suballocations1st.end(),
10164 if(it != suballocations1st.end())
10166 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10167 it->hAllocation = VK_NULL_HANDLE;
10168 ++m_1stNullItemsMiddleCount;
10169 m_SumFreeSize += it->size;
10170 CleanupAfterFree();
10175 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10178 VmaSuballocation refSuballoc;
10179 refSuballoc.offset = offset;
10181 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10182 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10183 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10184 if(it != suballocations2nd.end())
10186 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10187 it->hAllocation = VK_NULL_HANDLE;
10188 ++m_2ndNullItemsCount;
10189 m_SumFreeSize += it->size;
10190 CleanupAfterFree();
10195 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10198 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10200 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10201 const size_t suballocCount = AccessSuballocations1st().size();
10202 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10205 void VmaBlockMetadata_Linear::CleanupAfterFree()
10207 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10208 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10212 suballocations1st.clear();
10213 suballocations2nd.clear();
10214 m_1stNullItemsBeginCount = 0;
10215 m_1stNullItemsMiddleCount = 0;
10216 m_2ndNullItemsCount = 0;
10217 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10221 const size_t suballoc1stCount = suballocations1st.size();
10222 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10223 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10226 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10227 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10229 ++m_1stNullItemsBeginCount;
10230 --m_1stNullItemsMiddleCount;
10234 while(m_1stNullItemsMiddleCount > 0 &&
10235 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10237 --m_1stNullItemsMiddleCount;
10238 suballocations1st.pop_back();
10242 while(m_2ndNullItemsCount > 0 &&
10243 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10245 --m_2ndNullItemsCount;
10246 suballocations2nd.pop_back();
10249 if(ShouldCompact1st())
10251 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10252 size_t srcIndex = m_1stNullItemsBeginCount;
10253 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10255 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10259 if(dstIndex != srcIndex)
10261 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10265 suballocations1st.resize(nonNullItemCount);
10266 m_1stNullItemsBeginCount = 0;
10267 m_1stNullItemsMiddleCount = 0;
10271 if(suballocations2nd.empty())
10273 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10277 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10279 suballocations1st.clear();
10280 m_1stNullItemsBeginCount = 0;
10282 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10285 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10286 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10287 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10288 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10290 ++m_1stNullItemsBeginCount;
10291 --m_1stNullItemsMiddleCount;
10293 m_2ndNullItemsCount = 0;
10294 m_1stVectorIndex ^= 1;
10299 VMA_HEAVY_ASSERT(Validate());
10306 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10307 VmaBlockMetadata(hAllocator),
10309 m_AllocationCount(0),
10313 memset(m_FreeList, 0,
sizeof(m_FreeList));
10316 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10318 DeleteNode(m_Root);
10321 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10323 VmaBlockMetadata::Init(size);
10325 m_UsableSize = VmaPrevPow2(size);
10326 m_SumFreeSize = m_UsableSize;
10330 while(m_LevelCount < MAX_LEVELS &&
10331 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10336 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10337 rootNode->offset = 0;
10338 rootNode->type = Node::TYPE_FREE;
10339 rootNode->parent = VMA_NULL;
10340 rootNode->buddy = VMA_NULL;
10343 AddToFreeListFront(0, rootNode);
10346 bool VmaBlockMetadata_Buddy::Validate()
const 10349 ValidationContext ctx;
10350 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10352 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10354 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10355 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10358 for(uint32_t level = 0; level < m_LevelCount; ++level)
10360 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10361 m_FreeList[level].front->free.prev == VMA_NULL);
10363 for(Node* node = m_FreeList[level].front;
10365 node = node->free.next)
10367 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10369 if(node->free.next == VMA_NULL)
10371 VMA_VALIDATE(m_FreeList[level].back == node);
10375 VMA_VALIDATE(node->free.next->free.prev == node);
10381 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10383 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10389 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10391 for(uint32_t level = 0; level < m_LevelCount; ++level)
10393 if(m_FreeList[level].front != VMA_NULL)
10395 return LevelToNodeSize(level);
10401 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10403 const VkDeviceSize unusableSize = GetUnusableSize();
10414 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10416 if(unusableSize > 0)
10425 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10427 const VkDeviceSize unusableSize = GetUnusableSize();
10429 inoutStats.
size += GetSize();
10430 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10435 if(unusableSize > 0)
10442 #if VMA_STATS_STRING_ENABLED 10444 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10448 CalcAllocationStatInfo(stat);
10450 PrintDetailedMap_Begin(
10456 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10458 const VkDeviceSize unusableSize = GetUnusableSize();
10459 if(unusableSize > 0)
10461 PrintDetailedMap_UnusedRange(json,
10466 PrintDetailedMap_End(json);
10469 #endif // #if VMA_STATS_STRING_ENABLED 10471 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10472 uint32_t currentFrameIndex,
10473 uint32_t frameInUseCount,
10474 VkDeviceSize bufferImageGranularity,
10475 VkDeviceSize allocSize,
10476 VkDeviceSize allocAlignment,
10478 VmaSuballocationType allocType,
10479 bool canMakeOtherLost,
10481 VmaAllocationRequest* pAllocationRequest)
10483 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10487 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10488 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10489 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10491 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10492 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10495 if(allocSize > m_UsableSize)
10500 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10501 for(uint32_t level = targetLevel + 1; level--; )
10503 for(Node* freeNode = m_FreeList[level].front;
10504 freeNode != VMA_NULL;
10505 freeNode = freeNode->free.next)
10507 if(freeNode->offset % allocAlignment == 0)
10509 pAllocationRequest->offset = freeNode->offset;
10510 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10511 pAllocationRequest->sumItemSize = 0;
10512 pAllocationRequest->itemsToMakeLostCount = 0;
10513 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10522 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10523 uint32_t currentFrameIndex,
10524 uint32_t frameInUseCount,
10525 VmaAllocationRequest* pAllocationRequest)
10531 return pAllocationRequest->itemsToMakeLostCount == 0;
10534 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10543 void VmaBlockMetadata_Buddy::Alloc(
10544 const VmaAllocationRequest& request,
10545 VmaSuballocationType type,
10546 VkDeviceSize allocSize,
10550 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10551 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10553 Node* currNode = m_FreeList[currLevel].front;
10554 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10555 while(currNode->offset != request.offset)
10557 currNode = currNode->free.next;
10558 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10562 while(currLevel < targetLevel)
10566 RemoveFromFreeList(currLevel, currNode);
10568 const uint32_t childrenLevel = currLevel + 1;
10571 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10572 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10574 leftChild->offset = currNode->offset;
10575 leftChild->type = Node::TYPE_FREE;
10576 leftChild->parent = currNode;
10577 leftChild->buddy = rightChild;
10579 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10580 rightChild->type = Node::TYPE_FREE;
10581 rightChild->parent = currNode;
10582 rightChild->buddy = leftChild;
10585 currNode->type = Node::TYPE_SPLIT;
10586 currNode->split.leftChild = leftChild;
10589 AddToFreeListFront(childrenLevel, rightChild);
10590 AddToFreeListFront(childrenLevel, leftChild);
10595 currNode = m_FreeList[currLevel].front;
10604 VMA_ASSERT(currLevel == targetLevel &&
10605 currNode != VMA_NULL &&
10606 currNode->type == Node::TYPE_FREE);
10607 RemoveFromFreeList(currLevel, currNode);
10610 currNode->type = Node::TYPE_ALLOCATION;
10611 currNode->allocation.alloc = hAllocation;
10613 ++m_AllocationCount;
10615 m_SumFreeSize -= allocSize;
10618 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10620 if(node->type == Node::TYPE_SPLIT)
10622 DeleteNode(node->split.leftChild->buddy);
10623 DeleteNode(node->split.leftChild);
10626 vma_delete(GetAllocationCallbacks(), node);
10629 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10631 VMA_VALIDATE(level < m_LevelCount);
10632 VMA_VALIDATE(curr->parent == parent);
10633 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10634 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10637 case Node::TYPE_FREE:
10639 ctx.calculatedSumFreeSize += levelNodeSize;
10640 ++ctx.calculatedFreeCount;
10642 case Node::TYPE_ALLOCATION:
10643 ++ctx.calculatedAllocationCount;
10644 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10645 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10647 case Node::TYPE_SPLIT:
10649 const uint32_t childrenLevel = level + 1;
10650 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10651 const Node*
const leftChild = curr->split.leftChild;
10652 VMA_VALIDATE(leftChild != VMA_NULL);
10653 VMA_VALIDATE(leftChild->offset == curr->offset);
10654 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10656 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10658 const Node*
const rightChild = leftChild->buddy;
10659 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10660 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10662 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10673 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10676 uint32_t level = 0;
10677 VkDeviceSize currLevelNodeSize = m_UsableSize;
10678 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10679 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10682 currLevelNodeSize = nextLevelNodeSize;
10683 nextLevelNodeSize = currLevelNodeSize >> 1;
10688 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10691 Node* node = m_Root;
10692 VkDeviceSize nodeOffset = 0;
10693 uint32_t level = 0;
10694 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10695 while(node->type == Node::TYPE_SPLIT)
10697 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10698 if(offset < nodeOffset + nextLevelSize)
10700 node = node->split.leftChild;
10704 node = node->split.leftChild->buddy;
10705 nodeOffset += nextLevelSize;
10708 levelNodeSize = nextLevelSize;
10711 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10712 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10715 --m_AllocationCount;
10716 m_SumFreeSize += alloc->GetSize();
10718 node->type = Node::TYPE_FREE;
10721 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10723 RemoveFromFreeList(level, node->buddy);
10724 Node*
const parent = node->parent;
10726 vma_delete(GetAllocationCallbacks(), node->buddy);
10727 vma_delete(GetAllocationCallbacks(), node);
10728 parent->type = Node::TYPE_FREE;
10736 AddToFreeListFront(level, node);
10739 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10743 case Node::TYPE_FREE:
10749 case Node::TYPE_ALLOCATION:
10751 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10757 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10758 if(unusedRangeSize > 0)
10767 case Node::TYPE_SPLIT:
10769 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10770 const Node*
const leftChild = node->split.leftChild;
10771 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
10772 const Node*
const rightChild = leftChild->buddy;
10773 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
10781 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
10783 VMA_ASSERT(node->type == Node::TYPE_FREE);
10786 Node*
const frontNode = m_FreeList[level].front;
10787 if(frontNode == VMA_NULL)
10789 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
10790 node->free.prev = node->free.next = VMA_NULL;
10791 m_FreeList[level].front = m_FreeList[level].back = node;
10795 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
10796 node->free.prev = VMA_NULL;
10797 node->free.next = frontNode;
10798 frontNode->free.prev = node;
10799 m_FreeList[level].front = node;
10803 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
10805 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10808 if(node->free.prev == VMA_NULL)
10810 VMA_ASSERT(m_FreeList[level].front == node);
10811 m_FreeList[level].front = node->free.next;
10815 Node*
const prevFreeNode = node->free.prev;
10816 VMA_ASSERT(prevFreeNode->free.next == node);
10817 prevFreeNode->free.next = node->free.next;
10821 if(node->free.next == VMA_NULL)
10823 VMA_ASSERT(m_FreeList[level].back == node);
10824 m_FreeList[level].back = node->free.prev;
10828 Node*
const nextFreeNode = node->free.next;
10829 VMA_ASSERT(nextFreeNode->free.prev == node);
10830 nextFreeNode->free.prev = node->free.prev;
10834 #if VMA_STATS_STRING_ENABLED 10835 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10839 case Node::TYPE_FREE:
10840 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10842 case Node::TYPE_ALLOCATION:
10844 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10845 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10846 if(allocSize < levelNodeSize)
10848 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10852 case Node::TYPE_SPLIT:
10854 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10855 const Node*
const leftChild = node->split.leftChild;
10856 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10857 const Node*
const rightChild = leftChild->buddy;
10858 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10865 #endif // #if VMA_STATS_STRING_ENABLED 10871 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10872 m_pMetadata(VMA_NULL),
10873 m_MemoryTypeIndex(UINT32_MAX),
10875 m_hMemory(VK_NULL_HANDLE),
10877 m_pMappedData(VMA_NULL)
10881 void VmaDeviceMemoryBlock::Init(
10883 uint32_t newMemoryTypeIndex,
10884 VkDeviceMemory newMemory,
10885 VkDeviceSize newSize,
10887 uint32_t algorithm)
10889 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10891 m_MemoryTypeIndex = newMemoryTypeIndex;
10893 m_hMemory = newMemory;
10898 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10901 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10907 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10909 m_pMetadata->Init(newSize);
10912 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10916 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10918 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10919 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10920 m_hMemory = VK_NULL_HANDLE;
10922 vma_delete(allocator, m_pMetadata);
10923 m_pMetadata = VMA_NULL;
10926 bool VmaDeviceMemoryBlock::Validate()
const 10928 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
10929 (m_pMetadata->GetSize() != 0));
10931 return m_pMetadata->Validate();
10934 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
10936 void* pData =
nullptr;
10937 VkResult res = Map(hAllocator, 1, &pData);
10938 if(res != VK_SUCCESS)
10943 res = m_pMetadata->CheckCorruption(pData);
10945 Unmap(hAllocator, 1);
10950 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10957 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10958 if(m_MapCount != 0)
10960 m_MapCount += count;
10961 VMA_ASSERT(m_pMappedData != VMA_NULL);
10962 if(ppData != VMA_NULL)
10964 *ppData = m_pMappedData;
10970 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10971 hAllocator->m_hDevice,
10977 if(result == VK_SUCCESS)
10979 if(ppData != VMA_NULL)
10981 *ppData = m_pMappedData;
10983 m_MapCount = count;
10989 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10996 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10997 if(m_MapCount >= count)
10999 m_MapCount -= count;
11000 if(m_MapCount == 0)
11002 m_pMappedData = VMA_NULL;
11003 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11008 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11012 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11014 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11015 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11018 VkResult res = Map(hAllocator, 1, &pData);
11019 if(res != VK_SUCCESS)
11024 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11025 VmaWriteMagicValue(pData, allocOffset + allocSize);
11027 Unmap(hAllocator, 1);
11032 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11034 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11035 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11038 VkResult res = Map(hAllocator, 1, &pData);
11039 if(res != VK_SUCCESS)
11044 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11046 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11048 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11050 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11053 Unmap(hAllocator, 1);
11058 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11063 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11064 hAllocation->GetBlock() ==
this);
11066 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11067 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11068 hAllocator->m_hDevice,
11071 hAllocation->GetOffset());
11074 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11079 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11080 hAllocation->GetBlock() ==
this);
11082 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11083 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11084 hAllocator->m_hDevice,
11087 hAllocation->GetOffset());
11092 memset(&outInfo, 0,
sizeof(outInfo));
11111 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11119 VmaPool_T::VmaPool_T(
11122 VkDeviceSize preferredBlockSize) :
11125 createInfo.memoryTypeIndex,
11126 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11127 createInfo.minBlockCount,
11128 createInfo.maxBlockCount,
11130 createInfo.frameInUseCount,
11132 createInfo.blockSize != 0,
11138 VmaPool_T::~VmaPool_T()
11142 #if VMA_STATS_STRING_ENABLED 11144 #endif // #if VMA_STATS_STRING_ENABLED 11146 VmaBlockVector::VmaBlockVector(
11148 uint32_t memoryTypeIndex,
11149 VkDeviceSize preferredBlockSize,
11150 size_t minBlockCount,
11151 size_t maxBlockCount,
11152 VkDeviceSize bufferImageGranularity,
11153 uint32_t frameInUseCount,
11155 bool explicitBlockSize,
11156 uint32_t algorithm) :
11157 m_hAllocator(hAllocator),
11158 m_MemoryTypeIndex(memoryTypeIndex),
11159 m_PreferredBlockSize(preferredBlockSize),
11160 m_MinBlockCount(minBlockCount),
11161 m_MaxBlockCount(maxBlockCount),
11162 m_BufferImageGranularity(bufferImageGranularity),
11163 m_FrameInUseCount(frameInUseCount),
11164 m_IsCustomPool(isCustomPool),
11165 m_ExplicitBlockSize(explicitBlockSize),
11166 m_Algorithm(algorithm),
11167 m_HasEmptyBlock(false),
11168 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11173 VmaBlockVector::~VmaBlockVector()
11175 for(
size_t i = m_Blocks.size(); i--; )
11177 m_Blocks[i]->Destroy(m_hAllocator);
11178 vma_delete(m_hAllocator, m_Blocks[i]);
11182 VkResult VmaBlockVector::CreateMinBlocks()
11184 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11186 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11187 if(res != VK_SUCCESS)
11195 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11197 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11199 const size_t blockCount = m_Blocks.size();
11208 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11210 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11211 VMA_ASSERT(pBlock);
11212 VMA_HEAVY_ASSERT(pBlock->Validate());
11213 pBlock->m_pMetadata->AddPoolStats(*pStats);
11217 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11219 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11220 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11221 (VMA_DEBUG_MARGIN > 0) &&
11222 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11225 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11227 VkResult VmaBlockVector::Allocate(
11229 uint32_t currentFrameIndex,
11231 VkDeviceSize alignment,
11233 VmaSuballocationType suballocType,
11240 const bool canCreateNewBlock =
11242 (m_Blocks.size() < m_MaxBlockCount);
11249 canMakeOtherLost =
false;
11253 if(isUpperAddress &&
11256 return VK_ERROR_FEATURE_NOT_PRESENT;
11270 return VK_ERROR_FEATURE_NOT_PRESENT;
11274 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11276 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11279 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11286 if(!canMakeOtherLost || canCreateNewBlock)
11295 if(!m_Blocks.empty())
11297 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11298 VMA_ASSERT(pCurrBlock);
11299 VkResult res = AllocateFromBlock(
11310 if(res == VK_SUCCESS)
11312 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11322 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11324 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11325 VMA_ASSERT(pCurrBlock);
11326 VkResult res = AllocateFromBlock(
11337 if(res == VK_SUCCESS)
11339 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11347 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11349 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11350 VMA_ASSERT(pCurrBlock);
11351 VkResult res = AllocateFromBlock(
11362 if(res == VK_SUCCESS)
11364 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11372 if(canCreateNewBlock)
11375 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11376 uint32_t newBlockSizeShift = 0;
11377 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11379 if(!m_ExplicitBlockSize)
11382 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11383 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11385 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11386 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11388 newBlockSize = smallerNewBlockSize;
11389 ++newBlockSizeShift;
11398 size_t newBlockIndex = 0;
11399 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11401 if(!m_ExplicitBlockSize)
11403 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11405 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11406 if(smallerNewBlockSize >= size)
11408 newBlockSize = smallerNewBlockSize;
11409 ++newBlockSizeShift;
11410 res = CreateBlock(newBlockSize, &newBlockIndex);
11419 if(res == VK_SUCCESS)
11421 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11422 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11424 res = AllocateFromBlock(
11435 if(res == VK_SUCCESS)
11437 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11443 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11450 if(canMakeOtherLost)
11452 uint32_t tryIndex = 0;
11453 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11455 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11456 VmaAllocationRequest bestRequest = {};
11457 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11463 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11465 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11466 VMA_ASSERT(pCurrBlock);
11467 VmaAllocationRequest currRequest = {};
11468 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11471 m_BufferImageGranularity,
11480 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11481 if(pBestRequestBlock == VMA_NULL ||
11482 currRequestCost < bestRequestCost)
11484 pBestRequestBlock = pCurrBlock;
11485 bestRequest = currRequest;
11486 bestRequestCost = currRequestCost;
11488 if(bestRequestCost == 0)
11499 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11501 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11502 VMA_ASSERT(pCurrBlock);
11503 VmaAllocationRequest currRequest = {};
11504 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11507 m_BufferImageGranularity,
11516 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11517 if(pBestRequestBlock == VMA_NULL ||
11518 currRequestCost < bestRequestCost ||
11521 pBestRequestBlock = pCurrBlock;
11522 bestRequest = currRequest;
11523 bestRequestCost = currRequestCost;
11525 if(bestRequestCost == 0 ||
11535 if(pBestRequestBlock != VMA_NULL)
11539 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11540 if(res != VK_SUCCESS)
11546 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11552 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11554 m_HasEmptyBlock =
false;
11557 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11558 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
11559 (*pAllocation)->InitBlockAllocation(
11562 bestRequest.offset,
11568 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11569 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
11570 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11571 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11573 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11575 if(IsCorruptionDetectionEnabled())
11577 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11578 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11593 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11595 return VK_ERROR_TOO_MANY_OBJECTS;
11599 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11602 void VmaBlockVector::Free(
11605 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11609 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11611 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11613 if(IsCorruptionDetectionEnabled())
11615 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11616 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11619 if(hAllocation->IsPersistentMap())
11621 pBlock->Unmap(m_hAllocator, 1);
11624 pBlock->m_pMetadata->Free(hAllocation);
11625 VMA_HEAVY_ASSERT(pBlock->Validate());
11627 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
11630 if(pBlock->m_pMetadata->IsEmpty())
11633 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11635 pBlockToDelete = pBlock;
11641 m_HasEmptyBlock =
true;
11646 else if(m_HasEmptyBlock)
11648 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11649 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11651 pBlockToDelete = pLastBlock;
11652 m_Blocks.pop_back();
11653 m_HasEmptyBlock =
false;
11657 IncrementallySortBlocks();
11662 if(pBlockToDelete != VMA_NULL)
11664 VMA_DEBUG_LOG(
" Deleted empty allocation");
11665 pBlockToDelete->Destroy(m_hAllocator);
11666 vma_delete(m_hAllocator, pBlockToDelete);
11670 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11672 VkDeviceSize result = 0;
11673 for(
size_t i = m_Blocks.size(); i--; )
11675 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11676 if(result >= m_PreferredBlockSize)
11684 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11686 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11688 if(m_Blocks[blockIndex] == pBlock)
11690 VmaVectorRemove(m_Blocks, blockIndex);
11697 void VmaBlockVector::IncrementallySortBlocks()
11702 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11704 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11706 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
11713 VkResult VmaBlockVector::AllocateFromBlock(
11714 VmaDeviceMemoryBlock* pBlock,
11716 uint32_t currentFrameIndex,
11718 VkDeviceSize alignment,
11721 VmaSuballocationType suballocType,
11730 VmaAllocationRequest currRequest = {};
11731 if(pBlock->m_pMetadata->CreateAllocationRequest(
11734 m_BufferImageGranularity,
11744 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
11748 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
11749 if(res != VK_SUCCESS)
11756 if(pBlock->m_pMetadata->IsEmpty())
11758 m_HasEmptyBlock =
false;
11761 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11762 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
11763 (*pAllocation)->InitBlockAllocation(
11766 currRequest.offset,
11772 VMA_HEAVY_ASSERT(pBlock->Validate());
11773 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
11774 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11776 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11778 if(IsCorruptionDetectionEnabled())
11780 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
11781 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11785 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11788 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
11790 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11791 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
11792 allocInfo.allocationSize = blockSize;
11793 VkDeviceMemory mem = VK_NULL_HANDLE;
11794 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
11803 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11808 allocInfo.allocationSize,
11812 m_Blocks.push_back(pBlock);
11813 if(pNewBlockIndex != VMA_NULL)
11815 *pNewBlockIndex = m_Blocks.size() - 1;
11821 void VmaBlockVector::ApplyDefragmentationMovesCpu(
11822 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11823 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
11825 const size_t blockCount = m_Blocks.size();
11826 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
11830 BLOCK_FLAG_USED = 0x00000001,
11831 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
11839 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
11840 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
11841 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
11844 const size_t moveCount = moves.size();
11845 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11847 const VmaDefragmentationMove& move = moves[moveIndex];
11848 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
11849 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
11852 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
11855 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
11857 BlockInfo& currBlockInfo = blockInfo[blockIndex];
11858 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11859 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
11861 currBlockInfo.pMappedData = pBlock->GetMappedData();
11863 if(currBlockInfo.pMappedData == VMA_NULL)
11865 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
11866 if(pDefragCtx->res == VK_SUCCESS)
11868 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
11875 if(pDefragCtx->res == VK_SUCCESS)
11877 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
11878 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
11880 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11882 const VmaDefragmentationMove& move = moves[moveIndex];
11884 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
11885 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
11887 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
11892 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
11893 memRange.memory = pSrcBlock->GetDeviceMemory();
11894 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
11895 memRange.size = VMA_MIN(
11896 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
11897 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
11898 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
11903 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
11904 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
11905 static_cast<size_t>(move.size));
11907 if(IsCorruptionDetectionEnabled())
11909 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
11910 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
11916 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
11917 memRange.memory = pDstBlock->GetDeviceMemory();
11918 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
11919 memRange.size = VMA_MIN(
11920 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
11921 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
11922 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
11929 for(
size_t blockIndex = blockCount; blockIndex--; )
11931 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
11932 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
11934 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11935 pBlock->Unmap(m_hAllocator, 1);
11940 void VmaBlockVector::ApplyDefragmentationMovesGpu(
11941 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11942 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
11943 VkCommandBuffer commandBuffer)
11945 const size_t blockCount = m_Blocks.size();
11947 pDefragCtx->blockContexts.resize(blockCount);
11948 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
11951 const size_t moveCount = moves.size();
11952 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11954 const VmaDefragmentationMove& move = moves[moveIndex];
11955 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
11956 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
11959 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
11963 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
11964 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
11965 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
11967 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
11969 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
11970 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11971 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
11973 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
11974 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
11975 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
11976 if(pDefragCtx->res == VK_SUCCESS)
11978 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
11979 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
11986 if(pDefragCtx->res == VK_SUCCESS)
11988 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
11989 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
11991 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11993 const VmaDefragmentationMove& move = moves[moveIndex];
11995 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
11996 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
11998 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12000 VkBufferCopy region = {
12004 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12005 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12010 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12012 pDefragCtx->res = VK_NOT_READY;
12018 m_HasEmptyBlock =
false;
12019 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12021 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12022 if(pBlock->m_pMetadata->IsEmpty())
12024 if(m_Blocks.size() > m_MinBlockCount)
12026 if(pDefragmentationStats != VMA_NULL)
12029 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12032 VmaVectorRemove(m_Blocks, blockIndex);
12033 pBlock->Destroy(m_hAllocator);
12034 vma_delete(m_hAllocator, pBlock);
12038 m_HasEmptyBlock =
true;
12044 #if VMA_STATS_STRING_ENABLED 12046 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12048 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12050 json.BeginObject();
12054 json.WriteString(
"MemoryTypeIndex");
12055 json.WriteNumber(m_MemoryTypeIndex);
12057 json.WriteString(
"BlockSize");
12058 json.WriteNumber(m_PreferredBlockSize);
12060 json.WriteString(
"BlockCount");
12061 json.BeginObject(
true);
12062 if(m_MinBlockCount > 0)
12064 json.WriteString(
"Min");
12065 json.WriteNumber((uint64_t)m_MinBlockCount);
12067 if(m_MaxBlockCount < SIZE_MAX)
12069 json.WriteString(
"Max");
12070 json.WriteNumber((uint64_t)m_MaxBlockCount);
12072 json.WriteString(
"Cur");
12073 json.WriteNumber((uint64_t)m_Blocks.size());
12076 if(m_FrameInUseCount > 0)
12078 json.WriteString(
"FrameInUseCount");
12079 json.WriteNumber(m_FrameInUseCount);
12082 if(m_Algorithm != 0)
12084 json.WriteString(
"Algorithm");
12085 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12090 json.WriteString(
"PreferredBlockSize");
12091 json.WriteNumber(m_PreferredBlockSize);
12094 json.WriteString(
"Blocks");
12095 json.BeginObject();
12096 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12098 json.BeginString();
12099 json.ContinueString(m_Blocks[i]->GetId());
12102 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12109 #endif // #if VMA_STATS_STRING_ENABLED 12111 void VmaBlockVector::Defragment(
12112 class VmaBlockVectorDefragmentationContext* pCtx,
12114 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12115 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12116 VkCommandBuffer commandBuffer)
12118 pCtx->res = VK_SUCCESS;
12120 const VkMemoryPropertyFlags memPropFlags =
12121 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12122 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12123 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12125 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12127 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12128 (VMA_DEBUG_DETECT_CORRUPTION == 0 || !(isHostVisible && isHostCoherent));
12131 if(canDefragmentOnCpu || canDefragmentOnGpu)
12133 bool defragmentOnGpu;
12135 if(canDefragmentOnGpu != canDefragmentOnCpu)
12137 defragmentOnGpu = canDefragmentOnGpu;
12142 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12143 m_hAllocator->IsIntegratedGpu();
12146 bool overlappingMoveSupported = !defragmentOnGpu;
12148 if(m_hAllocator->m_UseMutex)
12150 m_Mutex.LockWrite();
12151 pCtx->mutexLocked =
true;
12154 pCtx->Begin(overlappingMoveSupported);
12158 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12159 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12160 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12161 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12162 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12165 if(pStats != VMA_NULL)
12167 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12168 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12169 pStats->bytesMoved += bytesMoved;
12170 pStats->allocationsMoved += allocationsMoved;
12171 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12172 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12173 if(defragmentOnGpu)
12175 maxGpuBytesToMove -= bytesMoved;
12176 maxGpuAllocationsToMove -= allocationsMoved;
12180 maxCpuBytesToMove -= bytesMoved;
12181 maxCpuAllocationsToMove -= allocationsMoved;
12185 if(pCtx->res >= VK_SUCCESS)
12187 if(defragmentOnGpu)
12189 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12193 ApplyDefragmentationMovesCpu(pCtx, moves);
12199 void VmaBlockVector::DefragmentationEnd(
12200 class VmaBlockVectorDefragmentationContext* pCtx,
12204 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12206 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12207 if(blockCtx.hBuffer)
12209 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12210 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12214 if(pCtx->res >= VK_SUCCESS)
12216 FreeEmptyBlocks(pStats);
12219 if(pCtx->mutexLocked)
12221 VMA_ASSERT(m_hAllocator->m_UseMutex);
12222 m_Mutex.UnlockWrite();
12226 size_t VmaBlockVector::CalcAllocationCount()
const 12229 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12231 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12236 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12238 if(m_BufferImageGranularity == 1)
12242 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12243 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12245 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12246 VMA_ASSERT(m_Algorithm == 0);
12247 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12248 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12256 void VmaBlockVector::MakePoolAllocationsLost(
12257 uint32_t currentFrameIndex,
12258 size_t* pLostAllocationCount)
12260 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12261 size_t lostAllocationCount = 0;
12262 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12264 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12265 VMA_ASSERT(pBlock);
12266 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12268 if(pLostAllocationCount != VMA_NULL)
12270 *pLostAllocationCount = lostAllocationCount;
12274 VkResult VmaBlockVector::CheckCorruption()
12276 if(!IsCorruptionDetectionEnabled())
12278 return VK_ERROR_FEATURE_NOT_PRESENT;
12281 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12282 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12284 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12285 VMA_ASSERT(pBlock);
12286 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12287 if(res != VK_SUCCESS)
12295 void VmaBlockVector::AddStats(
VmaStats* pStats)
12297 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12298 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12300 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12302 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12304 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12305 VMA_ASSERT(pBlock);
12306 VMA_HEAVY_ASSERT(pBlock->Validate());
12308 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12309 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12310 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12311 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12318 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12320 VmaBlockVector* pBlockVector,
12321 uint32_t currentFrameIndex,
12322 bool overlappingMoveSupported) :
12323 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12324 m_AllAllocations(false),
12325 m_AllocationCount(0),
12327 m_AllocationsMoved(0),
12328 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12331 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12332 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12334 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12335 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12336 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12337 m_Blocks.push_back(pBlockInfo);
12341 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12344 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12346 for(
size_t i = m_Blocks.size(); i--; )
12348 vma_delete(m_hAllocator, m_Blocks[i]);
12352 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12355 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12357 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12358 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12359 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12361 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12362 (*it)->m_Allocations.push_back(allocInfo);
12369 ++m_AllocationCount;
12373 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12374 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12375 VkDeviceSize maxBytesToMove,
12376 uint32_t maxAllocationsToMove)
12378 if(m_Blocks.empty())
12391 size_t srcBlockMinIndex = 0;
12404 size_t srcBlockIndex = m_Blocks.size() - 1;
12405 size_t srcAllocIndex = SIZE_MAX;
12411 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12413 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12416 if(srcBlockIndex == srcBlockMinIndex)
12423 srcAllocIndex = SIZE_MAX;
12428 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12432 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12433 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12435 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12436 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12437 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12438 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12441 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12443 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12444 VmaAllocationRequest dstAllocRequest;
12445 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12446 m_CurrentFrameIndex,
12447 m_pBlockVector->GetFrameInUseCount(),
12448 m_pBlockVector->GetBufferImageGranularity(),
12455 &dstAllocRequest) &&
12457 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12459 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12462 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12463 (m_BytesMoved + size > maxBytesToMove))
12468 VmaDefragmentationMove move;
12469 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12470 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12471 move.srcOffset = srcOffset;
12472 move.dstOffset = dstAllocRequest.offset;
12474 moves.push_back(move);
12476 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12481 allocInfo.m_hAllocation);
12482 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12484 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12486 if(allocInfo.m_pChanged != VMA_NULL)
12488 *allocInfo.m_pChanged = VK_TRUE;
12491 ++m_AllocationsMoved;
12492 m_BytesMoved += size;
12494 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12502 if(srcAllocIndex > 0)
12508 if(srcBlockIndex > 0)
12511 srcAllocIndex = SIZE_MAX;
12521 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12524 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12526 if(m_Blocks[i]->m_HasNonMovableAllocations)
12534 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12535 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12536 VkDeviceSize maxBytesToMove,
12537 uint32_t maxAllocationsToMove)
12539 if(!m_AllAllocations && m_AllocationCount == 0)
12544 const size_t blockCount = m_Blocks.size();
12545 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12547 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12549 if(m_AllAllocations)
12551 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12552 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12553 it != pMetadata->m_Suballocations.end();
12556 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12558 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12559 pBlockInfo->m_Allocations.push_back(allocInfo);
12564 pBlockInfo->CalcHasNonMovableAllocations();
12568 pBlockInfo->SortAllocationsByOffsetDescending();
12574 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12577 const uint32_t roundCount = 2;
12580 VkResult result = VK_SUCCESS;
12581 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12583 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12589 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12590 size_t dstBlockIndex, VkDeviceSize dstOffset,
12591 size_t srcBlockIndex, VkDeviceSize srcOffset)
12593 if(dstBlockIndex < srcBlockIndex)
12597 if(dstBlockIndex > srcBlockIndex)
12601 if(dstOffset < srcOffset)
12611 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12613 VmaBlockVector* pBlockVector,
12614 uint32_t currentFrameIndex,
12615 bool overlappingMoveSupported) :
12616 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12617 m_OverlappingMoveSupported(overlappingMoveSupported),
12618 m_AllocationCount(0),
12619 m_AllAllocations(false),
12621 m_AllocationsMoved(0),
12622 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12624 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12628 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12632 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12633 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12634 VkDeviceSize maxBytesToMove,
12635 uint32_t maxAllocationsToMove)
12637 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12639 const size_t blockCount = m_pBlockVector->GetBlockCount();
12640 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12645 PreprocessMetadata();
12649 m_BlockInfos.resize(blockCount);
12650 for(
size_t i = 0; i < blockCount; ++i)
12652 m_BlockInfos[i].origBlockIndex = i;
12655 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12656 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12657 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12662 FreeSpaceDatabase freeSpaceDb;
12664 size_t dstBlockInfoIndex = 0;
12665 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12666 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12667 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12668 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12669 VkDeviceSize dstOffset = 0;
12672 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12674 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12675 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12676 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12677 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12678 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12680 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12681 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12682 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12683 if(m_AllocationsMoved == maxAllocationsToMove ||
12684 m_BytesMoved + srcAllocSize > maxBytesToMove)
12689 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12692 size_t freeSpaceInfoIndex;
12693 VkDeviceSize dstAllocOffset;
12694 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12695 freeSpaceInfoIndex, dstAllocOffset))
12697 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12698 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12699 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12700 VkDeviceSize freeSpaceBlockSize = pFreeSpaceMetadata->GetSize();
12703 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12705 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12709 VmaSuballocation suballoc = *srcSuballocIt;
12710 suballoc.offset = dstAllocOffset;
12711 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12712 m_BytesMoved += srcAllocSize;
12713 ++m_AllocationsMoved;
12715 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12717 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12718 srcSuballocIt = nextSuballocIt;
12720 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12722 VmaDefragmentationMove move = {
12723 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12724 srcAllocOffset, dstAllocOffset,
12726 moves.push_back(move);
12733 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
12735 VmaSuballocation suballoc = *srcSuballocIt;
12736 suballoc.offset = dstAllocOffset;
12737 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
12738 m_BytesMoved += srcAllocSize;
12739 ++m_AllocationsMoved;
12741 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12743 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12744 srcSuballocIt = nextSuballocIt;
12746 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12748 VmaDefragmentationMove move = {
12749 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12750 srcAllocOffset, dstAllocOffset,
12752 moves.push_back(move);
12757 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
12760 while(dstBlockInfoIndex < srcBlockInfoIndex &&
12761 dstAllocOffset + srcAllocSize > dstBlockSize)
12764 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
12766 ++dstBlockInfoIndex;
12767 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12768 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12769 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12770 dstBlockSize = pDstMetadata->GetSize();
12772 dstAllocOffset = 0;
12776 if(dstBlockInfoIndex == srcBlockInfoIndex)
12778 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12780 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
12782 bool skipOver = overlap;
12783 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
12787 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
12792 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
12794 dstOffset = srcAllocOffset + srcAllocSize;
12800 srcSuballocIt->offset = dstAllocOffset;
12801 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
12802 dstOffset = dstAllocOffset + srcAllocSize;
12803 m_BytesMoved += srcAllocSize;
12804 ++m_AllocationsMoved;
12806 VmaDefragmentationMove move = {
12807 srcOrigBlockIndex, dstOrigBlockIndex,
12808 srcAllocOffset, dstAllocOffset,
12810 moves.push_back(move);
12818 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
12819 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
12821 VmaSuballocation suballoc = *srcSuballocIt;
12822 suballoc.offset = dstAllocOffset;
12823 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
12824 dstOffset = dstAllocOffset + srcAllocSize;
12825 m_BytesMoved += srcAllocSize;
12826 ++m_AllocationsMoved;
12828 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12830 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12831 srcSuballocIt = nextSuballocIt;
12833 pDstMetadata->m_Suballocations.push_back(suballoc);
12835 VmaDefragmentationMove move = {
12836 srcOrigBlockIndex, dstOrigBlockIndex,
12837 srcAllocOffset, dstAllocOffset,
12839 moves.push_back(move);
12845 m_BlockInfos.clear();
12847 PostprocessMetadata();
12852 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
12854 const size_t blockCount = m_pBlockVector->GetBlockCount();
12855 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12857 VmaBlockMetadata_Generic*
const pMetadata =
12858 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
12859 pMetadata->m_FreeCount = 0;
12860 pMetadata->m_SumFreeSize = pMetadata->GetSize();
12861 pMetadata->m_FreeSuballocationsBySize.clear();
12862 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
12863 it != pMetadata->m_Suballocations.end(); )
12865 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
12867 VmaSuballocationList::iterator nextIt = it;
12869 pMetadata->m_Suballocations.erase(it);
12880 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
12882 const size_t blockCount = m_pBlockVector->GetBlockCount();
12883 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12885 VmaBlockMetadata_Generic*
const pMetadata =
12886 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
12887 const VkDeviceSize blockSize = pMetadata->GetSize();
12890 if(pMetadata->m_Suballocations.empty())
12892 pMetadata->m_FreeCount = 1;
12894 VmaSuballocation suballoc = {
12898 VMA_SUBALLOCATION_TYPE_FREE };
12899 pMetadata->m_Suballocations.push_back(suballoc);
12900 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
12905 VkDeviceSize offset = 0;
12906 VmaSuballocationList::iterator it;
12907 for(it = pMetadata->m_Suballocations.begin();
12908 it != pMetadata->m_Suballocations.end();
12911 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
12912 VMA_ASSERT(it->offset >= offset);
12915 if(it->offset > offset)
12917 ++pMetadata->m_FreeCount;
12918 const VkDeviceSize freeSize = it->offset - offset;
12919 VmaSuballocation suballoc = {
12923 VMA_SUBALLOCATION_TYPE_FREE };
12924 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
12925 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
12927 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
12931 pMetadata->m_SumFreeSize -= it->size;
12932 offset = it->offset + it->size;
12936 if(offset < blockSize)
12938 ++pMetadata->m_FreeCount;
12939 const VkDeviceSize freeSize = blockSize - offset;
12940 VmaSuballocation suballoc = {
12944 VMA_SUBALLOCATION_TYPE_FREE };
12945 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
12946 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
12947 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
12949 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
12954 pMetadata->m_FreeSuballocationsBySize.begin(),
12955 pMetadata->m_FreeSuballocationsBySize.end(),
12956 VmaSuballocationItemSizeLess());
12959 VMA_HEAVY_ASSERT(pMetadata->Validate());
12963 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
12966 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
12967 while(it != pMetadata->m_Suballocations.end())
12969 if(it->offset < suballoc.offset)
12974 pMetadata->m_Suballocations.insert(it, suballoc);
12980 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
12983 VmaBlockVector* pBlockVector,
12984 uint32_t currFrameIndex,
12985 uint32_t algorithmFlags) :
12987 mutexLocked(false),
12988 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
12989 m_hAllocator(hAllocator),
12990 m_hCustomPool(hCustomPool),
12991 m_pBlockVector(pBlockVector),
12992 m_CurrFrameIndex(currFrameIndex),
12993 m_AlgorithmFlags(algorithmFlags),
12994 m_pAlgorithm(VMA_NULL),
12995 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
12996 m_AllAllocations(false)
13000 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13002 vma_delete(m_hAllocator, m_pAlgorithm);
13005 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13007 AllocInfo info = { hAlloc, pChanged };
13008 m_Allocations.push_back(info);
13011 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13013 const bool allAllocations = m_AllAllocations ||
13014 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13026 if(VMA_DEBUG_MARGIN == 0 &&
13028 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13030 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13031 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13035 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13036 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13041 m_pAlgorithm->AddAll();
13045 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13047 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13055 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13057 uint32_t currFrameIndex,
13060 m_hAllocator(hAllocator),
13061 m_CurrFrameIndex(currFrameIndex),
13064 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13066 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13069 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13071 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13073 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13074 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13075 vma_delete(m_hAllocator, pBlockVectorCtx);
13077 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13079 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13080 if(pBlockVectorCtx)
13082 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13083 vma_delete(m_hAllocator, pBlockVectorCtx);
13088 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13090 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13092 VmaPool pool = pPools[poolIndex];
13095 if(pool->m_BlockVector.GetAlgorithm() == 0)
13097 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13099 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13101 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13103 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13108 if(!pBlockVectorDefragCtx)
13110 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13113 &pool->m_BlockVector,
13116 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13119 pBlockVectorDefragCtx->AddAll();
13124 void VmaDefragmentationContext_T::AddAllocations(
13125 uint32_t allocationCount,
13127 VkBool32* pAllocationsChanged)
13130 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13133 VMA_ASSERT(hAlloc);
13135 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13137 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13139 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13141 const VmaPool hAllocPool = hAlloc->GetPool();
13143 if(hAllocPool != VK_NULL_HANDLE)
13146 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13148 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13150 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13152 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13156 if(!pBlockVectorDefragCtx)
13158 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13161 &hAllocPool->m_BlockVector,
13164 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13171 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13172 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13173 if(!pBlockVectorDefragCtx)
13175 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13178 m_hAllocator->m_pBlockVectors[memTypeIndex],
13181 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13185 if(pBlockVectorDefragCtx)
13187 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13188 &pAllocationsChanged[allocIndex] : VMA_NULL;
13189 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13195 VkResult VmaDefragmentationContext_T::Defragment(
13196 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13197 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13205 if(commandBuffer == VK_NULL_HANDLE)
13207 maxGpuBytesToMove = 0;
13208 maxGpuAllocationsToMove = 0;
13211 VkResult res = VK_SUCCESS;
13214 for(uint32_t memTypeIndex = 0;
13215 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13218 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13219 if(pBlockVectorCtx)
13221 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13222 pBlockVectorCtx->GetBlockVector()->Defragment(
13225 maxCpuBytesToMove, maxCpuAllocationsToMove,
13226 maxGpuBytesToMove, maxGpuAllocationsToMove,
13228 if(pBlockVectorCtx->res != VK_SUCCESS)
13230 res = pBlockVectorCtx->res;
13236 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13237 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13240 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13241 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13242 pBlockVectorCtx->GetBlockVector()->Defragment(
13245 maxCpuBytesToMove, maxCpuAllocationsToMove,
13246 maxGpuBytesToMove, maxGpuAllocationsToMove,
13248 if(pBlockVectorCtx->res != VK_SUCCESS)
13250 res = pBlockVectorCtx->res;
13260 #if VMA_RECORDING_ENABLED 13262 VmaRecorder::VmaRecorder() :
13267 m_StartCounter(INT64_MAX)
13273 m_UseMutex = useMutex;
13274 m_Flags = settings.
flags;
13276 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13277 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13280 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13283 return VK_ERROR_INITIALIZATION_FAILED;
13287 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13288 fprintf(m_File,
"%s\n",
"1,5");
13293 VmaRecorder::~VmaRecorder()
13295 if(m_File != VMA_NULL)
13301 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13303 CallParams callParams;
13304 GetBasicParams(callParams);
13306 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13307 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13311 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13313 CallParams callParams;
13314 GetBasicParams(callParams);
13316 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13317 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13323 CallParams callParams;
13324 GetBasicParams(callParams);
13326 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13327 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13338 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13340 CallParams callParams;
13341 GetBasicParams(callParams);
13343 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13344 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13349 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13350 const VkMemoryRequirements& vkMemReq,
13354 CallParams callParams;
13355 GetBasicParams(callParams);
13357 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13358 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13359 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13361 vkMemReq.alignment,
13362 vkMemReq.memoryTypeBits,
13370 userDataStr.GetString());
13374 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13375 const VkMemoryRequirements& vkMemReq,
13376 bool requiresDedicatedAllocation,
13377 bool prefersDedicatedAllocation,
13381 CallParams callParams;
13382 GetBasicParams(callParams);
13384 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13385 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13386 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13388 vkMemReq.alignment,
13389 vkMemReq.memoryTypeBits,
13390 requiresDedicatedAllocation ? 1 : 0,
13391 prefersDedicatedAllocation ? 1 : 0,
13399 userDataStr.GetString());
13403 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13404 const VkMemoryRequirements& vkMemReq,
13405 bool requiresDedicatedAllocation,
13406 bool prefersDedicatedAllocation,
13410 CallParams callParams;
13411 GetBasicParams(callParams);
13413 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13414 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13415 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13417 vkMemReq.alignment,
13418 vkMemReq.memoryTypeBits,
13419 requiresDedicatedAllocation ? 1 : 0,
13420 prefersDedicatedAllocation ? 1 : 0,
13428 userDataStr.GetString());
13432 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13435 CallParams callParams;
13436 GetBasicParams(callParams);
13438 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13439 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13444 void VmaRecorder::RecordResizeAllocation(
13445 uint32_t frameIndex,
13447 VkDeviceSize newSize)
13449 CallParams callParams;
13450 GetBasicParams(callParams);
13452 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13453 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13454 allocation, newSize);
13458 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13460 const void* pUserData)
13462 CallParams callParams;
13463 GetBasicParams(callParams);
13465 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13466 UserDataString userDataStr(
13469 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13471 userDataStr.GetString());
13475 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13478 CallParams callParams;
13479 GetBasicParams(callParams);
13481 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13482 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13487 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13490 CallParams callParams;
13491 GetBasicParams(callParams);
13493 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13494 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13499 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13502 CallParams callParams;
13503 GetBasicParams(callParams);
13505 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13506 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13511 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13512 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13514 CallParams callParams;
13515 GetBasicParams(callParams);
13517 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13518 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13525 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13526 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13528 CallParams callParams;
13529 GetBasicParams(callParams);
13531 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13532 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13539 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13540 const VkBufferCreateInfo& bufCreateInfo,
13544 CallParams callParams;
13545 GetBasicParams(callParams);
13547 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13548 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13549 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13550 bufCreateInfo.flags,
13551 bufCreateInfo.size,
13552 bufCreateInfo.usage,
13553 bufCreateInfo.sharingMode,
13554 allocCreateInfo.
flags,
13555 allocCreateInfo.
usage,
13559 allocCreateInfo.
pool,
13561 userDataStr.GetString());
13565 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13566 const VkImageCreateInfo& imageCreateInfo,
13570 CallParams callParams;
13571 GetBasicParams(callParams);
13573 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13574 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13575 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13576 imageCreateInfo.flags,
13577 imageCreateInfo.imageType,
13578 imageCreateInfo.format,
13579 imageCreateInfo.extent.width,
13580 imageCreateInfo.extent.height,
13581 imageCreateInfo.extent.depth,
13582 imageCreateInfo.mipLevels,
13583 imageCreateInfo.arrayLayers,
13584 imageCreateInfo.samples,
13585 imageCreateInfo.tiling,
13586 imageCreateInfo.usage,
13587 imageCreateInfo.sharingMode,
13588 imageCreateInfo.initialLayout,
13589 allocCreateInfo.
flags,
13590 allocCreateInfo.
usage,
13594 allocCreateInfo.
pool,
13596 userDataStr.GetString());
13600 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13603 CallParams callParams;
13604 GetBasicParams(callParams);
13606 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13607 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13612 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13615 CallParams callParams;
13616 GetBasicParams(callParams);
13618 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13619 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13624 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13627 CallParams callParams;
13628 GetBasicParams(callParams);
13630 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13631 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13636 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13639 CallParams callParams;
13640 GetBasicParams(callParams);
13642 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13643 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13648 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13651 CallParams callParams;
13652 GetBasicParams(callParams);
13654 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13655 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13660 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13664 CallParams callParams;
13665 GetBasicParams(callParams);
13667 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13668 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13671 fprintf(m_File,
",");
13673 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
13683 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
13686 CallParams callParams;
13687 GetBasicParams(callParams);
13689 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13690 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
13697 if(pUserData != VMA_NULL)
13701 m_Str = (
const char*)pUserData;
13705 sprintf_s(m_PtrStr,
"%p", pUserData);
13715 void VmaRecorder::WriteConfiguration(
13716 const VkPhysicalDeviceProperties& devProps,
13717 const VkPhysicalDeviceMemoryProperties& memProps,
13718 bool dedicatedAllocationExtensionEnabled)
13720 fprintf(m_File,
"Config,Begin\n");
13722 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
13723 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
13724 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
13725 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
13726 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
13727 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
13729 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
13730 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
13731 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
13733 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
13734 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
13736 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
13737 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
13739 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
13740 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
13742 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
13743 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
13746 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
13748 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
13749 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
13750 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
13751 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
13752 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
13753 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
13754 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
13755 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
13756 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
13758 fprintf(m_File,
"Config,End\n");
13761 void VmaRecorder::GetBasicParams(CallParams& outParams)
13763 outParams.threadId = GetCurrentThreadId();
13765 LARGE_INTEGER counter;
13766 QueryPerformanceCounter(&counter);
13767 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
13770 void VmaRecorder::Flush()
13778 #endif // #if VMA_RECORDING_ENABLED 13786 m_hDevice(pCreateInfo->device),
13787 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
13788 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
13789 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
13790 m_PreferredLargeHeapBlockSize(0),
13791 m_PhysicalDevice(pCreateInfo->physicalDevice),
13792 m_CurrentFrameIndex(0),
13793 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
13796 ,m_pRecorder(VMA_NULL)
13799 if(VMA_DEBUG_DETECT_CORRUPTION)
13802 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
13807 #if !(VMA_DEDICATED_ALLOCATION) 13810 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
13814 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
13815 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
13816 memset(&m_MemProps, 0,
sizeof(m_MemProps));
13818 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
13819 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
13821 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
13823 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
13834 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
13835 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
13837 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
13838 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
13839 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
13840 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
13847 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
13849 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
13850 if(limit != VK_WHOLE_SIZE)
13852 m_HeapSizeLimit[heapIndex] = limit;
13853 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
13855 m_MemProps.memoryHeaps[heapIndex].size = limit;
13861 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13863 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
13865 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
13868 preferredBlockSize,
13871 GetBufferImageGranularity(),
13878 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
13885 VkResult res = VK_SUCCESS;
13890 #if VMA_RECORDING_ENABLED 13891 m_pRecorder = vma_new(
this, VmaRecorder)();
13893 if(res != VK_SUCCESS)
13897 m_pRecorder->WriteConfiguration(
13898 m_PhysicalDeviceProperties,
13900 m_UseKhrDedicatedAllocation);
13901 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
13903 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
13904 return VK_ERROR_FEATURE_NOT_PRESENT;
13911 VmaAllocator_T::~VmaAllocator_T()
13913 #if VMA_RECORDING_ENABLED 13914 if(m_pRecorder != VMA_NULL)
13916 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
13917 vma_delete(
this, m_pRecorder);
13921 VMA_ASSERT(m_Pools.empty());
13923 for(
size_t i = GetMemoryTypeCount(); i--; )
13925 vma_delete(
this, m_pDedicatedAllocations[i]);
13926 vma_delete(
this, m_pBlockVectors[i]);
13930 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
13932 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 13933 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
13934 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
13935 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
13936 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
13937 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
13938 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
13939 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
13940 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
13941 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
13942 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
13943 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
13944 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
13945 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
13946 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
13947 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
13948 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
13949 m_VulkanFunctions.vkCmdCopyBuffer = &vkCmdCopyBuffer;
13950 #if VMA_DEDICATED_ALLOCATION 13951 if(m_UseKhrDedicatedAllocation)
13953 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
13954 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
13955 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
13956 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
13958 #endif // #if VMA_DEDICATED_ALLOCATION 13959 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 13961 #define VMA_COPY_IF_NOT_NULL(funcName) \ 13962 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 13964 if(pVulkanFunctions != VMA_NULL)
13966 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
13967 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
13968 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
13969 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
13970 VMA_COPY_IF_NOT_NULL(vkMapMemory);
13971 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
13972 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
13973 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
13974 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
13975 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
13976 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
13977 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
13978 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
13979 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
13980 VMA_COPY_IF_NOT_NULL(vkCreateImage);
13981 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
13982 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
13983 #if VMA_DEDICATED_ALLOCATION 13984 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
13985 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
13989 #undef VMA_COPY_IF_NOT_NULL 13993 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
13994 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
13995 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
13996 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
13997 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
13998 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
13999 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14000 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14001 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14002 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14003 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14004 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14005 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14006 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14007 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14008 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14009 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14010 #if VMA_DEDICATED_ALLOCATION 14011 if(m_UseKhrDedicatedAllocation)
14013 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14014 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14019 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14021 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14022 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14023 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14024 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14027 VkResult VmaAllocator_T::AllocateMemoryOfType(
14029 VkDeviceSize alignment,
14030 bool dedicatedAllocation,
14031 VkBuffer dedicatedBuffer,
14032 VkImage dedicatedImage,
14034 uint32_t memTypeIndex,
14035 VmaSuballocationType suballocType,
14038 VMA_ASSERT(pAllocation != VMA_NULL);
14039 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
14045 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14050 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14051 VMA_ASSERT(blockVector);
14053 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14054 bool preferDedicatedMemory =
14055 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14056 dedicatedAllocation ||
14058 size > preferredBlockSize / 2;
14060 if(preferDedicatedMemory &&
14062 finalCreateInfo.
pool == VK_NULL_HANDLE)
14071 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14075 return AllocateDedicatedMemory(
14089 VkResult res = blockVector->Allocate(
14091 m_CurrentFrameIndex.load(),
14097 if(res == VK_SUCCESS)
14105 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14109 res = AllocateDedicatedMemory(
14115 finalCreateInfo.pUserData,
14119 if(res == VK_SUCCESS)
14122 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14128 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14135 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14137 VmaSuballocationType suballocType,
14138 uint32_t memTypeIndex,
14140 bool isUserDataString,
14142 VkBuffer dedicatedBuffer,
14143 VkImage dedicatedImage,
14146 VMA_ASSERT(pAllocation);
14148 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14149 allocInfo.memoryTypeIndex = memTypeIndex;
14150 allocInfo.allocationSize = size;
14152 #if VMA_DEDICATED_ALLOCATION 14153 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14154 if(m_UseKhrDedicatedAllocation)
14156 if(dedicatedBuffer != VK_NULL_HANDLE)
14158 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14159 dedicatedAllocInfo.buffer = dedicatedBuffer;
14160 allocInfo.pNext = &dedicatedAllocInfo;
14162 else if(dedicatedImage != VK_NULL_HANDLE)
14164 dedicatedAllocInfo.image = dedicatedImage;
14165 allocInfo.pNext = &dedicatedAllocInfo;
14168 #endif // #if VMA_DEDICATED_ALLOCATION 14171 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14172 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14175 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14179 void* pMappedData = VMA_NULL;
14182 res = (*m_VulkanFunctions.vkMapMemory)(
14191 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14192 FreeVulkanMemory(memTypeIndex, size, hMemory);
14197 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
14198 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14199 (*pAllocation)->SetUserData(
this, pUserData);
14200 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14202 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14207 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14208 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14209 VMA_ASSERT(pDedicatedAllocations);
14210 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
14213 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
14218 void VmaAllocator_T::GetBufferMemoryRequirements(
14220 VkMemoryRequirements& memReq,
14221 bool& requiresDedicatedAllocation,
14222 bool& prefersDedicatedAllocation)
const 14224 #if VMA_DEDICATED_ALLOCATION 14225 if(m_UseKhrDedicatedAllocation)
14227 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14228 memReqInfo.buffer = hBuffer;
14230 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14232 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14233 memReq2.pNext = &memDedicatedReq;
14235 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14237 memReq = memReq2.memoryRequirements;
14238 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14239 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14242 #endif // #if VMA_DEDICATED_ALLOCATION 14244 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14245 requiresDedicatedAllocation =
false;
14246 prefersDedicatedAllocation =
false;
14250 void VmaAllocator_T::GetImageMemoryRequirements(
14252 VkMemoryRequirements& memReq,
14253 bool& requiresDedicatedAllocation,
14254 bool& prefersDedicatedAllocation)
const 14256 #if VMA_DEDICATED_ALLOCATION 14257 if(m_UseKhrDedicatedAllocation)
14259 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14260 memReqInfo.image = hImage;
14262 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14264 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14265 memReq2.pNext = &memDedicatedReq;
14267 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14269 memReq = memReq2.memoryRequirements;
14270 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14271 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14274 #endif // #if VMA_DEDICATED_ALLOCATION 14276 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14277 requiresDedicatedAllocation =
false;
14278 prefersDedicatedAllocation =
false;
14282 VkResult VmaAllocator_T::AllocateMemory(
14283 const VkMemoryRequirements& vkMemReq,
14284 bool requiresDedicatedAllocation,
14285 bool prefersDedicatedAllocation,
14286 VkBuffer dedicatedBuffer,
14287 VkImage dedicatedImage,
14289 VmaSuballocationType suballocType,
14292 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14294 if(vkMemReq.size == 0)
14296 return VK_ERROR_VALIDATION_FAILED_EXT;
14301 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14302 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14307 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14308 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14310 if(requiresDedicatedAllocation)
14314 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14315 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14317 if(createInfo.
pool != VK_NULL_HANDLE)
14319 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14320 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14323 if((createInfo.
pool != VK_NULL_HANDLE) &&
14326 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14327 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14330 if(createInfo.
pool != VK_NULL_HANDLE)
14332 const VkDeviceSize alignmentForPool = VMA_MAX(
14333 vkMemReq.alignment,
14334 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14335 return createInfo.
pool->m_BlockVector.Allocate(
14337 m_CurrentFrameIndex.load(),
14347 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14348 uint32_t memTypeIndex = UINT32_MAX;
14350 if(res == VK_SUCCESS)
14352 VkDeviceSize alignmentForMemType = VMA_MAX(
14353 vkMemReq.alignment,
14354 GetMemoryTypeMinAlignment(memTypeIndex));
14356 res = AllocateMemoryOfType(
14358 alignmentForMemType,
14359 requiresDedicatedAllocation || prefersDedicatedAllocation,
14367 if(res == VK_SUCCESS)
14377 memoryTypeBits &= ~(1u << memTypeIndex);
14380 if(res == VK_SUCCESS)
14382 alignmentForMemType = VMA_MAX(
14383 vkMemReq.alignment,
14384 GetMemoryTypeMinAlignment(memTypeIndex));
14386 res = AllocateMemoryOfType(
14388 alignmentForMemType,
14389 requiresDedicatedAllocation || prefersDedicatedAllocation,
14397 if(res == VK_SUCCESS)
14407 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14418 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
14420 VMA_ASSERT(allocation);
14422 if(TouchAllocation(allocation))
14424 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14426 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14429 switch(allocation->GetType())
14431 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14433 VmaBlockVector* pBlockVector = VMA_NULL;
14434 VmaPool hPool = allocation->GetPool();
14435 if(hPool != VK_NULL_HANDLE)
14437 pBlockVector = &hPool->m_BlockVector;
14441 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14442 pBlockVector = m_pBlockVectors[memTypeIndex];
14444 pBlockVector->Free(allocation);
14447 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14448 FreeDedicatedMemory(allocation);
14455 allocation->SetUserData(
this, VMA_NULL);
14456 vma_delete(
this, allocation);
14459 VkResult VmaAllocator_T::ResizeAllocation(
14461 VkDeviceSize newSize)
14463 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14465 return VK_ERROR_VALIDATION_FAILED_EXT;
14467 if(newSize == alloc->GetSize())
14472 switch(alloc->GetType())
14474 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14475 return VK_ERROR_FEATURE_NOT_PRESENT;
14476 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14477 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14479 alloc->ChangeSize(newSize);
14480 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14485 return VK_ERROR_OUT_OF_POOL_MEMORY;
14489 return VK_ERROR_VALIDATION_FAILED_EXT;
14493 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14496 InitStatInfo(pStats->
total);
14497 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14499 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14503 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14505 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14506 VMA_ASSERT(pBlockVector);
14507 pBlockVector->AddStats(pStats);
14512 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14513 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14515 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14520 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14522 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14523 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14524 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14525 VMA_ASSERT(pDedicatedAllocVector);
14526 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14529 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14530 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14531 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14532 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14537 VmaPostprocessCalcStatInfo(pStats->
total);
14538 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14539 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14540 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14541 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14544 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14546 VkResult VmaAllocator_T::DefragmentationBegin(
14556 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
14557 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
14560 (*pContext)->AddAllocations(
14563 VkResult res = (*pContext)->Defragment(
14568 if(res != VK_NOT_READY)
14570 vma_delete(
this, *pContext);
14571 *pContext = VMA_NULL;
14577 VkResult VmaAllocator_T::DefragmentationEnd(
14580 vma_delete(
this, context);
14586 if(hAllocation->CanBecomeLost())
14592 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14593 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14596 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14600 pAllocationInfo->
offset = 0;
14601 pAllocationInfo->
size = hAllocation->GetSize();
14603 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14606 else if(localLastUseFrameIndex == localCurrFrameIndex)
14608 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14609 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14610 pAllocationInfo->
offset = hAllocation->GetOffset();
14611 pAllocationInfo->
size = hAllocation->GetSize();
14613 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14618 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14620 localLastUseFrameIndex = localCurrFrameIndex;
14627 #if VMA_STATS_STRING_ENABLED 14628 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14629 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14632 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14633 if(localLastUseFrameIndex == localCurrFrameIndex)
14639 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14641 localLastUseFrameIndex = localCurrFrameIndex;
14647 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14648 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14649 pAllocationInfo->
offset = hAllocation->GetOffset();
14650 pAllocationInfo->
size = hAllocation->GetSize();
14651 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
14652 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14656 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
14659 if(hAllocation->CanBecomeLost())
14661 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14662 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14665 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14669 else if(localLastUseFrameIndex == localCurrFrameIndex)
14675 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14677 localLastUseFrameIndex = localCurrFrameIndex;
14684 #if VMA_STATS_STRING_ENABLED 14685 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14686 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14689 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14690 if(localLastUseFrameIndex == localCurrFrameIndex)
14696 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14698 localLastUseFrameIndex = localCurrFrameIndex;
14710 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
14720 return VK_ERROR_INITIALIZATION_FAILED;
14723 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
14725 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
14727 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
14728 if(res != VK_SUCCESS)
14730 vma_delete(
this, *pPool);
14737 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
14738 (*pPool)->SetId(m_NextPoolId++);
14739 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
14745 void VmaAllocator_T::DestroyPool(
VmaPool pool)
14749 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
14750 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
14751 VMA_ASSERT(success &&
"Pool not found in Allocator.");
14754 vma_delete(
this, pool);
14759 pool->m_BlockVector.GetPoolStats(pPoolStats);
14762 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
14764 m_CurrentFrameIndex.store(frameIndex);
14767 void VmaAllocator_T::MakePoolAllocationsLost(
14769 size_t* pLostAllocationCount)
14771 hPool->m_BlockVector.MakePoolAllocationsLost(
14772 m_CurrentFrameIndex.load(),
14773 pLostAllocationCount);
14776 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
14778 return hPool->m_BlockVector.CheckCorruption();
14781 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
14783 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
14786 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14788 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
14790 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14791 VMA_ASSERT(pBlockVector);
14792 VkResult localRes = pBlockVector->CheckCorruption();
14795 case VK_ERROR_FEATURE_NOT_PRESENT:
14798 finalRes = VK_SUCCESS;
14808 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14809 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14811 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
14813 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
14816 case VK_ERROR_FEATURE_NOT_PRESENT:
14819 finalRes = VK_SUCCESS;
14831 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
14833 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
14834 (*pAllocation)->InitLost();
14837 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
14839 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
14842 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
14844 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
14845 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
14847 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
14848 if(res == VK_SUCCESS)
14850 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
14855 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
14860 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
14863 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
14865 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
14871 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
14873 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
14875 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
14878 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
14880 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
14881 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
14883 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
14884 m_HeapSizeLimit[heapIndex] += size;
14888 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
14890 if(hAllocation->CanBecomeLost())
14892 return VK_ERROR_MEMORY_MAP_FAILED;
14895 switch(hAllocation->GetType())
14897 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14899 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
14900 char *pBytes = VMA_NULL;
14901 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
14902 if(res == VK_SUCCESS)
14904 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
14905 hAllocation->BlockAllocMap();
14909 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14910 return hAllocation->DedicatedAllocMap(
this, ppData);
14913 return VK_ERROR_MEMORY_MAP_FAILED;
14919 switch(hAllocation->GetType())
14921 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14923 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
14924 hAllocation->BlockAllocUnmap();
14925 pBlock->Unmap(
this, 1);
14928 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14929 hAllocation->DedicatedAllocUnmap(
this);
14936 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
14938 VkResult res = VK_SUCCESS;
14939 switch(hAllocation->GetType())
14941 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14942 res = GetVulkanFunctions().vkBindBufferMemory(
14945 hAllocation->GetMemory(),
14948 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14950 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
14951 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
14952 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
14961 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
14963 VkResult res = VK_SUCCESS;
14964 switch(hAllocation->GetType())
14966 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14967 res = GetVulkanFunctions().vkBindImageMemory(
14970 hAllocation->GetMemory(),
14973 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14975 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
14976 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
14977 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
14986 void VmaAllocator_T::FlushOrInvalidateAllocation(
14988 VkDeviceSize offset, VkDeviceSize size,
14989 VMA_CACHE_OPERATION op)
14991 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
14992 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
14994 const VkDeviceSize allocationSize = hAllocation->GetSize();
14995 VMA_ASSERT(offset <= allocationSize);
14997 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
14999 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15000 memRange.memory = hAllocation->GetMemory();
15002 switch(hAllocation->GetType())
15004 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15005 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15006 if(size == VK_WHOLE_SIZE)
15008 memRange.size = allocationSize - memRange.offset;
15012 VMA_ASSERT(offset + size <= allocationSize);
15013 memRange.size = VMA_MIN(
15014 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15015 allocationSize - memRange.offset);
15019 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15022 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15023 if(size == VK_WHOLE_SIZE)
15025 size = allocationSize - offset;
15029 VMA_ASSERT(offset + size <= allocationSize);
15031 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15034 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15035 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15036 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15037 memRange.offset += allocationOffset;
15038 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15049 case VMA_CACHE_FLUSH:
15050 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15052 case VMA_CACHE_INVALIDATE:
15053 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15062 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15064 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15066 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15068 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15069 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15070 VMA_ASSERT(pDedicatedAllocations);
15071 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15072 VMA_ASSERT(success);
15075 VkDeviceMemory hMemory = allocation->GetMemory();
15087 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15089 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15092 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15094 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15095 !hAllocation->CanBecomeLost() &&
15096 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15098 void* pData = VMA_NULL;
15099 VkResult res = Map(hAllocation, &pData);
15100 if(res == VK_SUCCESS)
15102 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15103 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15104 Unmap(hAllocation);
15108 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15113 #if VMA_STATS_STRING_ENABLED 15115 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15117 bool dedicatedAllocationsStarted =
false;
15118 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15120 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15121 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15122 VMA_ASSERT(pDedicatedAllocVector);
15123 if(pDedicatedAllocVector->empty() ==
false)
15125 if(dedicatedAllocationsStarted ==
false)
15127 dedicatedAllocationsStarted =
true;
15128 json.WriteString(
"DedicatedAllocations");
15129 json.BeginObject();
15132 json.BeginString(
"Type ");
15133 json.ContinueString(memTypeIndex);
15138 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15140 json.BeginObject(
true);
15142 hAlloc->PrintParameters(json);
15149 if(dedicatedAllocationsStarted)
15155 bool allocationsStarted =
false;
15156 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15158 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15160 if(allocationsStarted ==
false)
15162 allocationsStarted =
true;
15163 json.WriteString(
"DefaultPools");
15164 json.BeginObject();
15167 json.BeginString(
"Type ");
15168 json.ContinueString(memTypeIndex);
15171 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15174 if(allocationsStarted)
15182 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15183 const size_t poolCount = m_Pools.size();
15186 json.WriteString(
"Pools");
15187 json.BeginObject();
15188 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15190 json.BeginString();
15191 json.ContinueString(m_Pools[poolIndex]->GetId());
15194 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15201 #endif // #if VMA_STATS_STRING_ENABLED 15210 VMA_ASSERT(pCreateInfo && pAllocator);
15211 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15213 return (*pAllocator)->Init(pCreateInfo);
15219 if(allocator != VK_NULL_HANDLE)
15221 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15222 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15223 vma_delete(&allocationCallbacks, allocator);
15229 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15231 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15232 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15237 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15239 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15240 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15245 uint32_t memoryTypeIndex,
15246 VkMemoryPropertyFlags* pFlags)
15248 VMA_ASSERT(allocator && pFlags);
15249 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15250 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15255 uint32_t frameIndex)
15257 VMA_ASSERT(allocator);
15258 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15260 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15262 allocator->SetCurrentFrameIndex(frameIndex);
15269 VMA_ASSERT(allocator && pStats);
15270 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15271 allocator->CalculateStats(pStats);
15274 #if VMA_STATS_STRING_ENABLED 15278 char** ppStatsString,
15279 VkBool32 detailedMap)
15281 VMA_ASSERT(allocator && ppStatsString);
15282 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15284 VmaStringBuilder sb(allocator);
15286 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15287 json.BeginObject();
15290 allocator->CalculateStats(&stats);
15292 json.WriteString(
"Total");
15293 VmaPrintStatInfo(json, stats.
total);
15295 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15297 json.BeginString(
"Heap ");
15298 json.ContinueString(heapIndex);
15300 json.BeginObject();
15302 json.WriteString(
"Size");
15303 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15305 json.WriteString(
"Flags");
15306 json.BeginArray(
true);
15307 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15309 json.WriteString(
"DEVICE_LOCAL");
15315 json.WriteString(
"Stats");
15316 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15319 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15321 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15323 json.BeginString(
"Type ");
15324 json.ContinueString(typeIndex);
15327 json.BeginObject();
15329 json.WriteString(
"Flags");
15330 json.BeginArray(
true);
15331 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15332 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15334 json.WriteString(
"DEVICE_LOCAL");
15336 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15338 json.WriteString(
"HOST_VISIBLE");
15340 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15342 json.WriteString(
"HOST_COHERENT");
15344 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15346 json.WriteString(
"HOST_CACHED");
15348 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15350 json.WriteString(
"LAZILY_ALLOCATED");
15356 json.WriteString(
"Stats");
15357 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15366 if(detailedMap == VK_TRUE)
15368 allocator->PrintDetailedMap(json);
15374 const size_t len = sb.GetLength();
15375 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15378 memcpy(pChars, sb.GetData(), len);
15380 pChars[len] =
'\0';
15381 *ppStatsString = pChars;
15386 char* pStatsString)
15388 if(pStatsString != VMA_NULL)
15390 VMA_ASSERT(allocator);
15391 size_t len = strlen(pStatsString);
15392 vma_delete_array(allocator, pStatsString, len + 1);
15396 #endif // #if VMA_STATS_STRING_ENABLED 15403 uint32_t memoryTypeBits,
15405 uint32_t* pMemoryTypeIndex)
15407 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15408 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15409 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15416 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15417 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15422 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15426 switch(pAllocationCreateInfo->
usage)
15431 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15433 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15437 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15440 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15441 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15443 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15447 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15448 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15454 *pMemoryTypeIndex = UINT32_MAX;
15455 uint32_t minCost = UINT32_MAX;
15456 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15457 memTypeIndex < allocator->GetMemoryTypeCount();
15458 ++memTypeIndex, memTypeBit <<= 1)
15461 if((memTypeBit & memoryTypeBits) != 0)
15463 const VkMemoryPropertyFlags currFlags =
15464 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15466 if((requiredFlags & ~currFlags) == 0)
15469 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15471 if(currCost < minCost)
15473 *pMemoryTypeIndex = memTypeIndex;
15478 minCost = currCost;
15483 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15488 const VkBufferCreateInfo* pBufferCreateInfo,
15490 uint32_t* pMemoryTypeIndex)
15492 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15493 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15494 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15495 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15497 const VkDevice hDev = allocator->m_hDevice;
15498 VkBuffer hBuffer = VK_NULL_HANDLE;
15499 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15500 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15501 if(res == VK_SUCCESS)
15503 VkMemoryRequirements memReq = {};
15504 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15505 hDev, hBuffer, &memReq);
15509 memReq.memoryTypeBits,
15510 pAllocationCreateInfo,
15513 allocator->GetVulkanFunctions().vkDestroyBuffer(
15514 hDev, hBuffer, allocator->GetAllocationCallbacks());
15521 const VkImageCreateInfo* pImageCreateInfo,
15523 uint32_t* pMemoryTypeIndex)
15525 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15526 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15527 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15528 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15530 const VkDevice hDev = allocator->m_hDevice;
15531 VkImage hImage = VK_NULL_HANDLE;
15532 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15533 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15534 if(res == VK_SUCCESS)
15536 VkMemoryRequirements memReq = {};
15537 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15538 hDev, hImage, &memReq);
15542 memReq.memoryTypeBits,
15543 pAllocationCreateInfo,
15546 allocator->GetVulkanFunctions().vkDestroyImage(
15547 hDev, hImage, allocator->GetAllocationCallbacks());
15557 VMA_ASSERT(allocator && pCreateInfo && pPool);
15559 VMA_DEBUG_LOG(
"vmaCreatePool");
15561 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15563 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
15565 #if VMA_RECORDING_ENABLED 15566 if(allocator->GetRecorder() != VMA_NULL)
15568 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
15579 VMA_ASSERT(allocator);
15581 if(pool == VK_NULL_HANDLE)
15586 VMA_DEBUG_LOG(
"vmaDestroyPool");
15588 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15590 #if VMA_RECORDING_ENABLED 15591 if(allocator->GetRecorder() != VMA_NULL)
15593 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
15597 allocator->DestroyPool(pool);
15605 VMA_ASSERT(allocator && pool && pPoolStats);
15607 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15609 allocator->GetPoolStats(pool, pPoolStats);
15615 size_t* pLostAllocationCount)
15617 VMA_ASSERT(allocator && pool);
15619 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15621 #if VMA_RECORDING_ENABLED 15622 if(allocator->GetRecorder() != VMA_NULL)
15624 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
15628 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
15633 VMA_ASSERT(allocator && pool);
15635 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15637 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
15639 return allocator->CheckPoolCorruption(pool);
15644 const VkMemoryRequirements* pVkMemoryRequirements,
15649 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
15651 VMA_DEBUG_LOG(
"vmaAllocateMemory");
15653 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15655 VkResult result = allocator->AllocateMemory(
15656 *pVkMemoryRequirements,
15662 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15665 #if VMA_RECORDING_ENABLED 15666 if(allocator->GetRecorder() != VMA_NULL)
15668 allocator->GetRecorder()->RecordAllocateMemory(
15669 allocator->GetCurrentFrameIndex(),
15670 *pVkMemoryRequirements,
15676 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15678 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15691 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
15693 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
15695 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15697 VkMemoryRequirements vkMemReq = {};
15698 bool requiresDedicatedAllocation =
false;
15699 bool prefersDedicatedAllocation =
false;
15700 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
15701 requiresDedicatedAllocation,
15702 prefersDedicatedAllocation);
15704 VkResult result = allocator->AllocateMemory(
15706 requiresDedicatedAllocation,
15707 prefersDedicatedAllocation,
15711 VMA_SUBALLOCATION_TYPE_BUFFER,
15714 #if VMA_RECORDING_ENABLED 15715 if(allocator->GetRecorder() != VMA_NULL)
15717 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
15718 allocator->GetCurrentFrameIndex(),
15720 requiresDedicatedAllocation,
15721 prefersDedicatedAllocation,
15727 if(pAllocationInfo && result == VK_SUCCESS)
15729 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15742 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
15744 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
15746 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15748 VkMemoryRequirements vkMemReq = {};
15749 bool requiresDedicatedAllocation =
false;
15750 bool prefersDedicatedAllocation =
false;
15751 allocator->GetImageMemoryRequirements(image, vkMemReq,
15752 requiresDedicatedAllocation, prefersDedicatedAllocation);
15754 VkResult result = allocator->AllocateMemory(
15756 requiresDedicatedAllocation,
15757 prefersDedicatedAllocation,
15761 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
15764 #if VMA_RECORDING_ENABLED 15765 if(allocator->GetRecorder() != VMA_NULL)
15767 allocator->GetRecorder()->RecordAllocateMemoryForImage(
15768 allocator->GetCurrentFrameIndex(),
15770 requiresDedicatedAllocation,
15771 prefersDedicatedAllocation,
15777 if(pAllocationInfo && result == VK_SUCCESS)
15779 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15789 VMA_ASSERT(allocator);
15791 if(allocation == VK_NULL_HANDLE)
15796 VMA_DEBUG_LOG(
"vmaFreeMemory");
15798 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15800 #if VMA_RECORDING_ENABLED 15801 if(allocator->GetRecorder() != VMA_NULL)
15803 allocator->GetRecorder()->RecordFreeMemory(
15804 allocator->GetCurrentFrameIndex(),
15809 allocator->FreeMemory(allocation);
15815 VkDeviceSize newSize)
15817 VMA_ASSERT(allocator && allocation);
15819 VMA_DEBUG_LOG(
"vmaResizeAllocation");
15821 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15823 #if VMA_RECORDING_ENABLED 15824 if(allocator->GetRecorder() != VMA_NULL)
15826 allocator->GetRecorder()->RecordResizeAllocation(
15827 allocator->GetCurrentFrameIndex(),
15833 return allocator->ResizeAllocation(allocation, newSize);
15841 VMA_ASSERT(allocator && allocation && pAllocationInfo);
15843 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15845 #if VMA_RECORDING_ENABLED 15846 if(allocator->GetRecorder() != VMA_NULL)
15848 allocator->GetRecorder()->RecordGetAllocationInfo(
15849 allocator->GetCurrentFrameIndex(),
15854 allocator->GetAllocationInfo(allocation, pAllocationInfo);
15861 VMA_ASSERT(allocator && allocation);
15863 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15865 #if VMA_RECORDING_ENABLED 15866 if(allocator->GetRecorder() != VMA_NULL)
15868 allocator->GetRecorder()->RecordTouchAllocation(
15869 allocator->GetCurrentFrameIndex(),
15874 return allocator->TouchAllocation(allocation);
15882 VMA_ASSERT(allocator && allocation);
15884 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15886 allocation->SetUserData(allocator, pUserData);
15888 #if VMA_RECORDING_ENABLED 15889 if(allocator->GetRecorder() != VMA_NULL)
15891 allocator->GetRecorder()->RecordSetAllocationUserData(
15892 allocator->GetCurrentFrameIndex(),
15903 VMA_ASSERT(allocator && pAllocation);
15905 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
15907 allocator->CreateLostAllocation(pAllocation);
15909 #if VMA_RECORDING_ENABLED 15910 if(allocator->GetRecorder() != VMA_NULL)
15912 allocator->GetRecorder()->RecordCreateLostAllocation(
15913 allocator->GetCurrentFrameIndex(),
15924 VMA_ASSERT(allocator && allocation && ppData);
15926 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15928 VkResult res = allocator->Map(allocation, ppData);
15930 #if VMA_RECORDING_ENABLED 15931 if(allocator->GetRecorder() != VMA_NULL)
15933 allocator->GetRecorder()->RecordMapMemory(
15934 allocator->GetCurrentFrameIndex(),
15946 VMA_ASSERT(allocator && allocation);
15948 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15950 #if VMA_RECORDING_ENABLED 15951 if(allocator->GetRecorder() != VMA_NULL)
15953 allocator->GetRecorder()->RecordUnmapMemory(
15954 allocator->GetCurrentFrameIndex(),
15959 allocator->Unmap(allocation);
15964 VMA_ASSERT(allocator && allocation);
15966 VMA_DEBUG_LOG(
"vmaFlushAllocation");
15968 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15970 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
15972 #if VMA_RECORDING_ENABLED 15973 if(allocator->GetRecorder() != VMA_NULL)
15975 allocator->GetRecorder()->RecordFlushAllocation(
15976 allocator->GetCurrentFrameIndex(),
15977 allocation, offset, size);
15984 VMA_ASSERT(allocator && allocation);
15986 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
15988 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15990 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
15992 #if VMA_RECORDING_ENABLED 15993 if(allocator->GetRecorder() != VMA_NULL)
15995 allocator->GetRecorder()->RecordInvalidateAllocation(
15996 allocator->GetCurrentFrameIndex(),
15997 allocation, offset, size);
16004 VMA_ASSERT(allocator);
16006 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16008 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16010 return allocator->CheckCorruption(memoryTypeBits);
16016 size_t allocationCount,
16017 VkBool32* pAllocationsChanged,
16027 if(pDefragmentationInfo != VMA_NULL)
16041 if(res == VK_NOT_READY)
16054 VMA_ASSERT(allocator && pInfo && pContext);
16065 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16067 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16069 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16071 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16073 #if VMA_RECORDING_ENABLED 16074 if(allocator->GetRecorder() != VMA_NULL)
16076 allocator->GetRecorder()->RecordDefragmentationBegin(
16077 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16088 VMA_ASSERT(allocator);
16090 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16092 if(context != VK_NULL_HANDLE)
16094 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16096 #if VMA_RECORDING_ENABLED 16097 if(allocator->GetRecorder() != VMA_NULL)
16099 allocator->GetRecorder()->RecordDefragmentationEnd(
16100 allocator->GetCurrentFrameIndex(), context);
16104 return allocator->DefragmentationEnd(context);
16117 VMA_ASSERT(allocator && allocation && buffer);
16119 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16121 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16123 return allocator->BindBufferMemory(allocation, buffer);
16131 VMA_ASSERT(allocator && allocation && image);
16133 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16135 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16137 return allocator->BindImageMemory(allocation, image);
16142 const VkBufferCreateInfo* pBufferCreateInfo,
16148 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16150 if(pBufferCreateInfo->size == 0)
16152 return VK_ERROR_VALIDATION_FAILED_EXT;
16155 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16157 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16159 *pBuffer = VK_NULL_HANDLE;
16160 *pAllocation = VK_NULL_HANDLE;
16163 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16164 allocator->m_hDevice,
16166 allocator->GetAllocationCallbacks(),
16171 VkMemoryRequirements vkMemReq = {};
16172 bool requiresDedicatedAllocation =
false;
16173 bool prefersDedicatedAllocation =
false;
16174 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16175 requiresDedicatedAllocation, prefersDedicatedAllocation);
16179 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16181 VMA_ASSERT(vkMemReq.alignment %
16182 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16184 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16186 VMA_ASSERT(vkMemReq.alignment %
16187 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16189 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16191 VMA_ASSERT(vkMemReq.alignment %
16192 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16196 res = allocator->AllocateMemory(
16198 requiresDedicatedAllocation,
16199 prefersDedicatedAllocation,
16202 *pAllocationCreateInfo,
16203 VMA_SUBALLOCATION_TYPE_BUFFER,
16206 #if VMA_RECORDING_ENABLED 16207 if(allocator->GetRecorder() != VMA_NULL)
16209 allocator->GetRecorder()->RecordCreateBuffer(
16210 allocator->GetCurrentFrameIndex(),
16211 *pBufferCreateInfo,
16212 *pAllocationCreateInfo,
16220 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16224 #if VMA_STATS_STRING_ENABLED 16225 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16227 if(pAllocationInfo != VMA_NULL)
16229 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16234 allocator->FreeMemory(*pAllocation);
16235 *pAllocation = VK_NULL_HANDLE;
16236 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16237 *pBuffer = VK_NULL_HANDLE;
16240 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16241 *pBuffer = VK_NULL_HANDLE;
16252 VMA_ASSERT(allocator);
16254 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16259 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16261 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16263 #if VMA_RECORDING_ENABLED 16264 if(allocator->GetRecorder() != VMA_NULL)
16266 allocator->GetRecorder()->RecordDestroyBuffer(
16267 allocator->GetCurrentFrameIndex(),
16272 if(buffer != VK_NULL_HANDLE)
16274 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16277 if(allocation != VK_NULL_HANDLE)
16279 allocator->FreeMemory(allocation);
16285 const VkImageCreateInfo* pImageCreateInfo,
16291 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16293 if(pImageCreateInfo->extent.width == 0 ||
16294 pImageCreateInfo->extent.height == 0 ||
16295 pImageCreateInfo->extent.depth == 0 ||
16296 pImageCreateInfo->mipLevels == 0 ||
16297 pImageCreateInfo->arrayLayers == 0)
16299 return VK_ERROR_VALIDATION_FAILED_EXT;
16302 VMA_DEBUG_LOG(
"vmaCreateImage");
16304 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16306 *pImage = VK_NULL_HANDLE;
16307 *pAllocation = VK_NULL_HANDLE;
16310 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16311 allocator->m_hDevice,
16313 allocator->GetAllocationCallbacks(),
16317 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16318 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16319 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16322 VkMemoryRequirements vkMemReq = {};
16323 bool requiresDedicatedAllocation =
false;
16324 bool prefersDedicatedAllocation =
false;
16325 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16326 requiresDedicatedAllocation, prefersDedicatedAllocation);
16328 res = allocator->AllocateMemory(
16330 requiresDedicatedAllocation,
16331 prefersDedicatedAllocation,
16334 *pAllocationCreateInfo,
16338 #if VMA_RECORDING_ENABLED 16339 if(allocator->GetRecorder() != VMA_NULL)
16341 allocator->GetRecorder()->RecordCreateImage(
16342 allocator->GetCurrentFrameIndex(),
16344 *pAllocationCreateInfo,
16352 res = allocator->BindImageMemory(*pAllocation, *pImage);
16356 #if VMA_STATS_STRING_ENABLED 16357 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16359 if(pAllocationInfo != VMA_NULL)
16361 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16366 allocator->FreeMemory(*pAllocation);
16367 *pAllocation = VK_NULL_HANDLE;
16368 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16369 *pImage = VK_NULL_HANDLE;
16372 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16373 *pImage = VK_NULL_HANDLE;
16384 VMA_ASSERT(allocator);
16386 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16391 VMA_DEBUG_LOG(
"vmaDestroyImage");
16393 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16395 #if VMA_RECORDING_ENABLED 16396 if(allocator->GetRecorder() != VMA_NULL)
16398 allocator->GetRecorder()->RecordDestroyImage(
16399 allocator->GetCurrentFrameIndex(),
16404 if(image != VK_NULL_HANDLE)
16406 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16408 if(allocation != VK_NULL_HANDLE)
16410 allocator->FreeMemory(allocation);
16414 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1727
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2030
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1785
diff --git a/src/Tests.cpp b/src/Tests.cpp
index 0f1392e..d0b3ea2 100644
--- a/src/Tests.cpp
+++ b/src/Tests.cpp
@@ -5049,7 +5049,7 @@ void Test()
{
wprintf(L"TESTING:\n");
- if(true)
+ if(false)
{
// # Temporarily insert custom tests here
// ########################################