23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1618 #ifndef VMA_RECORDING_ENABLED 1620 #define VMA_RECORDING_ENABLED 1 1622 #define VMA_RECORDING_ENABLED 0 1627 #define NOMINMAX // For windows.h 1631 #include <vulkan/vulkan.h> 1634 #if VMA_RECORDING_ENABLED 1635 #include <windows.h> 1638 #if !defined(VMA_DEDICATED_ALLOCATION) 1639 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1640 #define VMA_DEDICATED_ALLOCATION 1 1642 #define VMA_DEDICATED_ALLOCATION 0 1660 uint32_t memoryType,
1661 VkDeviceMemory memory,
1666 uint32_t memoryType,
1667 VkDeviceMemory memory,
1740 #if VMA_DEDICATED_ALLOCATION 1741 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1742 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1869 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1877 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1887 uint32_t memoryTypeIndex,
1888 VkMemoryPropertyFlags* pFlags);
1900 uint32_t frameIndex);
1933 #define VMA_STATS_STRING_ENABLED 1 1935 #if VMA_STATS_STRING_ENABLED 1942 char** ppStatsString,
1943 VkBool32 detailedMap);
1947 char* pStatsString);
1949 #endif // #if VMA_STATS_STRING_ENABLED 2181 uint32_t memoryTypeBits,
2183 uint32_t* pMemoryTypeIndex);
2199 const VkBufferCreateInfo* pBufferCreateInfo,
2201 uint32_t* pMemoryTypeIndex);
2217 const VkImageCreateInfo* pImageCreateInfo,
2219 uint32_t* pMemoryTypeIndex);
2391 size_t* pLostAllocationCount);
2490 const VkMemoryRequirements* pVkMemoryRequirements,
2516 const VkMemoryRequirements* pVkMemoryRequirements,
2518 size_t allocationCount,
2563 size_t allocationCount,
2589 VkDeviceSize newSize);
2958 size_t allocationCount,
2959 VkBool32* pAllocationsChanged,
3025 const VkBufferCreateInfo* pBufferCreateInfo,
3050 const VkImageCreateInfo* pImageCreateInfo,
3076 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3079 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3080 #define VMA_IMPLEMENTATION 3083 #ifdef VMA_IMPLEMENTATION 3084 #undef VMA_IMPLEMENTATION 3106 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3107 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3119 #if VMA_USE_STL_CONTAINERS 3120 #define VMA_USE_STL_VECTOR 1 3121 #define VMA_USE_STL_UNORDERED_MAP 1 3122 #define VMA_USE_STL_LIST 1 3125 #ifndef VMA_USE_STL_SHARED_MUTEX 3127 #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 3128 #define VMA_USE_STL_SHARED_MUTEX 1 3132 #if VMA_USE_STL_VECTOR 3136 #if VMA_USE_STL_UNORDERED_MAP 3137 #include <unordered_map> 3140 #if VMA_USE_STL_LIST 3149 #include <algorithm> 3155 #define VMA_NULL nullptr 3158 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3160 void *aligned_alloc(
size_t alignment,
size_t size)
3163 if(alignment <
sizeof(
void*))
3165 alignment =
sizeof(
void*);
3168 return memalign(alignment, size);
3170 #elif defined(__APPLE__) || defined(__ANDROID__) 3172 void *aligned_alloc(
size_t alignment,
size_t size)
3175 if(alignment <
sizeof(
void*))
3177 alignment =
sizeof(
void*);
3181 if(posix_memalign(&pointer, alignment, size) == 0)
3195 #define VMA_ASSERT(expr) assert(expr) 3197 #define VMA_ASSERT(expr) 3203 #ifndef VMA_HEAVY_ASSERT 3205 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3207 #define VMA_HEAVY_ASSERT(expr) 3211 #ifndef VMA_ALIGN_OF 3212 #define VMA_ALIGN_OF(type) (__alignof(type)) 3215 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3217 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3219 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3223 #ifndef VMA_SYSTEM_FREE 3225 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3227 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3232 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3236 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3240 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3244 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3247 #ifndef VMA_DEBUG_LOG 3248 #define VMA_DEBUG_LOG(format, ...) 3258 #if VMA_STATS_STRING_ENABLED 3259 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3261 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3263 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3265 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3267 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3269 snprintf(outStr, strLen,
"%p", ptr);
3277 void Lock() { m_Mutex.lock(); }
3278 void Unlock() { m_Mutex.unlock(); }
3282 #define VMA_MUTEX VmaMutex 3286 #ifndef VMA_RW_MUTEX 3287 #if VMA_USE_STL_SHARED_MUTEX 3289 #include <shared_mutex> 3293 void LockRead() { m_Mutex.lock_shared(); }
3294 void UnlockRead() { m_Mutex.unlock_shared(); }
3295 void LockWrite() { m_Mutex.lock(); }
3296 void UnlockWrite() { m_Mutex.unlock(); }
3298 std::shared_mutex m_Mutex;
3300 #define VMA_RW_MUTEX VmaRWMutex 3301 #elif defined(_WIN32) 3306 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3307 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3308 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3309 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3310 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3314 #define VMA_RW_MUTEX VmaRWMutex 3320 void LockRead() { m_Mutex.Lock(); }
3321 void UnlockRead() { m_Mutex.Unlock(); }
3322 void LockWrite() { m_Mutex.Lock(); }
3323 void UnlockWrite() { m_Mutex.Unlock(); }
3327 #define VMA_RW_MUTEX VmaRWMutex 3328 #endif // #if VMA_USE_STL_SHARED_MUTEX 3329 #endif // #ifndef VMA_RW_MUTEX 3339 #ifndef VMA_ATOMIC_UINT32 3340 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3343 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3348 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3351 #ifndef VMA_DEBUG_ALIGNMENT 3356 #define VMA_DEBUG_ALIGNMENT (1) 3359 #ifndef VMA_DEBUG_MARGIN 3364 #define VMA_DEBUG_MARGIN (0) 3367 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3372 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3375 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3381 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3384 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3389 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3392 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3397 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3400 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3401 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3405 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3406 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3410 #ifndef VMA_CLASS_NO_COPY 3411 #define VMA_CLASS_NO_COPY(className) \ 3413 className(const className&) = delete; \ 3414 className& operator=(const className&) = delete; 3417 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3420 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3422 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3423 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3429 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3431 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3432 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3435 static inline uint32_t VmaCountBitsSet(uint32_t v)
3437 uint32_t c = v - ((v >> 1) & 0x55555555);
3438 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3439 c = ((c >> 4) + c) & 0x0F0F0F0F;
3440 c = ((c >> 8) + c) & 0x00FF00FF;
3441 c = ((c >> 16) + c) & 0x0000FFFF;
3447 template <
typename T>
3448 static inline T VmaAlignUp(T val, T align)
3450 return (val + align - 1) / align * align;
3454 template <
typename T>
3455 static inline T VmaAlignDown(T val, T align)
3457 return val / align * align;
3461 template <
typename T>
3462 static inline T VmaRoundDiv(T x, T y)
3464 return (x + (y / (T)2)) / y;
3472 template <
typename T>
3473 inline bool VmaIsPow2(T x)
3475 return (x & (x-1)) == 0;
3479 static inline uint32_t VmaNextPow2(uint32_t v)
3490 static inline uint64_t VmaNextPow2(uint64_t v)
3504 static inline uint32_t VmaPrevPow2(uint32_t v)
3514 static inline uint64_t VmaPrevPow2(uint64_t v)
3526 static inline bool VmaStrIsEmpty(
const char* pStr)
3528 return pStr == VMA_NULL || *pStr ==
'\0';
3531 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3549 template<
typename Iterator,
typename Compare>
3550 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3552 Iterator centerValue = end; --centerValue;
3553 Iterator insertIndex = beg;
3554 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3556 if(cmp(*memTypeIndex, *centerValue))
3558 if(insertIndex != memTypeIndex)
3560 VMA_SWAP(*memTypeIndex, *insertIndex);
3565 if(insertIndex != centerValue)
3567 VMA_SWAP(*insertIndex, *centerValue);
3572 template<
typename Iterator,
typename Compare>
3573 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3577 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3578 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3579 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3583 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3585 #endif // #ifndef VMA_SORT 3594 static inline bool VmaBlocksOnSamePage(
3595 VkDeviceSize resourceAOffset,
3596 VkDeviceSize resourceASize,
3597 VkDeviceSize resourceBOffset,
3598 VkDeviceSize pageSize)
3600 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3601 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3602 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3603 VkDeviceSize resourceBStart = resourceBOffset;
3604 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3605 return resourceAEndPage == resourceBStartPage;
3608 enum VmaSuballocationType
3610 VMA_SUBALLOCATION_TYPE_FREE = 0,
3611 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3612 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3613 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3614 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3615 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3616 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3625 static inline bool VmaIsBufferImageGranularityConflict(
3626 VmaSuballocationType suballocType1,
3627 VmaSuballocationType suballocType2)
3629 if(suballocType1 > suballocType2)
3631 VMA_SWAP(suballocType1, suballocType2);
3634 switch(suballocType1)
3636 case VMA_SUBALLOCATION_TYPE_FREE:
3638 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3640 case VMA_SUBALLOCATION_TYPE_BUFFER:
3642 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3643 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3644 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3646 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3647 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3648 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3649 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3651 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3652 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3660 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3662 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3663 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3664 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3666 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3670 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3672 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3673 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3674 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3676 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3687 VMA_CLASS_NO_COPY(VmaMutexLock)
3689 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3690 m_pMutex(useMutex ? &mutex : VMA_NULL)
3691 {
if(m_pMutex) { m_pMutex->Lock(); } }
3693 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3695 VMA_MUTEX* m_pMutex;
3699 struct VmaMutexLockRead
3701 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3703 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3704 m_pMutex(useMutex ? &mutex : VMA_NULL)
3705 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3706 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3708 VMA_RW_MUTEX* m_pMutex;
3712 struct VmaMutexLockWrite
3714 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3716 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3717 m_pMutex(useMutex ? &mutex : VMA_NULL)
3718 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3719 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3721 VMA_RW_MUTEX* m_pMutex;
3724 #if VMA_DEBUG_GLOBAL_MUTEX 3725 static VMA_MUTEX gDebugGlobalMutex;
3726 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3728 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3732 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3743 template <
typename CmpLess,
typename IterT,
typename KeyT>
3744 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3746 size_t down = 0, up = (end - beg);
3749 const size_t mid = (down + up) / 2;
3750 if(cmp(*(beg+mid), key))
3767 template<
typename T>
3768 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3770 for(uint32_t i = 0; i < count; ++i)
3772 const T iPtr = arr[i];
3773 if(iPtr == VMA_NULL)
3777 for(uint32_t j = i + 1; j < count; ++j)
3791 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3793 if((pAllocationCallbacks != VMA_NULL) &&
3794 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3796 return (*pAllocationCallbacks->pfnAllocation)(
3797 pAllocationCallbacks->pUserData,
3800 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3804 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3808 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3810 if((pAllocationCallbacks != VMA_NULL) &&
3811 (pAllocationCallbacks->pfnFree != VMA_NULL))
3813 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3817 VMA_SYSTEM_FREE(ptr);
3821 template<
typename T>
3822 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3824 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3827 template<
typename T>
3828 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3830 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3833 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3835 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3837 template<
typename T>
3838 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3841 VmaFree(pAllocationCallbacks, ptr);
3844 template<
typename T>
3845 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3849 for(
size_t i = count; i--; )
3853 VmaFree(pAllocationCallbacks, ptr);
3858 template<
typename T>
3859 class VmaStlAllocator
3862 const VkAllocationCallbacks*
const m_pCallbacks;
3863 typedef T value_type;
3865 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3866 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3868 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3869 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3871 template<
typename U>
3872 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3874 return m_pCallbacks == rhs.m_pCallbacks;
3876 template<
typename U>
3877 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3879 return m_pCallbacks != rhs.m_pCallbacks;
3882 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3885 #if VMA_USE_STL_VECTOR 3887 #define VmaVector std::vector 3889 template<
typename T,
typename allocatorT>
3890 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3892 vec.insert(vec.begin() + index, item);
3895 template<
typename T,
typename allocatorT>
3896 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3898 vec.erase(vec.begin() + index);
3901 #else // #if VMA_USE_STL_VECTOR 3906 template<
typename T,
typename AllocatorT>
3910 typedef T value_type;
3912 VmaVector(
const AllocatorT& allocator) :
3913 m_Allocator(allocator),
3920 VmaVector(
size_t count,
const AllocatorT& allocator) :
3921 m_Allocator(allocator),
3922 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3928 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3929 m_Allocator(src.m_Allocator),
3930 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3931 m_Count(src.m_Count),
3932 m_Capacity(src.m_Count)
3936 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3942 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3945 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3949 resize(rhs.m_Count);
3952 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3958 bool empty()
const {
return m_Count == 0; }
3959 size_t size()
const {
return m_Count; }
3960 T* data() {
return m_pArray; }
3961 const T* data()
const {
return m_pArray; }
3963 T& operator[](
size_t index)
3965 VMA_HEAVY_ASSERT(index < m_Count);
3966 return m_pArray[index];
3968 const T& operator[](
size_t index)
const 3970 VMA_HEAVY_ASSERT(index < m_Count);
3971 return m_pArray[index];
3976 VMA_HEAVY_ASSERT(m_Count > 0);
3979 const T& front()
const 3981 VMA_HEAVY_ASSERT(m_Count > 0);
3986 VMA_HEAVY_ASSERT(m_Count > 0);
3987 return m_pArray[m_Count - 1];
3989 const T& back()
const 3991 VMA_HEAVY_ASSERT(m_Count > 0);
3992 return m_pArray[m_Count - 1];
3995 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3997 newCapacity = VMA_MAX(newCapacity, m_Count);
3999 if((newCapacity < m_Capacity) && !freeMemory)
4001 newCapacity = m_Capacity;
4004 if(newCapacity != m_Capacity)
4006 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4009 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4011 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4012 m_Capacity = newCapacity;
4013 m_pArray = newArray;
4017 void resize(
size_t newCount,
bool freeMemory =
false)
4019 size_t newCapacity = m_Capacity;
4020 if(newCount > m_Capacity)
4022 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4026 newCapacity = newCount;
4029 if(newCapacity != m_Capacity)
4031 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4032 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4033 if(elementsToCopy != 0)
4035 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4037 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4038 m_Capacity = newCapacity;
4039 m_pArray = newArray;
4045 void clear(
bool freeMemory =
false)
4047 resize(0, freeMemory);
4050 void insert(
size_t index,
const T& src)
4052 VMA_HEAVY_ASSERT(index <= m_Count);
4053 const size_t oldCount = size();
4054 resize(oldCount + 1);
4055 if(index < oldCount)
4057 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4059 m_pArray[index] = src;
4062 void remove(
size_t index)
4064 VMA_HEAVY_ASSERT(index < m_Count);
4065 const size_t oldCount = size();
4066 if(index < oldCount - 1)
4068 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4070 resize(oldCount - 1);
4073 void push_back(
const T& src)
4075 const size_t newIndex = size();
4076 resize(newIndex + 1);
4077 m_pArray[newIndex] = src;
4082 VMA_HEAVY_ASSERT(m_Count > 0);
4086 void push_front(
const T& src)
4093 VMA_HEAVY_ASSERT(m_Count > 0);
4097 typedef T* iterator;
4099 iterator begin() {
return m_pArray; }
4100 iterator end() {
return m_pArray + m_Count; }
4103 AllocatorT m_Allocator;
4109 template<
typename T,
typename allocatorT>
4110 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4112 vec.insert(index, item);
4115 template<
typename T,
typename allocatorT>
4116 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4121 #endif // #if VMA_USE_STL_VECTOR 4123 template<
typename CmpLess,
typename VectorT>
4124 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4126 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4128 vector.data() + vector.size(),
4130 CmpLess()) - vector.data();
4131 VmaVectorInsert(vector, indexToInsert, value);
4132 return indexToInsert;
4135 template<
typename CmpLess,
typename VectorT>
4136 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4139 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4144 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4146 size_t indexToRemove = it - vector.begin();
4147 VmaVectorRemove(vector, indexToRemove);
4153 template<
typename CmpLess,
typename IterT,
typename KeyT>
4154 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4157 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4158 beg, end, value, comparator);
4160 (!comparator(*it, value) && !comparator(value, *it)))
4175 template<
typename T>
4176 class VmaPoolAllocator
4178 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4180 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
4181 ~VmaPoolAllocator();
4189 uint32_t NextFreeIndex;
4196 uint32_t FirstFreeIndex;
4199 const VkAllocationCallbacks* m_pAllocationCallbacks;
4200 size_t m_ItemsPerBlock;
4201 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4203 ItemBlock& CreateNewBlock();
4206 template<
typename T>
4207 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
4208 m_pAllocationCallbacks(pAllocationCallbacks),
4209 m_ItemsPerBlock(itemsPerBlock),
4210 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4212 VMA_ASSERT(itemsPerBlock > 0);
4215 template<
typename T>
4216 VmaPoolAllocator<T>::~VmaPoolAllocator()
4221 template<
typename T>
4222 void VmaPoolAllocator<T>::Clear()
4224 for(
size_t i = m_ItemBlocks.size(); i--; )
4225 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
4226 m_ItemBlocks.clear();
4229 template<
typename T>
4230 T* VmaPoolAllocator<T>::Alloc()
4232 for(
size_t i = m_ItemBlocks.size(); i--; )
4234 ItemBlock& block = m_ItemBlocks[i];
4236 if(block.FirstFreeIndex != UINT32_MAX)
4238 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4239 block.FirstFreeIndex = pItem->NextFreeIndex;
4240 return &pItem->Value;
4245 ItemBlock& newBlock = CreateNewBlock();
4246 Item*
const pItem = &newBlock.pItems[0];
4247 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4248 return &pItem->Value;
4251 template<
typename T>
4252 void VmaPoolAllocator<T>::Free(T* ptr)
4255 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
4257 ItemBlock& block = m_ItemBlocks[i];
4261 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4264 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
4266 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4267 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4268 block.FirstFreeIndex = index;
4272 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4275 template<
typename T>
4276 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4278 ItemBlock newBlock = {
4279 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
4281 m_ItemBlocks.push_back(newBlock);
4284 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
4285 newBlock.pItems[i].NextFreeIndex = i + 1;
4286 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
4287 return m_ItemBlocks.back();
4293 #if VMA_USE_STL_LIST 4295 #define VmaList std::list 4297 #else // #if VMA_USE_STL_LIST 4299 template<
typename T>
4308 template<
typename T>
4311 VMA_CLASS_NO_COPY(VmaRawList)
4313 typedef VmaListItem<T> ItemType;
4315 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4319 size_t GetCount()
const {
return m_Count; }
4320 bool IsEmpty()
const {
return m_Count == 0; }
4322 ItemType* Front() {
return m_pFront; }
4323 const ItemType* Front()
const {
return m_pFront; }
4324 ItemType* Back() {
return m_pBack; }
4325 const ItemType* Back()
const {
return m_pBack; }
4327 ItemType* PushBack();
4328 ItemType* PushFront();
4329 ItemType* PushBack(
const T& value);
4330 ItemType* PushFront(
const T& value);
4335 ItemType* InsertBefore(ItemType* pItem);
4337 ItemType* InsertAfter(ItemType* pItem);
4339 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4340 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4342 void Remove(ItemType* pItem);
4345 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4346 VmaPoolAllocator<ItemType> m_ItemAllocator;
4352 template<
typename T>
4353 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4354 m_pAllocationCallbacks(pAllocationCallbacks),
4355 m_ItemAllocator(pAllocationCallbacks, 128),
4362 template<
typename T>
4363 VmaRawList<T>::~VmaRawList()
4369 template<
typename T>
4370 void VmaRawList<T>::Clear()
4372 if(IsEmpty() ==
false)
4374 ItemType* pItem = m_pBack;
4375 while(pItem != VMA_NULL)
4377 ItemType*
const pPrevItem = pItem->pPrev;
4378 m_ItemAllocator.Free(pItem);
4381 m_pFront = VMA_NULL;
4387 template<
typename T>
4388 VmaListItem<T>* VmaRawList<T>::PushBack()
4390 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4391 pNewItem->pNext = VMA_NULL;
4394 pNewItem->pPrev = VMA_NULL;
4395 m_pFront = pNewItem;
4401 pNewItem->pPrev = m_pBack;
4402 m_pBack->pNext = pNewItem;
4409 template<
typename T>
4410 VmaListItem<T>* VmaRawList<T>::PushFront()
4412 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4413 pNewItem->pPrev = VMA_NULL;
4416 pNewItem->pNext = VMA_NULL;
4417 m_pFront = pNewItem;
4423 pNewItem->pNext = m_pFront;
4424 m_pFront->pPrev = pNewItem;
4425 m_pFront = pNewItem;
4431 template<
typename T>
4432 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4434 ItemType*
const pNewItem = PushBack();
4435 pNewItem->Value = value;
4439 template<
typename T>
4440 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4442 ItemType*
const pNewItem = PushFront();
4443 pNewItem->Value = value;
4447 template<
typename T>
4448 void VmaRawList<T>::PopBack()
4450 VMA_HEAVY_ASSERT(m_Count > 0);
4451 ItemType*
const pBackItem = m_pBack;
4452 ItemType*
const pPrevItem = pBackItem->pPrev;
4453 if(pPrevItem != VMA_NULL)
4455 pPrevItem->pNext = VMA_NULL;
4457 m_pBack = pPrevItem;
4458 m_ItemAllocator.Free(pBackItem);
4462 template<
typename T>
4463 void VmaRawList<T>::PopFront()
4465 VMA_HEAVY_ASSERT(m_Count > 0);
4466 ItemType*
const pFrontItem = m_pFront;
4467 ItemType*
const pNextItem = pFrontItem->pNext;
4468 if(pNextItem != VMA_NULL)
4470 pNextItem->pPrev = VMA_NULL;
4472 m_pFront = pNextItem;
4473 m_ItemAllocator.Free(pFrontItem);
4477 template<
typename T>
4478 void VmaRawList<T>::Remove(ItemType* pItem)
4480 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4481 VMA_HEAVY_ASSERT(m_Count > 0);
4483 if(pItem->pPrev != VMA_NULL)
4485 pItem->pPrev->pNext = pItem->pNext;
4489 VMA_HEAVY_ASSERT(m_pFront == pItem);
4490 m_pFront = pItem->pNext;
4493 if(pItem->pNext != VMA_NULL)
4495 pItem->pNext->pPrev = pItem->pPrev;
4499 VMA_HEAVY_ASSERT(m_pBack == pItem);
4500 m_pBack = pItem->pPrev;
4503 m_ItemAllocator.Free(pItem);
4507 template<
typename T>
4508 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4510 if(pItem != VMA_NULL)
4512 ItemType*
const prevItem = pItem->pPrev;
4513 ItemType*
const newItem = m_ItemAllocator.Alloc();
4514 newItem->pPrev = prevItem;
4515 newItem->pNext = pItem;
4516 pItem->pPrev = newItem;
4517 if(prevItem != VMA_NULL)
4519 prevItem->pNext = newItem;
4523 VMA_HEAVY_ASSERT(m_pFront == pItem);
4533 template<
typename T>
4534 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4536 if(pItem != VMA_NULL)
4538 ItemType*
const nextItem = pItem->pNext;
4539 ItemType*
const newItem = m_ItemAllocator.Alloc();
4540 newItem->pNext = nextItem;
4541 newItem->pPrev = pItem;
4542 pItem->pNext = newItem;
4543 if(nextItem != VMA_NULL)
4545 nextItem->pPrev = newItem;
4549 VMA_HEAVY_ASSERT(m_pBack == pItem);
4559 template<
typename T>
4560 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4562 ItemType*
const newItem = InsertBefore(pItem);
4563 newItem->Value = value;
4567 template<
typename T>
4568 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4570 ItemType*
const newItem = InsertAfter(pItem);
4571 newItem->Value = value;
4575 template<
typename T,
typename AllocatorT>
4578 VMA_CLASS_NO_COPY(VmaList)
4589 T& operator*()
const 4591 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4592 return m_pItem->Value;
4594 T* operator->()
const 4596 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4597 return &m_pItem->Value;
4600 iterator& operator++()
4602 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4603 m_pItem = m_pItem->pNext;
4606 iterator& operator--()
4608 if(m_pItem != VMA_NULL)
4610 m_pItem = m_pItem->pPrev;
4614 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4615 m_pItem = m_pList->Back();
4620 iterator operator++(
int)
4622 iterator result = *
this;
4626 iterator operator--(
int)
4628 iterator result = *
this;
4633 bool operator==(
const iterator& rhs)
const 4635 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4636 return m_pItem == rhs.m_pItem;
4638 bool operator!=(
const iterator& rhs)
const 4640 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4641 return m_pItem != rhs.m_pItem;
4645 VmaRawList<T>* m_pList;
4646 VmaListItem<T>* m_pItem;
4648 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4654 friend class VmaList<T, AllocatorT>;
4657 class const_iterator
4666 const_iterator(
const iterator& src) :
4667 m_pList(src.m_pList),
4668 m_pItem(src.m_pItem)
4672 const T& operator*()
const 4674 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4675 return m_pItem->Value;
4677 const T* operator->()
const 4679 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4680 return &m_pItem->Value;
4683 const_iterator& operator++()
4685 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4686 m_pItem = m_pItem->pNext;
4689 const_iterator& operator--()
4691 if(m_pItem != VMA_NULL)
4693 m_pItem = m_pItem->pPrev;
4697 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4698 m_pItem = m_pList->Back();
4703 const_iterator operator++(
int)
4705 const_iterator result = *
this;
4709 const_iterator operator--(
int)
4711 const_iterator result = *
this;
4716 bool operator==(
const const_iterator& rhs)
const 4718 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4719 return m_pItem == rhs.m_pItem;
4721 bool operator!=(
const const_iterator& rhs)
const 4723 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4724 return m_pItem != rhs.m_pItem;
4728 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4734 const VmaRawList<T>* m_pList;
4735 const VmaListItem<T>* m_pItem;
4737 friend class VmaList<T, AllocatorT>;
4740 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4742 bool empty()
const {
return m_RawList.IsEmpty(); }
4743 size_t size()
const {
return m_RawList.GetCount(); }
4745 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4746 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4748 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4749 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4751 void clear() { m_RawList.Clear(); }
4752 void push_back(
const T& value) { m_RawList.PushBack(value); }
4753 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4754 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4757 VmaRawList<T> m_RawList;
4760 #endif // #if VMA_USE_STL_LIST 4768 #if VMA_USE_STL_UNORDERED_MAP 4770 #define VmaPair std::pair 4772 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4773 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4775 #else // #if VMA_USE_STL_UNORDERED_MAP 4777 template<
typename T1,
typename T2>
4783 VmaPair() : first(), second() { }
4784 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4790 template<
typename KeyT,
typename ValueT>
4794 typedef VmaPair<KeyT, ValueT> PairType;
4795 typedef PairType* iterator;
4797 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4799 iterator begin() {
return m_Vector.begin(); }
4800 iterator end() {
return m_Vector.end(); }
4802 void insert(
const PairType& pair);
4803 iterator find(
const KeyT& key);
4804 void erase(iterator it);
4807 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4810 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4812 template<
typename FirstT,
typename SecondT>
4813 struct VmaPairFirstLess
4815 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4817 return lhs.first < rhs.first;
4819 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4821 return lhs.first < rhsFirst;
4825 template<
typename KeyT,
typename ValueT>
4826 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4828 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4830 m_Vector.data() + m_Vector.size(),
4832 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4833 VmaVectorInsert(m_Vector, indexToInsert, pair);
4836 template<
typename KeyT,
typename ValueT>
4837 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4839 PairType* it = VmaBinaryFindFirstNotLess(
4841 m_Vector.data() + m_Vector.size(),
4843 VmaPairFirstLess<KeyT, ValueT>());
4844 if((it != m_Vector.end()) && (it->first == key))
4850 return m_Vector.end();
4854 template<
typename KeyT,
typename ValueT>
4855 void VmaMap<KeyT, ValueT>::erase(iterator it)
4857 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4860 #endif // #if VMA_USE_STL_UNORDERED_MAP 4866 class VmaDeviceMemoryBlock;
4868 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4870 struct VmaAllocation_T
4872 VMA_CLASS_NO_COPY(VmaAllocation_T)
4874 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4878 FLAG_USER_DATA_STRING = 0x01,
4882 enum ALLOCATION_TYPE
4884 ALLOCATION_TYPE_NONE,
4885 ALLOCATION_TYPE_BLOCK,
4886 ALLOCATION_TYPE_DEDICATED,
4889 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4892 m_pUserData(VMA_NULL),
4893 m_LastUseFrameIndex(currentFrameIndex),
4894 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4895 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4897 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4899 #if VMA_STATS_STRING_ENABLED 4900 m_CreationFrameIndex = currentFrameIndex;
4901 m_BufferImageUsage = 0;
4907 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4910 VMA_ASSERT(m_pUserData == VMA_NULL);
4913 void InitBlockAllocation(
4915 VmaDeviceMemoryBlock* block,
4916 VkDeviceSize offset,
4917 VkDeviceSize alignment,
4919 VmaSuballocationType suballocationType,
4923 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4924 VMA_ASSERT(block != VMA_NULL);
4925 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4926 m_Alignment = alignment;
4928 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4929 m_SuballocationType = (uint8_t)suballocationType;
4930 m_BlockAllocation.m_hPool = hPool;
4931 m_BlockAllocation.m_Block = block;
4932 m_BlockAllocation.m_Offset = offset;
4933 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4938 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4939 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4940 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4941 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4942 m_BlockAllocation.m_Block = VMA_NULL;
4943 m_BlockAllocation.m_Offset = 0;
4944 m_BlockAllocation.m_CanBecomeLost =
true;
4947 void ChangeBlockAllocation(
4949 VmaDeviceMemoryBlock* block,
4950 VkDeviceSize offset);
4952 void ChangeSize(VkDeviceSize newSize);
4953 void ChangeOffset(VkDeviceSize newOffset);
4956 void InitDedicatedAllocation(
4957 uint32_t memoryTypeIndex,
4958 VkDeviceMemory hMemory,
4959 VmaSuballocationType suballocationType,
4963 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4964 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4965 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4968 m_SuballocationType = (uint8_t)suballocationType;
4969 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4970 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4971 m_DedicatedAllocation.m_hMemory = hMemory;
4972 m_DedicatedAllocation.m_pMappedData = pMappedData;
4975 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4976 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4977 VkDeviceSize GetSize()
const {
return m_Size; }
4978 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4979 void* GetUserData()
const {
return m_pUserData; }
4980 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4981 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4983 VmaDeviceMemoryBlock* GetBlock()
const 4985 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4986 return m_BlockAllocation.m_Block;
4988 VkDeviceSize GetOffset()
const;
4989 VkDeviceMemory GetMemory()
const;
4990 uint32_t GetMemoryTypeIndex()
const;
4991 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4992 void* GetMappedData()
const;
4993 bool CanBecomeLost()
const;
4996 uint32_t GetLastUseFrameIndex()
const 4998 return m_LastUseFrameIndex.load();
5000 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5002 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5012 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5014 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5016 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5027 void BlockAllocMap();
5028 void BlockAllocUnmap();
5029 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5032 #if VMA_STATS_STRING_ENABLED 5033 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5034 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5036 void InitBufferImageUsage(uint32_t bufferImageUsage)
5038 VMA_ASSERT(m_BufferImageUsage == 0);
5039 m_BufferImageUsage = bufferImageUsage;
5042 void PrintParameters(
class VmaJsonWriter& json)
const;
5046 VkDeviceSize m_Alignment;
5047 VkDeviceSize m_Size;
5049 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5051 uint8_t m_SuballocationType;
5058 struct BlockAllocation
5061 VmaDeviceMemoryBlock* m_Block;
5062 VkDeviceSize m_Offset;
5063 bool m_CanBecomeLost;
5067 struct DedicatedAllocation
5069 uint32_t m_MemoryTypeIndex;
5070 VkDeviceMemory m_hMemory;
5071 void* m_pMappedData;
5077 BlockAllocation m_BlockAllocation;
5079 DedicatedAllocation m_DedicatedAllocation;
5082 #if VMA_STATS_STRING_ENABLED 5083 uint32_t m_CreationFrameIndex;
5084 uint32_t m_BufferImageUsage;
5094 struct VmaSuballocation
5096 VkDeviceSize offset;
5099 VmaSuballocationType type;
5103 struct VmaSuballocationOffsetLess
5105 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5107 return lhs.offset < rhs.offset;
5110 struct VmaSuballocationOffsetGreater
5112 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5114 return lhs.offset > rhs.offset;
5118 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5121 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5136 struct VmaAllocationRequest
5138 VkDeviceSize offset;
5139 VkDeviceSize sumFreeSize;
5140 VkDeviceSize sumItemSize;
5141 VmaSuballocationList::iterator item;
5142 size_t itemsToMakeLostCount;
5145 VkDeviceSize CalcCost()
const 5147 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5155 class VmaBlockMetadata
5159 virtual ~VmaBlockMetadata() { }
5160 virtual void Init(VkDeviceSize size) { m_Size = size; }
5163 virtual bool Validate()
const = 0;
5164 VkDeviceSize GetSize()
const {
return m_Size; }
5165 virtual size_t GetAllocationCount()
const = 0;
5166 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5167 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5169 virtual bool IsEmpty()
const = 0;
5171 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5173 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5175 #if VMA_STATS_STRING_ENABLED 5176 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5182 virtual bool CreateAllocationRequest(
5183 uint32_t currentFrameIndex,
5184 uint32_t frameInUseCount,
5185 VkDeviceSize bufferImageGranularity,
5186 VkDeviceSize allocSize,
5187 VkDeviceSize allocAlignment,
5189 VmaSuballocationType allocType,
5190 bool canMakeOtherLost,
5193 VmaAllocationRequest* pAllocationRequest) = 0;
5195 virtual bool MakeRequestedAllocationsLost(
5196 uint32_t currentFrameIndex,
5197 uint32_t frameInUseCount,
5198 VmaAllocationRequest* pAllocationRequest) = 0;
5200 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5202 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5206 const VmaAllocationRequest& request,
5207 VmaSuballocationType type,
5208 VkDeviceSize allocSize,
5214 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5217 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5220 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5222 #if VMA_STATS_STRING_ENABLED 5223 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5224 VkDeviceSize unusedBytes,
5225 size_t allocationCount,
5226 size_t unusedRangeCount)
const;
5227 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5228 VkDeviceSize offset,
5230 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5231 VkDeviceSize offset,
5232 VkDeviceSize size)
const;
5233 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5237 VkDeviceSize m_Size;
5238 const VkAllocationCallbacks* m_pAllocationCallbacks;
5241 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5242 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5246 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5248 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5251 virtual ~VmaBlockMetadata_Generic();
5252 virtual void Init(VkDeviceSize size);
5254 virtual bool Validate()
const;
5255 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5256 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5257 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5258 virtual bool IsEmpty()
const;
5260 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5261 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5263 #if VMA_STATS_STRING_ENABLED 5264 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5267 virtual bool CreateAllocationRequest(
5268 uint32_t currentFrameIndex,
5269 uint32_t frameInUseCount,
5270 VkDeviceSize bufferImageGranularity,
5271 VkDeviceSize allocSize,
5272 VkDeviceSize allocAlignment,
5274 VmaSuballocationType allocType,
5275 bool canMakeOtherLost,
5277 VmaAllocationRequest* pAllocationRequest);
5279 virtual bool MakeRequestedAllocationsLost(
5280 uint32_t currentFrameIndex,
5281 uint32_t frameInUseCount,
5282 VmaAllocationRequest* pAllocationRequest);
5284 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5286 virtual VkResult CheckCorruption(
const void* pBlockData);
5289 const VmaAllocationRequest& request,
5290 VmaSuballocationType type,
5291 VkDeviceSize allocSize,
5296 virtual void FreeAtOffset(VkDeviceSize offset);
5298 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5303 bool IsBufferImageGranularityConflictPossible(
5304 VkDeviceSize bufferImageGranularity,
5305 VmaSuballocationType& inOutPrevSuballocType)
const;
5308 friend class VmaDefragmentationAlgorithm_Generic;
5309 friend class VmaDefragmentationAlgorithm_Fast;
5311 uint32_t m_FreeCount;
5312 VkDeviceSize m_SumFreeSize;
5313 VmaSuballocationList m_Suballocations;
5316 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5318 bool ValidateFreeSuballocationList()
const;
5322 bool CheckAllocation(
5323 uint32_t currentFrameIndex,
5324 uint32_t frameInUseCount,
5325 VkDeviceSize bufferImageGranularity,
5326 VkDeviceSize allocSize,
5327 VkDeviceSize allocAlignment,
5328 VmaSuballocationType allocType,
5329 VmaSuballocationList::const_iterator suballocItem,
5330 bool canMakeOtherLost,
5331 VkDeviceSize* pOffset,
5332 size_t* itemsToMakeLostCount,
5333 VkDeviceSize* pSumFreeSize,
5334 VkDeviceSize* pSumItemSize)
const;
5336 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5340 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5343 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5346 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5427 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5429 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5432 virtual ~VmaBlockMetadata_Linear();
5433 virtual void Init(VkDeviceSize size);
5435 virtual bool Validate()
const;
5436 virtual size_t GetAllocationCount()
const;
5437 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5438 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5439 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5441 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5442 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5444 #if VMA_STATS_STRING_ENABLED 5445 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5448 virtual bool CreateAllocationRequest(
5449 uint32_t currentFrameIndex,
5450 uint32_t frameInUseCount,
5451 VkDeviceSize bufferImageGranularity,
5452 VkDeviceSize allocSize,
5453 VkDeviceSize allocAlignment,
5455 VmaSuballocationType allocType,
5456 bool canMakeOtherLost,
5458 VmaAllocationRequest* pAllocationRequest);
5460 virtual bool MakeRequestedAllocationsLost(
5461 uint32_t currentFrameIndex,
5462 uint32_t frameInUseCount,
5463 VmaAllocationRequest* pAllocationRequest);
5465 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5467 virtual VkResult CheckCorruption(
const void* pBlockData);
5470 const VmaAllocationRequest& request,
5471 VmaSuballocationType type,
5472 VkDeviceSize allocSize,
5477 virtual void FreeAtOffset(VkDeviceSize offset);
5487 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5489 enum SECOND_VECTOR_MODE
5491 SECOND_VECTOR_EMPTY,
5496 SECOND_VECTOR_RING_BUFFER,
5502 SECOND_VECTOR_DOUBLE_STACK,
5505 VkDeviceSize m_SumFreeSize;
5506 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5507 uint32_t m_1stVectorIndex;
5508 SECOND_VECTOR_MODE m_2ndVectorMode;
5510 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5511 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5512 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5513 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5516 size_t m_1stNullItemsBeginCount;
5518 size_t m_1stNullItemsMiddleCount;
5520 size_t m_2ndNullItemsCount;
5522 bool ShouldCompact1st()
const;
5523 void CleanupAfterFree();
5537 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5539 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5542 virtual ~VmaBlockMetadata_Buddy();
5543 virtual void Init(VkDeviceSize size);
5545 virtual bool Validate()
const;
5546 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5547 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5548 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5549 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5551 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5552 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5554 #if VMA_STATS_STRING_ENABLED 5555 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5558 virtual bool CreateAllocationRequest(
5559 uint32_t currentFrameIndex,
5560 uint32_t frameInUseCount,
5561 VkDeviceSize bufferImageGranularity,
5562 VkDeviceSize allocSize,
5563 VkDeviceSize allocAlignment,
5565 VmaSuballocationType allocType,
5566 bool canMakeOtherLost,
5568 VmaAllocationRequest* pAllocationRequest);
5570 virtual bool MakeRequestedAllocationsLost(
5571 uint32_t currentFrameIndex,
5572 uint32_t frameInUseCount,
5573 VmaAllocationRequest* pAllocationRequest);
5575 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5577 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5580 const VmaAllocationRequest& request,
5581 VmaSuballocationType type,
5582 VkDeviceSize allocSize,
5586 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5587 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5590 static const VkDeviceSize MIN_NODE_SIZE = 32;
5591 static const size_t MAX_LEVELS = 30;
5593 struct ValidationContext
5595 size_t calculatedAllocationCount;
5596 size_t calculatedFreeCount;
5597 VkDeviceSize calculatedSumFreeSize;
5599 ValidationContext() :
5600 calculatedAllocationCount(0),
5601 calculatedFreeCount(0),
5602 calculatedSumFreeSize(0) { }
5607 VkDeviceSize offset;
5637 VkDeviceSize m_UsableSize;
5638 uint32_t m_LevelCount;
5644 } m_FreeList[MAX_LEVELS];
5646 size_t m_AllocationCount;
5650 VkDeviceSize m_SumFreeSize;
5652 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5653 void DeleteNode(Node* node);
5654 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5655 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5656 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5658 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5659 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5663 void AddToFreeListFront(uint32_t level, Node* node);
5667 void RemoveFromFreeList(uint32_t level, Node* node);
5669 #if VMA_STATS_STRING_ENABLED 5670 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5680 class VmaDeviceMemoryBlock
5682 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5684 VmaBlockMetadata* m_pMetadata;
5688 ~VmaDeviceMemoryBlock()
5690 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5691 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5697 uint32_t newMemoryTypeIndex,
5698 VkDeviceMemory newMemory,
5699 VkDeviceSize newSize,
5701 uint32_t algorithm);
5705 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5706 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5707 uint32_t GetId()
const {
return m_Id; }
5708 void* GetMappedData()
const {
return m_pMappedData; }
5711 bool Validate()
const;
5716 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5719 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5720 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5722 VkResult BindBufferMemory(
5726 VkResult BindImageMemory(
5732 uint32_t m_MemoryTypeIndex;
5734 VkDeviceMemory m_hMemory;
5742 uint32_t m_MapCount;
5743 void* m_pMappedData;
5746 struct VmaPointerLess
5748 bool operator()(
const void* lhs,
const void* rhs)
const 5754 struct VmaDefragmentationMove
5756 size_t srcBlockIndex;
5757 size_t dstBlockIndex;
5758 VkDeviceSize srcOffset;
5759 VkDeviceSize dstOffset;
5763 class VmaDefragmentationAlgorithm;
5771 struct VmaBlockVector
5773 VMA_CLASS_NO_COPY(VmaBlockVector)
5777 uint32_t memoryTypeIndex,
5778 VkDeviceSize preferredBlockSize,
5779 size_t minBlockCount,
5780 size_t maxBlockCount,
5781 VkDeviceSize bufferImageGranularity,
5782 uint32_t frameInUseCount,
5784 bool explicitBlockSize,
5785 uint32_t algorithm);
5788 VkResult CreateMinBlocks();
5790 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5791 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5792 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5793 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5794 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5798 bool IsEmpty()
const {
return m_Blocks.empty(); }
5799 bool IsCorruptionDetectionEnabled()
const;
5803 uint32_t currentFrameIndex,
5805 VkDeviceSize alignment,
5807 VmaSuballocationType suballocType,
5808 size_t allocationCount,
5817 #if VMA_STATS_STRING_ENABLED 5818 void PrintDetailedMap(
class VmaJsonWriter& json);
5821 void MakePoolAllocationsLost(
5822 uint32_t currentFrameIndex,
5823 size_t* pLostAllocationCount);
5824 VkResult CheckCorruption();
5828 class VmaBlockVectorDefragmentationContext* pCtx,
5830 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5831 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5832 VkCommandBuffer commandBuffer);
5833 void DefragmentationEnd(
5834 class VmaBlockVectorDefragmentationContext* pCtx,
5840 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5841 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5842 size_t CalcAllocationCount()
const;
5843 bool IsBufferImageGranularityConflictPossible()
const;
5846 friend class VmaDefragmentationAlgorithm_Generic;
5849 const uint32_t m_MemoryTypeIndex;
5850 const VkDeviceSize m_PreferredBlockSize;
5851 const size_t m_MinBlockCount;
5852 const size_t m_MaxBlockCount;
5853 const VkDeviceSize m_BufferImageGranularity;
5854 const uint32_t m_FrameInUseCount;
5855 const bool m_IsCustomPool;
5856 const bool m_ExplicitBlockSize;
5857 const uint32_t m_Algorithm;
5861 bool m_HasEmptyBlock;
5862 VMA_RW_MUTEX m_Mutex;
5864 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5865 uint32_t m_NextBlockId;
5867 VkDeviceSize CalcMaxBlockSize()
const;
5870 void Remove(VmaDeviceMemoryBlock* pBlock);
5874 void IncrementallySortBlocks();
5876 VkResult AllocatePage(
5878 uint32_t currentFrameIndex,
5880 VkDeviceSize alignment,
5882 VmaSuballocationType suballocType,
5886 VkResult AllocateFromBlock(
5887 VmaDeviceMemoryBlock* pBlock,
5889 uint32_t currentFrameIndex,
5891 VkDeviceSize alignment,
5894 VmaSuballocationType suballocType,
5898 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5901 void ApplyDefragmentationMovesCpu(
5902 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5903 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5905 void ApplyDefragmentationMovesGpu(
5906 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5907 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5908 VkCommandBuffer commandBuffer);
5919 VMA_CLASS_NO_COPY(VmaPool_T)
5921 VmaBlockVector m_BlockVector;
5926 VkDeviceSize preferredBlockSize);
5929 uint32_t GetId()
const {
return m_Id; }
5930 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5932 #if VMA_STATS_STRING_ENABLED 5947 class VmaDefragmentationAlgorithm
5949 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
5951 VmaDefragmentationAlgorithm(
5953 VmaBlockVector* pBlockVector,
5954 uint32_t currentFrameIndex) :
5955 m_hAllocator(hAllocator),
5956 m_pBlockVector(pBlockVector),
5957 m_CurrentFrameIndex(currentFrameIndex)
5960 virtual ~VmaDefragmentationAlgorithm()
5964 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
5965 virtual void AddAll() = 0;
5967 virtual VkResult Defragment(
5968 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5969 VkDeviceSize maxBytesToMove,
5970 uint32_t maxAllocationsToMove) = 0;
5972 virtual VkDeviceSize GetBytesMoved()
const = 0;
5973 virtual uint32_t GetAllocationsMoved()
const = 0;
5977 VmaBlockVector*
const m_pBlockVector;
5978 const uint32_t m_CurrentFrameIndex;
5980 struct AllocationInfo
5983 VkBool32* m_pChanged;
5986 m_hAllocation(VK_NULL_HANDLE),
5987 m_pChanged(VMA_NULL)
5991 m_hAllocation(hAlloc),
5992 m_pChanged(pChanged)
5998 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6000 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6002 VmaDefragmentationAlgorithm_Generic(
6004 VmaBlockVector* pBlockVector,
6005 uint32_t currentFrameIndex,
6006 bool overlappingMoveSupported);
6007 virtual ~VmaDefragmentationAlgorithm_Generic();
6009 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6010 virtual void AddAll() { m_AllAllocations =
true; }
6012 virtual VkResult Defragment(
6013 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6014 VkDeviceSize maxBytesToMove,
6015 uint32_t maxAllocationsToMove);
6017 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6018 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6021 uint32_t m_AllocationCount;
6022 bool m_AllAllocations;
6024 VkDeviceSize m_BytesMoved;
6025 uint32_t m_AllocationsMoved;
6027 struct AllocationInfoSizeGreater
6029 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6031 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6035 struct AllocationInfoOffsetGreater
6037 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6039 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6045 size_t m_OriginalBlockIndex;
6046 VmaDeviceMemoryBlock* m_pBlock;
6047 bool m_HasNonMovableAllocations;
6048 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6050 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6051 m_OriginalBlockIndex(SIZE_MAX),
6053 m_HasNonMovableAllocations(true),
6054 m_Allocations(pAllocationCallbacks)
6058 void CalcHasNonMovableAllocations()
6060 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6061 const size_t defragmentAllocCount = m_Allocations.size();
6062 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6065 void SortAllocationsBySizeDescending()
6067 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6070 void SortAllocationsByOffsetDescending()
6072 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6076 struct BlockPointerLess
6078 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6080 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6082 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6084 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6090 struct BlockInfoCompareMoveDestination
6092 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6094 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6098 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6102 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6110 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6111 BlockInfoVector m_Blocks;
6113 VkResult DefragmentRound(
6114 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6115 VkDeviceSize maxBytesToMove,
6116 uint32_t maxAllocationsToMove);
6118 size_t CalcBlocksWithNonMovableCount()
const;
6120 static bool MoveMakesSense(
6121 size_t dstBlockIndex, VkDeviceSize dstOffset,
6122 size_t srcBlockIndex, VkDeviceSize srcOffset);
6125 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6127 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6129 VmaDefragmentationAlgorithm_Fast(
6131 VmaBlockVector* pBlockVector,
6132 uint32_t currentFrameIndex,
6133 bool overlappingMoveSupported);
6134 virtual ~VmaDefragmentationAlgorithm_Fast();
6136 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6137 virtual void AddAll() { m_AllAllocations =
true; }
6139 virtual VkResult Defragment(
6140 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6141 VkDeviceSize maxBytesToMove,
6142 uint32_t maxAllocationsToMove);
6144 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6145 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6150 size_t origBlockIndex;
6153 class FreeSpaceDatabase
6159 s.blockInfoIndex = SIZE_MAX;
6160 for(
size_t i = 0; i < MAX_COUNT; ++i)
6162 m_FreeSpaces[i] = s;
6166 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6168 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6174 size_t bestIndex = SIZE_MAX;
6175 for(
size_t i = 0; i < MAX_COUNT; ++i)
6178 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6183 if(m_FreeSpaces[i].size < size &&
6184 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6190 if(bestIndex != SIZE_MAX)
6192 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6193 m_FreeSpaces[bestIndex].offset = offset;
6194 m_FreeSpaces[bestIndex].size = size;
6198 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6199 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6201 size_t bestIndex = SIZE_MAX;
6202 VkDeviceSize bestFreeSpaceAfter = 0;
6203 for(
size_t i = 0; i < MAX_COUNT; ++i)
6206 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6208 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6210 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6212 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6214 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6217 bestFreeSpaceAfter = freeSpaceAfter;
6223 if(bestIndex != SIZE_MAX)
6225 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6226 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6228 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6231 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6232 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6233 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6238 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6248 static const size_t MAX_COUNT = 4;
6252 size_t blockInfoIndex;
6253 VkDeviceSize offset;
6255 } m_FreeSpaces[MAX_COUNT];
6258 const bool m_OverlappingMoveSupported;
6260 uint32_t m_AllocationCount;
6261 bool m_AllAllocations;
6263 VkDeviceSize m_BytesMoved;
6264 uint32_t m_AllocationsMoved;
6266 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6268 void PreprocessMetadata();
6269 void PostprocessMetadata();
6270 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6273 struct VmaBlockDefragmentationContext
6276 VMA_CLASS_NO_COPY(VmaBlockDefragmentationContext)
6280 BLOCK_FLAG_USED = 0x00000001,
6285 VmaBlockDefragmentationContext() :
6287 hBuffer(VK_NULL_HANDLE)
6292 class VmaBlockVectorDefragmentationContext
6294 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6298 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6300 VmaBlockVectorDefragmentationContext(
6303 VmaBlockVector* pBlockVector,
6304 uint32_t currFrameIndex,
6306 ~VmaBlockVectorDefragmentationContext();
6308 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6309 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6310 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6312 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6313 void AddAll() { m_AllAllocations =
true; }
6315 void Begin(
bool overlappingMoveSupported);
6322 VmaBlockVector*
const m_pBlockVector;
6323 const uint32_t m_CurrFrameIndex;
6324 const uint32_t m_AlgorithmFlags;
6326 VmaDefragmentationAlgorithm* m_pAlgorithm;
6334 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6335 bool m_AllAllocations;
6338 struct VmaDefragmentationContext_T
6341 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6343 VmaDefragmentationContext_T(
6345 uint32_t currFrameIndex,
6348 ~VmaDefragmentationContext_T();
6350 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6351 void AddAllocations(
6352 uint32_t allocationCount,
6354 VkBool32* pAllocationsChanged);
6362 VkResult Defragment(
6363 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6364 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6369 const uint32_t m_CurrFrameIndex;
6370 const uint32_t m_Flags;
6373 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6375 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6378 #if VMA_RECORDING_ENABLED 6385 void WriteConfiguration(
6386 const VkPhysicalDeviceProperties& devProps,
6387 const VkPhysicalDeviceMemoryProperties& memProps,
6388 bool dedicatedAllocationExtensionEnabled);
6391 void RecordCreateAllocator(uint32_t frameIndex);
6392 void RecordDestroyAllocator(uint32_t frameIndex);
6393 void RecordCreatePool(uint32_t frameIndex,
6396 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6397 void RecordAllocateMemory(uint32_t frameIndex,
6398 const VkMemoryRequirements& vkMemReq,
6401 void RecordAllocateMemoryPages(uint32_t frameIndex,
6402 const VkMemoryRequirements& vkMemReq,
6404 uint64_t allocationCount,
6406 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6407 const VkMemoryRequirements& vkMemReq,
6408 bool requiresDedicatedAllocation,
6409 bool prefersDedicatedAllocation,
6412 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6413 const VkMemoryRequirements& vkMemReq,
6414 bool requiresDedicatedAllocation,
6415 bool prefersDedicatedAllocation,
6418 void RecordFreeMemory(uint32_t frameIndex,
6420 void RecordFreeMemoryPages(uint32_t frameIndex,
6421 uint64_t allocationCount,
6423 void RecordResizeAllocation(
6424 uint32_t frameIndex,
6426 VkDeviceSize newSize);
6427 void RecordSetAllocationUserData(uint32_t frameIndex,
6429 const void* pUserData);
6430 void RecordCreateLostAllocation(uint32_t frameIndex,
6432 void RecordMapMemory(uint32_t frameIndex,
6434 void RecordUnmapMemory(uint32_t frameIndex,
6436 void RecordFlushAllocation(uint32_t frameIndex,
6437 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6438 void RecordInvalidateAllocation(uint32_t frameIndex,
6439 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6440 void RecordCreateBuffer(uint32_t frameIndex,
6441 const VkBufferCreateInfo& bufCreateInfo,
6444 void RecordCreateImage(uint32_t frameIndex,
6445 const VkImageCreateInfo& imageCreateInfo,
6448 void RecordDestroyBuffer(uint32_t frameIndex,
6450 void RecordDestroyImage(uint32_t frameIndex,
6452 void RecordTouchAllocation(uint32_t frameIndex,
6454 void RecordGetAllocationInfo(uint32_t frameIndex,
6456 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6458 void RecordDefragmentationBegin(uint32_t frameIndex,
6461 void RecordDefragmentationEnd(uint32_t frameIndex,
6471 class UserDataString
6475 const char* GetString()
const {
return m_Str; }
6485 VMA_MUTEX m_FileMutex;
6487 int64_t m_StartCounter;
6489 void GetBasicParams(CallParams& outParams);
6492 template<
typename T>
6493 void PrintPointerList(uint64_t count,
const T* pItems)
6497 fprintf(m_File,
"%p", pItems[0]);
6498 for(uint64_t i = 1; i < count; ++i)
6500 fprintf(m_File,
" %p", pItems[i]);
6505 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6509 #endif // #if VMA_RECORDING_ENABLED 6512 struct VmaAllocator_T
6514 VMA_CLASS_NO_COPY(VmaAllocator_T)
6517 bool m_UseKhrDedicatedAllocation;
6519 bool m_AllocationCallbacksSpecified;
6520 VkAllocationCallbacks m_AllocationCallbacks;
6524 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6525 VMA_MUTEX m_HeapSizeLimitMutex;
6527 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6528 VkPhysicalDeviceMemoryProperties m_MemProps;
6531 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6534 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6535 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6536 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6542 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6544 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6548 return m_VulkanFunctions;
6551 VkDeviceSize GetBufferImageGranularity()
const 6554 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6555 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6558 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6559 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6561 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6563 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6564 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6567 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6569 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6570 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6573 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6575 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6576 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6577 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6580 bool IsIntegratedGpu()
const 6582 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6585 #if VMA_RECORDING_ENABLED 6586 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6589 void GetBufferMemoryRequirements(
6591 VkMemoryRequirements& memReq,
6592 bool& requiresDedicatedAllocation,
6593 bool& prefersDedicatedAllocation)
const;
6594 void GetImageMemoryRequirements(
6596 VkMemoryRequirements& memReq,
6597 bool& requiresDedicatedAllocation,
6598 bool& prefersDedicatedAllocation)
const;
6601 VkResult AllocateMemory(
6602 const VkMemoryRequirements& vkMemReq,
6603 bool requiresDedicatedAllocation,
6604 bool prefersDedicatedAllocation,
6605 VkBuffer dedicatedBuffer,
6606 VkImage dedicatedImage,
6608 VmaSuballocationType suballocType,
6609 size_t allocationCount,
6614 size_t allocationCount,
6617 VkResult ResizeAllocation(
6619 VkDeviceSize newSize);
6621 void CalculateStats(
VmaStats* pStats);
6623 #if VMA_STATS_STRING_ENABLED 6624 void PrintDetailedMap(
class VmaJsonWriter& json);
6627 VkResult DefragmentationBegin(
6631 VkResult DefragmentationEnd(
6638 void DestroyPool(
VmaPool pool);
6641 void SetCurrentFrameIndex(uint32_t frameIndex);
6642 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6644 void MakePoolAllocationsLost(
6646 size_t* pLostAllocationCount);
6647 VkResult CheckPoolCorruption(
VmaPool hPool);
6648 VkResult CheckCorruption(uint32_t memoryTypeBits);
6652 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6653 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6658 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6659 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6661 void FlushOrInvalidateAllocation(
6663 VkDeviceSize offset, VkDeviceSize size,
6664 VMA_CACHE_OPERATION op);
6666 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6669 VkDeviceSize m_PreferredLargeHeapBlockSize;
6671 VkPhysicalDevice m_PhysicalDevice;
6672 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6674 VMA_RW_MUTEX m_PoolsMutex;
6676 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6677 uint32_t m_NextPoolId;
6681 #if VMA_RECORDING_ENABLED 6682 VmaRecorder* m_pRecorder;
6687 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6689 VkResult AllocateMemoryOfType(
6691 VkDeviceSize alignment,
6692 bool dedicatedAllocation,
6693 VkBuffer dedicatedBuffer,
6694 VkImage dedicatedImage,
6696 uint32_t memTypeIndex,
6697 VmaSuballocationType suballocType,
6698 size_t allocationCount,
6702 VkResult AllocateDedicatedMemoryPage(
6704 VmaSuballocationType suballocType,
6705 uint32_t memTypeIndex,
6706 const VkMemoryAllocateInfo& allocInfo,
6708 bool isUserDataString,
6713 VkResult AllocateDedicatedMemory(
6715 VmaSuballocationType suballocType,
6716 uint32_t memTypeIndex,
6718 bool isUserDataString,
6720 VkBuffer dedicatedBuffer,
6721 VkImage dedicatedImage,
6722 size_t allocationCount,
6732 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6734 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6737 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6739 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6742 template<
typename T>
6745 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6748 template<
typename T>
6749 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6751 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6754 template<
typename T>
6755 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6760 VmaFree(hAllocator, ptr);
6764 template<
typename T>
6765 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6769 for(
size_t i = count; i--; )
6771 VmaFree(hAllocator, ptr);
6778 #if VMA_STATS_STRING_ENABLED 6780 class VmaStringBuilder
6783 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6784 size_t GetLength()
const {
return m_Data.size(); }
6785 const char* GetData()
const {
return m_Data.data(); }
6787 void Add(
char ch) { m_Data.push_back(ch); }
6788 void Add(
const char* pStr);
6789 void AddNewLine() { Add(
'\n'); }
6790 void AddNumber(uint32_t num);
6791 void AddNumber(uint64_t num);
6792 void AddPointer(
const void* ptr);
6795 VmaVector< char, VmaStlAllocator<char> > m_Data;
6798 void VmaStringBuilder::Add(
const char* pStr)
6800 const size_t strLen = strlen(pStr);
6803 const size_t oldCount = m_Data.size();
6804 m_Data.resize(oldCount + strLen);
6805 memcpy(m_Data.data() + oldCount, pStr, strLen);
6809 void VmaStringBuilder::AddNumber(uint32_t num)
6812 VmaUint32ToStr(buf,
sizeof(buf), num);
6816 void VmaStringBuilder::AddNumber(uint64_t num)
6819 VmaUint64ToStr(buf,
sizeof(buf), num);
6823 void VmaStringBuilder::AddPointer(
const void* ptr)
6826 VmaPtrToStr(buf,
sizeof(buf), ptr);
6830 #endif // #if VMA_STATS_STRING_ENABLED 6835 #if VMA_STATS_STRING_ENABLED 6839 VMA_CLASS_NO_COPY(VmaJsonWriter)
6841 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6844 void BeginObject(
bool singleLine =
false);
6847 void BeginArray(
bool singleLine =
false);
6850 void WriteString(
const char* pStr);
6851 void BeginString(
const char* pStr = VMA_NULL);
6852 void ContinueString(
const char* pStr);
6853 void ContinueString(uint32_t n);
6854 void ContinueString(uint64_t n);
6855 void ContinueString_Pointer(
const void* ptr);
6856 void EndString(
const char* pStr = VMA_NULL);
6858 void WriteNumber(uint32_t n);
6859 void WriteNumber(uint64_t n);
6860 void WriteBool(
bool b);
6864 static const char*
const INDENT;
6866 enum COLLECTION_TYPE
6868 COLLECTION_TYPE_OBJECT,
6869 COLLECTION_TYPE_ARRAY,
6873 COLLECTION_TYPE type;
6874 uint32_t valueCount;
6875 bool singleLineMode;
6878 VmaStringBuilder& m_SB;
6879 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6880 bool m_InsideString;
6882 void BeginValue(
bool isString);
6883 void WriteIndent(
bool oneLess =
false);
6886 const char*
const VmaJsonWriter::INDENT =
" ";
6888 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6890 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6891 m_InsideString(false)
6895 VmaJsonWriter::~VmaJsonWriter()
6897 VMA_ASSERT(!m_InsideString);
6898 VMA_ASSERT(m_Stack.empty());
6901 void VmaJsonWriter::BeginObject(
bool singleLine)
6903 VMA_ASSERT(!m_InsideString);
6909 item.type = COLLECTION_TYPE_OBJECT;
6910 item.valueCount = 0;
6911 item.singleLineMode = singleLine;
6912 m_Stack.push_back(item);
6915 void VmaJsonWriter::EndObject()
6917 VMA_ASSERT(!m_InsideString);
6922 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6926 void VmaJsonWriter::BeginArray(
bool singleLine)
6928 VMA_ASSERT(!m_InsideString);
6934 item.type = COLLECTION_TYPE_ARRAY;
6935 item.valueCount = 0;
6936 item.singleLineMode = singleLine;
6937 m_Stack.push_back(item);
6940 void VmaJsonWriter::EndArray()
6942 VMA_ASSERT(!m_InsideString);
6947 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6951 void VmaJsonWriter::WriteString(
const char* pStr)
6957 void VmaJsonWriter::BeginString(
const char* pStr)
6959 VMA_ASSERT(!m_InsideString);
6963 m_InsideString =
true;
6964 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6966 ContinueString(pStr);
6970 void VmaJsonWriter::ContinueString(
const char* pStr)
6972 VMA_ASSERT(m_InsideString);
6974 const size_t strLen = strlen(pStr);
6975 for(
size_t i = 0; i < strLen; ++i)
7008 VMA_ASSERT(0 &&
"Character not currently supported.");
7014 void VmaJsonWriter::ContinueString(uint32_t n)
7016 VMA_ASSERT(m_InsideString);
7020 void VmaJsonWriter::ContinueString(uint64_t n)
7022 VMA_ASSERT(m_InsideString);
7026 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7028 VMA_ASSERT(m_InsideString);
7029 m_SB.AddPointer(ptr);
7032 void VmaJsonWriter::EndString(
const char* pStr)
7034 VMA_ASSERT(m_InsideString);
7035 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7037 ContinueString(pStr);
7040 m_InsideString =
false;
7043 void VmaJsonWriter::WriteNumber(uint32_t n)
7045 VMA_ASSERT(!m_InsideString);
7050 void VmaJsonWriter::WriteNumber(uint64_t n)
7052 VMA_ASSERT(!m_InsideString);
7057 void VmaJsonWriter::WriteBool(
bool b)
7059 VMA_ASSERT(!m_InsideString);
7061 m_SB.Add(b ?
"true" :
"false");
7064 void VmaJsonWriter::WriteNull()
7066 VMA_ASSERT(!m_InsideString);
7071 void VmaJsonWriter::BeginValue(
bool isString)
7073 if(!m_Stack.empty())
7075 StackItem& currItem = m_Stack.back();
7076 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7077 currItem.valueCount % 2 == 0)
7079 VMA_ASSERT(isString);
7082 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7083 currItem.valueCount % 2 != 0)
7087 else if(currItem.valueCount > 0)
7096 ++currItem.valueCount;
7100 void VmaJsonWriter::WriteIndent(
bool oneLess)
7102 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7106 size_t count = m_Stack.size();
7107 if(count > 0 && oneLess)
7111 for(
size_t i = 0; i < count; ++i)
7118 #endif // #if VMA_STATS_STRING_ENABLED 7122 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7124 if(IsUserDataString())
7126 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7128 FreeUserDataString(hAllocator);
7130 if(pUserData != VMA_NULL)
7132 const char*
const newStrSrc = (
char*)pUserData;
7133 const size_t newStrLen = strlen(newStrSrc);
7134 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7135 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7136 m_pUserData = newStrDst;
7141 m_pUserData = pUserData;
7145 void VmaAllocation_T::ChangeBlockAllocation(
7147 VmaDeviceMemoryBlock* block,
7148 VkDeviceSize offset)
7150 VMA_ASSERT(block != VMA_NULL);
7151 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7154 if(block != m_BlockAllocation.m_Block)
7156 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7157 if(IsPersistentMap())
7159 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7160 block->Map(hAllocator, mapRefCount, VMA_NULL);
7163 m_BlockAllocation.m_Block = block;
7164 m_BlockAllocation.m_Offset = offset;
7167 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7169 VMA_ASSERT(newSize > 0);
7173 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7175 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7176 m_BlockAllocation.m_Offset = newOffset;
7179 VkDeviceSize VmaAllocation_T::GetOffset()
const 7183 case ALLOCATION_TYPE_BLOCK:
7184 return m_BlockAllocation.m_Offset;
7185 case ALLOCATION_TYPE_DEDICATED:
7193 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7197 case ALLOCATION_TYPE_BLOCK:
7198 return m_BlockAllocation.m_Block->GetDeviceMemory();
7199 case ALLOCATION_TYPE_DEDICATED:
7200 return m_DedicatedAllocation.m_hMemory;
7203 return VK_NULL_HANDLE;
7207 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7211 case ALLOCATION_TYPE_BLOCK:
7212 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7213 case ALLOCATION_TYPE_DEDICATED:
7214 return m_DedicatedAllocation.m_MemoryTypeIndex;
7221 void* VmaAllocation_T::GetMappedData()
const 7225 case ALLOCATION_TYPE_BLOCK:
7228 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7229 VMA_ASSERT(pBlockData != VMA_NULL);
7230 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7237 case ALLOCATION_TYPE_DEDICATED:
7238 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7239 return m_DedicatedAllocation.m_pMappedData;
7246 bool VmaAllocation_T::CanBecomeLost()
const 7250 case ALLOCATION_TYPE_BLOCK:
7251 return m_BlockAllocation.m_CanBecomeLost;
7252 case ALLOCATION_TYPE_DEDICATED:
7260 VmaPool VmaAllocation_T::GetPool()
const 7262 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7263 return m_BlockAllocation.m_hPool;
7266 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7268 VMA_ASSERT(CanBecomeLost());
7274 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7277 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7282 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7288 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7298 #if VMA_STATS_STRING_ENABLED 7301 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7310 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7312 json.WriteString(
"Type");
7313 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7315 json.WriteString(
"Size");
7316 json.WriteNumber(m_Size);
7318 if(m_pUserData != VMA_NULL)
7320 json.WriteString(
"UserData");
7321 if(IsUserDataString())
7323 json.WriteString((
const char*)m_pUserData);
7328 json.ContinueString_Pointer(m_pUserData);
7333 json.WriteString(
"CreationFrameIndex");
7334 json.WriteNumber(m_CreationFrameIndex);
7336 json.WriteString(
"LastUseFrameIndex");
7337 json.WriteNumber(GetLastUseFrameIndex());
7339 if(m_BufferImageUsage != 0)
7341 json.WriteString(
"Usage");
7342 json.WriteNumber(m_BufferImageUsage);
7348 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7350 VMA_ASSERT(IsUserDataString());
7351 if(m_pUserData != VMA_NULL)
7353 char*
const oldStr = (
char*)m_pUserData;
7354 const size_t oldStrLen = strlen(oldStr);
7355 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7356 m_pUserData = VMA_NULL;
7360 void VmaAllocation_T::BlockAllocMap()
7362 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7364 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7370 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7374 void VmaAllocation_T::BlockAllocUnmap()
7376 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7378 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7384 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7388 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7390 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7394 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7396 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7397 *ppData = m_DedicatedAllocation.m_pMappedData;
7403 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7404 return VK_ERROR_MEMORY_MAP_FAILED;
7409 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7410 hAllocator->m_hDevice,
7411 m_DedicatedAllocation.m_hMemory,
7416 if(result == VK_SUCCESS)
7418 m_DedicatedAllocation.m_pMappedData = *ppData;
7425 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7427 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7429 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7434 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7435 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7436 hAllocator->m_hDevice,
7437 m_DedicatedAllocation.m_hMemory);
7442 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7446 #if VMA_STATS_STRING_ENABLED 7448 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7452 json.WriteString(
"Blocks");
7455 json.WriteString(
"Allocations");
7458 json.WriteString(
"UnusedRanges");
7461 json.WriteString(
"UsedBytes");
7464 json.WriteString(
"UnusedBytes");
7469 json.WriteString(
"AllocationSize");
7470 json.BeginObject(
true);
7471 json.WriteString(
"Min");
7473 json.WriteString(
"Avg");
7475 json.WriteString(
"Max");
7482 json.WriteString(
"UnusedRangeSize");
7483 json.BeginObject(
true);
7484 json.WriteString(
"Min");
7486 json.WriteString(
"Avg");
7488 json.WriteString(
"Max");
7496 #endif // #if VMA_STATS_STRING_ENABLED 7498 struct VmaSuballocationItemSizeLess
7501 const VmaSuballocationList::iterator lhs,
7502 const VmaSuballocationList::iterator rhs)
const 7504 return lhs->size < rhs->size;
7507 const VmaSuballocationList::iterator lhs,
7508 VkDeviceSize rhsSize)
const 7510 return lhs->size < rhsSize;
7518 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7520 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7524 #if VMA_STATS_STRING_ENABLED 7526 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7527 VkDeviceSize unusedBytes,
7528 size_t allocationCount,
7529 size_t unusedRangeCount)
const 7533 json.WriteString(
"TotalBytes");
7534 json.WriteNumber(GetSize());
7536 json.WriteString(
"UnusedBytes");
7537 json.WriteNumber(unusedBytes);
7539 json.WriteString(
"Allocations");
7540 json.WriteNumber((uint64_t)allocationCount);
7542 json.WriteString(
"UnusedRanges");
7543 json.WriteNumber((uint64_t)unusedRangeCount);
7545 json.WriteString(
"Suballocations");
7549 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7550 VkDeviceSize offset,
7553 json.BeginObject(
true);
7555 json.WriteString(
"Offset");
7556 json.WriteNumber(offset);
7558 hAllocation->PrintParameters(json);
7563 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7564 VkDeviceSize offset,
7565 VkDeviceSize size)
const 7567 json.BeginObject(
true);
7569 json.WriteString(
"Offset");
7570 json.WriteNumber(offset);
7572 json.WriteString(
"Type");
7573 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7575 json.WriteString(
"Size");
7576 json.WriteNumber(size);
7581 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7587 #endif // #if VMA_STATS_STRING_ENABLED 7592 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7593 VmaBlockMetadata(hAllocator),
7596 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7597 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7601 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7605 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7607 VmaBlockMetadata::Init(size);
7610 m_SumFreeSize = size;
7612 VmaSuballocation suballoc = {};
7613 suballoc.offset = 0;
7614 suballoc.size = size;
7615 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7616 suballoc.hAllocation = VK_NULL_HANDLE;
7618 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7619 m_Suballocations.push_back(suballoc);
7620 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7622 m_FreeSuballocationsBySize.push_back(suballocItem);
7625 bool VmaBlockMetadata_Generic::Validate()
const 7627 VMA_VALIDATE(!m_Suballocations.empty());
7630 VkDeviceSize calculatedOffset = 0;
7632 uint32_t calculatedFreeCount = 0;
7634 VkDeviceSize calculatedSumFreeSize = 0;
7637 size_t freeSuballocationsToRegister = 0;
7639 bool prevFree =
false;
7641 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7642 suballocItem != m_Suballocations.cend();
7645 const VmaSuballocation& subAlloc = *suballocItem;
7648 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7650 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7652 VMA_VALIDATE(!prevFree || !currFree);
7654 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7658 calculatedSumFreeSize += subAlloc.size;
7659 ++calculatedFreeCount;
7660 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7662 ++freeSuballocationsToRegister;
7666 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7670 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7671 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7674 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7677 calculatedOffset += subAlloc.size;
7678 prevFree = currFree;
7683 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7685 VkDeviceSize lastSize = 0;
7686 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7688 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7691 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7693 VMA_VALIDATE(suballocItem->size >= lastSize);
7695 lastSize = suballocItem->size;
7699 VMA_VALIDATE(ValidateFreeSuballocationList());
7700 VMA_VALIDATE(calculatedOffset == GetSize());
7701 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7702 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7707 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7709 if(!m_FreeSuballocationsBySize.empty())
7711 return m_FreeSuballocationsBySize.back()->size;
7719 bool VmaBlockMetadata_Generic::IsEmpty()
const 7721 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7724 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7728 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7740 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7741 suballocItem != m_Suballocations.cend();
7744 const VmaSuballocation& suballoc = *suballocItem;
7745 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7758 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7760 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7762 inoutStats.
size += GetSize();
7769 #if VMA_STATS_STRING_ENABLED 7771 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7773 PrintDetailedMap_Begin(json,
7775 m_Suballocations.size() - (size_t)m_FreeCount,
7779 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7780 suballocItem != m_Suballocations.cend();
7781 ++suballocItem, ++i)
7783 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7785 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7789 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7793 PrintDetailedMap_End(json);
7796 #endif // #if VMA_STATS_STRING_ENABLED 7798 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7799 uint32_t currentFrameIndex,
7800 uint32_t frameInUseCount,
7801 VkDeviceSize bufferImageGranularity,
7802 VkDeviceSize allocSize,
7803 VkDeviceSize allocAlignment,
7805 VmaSuballocationType allocType,
7806 bool canMakeOtherLost,
7808 VmaAllocationRequest* pAllocationRequest)
7810 VMA_ASSERT(allocSize > 0);
7811 VMA_ASSERT(!upperAddress);
7812 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7813 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7814 VMA_HEAVY_ASSERT(Validate());
7817 if(canMakeOtherLost ==
false &&
7818 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7824 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7825 if(freeSuballocCount > 0)
7830 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7831 m_FreeSuballocationsBySize.data(),
7832 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7833 allocSize + 2 * VMA_DEBUG_MARGIN,
7834 VmaSuballocationItemSizeLess());
7835 size_t index = it - m_FreeSuballocationsBySize.data();
7836 for(; index < freeSuballocCount; ++index)
7841 bufferImageGranularity,
7845 m_FreeSuballocationsBySize[index],
7847 &pAllocationRequest->offset,
7848 &pAllocationRequest->itemsToMakeLostCount,
7849 &pAllocationRequest->sumFreeSize,
7850 &pAllocationRequest->sumItemSize))
7852 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7857 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7859 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7860 it != m_Suballocations.end();
7863 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7866 bufferImageGranularity,
7872 &pAllocationRequest->offset,
7873 &pAllocationRequest->itemsToMakeLostCount,
7874 &pAllocationRequest->sumFreeSize,
7875 &pAllocationRequest->sumItemSize))
7877 pAllocationRequest->item = it;
7885 for(
size_t index = freeSuballocCount; index--; )
7890 bufferImageGranularity,
7894 m_FreeSuballocationsBySize[index],
7896 &pAllocationRequest->offset,
7897 &pAllocationRequest->itemsToMakeLostCount,
7898 &pAllocationRequest->sumFreeSize,
7899 &pAllocationRequest->sumItemSize))
7901 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7908 if(canMakeOtherLost)
7912 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7913 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7915 VmaAllocationRequest tmpAllocRequest = {};
7916 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7917 suballocIt != m_Suballocations.end();
7920 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7921 suballocIt->hAllocation->CanBecomeLost())
7926 bufferImageGranularity,
7932 &tmpAllocRequest.offset,
7933 &tmpAllocRequest.itemsToMakeLostCount,
7934 &tmpAllocRequest.sumFreeSize,
7935 &tmpAllocRequest.sumItemSize))
7937 tmpAllocRequest.item = suballocIt;
7939 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7942 *pAllocationRequest = tmpAllocRequest;
7948 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7957 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7958 uint32_t currentFrameIndex,
7959 uint32_t frameInUseCount,
7960 VmaAllocationRequest* pAllocationRequest)
7962 while(pAllocationRequest->itemsToMakeLostCount > 0)
7964 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7966 ++pAllocationRequest->item;
7968 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7969 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7970 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7971 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7973 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7974 --pAllocationRequest->itemsToMakeLostCount;
7982 VMA_HEAVY_ASSERT(Validate());
7983 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7984 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7989 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7991 uint32_t lostAllocationCount = 0;
7992 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7993 it != m_Suballocations.end();
7996 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7997 it->hAllocation->CanBecomeLost() &&
7998 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8000 it = FreeSuballocation(it);
8001 ++lostAllocationCount;
8004 return lostAllocationCount;
8007 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8009 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8010 it != m_Suballocations.end();
8013 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8015 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8017 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8018 return VK_ERROR_VALIDATION_FAILED_EXT;
8020 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8022 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8023 return VK_ERROR_VALIDATION_FAILED_EXT;
8031 void VmaBlockMetadata_Generic::Alloc(
8032 const VmaAllocationRequest& request,
8033 VmaSuballocationType type,
8034 VkDeviceSize allocSize,
8038 VMA_ASSERT(!upperAddress);
8039 VMA_ASSERT(request.item != m_Suballocations.end());
8040 VmaSuballocation& suballoc = *request.item;
8042 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8044 VMA_ASSERT(request.offset >= suballoc.offset);
8045 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8046 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8047 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8051 UnregisterFreeSuballocation(request.item);
8053 suballoc.offset = request.offset;
8054 suballoc.size = allocSize;
8055 suballoc.type = type;
8056 suballoc.hAllocation = hAllocation;
8061 VmaSuballocation paddingSuballoc = {};
8062 paddingSuballoc.offset = request.offset + allocSize;
8063 paddingSuballoc.size = paddingEnd;
8064 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8065 VmaSuballocationList::iterator next = request.item;
8067 const VmaSuballocationList::iterator paddingEndItem =
8068 m_Suballocations.insert(next, paddingSuballoc);
8069 RegisterFreeSuballocation(paddingEndItem);
8075 VmaSuballocation paddingSuballoc = {};
8076 paddingSuballoc.offset = request.offset - paddingBegin;
8077 paddingSuballoc.size = paddingBegin;
8078 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8079 const VmaSuballocationList::iterator paddingBeginItem =
8080 m_Suballocations.insert(request.item, paddingSuballoc);
8081 RegisterFreeSuballocation(paddingBeginItem);
8085 m_FreeCount = m_FreeCount - 1;
8086 if(paddingBegin > 0)
8094 m_SumFreeSize -= allocSize;
8097 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8099 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8100 suballocItem != m_Suballocations.end();
8103 VmaSuballocation& suballoc = *suballocItem;
8104 if(suballoc.hAllocation == allocation)
8106 FreeSuballocation(suballocItem);
8107 VMA_HEAVY_ASSERT(Validate());
8111 VMA_ASSERT(0 &&
"Not found!");
8114 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8116 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8117 suballocItem != m_Suballocations.end();
8120 VmaSuballocation& suballoc = *suballocItem;
8121 if(suballoc.offset == offset)
8123 FreeSuballocation(suballocItem);
8127 VMA_ASSERT(0 &&
"Not found!");
8130 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8132 typedef VmaSuballocationList::iterator iter_type;
8133 for(iter_type suballocItem = m_Suballocations.begin();
8134 suballocItem != m_Suballocations.end();
8137 VmaSuballocation& suballoc = *suballocItem;
8138 if(suballoc.hAllocation == alloc)
8140 iter_type nextItem = suballocItem;
8144 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8147 if(newSize < alloc->GetSize())
8149 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8152 if(nextItem != m_Suballocations.end())
8155 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8158 UnregisterFreeSuballocation(nextItem);
8159 nextItem->offset -= sizeDiff;
8160 nextItem->size += sizeDiff;
8161 RegisterFreeSuballocation(nextItem);
8167 VmaSuballocation newFreeSuballoc;
8168 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8169 newFreeSuballoc.offset = suballoc.offset + newSize;
8170 newFreeSuballoc.size = sizeDiff;
8171 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8172 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8173 RegisterFreeSuballocation(newFreeSuballocIt);
8182 VmaSuballocation newFreeSuballoc;
8183 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8184 newFreeSuballoc.offset = suballoc.offset + newSize;
8185 newFreeSuballoc.size = sizeDiff;
8186 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8187 m_Suballocations.push_back(newFreeSuballoc);
8189 iter_type newFreeSuballocIt = m_Suballocations.end();
8190 RegisterFreeSuballocation(--newFreeSuballocIt);
8195 suballoc.size = newSize;
8196 m_SumFreeSize += sizeDiff;
8201 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8204 if(nextItem != m_Suballocations.end())
8207 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8210 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8216 if(nextItem->size > sizeDiff)
8219 UnregisterFreeSuballocation(nextItem);
8220 nextItem->offset += sizeDiff;
8221 nextItem->size -= sizeDiff;
8222 RegisterFreeSuballocation(nextItem);
8228 UnregisterFreeSuballocation(nextItem);
8229 m_Suballocations.erase(nextItem);
8245 suballoc.size = newSize;
8246 m_SumFreeSize -= sizeDiff;
8253 VMA_ASSERT(0 &&
"Not found!");
8257 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8259 VkDeviceSize lastSize = 0;
8260 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8262 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8264 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8265 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8266 VMA_VALIDATE(it->size >= lastSize);
8267 lastSize = it->size;
8272 bool VmaBlockMetadata_Generic::CheckAllocation(
8273 uint32_t currentFrameIndex,
8274 uint32_t frameInUseCount,
8275 VkDeviceSize bufferImageGranularity,
8276 VkDeviceSize allocSize,
8277 VkDeviceSize allocAlignment,
8278 VmaSuballocationType allocType,
8279 VmaSuballocationList::const_iterator suballocItem,
8280 bool canMakeOtherLost,
8281 VkDeviceSize* pOffset,
8282 size_t* itemsToMakeLostCount,
8283 VkDeviceSize* pSumFreeSize,
8284 VkDeviceSize* pSumItemSize)
const 8286 VMA_ASSERT(allocSize > 0);
8287 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8288 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8289 VMA_ASSERT(pOffset != VMA_NULL);
8291 *itemsToMakeLostCount = 0;
8295 if(canMakeOtherLost)
8297 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8299 *pSumFreeSize = suballocItem->size;
8303 if(suballocItem->hAllocation->CanBecomeLost() &&
8304 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8306 ++*itemsToMakeLostCount;
8307 *pSumItemSize = suballocItem->size;
8316 if(GetSize() - suballocItem->offset < allocSize)
8322 *pOffset = suballocItem->offset;
8325 if(VMA_DEBUG_MARGIN > 0)
8327 *pOffset += VMA_DEBUG_MARGIN;
8331 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8335 if(bufferImageGranularity > 1)
8337 bool bufferImageGranularityConflict =
false;
8338 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8339 while(prevSuballocItem != m_Suballocations.cbegin())
8342 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8343 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8345 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8347 bufferImageGranularityConflict =
true;
8355 if(bufferImageGranularityConflict)
8357 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8363 if(*pOffset >= suballocItem->offset + suballocItem->size)
8369 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8372 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8374 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8376 if(suballocItem->offset + totalSize > GetSize())
8383 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8384 if(totalSize > suballocItem->size)
8386 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8387 while(remainingSize > 0)
8390 if(lastSuballocItem == m_Suballocations.cend())
8394 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8396 *pSumFreeSize += lastSuballocItem->size;
8400 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8401 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8402 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8404 ++*itemsToMakeLostCount;
8405 *pSumItemSize += lastSuballocItem->size;
8412 remainingSize = (lastSuballocItem->size < remainingSize) ?
8413 remainingSize - lastSuballocItem->size : 0;
8419 if(bufferImageGranularity > 1)
8421 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8423 while(nextSuballocItem != m_Suballocations.cend())
8425 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8426 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8428 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8430 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8431 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8432 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8434 ++*itemsToMakeLostCount;
8453 const VmaSuballocation& suballoc = *suballocItem;
8454 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8456 *pSumFreeSize = suballoc.size;
8459 if(suballoc.size < allocSize)
8465 *pOffset = suballoc.offset;
8468 if(VMA_DEBUG_MARGIN > 0)
8470 *pOffset += VMA_DEBUG_MARGIN;
8474 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8478 if(bufferImageGranularity > 1)
8480 bool bufferImageGranularityConflict =
false;
8481 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8482 while(prevSuballocItem != m_Suballocations.cbegin())
8485 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8486 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8488 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8490 bufferImageGranularityConflict =
true;
8498 if(bufferImageGranularityConflict)
8500 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8505 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8508 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8511 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8518 if(bufferImageGranularity > 1)
8520 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8522 while(nextSuballocItem != m_Suballocations.cend())
8524 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8525 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8527 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8546 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8548 VMA_ASSERT(item != m_Suballocations.end());
8549 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8551 VmaSuballocationList::iterator nextItem = item;
8553 VMA_ASSERT(nextItem != m_Suballocations.end());
8554 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8556 item->size += nextItem->size;
8558 m_Suballocations.erase(nextItem);
8561 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8564 VmaSuballocation& suballoc = *suballocItem;
8565 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8566 suballoc.hAllocation = VK_NULL_HANDLE;
8570 m_SumFreeSize += suballoc.size;
8573 bool mergeWithNext =
false;
8574 bool mergeWithPrev =
false;
8576 VmaSuballocationList::iterator nextItem = suballocItem;
8578 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8580 mergeWithNext =
true;
8583 VmaSuballocationList::iterator prevItem = suballocItem;
8584 if(suballocItem != m_Suballocations.begin())
8587 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8589 mergeWithPrev =
true;
8595 UnregisterFreeSuballocation(nextItem);
8596 MergeFreeWithNext(suballocItem);
8601 UnregisterFreeSuballocation(prevItem);
8602 MergeFreeWithNext(prevItem);
8603 RegisterFreeSuballocation(prevItem);
8608 RegisterFreeSuballocation(suballocItem);
8609 return suballocItem;
8613 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8615 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8616 VMA_ASSERT(item->size > 0);
8620 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8622 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8624 if(m_FreeSuballocationsBySize.empty())
8626 m_FreeSuballocationsBySize.push_back(item);
8630 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8638 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8640 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8641 VMA_ASSERT(item->size > 0);
8645 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8647 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8649 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8650 m_FreeSuballocationsBySize.data(),
8651 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8653 VmaSuballocationItemSizeLess());
8654 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8655 index < m_FreeSuballocationsBySize.size();
8658 if(m_FreeSuballocationsBySize[index] == item)
8660 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8663 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8665 VMA_ASSERT(0 &&
"Not found.");
8671 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8672 VkDeviceSize bufferImageGranularity,
8673 VmaSuballocationType& inOutPrevSuballocType)
const 8675 if(bufferImageGranularity == 1 || IsEmpty())
8680 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8681 bool typeConflictFound =
false;
8682 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8683 it != m_Suballocations.cend();
8686 const VmaSuballocationType suballocType = it->type;
8687 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8689 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8690 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8692 typeConflictFound =
true;
8694 inOutPrevSuballocType = suballocType;
8698 return typeConflictFound || minAlignment >= bufferImageGranularity;
8704 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8705 VmaBlockMetadata(hAllocator),
8707 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8708 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8709 m_1stVectorIndex(0),
8710 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8711 m_1stNullItemsBeginCount(0),
8712 m_1stNullItemsMiddleCount(0),
8713 m_2ndNullItemsCount(0)
8717 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8721 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8723 VmaBlockMetadata::Init(size);
8724 m_SumFreeSize = size;
8727 bool VmaBlockMetadata_Linear::Validate()
const 8729 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8730 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8732 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8733 VMA_VALIDATE(!suballocations1st.empty() ||
8734 suballocations2nd.empty() ||
8735 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8737 if(!suballocations1st.empty())
8740 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8742 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8744 if(!suballocations2nd.empty())
8747 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8750 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8751 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8753 VkDeviceSize sumUsedSize = 0;
8754 const size_t suballoc1stCount = suballocations1st.size();
8755 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8757 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8759 const size_t suballoc2ndCount = suballocations2nd.size();
8760 size_t nullItem2ndCount = 0;
8761 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8763 const VmaSuballocation& suballoc = suballocations2nd[i];
8764 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8766 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8767 VMA_VALIDATE(suballoc.offset >= offset);
8771 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8772 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8773 sumUsedSize += suballoc.size;
8780 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8783 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8786 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8788 const VmaSuballocation& suballoc = suballocations1st[i];
8789 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8790 suballoc.hAllocation == VK_NULL_HANDLE);
8793 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8795 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8797 const VmaSuballocation& suballoc = suballocations1st[i];
8798 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8800 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8801 VMA_VALIDATE(suballoc.offset >= offset);
8802 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8806 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8807 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8808 sumUsedSize += suballoc.size;
8815 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8817 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8819 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8821 const size_t suballoc2ndCount = suballocations2nd.size();
8822 size_t nullItem2ndCount = 0;
8823 for(
size_t i = suballoc2ndCount; i--; )
8825 const VmaSuballocation& suballoc = suballocations2nd[i];
8826 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8828 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8829 VMA_VALIDATE(suballoc.offset >= offset);
8833 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8834 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8835 sumUsedSize += suballoc.size;
8842 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8845 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8848 VMA_VALIDATE(offset <= GetSize());
8849 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8854 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8856 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8857 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8860 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8862 const VkDeviceSize size = GetSize();
8874 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8876 switch(m_2ndVectorMode)
8878 case SECOND_VECTOR_EMPTY:
8884 const size_t suballocations1stCount = suballocations1st.size();
8885 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8886 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8887 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8889 firstSuballoc.offset,
8890 size - (lastSuballoc.offset + lastSuballoc.size));
8894 case SECOND_VECTOR_RING_BUFFER:
8899 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8900 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8901 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8902 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8906 case SECOND_VECTOR_DOUBLE_STACK:
8911 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8912 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8913 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8914 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8924 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8926 const VkDeviceSize size = GetSize();
8927 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8928 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8929 const size_t suballoc1stCount = suballocations1st.size();
8930 const size_t suballoc2ndCount = suballocations2nd.size();
8941 VkDeviceSize lastOffset = 0;
8943 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8945 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8946 size_t nextAlloc2ndIndex = 0;
8947 while(lastOffset < freeSpace2ndTo1stEnd)
8950 while(nextAlloc2ndIndex < suballoc2ndCount &&
8951 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8953 ++nextAlloc2ndIndex;
8957 if(nextAlloc2ndIndex < suballoc2ndCount)
8959 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8962 if(lastOffset < suballoc.offset)
8965 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8979 lastOffset = suballoc.offset + suballoc.size;
8980 ++nextAlloc2ndIndex;
8986 if(lastOffset < freeSpace2ndTo1stEnd)
8988 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8996 lastOffset = freeSpace2ndTo1stEnd;
9001 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9002 const VkDeviceSize freeSpace1stTo2ndEnd =
9003 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9004 while(lastOffset < freeSpace1stTo2ndEnd)
9007 while(nextAlloc1stIndex < suballoc1stCount &&
9008 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9010 ++nextAlloc1stIndex;
9014 if(nextAlloc1stIndex < suballoc1stCount)
9016 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9019 if(lastOffset < suballoc.offset)
9022 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9036 lastOffset = suballoc.offset + suballoc.size;
9037 ++nextAlloc1stIndex;
9043 if(lastOffset < freeSpace1stTo2ndEnd)
9045 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9053 lastOffset = freeSpace1stTo2ndEnd;
9057 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9059 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9060 while(lastOffset < size)
9063 while(nextAlloc2ndIndex != SIZE_MAX &&
9064 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9066 --nextAlloc2ndIndex;
9070 if(nextAlloc2ndIndex != SIZE_MAX)
9072 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9075 if(lastOffset < suballoc.offset)
9078 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9092 lastOffset = suballoc.offset + suballoc.size;
9093 --nextAlloc2ndIndex;
9099 if(lastOffset < size)
9101 const VkDeviceSize unusedRangeSize = size - lastOffset;
9117 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9119 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9120 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9121 const VkDeviceSize size = GetSize();
9122 const size_t suballoc1stCount = suballocations1st.size();
9123 const size_t suballoc2ndCount = suballocations2nd.size();
9125 inoutStats.
size += size;
9127 VkDeviceSize lastOffset = 0;
9129 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9131 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9132 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9133 while(lastOffset < freeSpace2ndTo1stEnd)
9136 while(nextAlloc2ndIndex < suballoc2ndCount &&
9137 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9139 ++nextAlloc2ndIndex;
9143 if(nextAlloc2ndIndex < suballoc2ndCount)
9145 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9148 if(lastOffset < suballoc.offset)
9151 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9162 lastOffset = suballoc.offset + suballoc.size;
9163 ++nextAlloc2ndIndex;
9168 if(lastOffset < freeSpace2ndTo1stEnd)
9171 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9178 lastOffset = freeSpace2ndTo1stEnd;
9183 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9184 const VkDeviceSize freeSpace1stTo2ndEnd =
9185 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9186 while(lastOffset < freeSpace1stTo2ndEnd)
9189 while(nextAlloc1stIndex < suballoc1stCount &&
9190 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9192 ++nextAlloc1stIndex;
9196 if(nextAlloc1stIndex < suballoc1stCount)
9198 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9201 if(lastOffset < suballoc.offset)
9204 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9215 lastOffset = suballoc.offset + suballoc.size;
9216 ++nextAlloc1stIndex;
9221 if(lastOffset < freeSpace1stTo2ndEnd)
9224 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9231 lastOffset = freeSpace1stTo2ndEnd;
9235 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9237 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9238 while(lastOffset < size)
9241 while(nextAlloc2ndIndex != SIZE_MAX &&
9242 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9244 --nextAlloc2ndIndex;
9248 if(nextAlloc2ndIndex != SIZE_MAX)
9250 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9253 if(lastOffset < suballoc.offset)
9256 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9267 lastOffset = suballoc.offset + suballoc.size;
9268 --nextAlloc2ndIndex;
9273 if(lastOffset < size)
9276 const VkDeviceSize unusedRangeSize = size - lastOffset;
9289 #if VMA_STATS_STRING_ENABLED 9290 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9292 const VkDeviceSize size = GetSize();
9293 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9294 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9295 const size_t suballoc1stCount = suballocations1st.size();
9296 const size_t suballoc2ndCount = suballocations2nd.size();
9300 size_t unusedRangeCount = 0;
9301 VkDeviceSize usedBytes = 0;
9303 VkDeviceSize lastOffset = 0;
9305 size_t alloc2ndCount = 0;
9306 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9308 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9309 size_t nextAlloc2ndIndex = 0;
9310 while(lastOffset < freeSpace2ndTo1stEnd)
9313 while(nextAlloc2ndIndex < suballoc2ndCount &&
9314 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9316 ++nextAlloc2ndIndex;
9320 if(nextAlloc2ndIndex < suballoc2ndCount)
9322 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9325 if(lastOffset < suballoc.offset)
9334 usedBytes += suballoc.size;
9337 lastOffset = suballoc.offset + suballoc.size;
9338 ++nextAlloc2ndIndex;
9343 if(lastOffset < freeSpace2ndTo1stEnd)
9350 lastOffset = freeSpace2ndTo1stEnd;
9355 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9356 size_t alloc1stCount = 0;
9357 const VkDeviceSize freeSpace1stTo2ndEnd =
9358 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9359 while(lastOffset < freeSpace1stTo2ndEnd)
9362 while(nextAlloc1stIndex < suballoc1stCount &&
9363 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9365 ++nextAlloc1stIndex;
9369 if(nextAlloc1stIndex < suballoc1stCount)
9371 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9374 if(lastOffset < suballoc.offset)
9383 usedBytes += suballoc.size;
9386 lastOffset = suballoc.offset + suballoc.size;
9387 ++nextAlloc1stIndex;
9392 if(lastOffset < size)
9399 lastOffset = freeSpace1stTo2ndEnd;
9403 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9405 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9406 while(lastOffset < size)
9409 while(nextAlloc2ndIndex != SIZE_MAX &&
9410 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9412 --nextAlloc2ndIndex;
9416 if(nextAlloc2ndIndex != SIZE_MAX)
9418 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9421 if(lastOffset < suballoc.offset)
9430 usedBytes += suballoc.size;
9433 lastOffset = suballoc.offset + suballoc.size;
9434 --nextAlloc2ndIndex;
9439 if(lastOffset < size)
9451 const VkDeviceSize unusedBytes = size - usedBytes;
9452 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9457 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9459 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9460 size_t nextAlloc2ndIndex = 0;
9461 while(lastOffset < freeSpace2ndTo1stEnd)
9464 while(nextAlloc2ndIndex < suballoc2ndCount &&
9465 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9467 ++nextAlloc2ndIndex;
9471 if(nextAlloc2ndIndex < suballoc2ndCount)
9473 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9476 if(lastOffset < suballoc.offset)
9479 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9480 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9485 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9488 lastOffset = suballoc.offset + suballoc.size;
9489 ++nextAlloc2ndIndex;
9494 if(lastOffset < freeSpace2ndTo1stEnd)
9497 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9498 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9502 lastOffset = freeSpace2ndTo1stEnd;
9507 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9508 while(lastOffset < freeSpace1stTo2ndEnd)
9511 while(nextAlloc1stIndex < suballoc1stCount &&
9512 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9514 ++nextAlloc1stIndex;
9518 if(nextAlloc1stIndex < suballoc1stCount)
9520 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9523 if(lastOffset < suballoc.offset)
9526 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9527 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9532 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9535 lastOffset = suballoc.offset + suballoc.size;
9536 ++nextAlloc1stIndex;
9541 if(lastOffset < freeSpace1stTo2ndEnd)
9544 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9545 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9549 lastOffset = freeSpace1stTo2ndEnd;
9553 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9555 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9556 while(lastOffset < size)
9559 while(nextAlloc2ndIndex != SIZE_MAX &&
9560 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9562 --nextAlloc2ndIndex;
9566 if(nextAlloc2ndIndex != SIZE_MAX)
9568 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9571 if(lastOffset < suballoc.offset)
9574 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9575 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9580 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9583 lastOffset = suballoc.offset + suballoc.size;
9584 --nextAlloc2ndIndex;
9589 if(lastOffset < size)
9592 const VkDeviceSize unusedRangeSize = size - lastOffset;
9593 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9602 PrintDetailedMap_End(json);
9604 #endif // #if VMA_STATS_STRING_ENABLED 9606 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9607 uint32_t currentFrameIndex,
9608 uint32_t frameInUseCount,
9609 VkDeviceSize bufferImageGranularity,
9610 VkDeviceSize allocSize,
9611 VkDeviceSize allocAlignment,
9613 VmaSuballocationType allocType,
9614 bool canMakeOtherLost,
9616 VmaAllocationRequest* pAllocationRequest)
9618 VMA_ASSERT(allocSize > 0);
9619 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9620 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9621 VMA_HEAVY_ASSERT(Validate());
9623 const VkDeviceSize size = GetSize();
9624 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9625 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9629 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9631 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9636 if(allocSize > size)
9640 VkDeviceSize resultBaseOffset = size - allocSize;
9641 if(!suballocations2nd.empty())
9643 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9644 resultBaseOffset = lastSuballoc.offset - allocSize;
9645 if(allocSize > lastSuballoc.offset)
9652 VkDeviceSize resultOffset = resultBaseOffset;
9655 if(VMA_DEBUG_MARGIN > 0)
9657 if(resultOffset < VMA_DEBUG_MARGIN)
9661 resultOffset -= VMA_DEBUG_MARGIN;
9665 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9669 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9671 bool bufferImageGranularityConflict =
false;
9672 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9674 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9675 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9677 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9679 bufferImageGranularityConflict =
true;
9687 if(bufferImageGranularityConflict)
9689 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9694 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9695 suballocations1st.back().offset + suballocations1st.back().size :
9697 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9701 if(bufferImageGranularity > 1)
9703 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9705 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9706 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9708 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9722 pAllocationRequest->offset = resultOffset;
9723 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9724 pAllocationRequest->sumItemSize = 0;
9726 pAllocationRequest->itemsToMakeLostCount = 0;
9732 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9736 VkDeviceSize resultBaseOffset = 0;
9737 if(!suballocations1st.empty())
9739 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9740 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9744 VkDeviceSize resultOffset = resultBaseOffset;
9747 if(VMA_DEBUG_MARGIN > 0)
9749 resultOffset += VMA_DEBUG_MARGIN;
9753 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9757 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9759 bool bufferImageGranularityConflict =
false;
9760 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9762 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9763 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9765 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9767 bufferImageGranularityConflict =
true;
9775 if(bufferImageGranularityConflict)
9777 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9781 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9782 suballocations2nd.back().offset : size;
9785 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9789 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9791 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9793 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9794 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9796 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9810 pAllocationRequest->offset = resultOffset;
9811 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9812 pAllocationRequest->sumItemSize = 0;
9814 pAllocationRequest->itemsToMakeLostCount = 0;
9821 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9823 VMA_ASSERT(!suballocations1st.empty());
9825 VkDeviceSize resultBaseOffset = 0;
9826 if(!suballocations2nd.empty())
9828 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9829 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9833 VkDeviceSize resultOffset = resultBaseOffset;
9836 if(VMA_DEBUG_MARGIN > 0)
9838 resultOffset += VMA_DEBUG_MARGIN;
9842 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9846 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9848 bool bufferImageGranularityConflict =
false;
9849 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9851 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9852 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9854 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9856 bufferImageGranularityConflict =
true;
9864 if(bufferImageGranularityConflict)
9866 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9870 pAllocationRequest->itemsToMakeLostCount = 0;
9871 pAllocationRequest->sumItemSize = 0;
9872 size_t index1st = m_1stNullItemsBeginCount;
9874 if(canMakeOtherLost)
9876 while(index1st < suballocations1st.size() &&
9877 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
9880 const VmaSuballocation& suballoc = suballocations1st[index1st];
9881 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9887 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9888 if(suballoc.hAllocation->CanBecomeLost() &&
9889 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9891 ++pAllocationRequest->itemsToMakeLostCount;
9892 pAllocationRequest->sumItemSize += suballoc.size;
9904 if(bufferImageGranularity > 1)
9906 while(index1st < suballocations1st.size())
9908 const VmaSuballocation& suballoc = suballocations1st[index1st];
9909 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9911 if(suballoc.hAllocation != VK_NULL_HANDLE)
9914 if(suballoc.hAllocation->CanBecomeLost() &&
9915 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9917 ++pAllocationRequest->itemsToMakeLostCount;
9918 pAllocationRequest->sumItemSize += suballoc.size;
9937 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9938 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9942 if(bufferImageGranularity > 1)
9944 for(
size_t nextSuballocIndex = index1st;
9945 nextSuballocIndex < suballocations1st.size();
9946 nextSuballocIndex++)
9948 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9949 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9951 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9965 pAllocationRequest->offset = resultOffset;
9966 pAllocationRequest->sumFreeSize =
9967 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9969 - pAllocationRequest->sumItemSize;
9979 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9980 uint32_t currentFrameIndex,
9981 uint32_t frameInUseCount,
9982 VmaAllocationRequest* pAllocationRequest)
9984 if(pAllocationRequest->itemsToMakeLostCount == 0)
9989 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9991 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9992 size_t index1st = m_1stNullItemsBeginCount;
9993 size_t madeLostCount = 0;
9994 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9996 VMA_ASSERT(index1st < suballocations1st.size());
9997 VmaSuballocation& suballoc = suballocations1st[index1st];
9998 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10000 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10001 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10002 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10004 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10005 suballoc.hAllocation = VK_NULL_HANDLE;
10006 m_SumFreeSize += suballoc.size;
10007 ++m_1stNullItemsMiddleCount;
10018 CleanupAfterFree();
10024 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10026 uint32_t lostAllocationCount = 0;
10028 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10029 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10031 VmaSuballocation& suballoc = suballocations1st[i];
10032 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10033 suballoc.hAllocation->CanBecomeLost() &&
10034 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10036 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10037 suballoc.hAllocation = VK_NULL_HANDLE;
10038 ++m_1stNullItemsMiddleCount;
10039 m_SumFreeSize += suballoc.size;
10040 ++lostAllocationCount;
10044 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10045 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10047 VmaSuballocation& suballoc = suballocations2nd[i];
10048 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10049 suballoc.hAllocation->CanBecomeLost() &&
10050 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10052 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10053 suballoc.hAllocation = VK_NULL_HANDLE;
10054 ++m_2ndNullItemsCount;
10055 ++lostAllocationCount;
10059 if(lostAllocationCount)
10061 CleanupAfterFree();
10064 return lostAllocationCount;
10067 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10069 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10070 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10072 const VmaSuballocation& suballoc = suballocations1st[i];
10073 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10075 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10077 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10078 return VK_ERROR_VALIDATION_FAILED_EXT;
10080 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10082 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10083 return VK_ERROR_VALIDATION_FAILED_EXT;
10088 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10089 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10091 const VmaSuballocation& suballoc = suballocations2nd[i];
10092 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10094 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10096 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10097 return VK_ERROR_VALIDATION_FAILED_EXT;
10099 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10101 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10102 return VK_ERROR_VALIDATION_FAILED_EXT;
10110 void VmaBlockMetadata_Linear::Alloc(
10111 const VmaAllocationRequest& request,
10112 VmaSuballocationType type,
10113 VkDeviceSize allocSize,
10117 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10121 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10122 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10123 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10124 suballocations2nd.push_back(newSuballoc);
10125 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10129 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10132 if(suballocations1st.empty())
10134 suballocations1st.push_back(newSuballoc);
10139 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
10142 VMA_ASSERT(request.offset + allocSize <= GetSize());
10143 suballocations1st.push_back(newSuballoc);
10146 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
10148 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10150 switch(m_2ndVectorMode)
10152 case SECOND_VECTOR_EMPTY:
10154 VMA_ASSERT(suballocations2nd.empty());
10155 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10157 case SECOND_VECTOR_RING_BUFFER:
10159 VMA_ASSERT(!suballocations2nd.empty());
10161 case SECOND_VECTOR_DOUBLE_STACK:
10162 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10168 suballocations2nd.push_back(newSuballoc);
10172 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10177 m_SumFreeSize -= newSuballoc.size;
10180 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10182 FreeAtOffset(allocation->GetOffset());
10185 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10187 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10188 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10190 if(!suballocations1st.empty())
10193 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10194 if(firstSuballoc.offset == offset)
10196 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10197 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10198 m_SumFreeSize += firstSuballoc.size;
10199 ++m_1stNullItemsBeginCount;
10200 CleanupAfterFree();
10206 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10207 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10209 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10210 if(lastSuballoc.offset == offset)
10212 m_SumFreeSize += lastSuballoc.size;
10213 suballocations2nd.pop_back();
10214 CleanupAfterFree();
10219 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10221 VmaSuballocation& lastSuballoc = suballocations1st.back();
10222 if(lastSuballoc.offset == offset)
10224 m_SumFreeSize += lastSuballoc.size;
10225 suballocations1st.pop_back();
10226 CleanupAfterFree();
10233 VmaSuballocation refSuballoc;
10234 refSuballoc.offset = offset;
10236 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10237 suballocations1st.begin() + m_1stNullItemsBeginCount,
10238 suballocations1st.end(),
10240 if(it != suballocations1st.end())
10242 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10243 it->hAllocation = VK_NULL_HANDLE;
10244 ++m_1stNullItemsMiddleCount;
10245 m_SumFreeSize += it->size;
10246 CleanupAfterFree();
10251 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10254 VmaSuballocation refSuballoc;
10255 refSuballoc.offset = offset;
10257 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10258 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10259 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10260 if(it != suballocations2nd.end())
10262 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10263 it->hAllocation = VK_NULL_HANDLE;
10264 ++m_2ndNullItemsCount;
10265 m_SumFreeSize += it->size;
10266 CleanupAfterFree();
10271 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10274 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10276 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10277 const size_t suballocCount = AccessSuballocations1st().size();
10278 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10281 void VmaBlockMetadata_Linear::CleanupAfterFree()
10283 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10284 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10288 suballocations1st.clear();
10289 suballocations2nd.clear();
10290 m_1stNullItemsBeginCount = 0;
10291 m_1stNullItemsMiddleCount = 0;
10292 m_2ndNullItemsCount = 0;
10293 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10297 const size_t suballoc1stCount = suballocations1st.size();
10298 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10299 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10302 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10303 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10305 ++m_1stNullItemsBeginCount;
10306 --m_1stNullItemsMiddleCount;
10310 while(m_1stNullItemsMiddleCount > 0 &&
10311 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10313 --m_1stNullItemsMiddleCount;
10314 suballocations1st.pop_back();
10318 while(m_2ndNullItemsCount > 0 &&
10319 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10321 --m_2ndNullItemsCount;
10322 suballocations2nd.pop_back();
10325 if(ShouldCompact1st())
10327 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10328 size_t srcIndex = m_1stNullItemsBeginCount;
10329 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10331 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10335 if(dstIndex != srcIndex)
10337 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10341 suballocations1st.resize(nonNullItemCount);
10342 m_1stNullItemsBeginCount = 0;
10343 m_1stNullItemsMiddleCount = 0;
10347 if(suballocations2nd.empty())
10349 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10353 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10355 suballocations1st.clear();
10356 m_1stNullItemsBeginCount = 0;
10358 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10361 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10362 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10363 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10364 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10366 ++m_1stNullItemsBeginCount;
10367 --m_1stNullItemsMiddleCount;
10369 m_2ndNullItemsCount = 0;
10370 m_1stVectorIndex ^= 1;
10375 VMA_HEAVY_ASSERT(Validate());
10382 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10383 VmaBlockMetadata(hAllocator),
10385 m_AllocationCount(0),
10389 memset(m_FreeList, 0,
sizeof(m_FreeList));
10392 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10394 DeleteNode(m_Root);
10397 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10399 VmaBlockMetadata::Init(size);
10401 m_UsableSize = VmaPrevPow2(size);
10402 m_SumFreeSize = m_UsableSize;
10406 while(m_LevelCount < MAX_LEVELS &&
10407 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10412 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10413 rootNode->offset = 0;
10414 rootNode->type = Node::TYPE_FREE;
10415 rootNode->parent = VMA_NULL;
10416 rootNode->buddy = VMA_NULL;
10419 AddToFreeListFront(0, rootNode);
10422 bool VmaBlockMetadata_Buddy::Validate()
const 10425 ValidationContext ctx;
10426 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10428 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10430 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10431 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10434 for(uint32_t level = 0; level < m_LevelCount; ++level)
10436 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10437 m_FreeList[level].front->free.prev == VMA_NULL);
10439 for(Node* node = m_FreeList[level].front;
10441 node = node->free.next)
10443 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10445 if(node->free.next == VMA_NULL)
10447 VMA_VALIDATE(m_FreeList[level].back == node);
10451 VMA_VALIDATE(node->free.next->free.prev == node);
10457 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10459 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10465 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10467 for(uint32_t level = 0; level < m_LevelCount; ++level)
10469 if(m_FreeList[level].front != VMA_NULL)
10471 return LevelToNodeSize(level);
10477 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10479 const VkDeviceSize unusableSize = GetUnusableSize();
10490 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10492 if(unusableSize > 0)
10501 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10503 const VkDeviceSize unusableSize = GetUnusableSize();
10505 inoutStats.
size += GetSize();
10506 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10511 if(unusableSize > 0)
10518 #if VMA_STATS_STRING_ENABLED 10520 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10524 CalcAllocationStatInfo(stat);
10526 PrintDetailedMap_Begin(
10532 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10534 const VkDeviceSize unusableSize = GetUnusableSize();
10535 if(unusableSize > 0)
10537 PrintDetailedMap_UnusedRange(json,
10542 PrintDetailedMap_End(json);
10545 #endif // #if VMA_STATS_STRING_ENABLED 10547 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10548 uint32_t currentFrameIndex,
10549 uint32_t frameInUseCount,
10550 VkDeviceSize bufferImageGranularity,
10551 VkDeviceSize allocSize,
10552 VkDeviceSize allocAlignment,
10554 VmaSuballocationType allocType,
10555 bool canMakeOtherLost,
10557 VmaAllocationRequest* pAllocationRequest)
10559 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10563 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10564 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10565 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10567 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10568 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10571 if(allocSize > m_UsableSize)
10576 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10577 for(uint32_t level = targetLevel + 1; level--; )
10579 for(Node* freeNode = m_FreeList[level].front;
10580 freeNode != VMA_NULL;
10581 freeNode = freeNode->free.next)
10583 if(freeNode->offset % allocAlignment == 0)
10585 pAllocationRequest->offset = freeNode->offset;
10586 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10587 pAllocationRequest->sumItemSize = 0;
10588 pAllocationRequest->itemsToMakeLostCount = 0;
10589 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10598 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10599 uint32_t currentFrameIndex,
10600 uint32_t frameInUseCount,
10601 VmaAllocationRequest* pAllocationRequest)
10607 return pAllocationRequest->itemsToMakeLostCount == 0;
10610 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10619 void VmaBlockMetadata_Buddy::Alloc(
10620 const VmaAllocationRequest& request,
10621 VmaSuballocationType type,
10622 VkDeviceSize allocSize,
10626 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10627 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10629 Node* currNode = m_FreeList[currLevel].front;
10630 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10631 while(currNode->offset != request.offset)
10633 currNode = currNode->free.next;
10634 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10638 while(currLevel < targetLevel)
10642 RemoveFromFreeList(currLevel, currNode);
10644 const uint32_t childrenLevel = currLevel + 1;
10647 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10648 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10650 leftChild->offset = currNode->offset;
10651 leftChild->type = Node::TYPE_FREE;
10652 leftChild->parent = currNode;
10653 leftChild->buddy = rightChild;
10655 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10656 rightChild->type = Node::TYPE_FREE;
10657 rightChild->parent = currNode;
10658 rightChild->buddy = leftChild;
10661 currNode->type = Node::TYPE_SPLIT;
10662 currNode->split.leftChild = leftChild;
10665 AddToFreeListFront(childrenLevel, rightChild);
10666 AddToFreeListFront(childrenLevel, leftChild);
10671 currNode = m_FreeList[currLevel].front;
10680 VMA_ASSERT(currLevel == targetLevel &&
10681 currNode != VMA_NULL &&
10682 currNode->type == Node::TYPE_FREE);
10683 RemoveFromFreeList(currLevel, currNode);
10686 currNode->type = Node::TYPE_ALLOCATION;
10687 currNode->allocation.alloc = hAllocation;
10689 ++m_AllocationCount;
10691 m_SumFreeSize -= allocSize;
10694 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10696 if(node->type == Node::TYPE_SPLIT)
10698 DeleteNode(node->split.leftChild->buddy);
10699 DeleteNode(node->split.leftChild);
10702 vma_delete(GetAllocationCallbacks(), node);
10705 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10707 VMA_VALIDATE(level < m_LevelCount);
10708 VMA_VALIDATE(curr->parent == parent);
10709 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10710 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10713 case Node::TYPE_FREE:
10715 ctx.calculatedSumFreeSize += levelNodeSize;
10716 ++ctx.calculatedFreeCount;
10718 case Node::TYPE_ALLOCATION:
10719 ++ctx.calculatedAllocationCount;
10720 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10721 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10723 case Node::TYPE_SPLIT:
10725 const uint32_t childrenLevel = level + 1;
10726 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10727 const Node*
const leftChild = curr->split.leftChild;
10728 VMA_VALIDATE(leftChild != VMA_NULL);
10729 VMA_VALIDATE(leftChild->offset == curr->offset);
10730 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10732 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10734 const Node*
const rightChild = leftChild->buddy;
10735 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10736 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10738 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10749 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10752 uint32_t level = 0;
10753 VkDeviceSize currLevelNodeSize = m_UsableSize;
10754 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10755 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10758 currLevelNodeSize = nextLevelNodeSize;
10759 nextLevelNodeSize = currLevelNodeSize >> 1;
10764 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10767 Node* node = m_Root;
10768 VkDeviceSize nodeOffset = 0;
10769 uint32_t level = 0;
10770 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10771 while(node->type == Node::TYPE_SPLIT)
10773 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10774 if(offset < nodeOffset + nextLevelSize)
10776 node = node->split.leftChild;
10780 node = node->split.leftChild->buddy;
10781 nodeOffset += nextLevelSize;
10784 levelNodeSize = nextLevelSize;
10787 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10788 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10791 --m_AllocationCount;
10792 m_SumFreeSize += alloc->GetSize();
10794 node->type = Node::TYPE_FREE;
10797 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10799 RemoveFromFreeList(level, node->buddy);
10800 Node*
const parent = node->parent;
10802 vma_delete(GetAllocationCallbacks(), node->buddy);
10803 vma_delete(GetAllocationCallbacks(), node);
10804 parent->type = Node::TYPE_FREE;
10812 AddToFreeListFront(level, node);
10815 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10819 case Node::TYPE_FREE:
10825 case Node::TYPE_ALLOCATION:
10827 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10833 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10834 if(unusedRangeSize > 0)
10843 case Node::TYPE_SPLIT:
10845 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10846 const Node*
const leftChild = node->split.leftChild;
10847 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
10848 const Node*
const rightChild = leftChild->buddy;
10849 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
10857 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
10859 VMA_ASSERT(node->type == Node::TYPE_FREE);
10862 Node*
const frontNode = m_FreeList[level].front;
10863 if(frontNode == VMA_NULL)
10865 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
10866 node->free.prev = node->free.next = VMA_NULL;
10867 m_FreeList[level].front = m_FreeList[level].back = node;
10871 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
10872 node->free.prev = VMA_NULL;
10873 node->free.next = frontNode;
10874 frontNode->free.prev = node;
10875 m_FreeList[level].front = node;
10879 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
10881 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10884 if(node->free.prev == VMA_NULL)
10886 VMA_ASSERT(m_FreeList[level].front == node);
10887 m_FreeList[level].front = node->free.next;
10891 Node*
const prevFreeNode = node->free.prev;
10892 VMA_ASSERT(prevFreeNode->free.next == node);
10893 prevFreeNode->free.next = node->free.next;
10897 if(node->free.next == VMA_NULL)
10899 VMA_ASSERT(m_FreeList[level].back == node);
10900 m_FreeList[level].back = node->free.prev;
10904 Node*
const nextFreeNode = node->free.next;
10905 VMA_ASSERT(nextFreeNode->free.prev == node);
10906 nextFreeNode->free.prev = node->free.prev;
10910 #if VMA_STATS_STRING_ENABLED 10911 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10915 case Node::TYPE_FREE:
10916 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10918 case Node::TYPE_ALLOCATION:
10920 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10921 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10922 if(allocSize < levelNodeSize)
10924 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10928 case Node::TYPE_SPLIT:
10930 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10931 const Node*
const leftChild = node->split.leftChild;
10932 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10933 const Node*
const rightChild = leftChild->buddy;
10934 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10941 #endif // #if VMA_STATS_STRING_ENABLED 10947 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10948 m_pMetadata(VMA_NULL),
10949 m_MemoryTypeIndex(UINT32_MAX),
10951 m_hMemory(VK_NULL_HANDLE),
10953 m_pMappedData(VMA_NULL)
10957 void VmaDeviceMemoryBlock::Init(
10959 uint32_t newMemoryTypeIndex,
10960 VkDeviceMemory newMemory,
10961 VkDeviceSize newSize,
10963 uint32_t algorithm)
10965 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10967 m_MemoryTypeIndex = newMemoryTypeIndex;
10969 m_hMemory = newMemory;
10974 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10977 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10983 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10985 m_pMetadata->Init(newSize);
10988 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10992 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10994 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10995 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10996 m_hMemory = VK_NULL_HANDLE;
10998 vma_delete(allocator, m_pMetadata);
10999 m_pMetadata = VMA_NULL;
11002 bool VmaDeviceMemoryBlock::Validate()
const 11004 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11005 (m_pMetadata->GetSize() != 0));
11007 return m_pMetadata->Validate();
11010 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11012 void* pData =
nullptr;
11013 VkResult res = Map(hAllocator, 1, &pData);
11014 if(res != VK_SUCCESS)
11019 res = m_pMetadata->CheckCorruption(pData);
11021 Unmap(hAllocator, 1);
11026 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11033 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11034 if(m_MapCount != 0)
11036 m_MapCount += count;
11037 VMA_ASSERT(m_pMappedData != VMA_NULL);
11038 if(ppData != VMA_NULL)
11040 *ppData = m_pMappedData;
11046 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11047 hAllocator->m_hDevice,
11053 if(result == VK_SUCCESS)
11055 if(ppData != VMA_NULL)
11057 *ppData = m_pMappedData;
11059 m_MapCount = count;
11065 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11072 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11073 if(m_MapCount >= count)
11075 m_MapCount -= count;
11076 if(m_MapCount == 0)
11078 m_pMappedData = VMA_NULL;
11079 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11084 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11088 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11090 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11091 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11094 VkResult res = Map(hAllocator, 1, &pData);
11095 if(res != VK_SUCCESS)
11100 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11101 VmaWriteMagicValue(pData, allocOffset + allocSize);
11103 Unmap(hAllocator, 1);
11108 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11110 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11111 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11114 VkResult res = Map(hAllocator, 1, &pData);
11115 if(res != VK_SUCCESS)
11120 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11122 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11124 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11126 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11129 Unmap(hAllocator, 1);
11134 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11139 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11140 hAllocation->GetBlock() ==
this);
11142 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11143 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11144 hAllocator->m_hDevice,
11147 hAllocation->GetOffset());
11150 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11155 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11156 hAllocation->GetBlock() ==
this);
11158 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11159 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11160 hAllocator->m_hDevice,
11163 hAllocation->GetOffset());
11168 memset(&outInfo, 0,
sizeof(outInfo));
11187 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11195 VmaPool_T::VmaPool_T(
11198 VkDeviceSize preferredBlockSize) :
11201 createInfo.memoryTypeIndex,
11202 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11203 createInfo.minBlockCount,
11204 createInfo.maxBlockCount,
11206 createInfo.frameInUseCount,
11208 createInfo.blockSize != 0,
11214 VmaPool_T::~VmaPool_T()
11218 #if VMA_STATS_STRING_ENABLED 11220 #endif // #if VMA_STATS_STRING_ENABLED 11222 VmaBlockVector::VmaBlockVector(
11224 uint32_t memoryTypeIndex,
11225 VkDeviceSize preferredBlockSize,
11226 size_t minBlockCount,
11227 size_t maxBlockCount,
11228 VkDeviceSize bufferImageGranularity,
11229 uint32_t frameInUseCount,
11231 bool explicitBlockSize,
11232 uint32_t algorithm) :
11233 m_hAllocator(hAllocator),
11234 m_MemoryTypeIndex(memoryTypeIndex),
11235 m_PreferredBlockSize(preferredBlockSize),
11236 m_MinBlockCount(minBlockCount),
11237 m_MaxBlockCount(maxBlockCount),
11238 m_BufferImageGranularity(bufferImageGranularity),
11239 m_FrameInUseCount(frameInUseCount),
11240 m_IsCustomPool(isCustomPool),
11241 m_ExplicitBlockSize(explicitBlockSize),
11242 m_Algorithm(algorithm),
11243 m_HasEmptyBlock(false),
11244 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11249 VmaBlockVector::~VmaBlockVector()
11251 for(
size_t i = m_Blocks.size(); i--; )
11253 m_Blocks[i]->Destroy(m_hAllocator);
11254 vma_delete(m_hAllocator, m_Blocks[i]);
11258 VkResult VmaBlockVector::CreateMinBlocks()
11260 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11262 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11263 if(res != VK_SUCCESS)
11271 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11273 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11275 const size_t blockCount = m_Blocks.size();
11284 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11286 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11287 VMA_ASSERT(pBlock);
11288 VMA_HEAVY_ASSERT(pBlock->Validate());
11289 pBlock->m_pMetadata->AddPoolStats(*pStats);
11293 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11295 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11296 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11297 (VMA_DEBUG_MARGIN > 0) &&
11298 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11301 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11303 VkResult VmaBlockVector::Allocate(
11305 uint32_t currentFrameIndex,
11307 VkDeviceSize alignment,
11309 VmaSuballocationType suballocType,
11310 size_t allocationCount,
11314 VkResult res = VK_SUCCESS;
11317 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11318 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11320 res = AllocatePage(
11327 pAllocations + allocIndex);
11328 if(res != VK_SUCCESS)
11335 if(res != VK_SUCCESS)
11338 while(allocIndex--)
11340 Free(pAllocations[allocIndex]);
11342 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11348 VkResult VmaBlockVector::AllocatePage(
11350 uint32_t currentFrameIndex,
11352 VkDeviceSize alignment,
11354 VmaSuballocationType suballocType,
11361 const bool canCreateNewBlock =
11363 (m_Blocks.size() < m_MaxBlockCount);
11370 canMakeOtherLost =
false;
11374 if(isUpperAddress &&
11377 return VK_ERROR_FEATURE_NOT_PRESENT;
11391 return VK_ERROR_FEATURE_NOT_PRESENT;
11395 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11397 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11405 if(!canMakeOtherLost || canCreateNewBlock)
11414 if(!m_Blocks.empty())
11416 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11417 VMA_ASSERT(pCurrBlock);
11418 VkResult res = AllocateFromBlock(
11429 if(res == VK_SUCCESS)
11431 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11441 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11443 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11444 VMA_ASSERT(pCurrBlock);
11445 VkResult res = AllocateFromBlock(
11456 if(res == VK_SUCCESS)
11458 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11466 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11468 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11469 VMA_ASSERT(pCurrBlock);
11470 VkResult res = AllocateFromBlock(
11481 if(res == VK_SUCCESS)
11483 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11491 if(canCreateNewBlock)
11494 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11495 uint32_t newBlockSizeShift = 0;
11496 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11498 if(!m_ExplicitBlockSize)
11501 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11502 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11504 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11505 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11507 newBlockSize = smallerNewBlockSize;
11508 ++newBlockSizeShift;
11517 size_t newBlockIndex = 0;
11518 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11520 if(!m_ExplicitBlockSize)
11522 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11524 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11525 if(smallerNewBlockSize >= size)
11527 newBlockSize = smallerNewBlockSize;
11528 ++newBlockSizeShift;
11529 res = CreateBlock(newBlockSize, &newBlockIndex);
11538 if(res == VK_SUCCESS)
11540 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11541 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11543 res = AllocateFromBlock(
11554 if(res == VK_SUCCESS)
11556 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11562 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11569 if(canMakeOtherLost)
11571 uint32_t tryIndex = 0;
11572 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11574 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11575 VmaAllocationRequest bestRequest = {};
11576 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11582 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11584 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11585 VMA_ASSERT(pCurrBlock);
11586 VmaAllocationRequest currRequest = {};
11587 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11590 m_BufferImageGranularity,
11599 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11600 if(pBestRequestBlock == VMA_NULL ||
11601 currRequestCost < bestRequestCost)
11603 pBestRequestBlock = pCurrBlock;
11604 bestRequest = currRequest;
11605 bestRequestCost = currRequestCost;
11607 if(bestRequestCost == 0)
11618 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11620 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11621 VMA_ASSERT(pCurrBlock);
11622 VmaAllocationRequest currRequest = {};
11623 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11626 m_BufferImageGranularity,
11635 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11636 if(pBestRequestBlock == VMA_NULL ||
11637 currRequestCost < bestRequestCost ||
11640 pBestRequestBlock = pCurrBlock;
11641 bestRequest = currRequest;
11642 bestRequestCost = currRequestCost;
11644 if(bestRequestCost == 0 ||
11654 if(pBestRequestBlock != VMA_NULL)
11658 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11659 if(res != VK_SUCCESS)
11665 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11671 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11673 m_HasEmptyBlock =
false;
11676 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11677 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
11678 (*pAllocation)->InitBlockAllocation(
11681 bestRequest.offset,
11687 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11688 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
11689 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11690 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11692 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11694 if(IsCorruptionDetectionEnabled())
11696 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11697 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11712 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11714 return VK_ERROR_TOO_MANY_OBJECTS;
11718 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11721 void VmaBlockVector::Free(
11724 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11728 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11730 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11732 if(IsCorruptionDetectionEnabled())
11734 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11735 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11738 if(hAllocation->IsPersistentMap())
11740 pBlock->Unmap(m_hAllocator, 1);
11743 pBlock->m_pMetadata->Free(hAllocation);
11744 VMA_HEAVY_ASSERT(pBlock->Validate());
11746 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
11749 if(pBlock->m_pMetadata->IsEmpty())
11752 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11754 pBlockToDelete = pBlock;
11760 m_HasEmptyBlock =
true;
11765 else if(m_HasEmptyBlock)
11767 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11768 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11770 pBlockToDelete = pLastBlock;
11771 m_Blocks.pop_back();
11772 m_HasEmptyBlock =
false;
11776 IncrementallySortBlocks();
11781 if(pBlockToDelete != VMA_NULL)
11783 VMA_DEBUG_LOG(
" Deleted empty allocation");
11784 pBlockToDelete->Destroy(m_hAllocator);
11785 vma_delete(m_hAllocator, pBlockToDelete);
11789 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11791 VkDeviceSize result = 0;
11792 for(
size_t i = m_Blocks.size(); i--; )
11794 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11795 if(result >= m_PreferredBlockSize)
11803 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11805 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11807 if(m_Blocks[blockIndex] == pBlock)
11809 VmaVectorRemove(m_Blocks, blockIndex);
11816 void VmaBlockVector::IncrementallySortBlocks()
11821 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11823 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11825 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
11832 VkResult VmaBlockVector::AllocateFromBlock(
11833 VmaDeviceMemoryBlock* pBlock,
11835 uint32_t currentFrameIndex,
11837 VkDeviceSize alignment,
11840 VmaSuballocationType suballocType,
11849 VmaAllocationRequest currRequest = {};
11850 if(pBlock->m_pMetadata->CreateAllocationRequest(
11853 m_BufferImageGranularity,
11863 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
11867 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
11868 if(res != VK_SUCCESS)
11875 if(pBlock->m_pMetadata->IsEmpty())
11877 m_HasEmptyBlock =
false;
11880 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11881 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
11882 (*pAllocation)->InitBlockAllocation(
11885 currRequest.offset,
11891 VMA_HEAVY_ASSERT(pBlock->Validate());
11892 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
11893 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11895 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11897 if(IsCorruptionDetectionEnabled())
11899 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
11900 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11904 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11907 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
11909 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11910 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
11911 allocInfo.allocationSize = blockSize;
11912 VkDeviceMemory mem = VK_NULL_HANDLE;
11913 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
11922 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11927 allocInfo.allocationSize,
11931 m_Blocks.push_back(pBlock);
11932 if(pNewBlockIndex != VMA_NULL)
11934 *pNewBlockIndex = m_Blocks.size() - 1;
11940 void VmaBlockVector::ApplyDefragmentationMovesCpu(
11941 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11942 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
11944 const size_t blockCount = m_Blocks.size();
11945 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
11949 BLOCK_FLAG_USED = 0x00000001,
11950 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
11958 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
11959 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
11960 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
11963 const size_t moveCount = moves.size();
11964 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11966 const VmaDefragmentationMove& move = moves[moveIndex];
11967 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
11968 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
11971 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
11974 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
11976 BlockInfo& currBlockInfo = blockInfo[blockIndex];
11977 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11978 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
11980 currBlockInfo.pMappedData = pBlock->GetMappedData();
11982 if(currBlockInfo.pMappedData == VMA_NULL)
11984 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
11985 if(pDefragCtx->res == VK_SUCCESS)
11987 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
11994 if(pDefragCtx->res == VK_SUCCESS)
11996 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
11997 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
11999 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12001 const VmaDefragmentationMove& move = moves[moveIndex];
12003 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12004 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12006 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12011 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12012 memRange.memory = pSrcBlock->GetDeviceMemory();
12013 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12014 memRange.size = VMA_MIN(
12015 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12016 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12017 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12022 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12023 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12024 static_cast<size_t>(move.size));
12026 if(IsCorruptionDetectionEnabled())
12028 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12029 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12035 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12036 memRange.memory = pDstBlock->GetDeviceMemory();
12037 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12038 memRange.size = VMA_MIN(
12039 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12040 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12041 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12048 for(
size_t blockIndex = blockCount; blockIndex--; )
12050 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12051 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12053 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12054 pBlock->Unmap(m_hAllocator, 1);
12059 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12060 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12061 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12062 VkCommandBuffer commandBuffer)
12064 const size_t blockCount = m_Blocks.size();
12066 pDefragCtx->blockContexts.resize(blockCount);
12067 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12070 const size_t moveCount = moves.size();
12071 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12073 const VmaDefragmentationMove& move = moves[moveIndex];
12074 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12075 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12078 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12082 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
12083 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
12084 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
12086 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12088 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12089 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12090 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12092 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12093 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12094 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12095 if(pDefragCtx->res == VK_SUCCESS)
12097 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12098 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12105 if(pDefragCtx->res == VK_SUCCESS)
12107 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12108 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12110 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12112 const VmaDefragmentationMove& move = moves[moveIndex];
12114 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12115 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12117 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12119 VkBufferCopy region = {
12123 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12124 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12129 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12131 pDefragCtx->res = VK_NOT_READY;
12137 m_HasEmptyBlock =
false;
12138 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12140 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12141 if(pBlock->m_pMetadata->IsEmpty())
12143 if(m_Blocks.size() > m_MinBlockCount)
12145 if(pDefragmentationStats != VMA_NULL)
12148 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12151 VmaVectorRemove(m_Blocks, blockIndex);
12152 pBlock->Destroy(m_hAllocator);
12153 vma_delete(m_hAllocator, pBlock);
12157 m_HasEmptyBlock =
true;
12163 #if VMA_STATS_STRING_ENABLED 12165 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12167 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12169 json.BeginObject();
12173 json.WriteString(
"MemoryTypeIndex");
12174 json.WriteNumber(m_MemoryTypeIndex);
12176 json.WriteString(
"BlockSize");
12177 json.WriteNumber(m_PreferredBlockSize);
12179 json.WriteString(
"BlockCount");
12180 json.BeginObject(
true);
12181 if(m_MinBlockCount > 0)
12183 json.WriteString(
"Min");
12184 json.WriteNumber((uint64_t)m_MinBlockCount);
12186 if(m_MaxBlockCount < SIZE_MAX)
12188 json.WriteString(
"Max");
12189 json.WriteNumber((uint64_t)m_MaxBlockCount);
12191 json.WriteString(
"Cur");
12192 json.WriteNumber((uint64_t)m_Blocks.size());
12195 if(m_FrameInUseCount > 0)
12197 json.WriteString(
"FrameInUseCount");
12198 json.WriteNumber(m_FrameInUseCount);
12201 if(m_Algorithm != 0)
12203 json.WriteString(
"Algorithm");
12204 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12209 json.WriteString(
"PreferredBlockSize");
12210 json.WriteNumber(m_PreferredBlockSize);
12213 json.WriteString(
"Blocks");
12214 json.BeginObject();
12215 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12217 json.BeginString();
12218 json.ContinueString(m_Blocks[i]->GetId());
12221 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12228 #endif // #if VMA_STATS_STRING_ENABLED 12230 void VmaBlockVector::Defragment(
12231 class VmaBlockVectorDefragmentationContext* pCtx,
12233 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12234 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12235 VkCommandBuffer commandBuffer)
12237 pCtx->res = VK_SUCCESS;
12239 const VkMemoryPropertyFlags memPropFlags =
12240 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12241 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12242 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12244 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12246 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12247 (VMA_DEBUG_DETECT_CORRUPTION == 0 || !(isHostVisible && isHostCoherent));
12250 if(canDefragmentOnCpu || canDefragmentOnGpu)
12252 bool defragmentOnGpu;
12254 if(canDefragmentOnGpu != canDefragmentOnCpu)
12256 defragmentOnGpu = canDefragmentOnGpu;
12261 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12262 m_hAllocator->IsIntegratedGpu();
12265 bool overlappingMoveSupported = !defragmentOnGpu;
12267 if(m_hAllocator->m_UseMutex)
12269 m_Mutex.LockWrite();
12270 pCtx->mutexLocked =
true;
12273 pCtx->Begin(overlappingMoveSupported);
12277 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12278 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12279 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12280 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12281 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12284 if(pStats != VMA_NULL)
12286 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12287 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12288 pStats->bytesMoved += bytesMoved;
12289 pStats->allocationsMoved += allocationsMoved;
12290 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12291 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12292 if(defragmentOnGpu)
12294 maxGpuBytesToMove -= bytesMoved;
12295 maxGpuAllocationsToMove -= allocationsMoved;
12299 maxCpuBytesToMove -= bytesMoved;
12300 maxCpuAllocationsToMove -= allocationsMoved;
12304 if(pCtx->res >= VK_SUCCESS)
12306 if(defragmentOnGpu)
12308 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12312 ApplyDefragmentationMovesCpu(pCtx, moves);
12318 void VmaBlockVector::DefragmentationEnd(
12319 class VmaBlockVectorDefragmentationContext* pCtx,
12323 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12325 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12326 if(blockCtx.hBuffer)
12328 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12329 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12333 if(pCtx->res >= VK_SUCCESS)
12335 FreeEmptyBlocks(pStats);
12338 if(pCtx->mutexLocked)
12340 VMA_ASSERT(m_hAllocator->m_UseMutex);
12341 m_Mutex.UnlockWrite();
12345 size_t VmaBlockVector::CalcAllocationCount()
const 12348 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12350 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12355 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12357 if(m_BufferImageGranularity == 1)
12361 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12362 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12364 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12365 VMA_ASSERT(m_Algorithm == 0);
12366 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12367 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12375 void VmaBlockVector::MakePoolAllocationsLost(
12376 uint32_t currentFrameIndex,
12377 size_t* pLostAllocationCount)
12379 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12380 size_t lostAllocationCount = 0;
12381 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12383 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12384 VMA_ASSERT(pBlock);
12385 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12387 if(pLostAllocationCount != VMA_NULL)
12389 *pLostAllocationCount = lostAllocationCount;
12393 VkResult VmaBlockVector::CheckCorruption()
12395 if(!IsCorruptionDetectionEnabled())
12397 return VK_ERROR_FEATURE_NOT_PRESENT;
12400 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12401 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12403 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12404 VMA_ASSERT(pBlock);
12405 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12406 if(res != VK_SUCCESS)
12414 void VmaBlockVector::AddStats(
VmaStats* pStats)
12416 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12417 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12419 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12421 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12423 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12424 VMA_ASSERT(pBlock);
12425 VMA_HEAVY_ASSERT(pBlock->Validate());
12427 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12428 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12429 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12430 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12437 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12439 VmaBlockVector* pBlockVector,
12440 uint32_t currentFrameIndex,
12441 bool overlappingMoveSupported) :
12442 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12443 m_AllAllocations(false),
12444 m_AllocationCount(0),
12446 m_AllocationsMoved(0),
12447 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12450 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12451 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12453 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12454 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12455 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12456 m_Blocks.push_back(pBlockInfo);
12460 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12463 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12465 for(
size_t i = m_Blocks.size(); i--; )
12467 vma_delete(m_hAllocator, m_Blocks[i]);
12471 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12474 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12476 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12477 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12478 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12480 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12481 (*it)->m_Allocations.push_back(allocInfo);
12488 ++m_AllocationCount;
12492 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12493 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12494 VkDeviceSize maxBytesToMove,
12495 uint32_t maxAllocationsToMove)
12497 if(m_Blocks.empty())
12510 size_t srcBlockMinIndex = 0;
12523 size_t srcBlockIndex = m_Blocks.size() - 1;
12524 size_t srcAllocIndex = SIZE_MAX;
12530 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12532 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12535 if(srcBlockIndex == srcBlockMinIndex)
12542 srcAllocIndex = SIZE_MAX;
12547 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12551 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12552 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12554 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12555 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12556 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12557 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12560 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12562 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12563 VmaAllocationRequest dstAllocRequest;
12564 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12565 m_CurrentFrameIndex,
12566 m_pBlockVector->GetFrameInUseCount(),
12567 m_pBlockVector->GetBufferImageGranularity(),
12574 &dstAllocRequest) &&
12576 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12578 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12581 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12582 (m_BytesMoved + size > maxBytesToMove))
12587 VmaDefragmentationMove move;
12588 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12589 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12590 move.srcOffset = srcOffset;
12591 move.dstOffset = dstAllocRequest.offset;
12593 moves.push_back(move);
12595 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12600 allocInfo.m_hAllocation);
12601 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12603 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12605 if(allocInfo.m_pChanged != VMA_NULL)
12607 *allocInfo.m_pChanged = VK_TRUE;
12610 ++m_AllocationsMoved;
12611 m_BytesMoved += size;
12613 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12621 if(srcAllocIndex > 0)
12627 if(srcBlockIndex > 0)
12630 srcAllocIndex = SIZE_MAX;
12640 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12643 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12645 if(m_Blocks[i]->m_HasNonMovableAllocations)
12653 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12654 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12655 VkDeviceSize maxBytesToMove,
12656 uint32_t maxAllocationsToMove)
12658 if(!m_AllAllocations && m_AllocationCount == 0)
12663 const size_t blockCount = m_Blocks.size();
12664 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12666 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12668 if(m_AllAllocations)
12670 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12671 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12672 it != pMetadata->m_Suballocations.end();
12675 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12677 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12678 pBlockInfo->m_Allocations.push_back(allocInfo);
12683 pBlockInfo->CalcHasNonMovableAllocations();
12687 pBlockInfo->SortAllocationsByOffsetDescending();
12693 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12696 const uint32_t roundCount = 2;
12699 VkResult result = VK_SUCCESS;
12700 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12702 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12708 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12709 size_t dstBlockIndex, VkDeviceSize dstOffset,
12710 size_t srcBlockIndex, VkDeviceSize srcOffset)
12712 if(dstBlockIndex < srcBlockIndex)
12716 if(dstBlockIndex > srcBlockIndex)
12720 if(dstOffset < srcOffset)
12730 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12732 VmaBlockVector* pBlockVector,
12733 uint32_t currentFrameIndex,
12734 bool overlappingMoveSupported) :
12735 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12736 m_OverlappingMoveSupported(overlappingMoveSupported),
12737 m_AllocationCount(0),
12738 m_AllAllocations(false),
12740 m_AllocationsMoved(0),
12741 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12743 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12747 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12751 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12752 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12753 VkDeviceSize maxBytesToMove,
12754 uint32_t maxAllocationsToMove)
12756 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12758 const size_t blockCount = m_pBlockVector->GetBlockCount();
12759 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12764 PreprocessMetadata();
12768 m_BlockInfos.resize(blockCount);
12769 for(
size_t i = 0; i < blockCount; ++i)
12771 m_BlockInfos[i].origBlockIndex = i;
12774 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12775 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12776 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12781 FreeSpaceDatabase freeSpaceDb;
12783 size_t dstBlockInfoIndex = 0;
12784 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12785 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12786 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12787 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12788 VkDeviceSize dstOffset = 0;
12791 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12793 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12794 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12795 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12796 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12797 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12799 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12800 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12801 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12802 if(m_AllocationsMoved == maxAllocationsToMove ||
12803 m_BytesMoved + srcAllocSize > maxBytesToMove)
12808 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12811 size_t freeSpaceInfoIndex;
12812 VkDeviceSize dstAllocOffset;
12813 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12814 freeSpaceInfoIndex, dstAllocOffset))
12816 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12817 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12818 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12819 VkDeviceSize freeSpaceBlockSize = pFreeSpaceMetadata->GetSize();
12822 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12824 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12828 VmaSuballocation suballoc = *srcSuballocIt;
12829 suballoc.offset = dstAllocOffset;
12830 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12831 m_BytesMoved += srcAllocSize;
12832 ++m_AllocationsMoved;
12834 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12836 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12837 srcSuballocIt = nextSuballocIt;
12839 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12841 VmaDefragmentationMove move = {
12842 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12843 srcAllocOffset, dstAllocOffset,
12845 moves.push_back(move);
12852 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
12854 VmaSuballocation suballoc = *srcSuballocIt;
12855 suballoc.offset = dstAllocOffset;
12856 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
12857 m_BytesMoved += srcAllocSize;
12858 ++m_AllocationsMoved;
12860 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12862 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12863 srcSuballocIt = nextSuballocIt;
12865 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12867 VmaDefragmentationMove move = {
12868 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12869 srcAllocOffset, dstAllocOffset,
12871 moves.push_back(move);
12876 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
12879 while(dstBlockInfoIndex < srcBlockInfoIndex &&
12880 dstAllocOffset + srcAllocSize > dstBlockSize)
12883 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
12885 ++dstBlockInfoIndex;
12886 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12887 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12888 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12889 dstBlockSize = pDstMetadata->GetSize();
12891 dstAllocOffset = 0;
12895 if(dstBlockInfoIndex == srcBlockInfoIndex)
12897 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12899 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
12901 bool skipOver = overlap;
12902 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
12906 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
12911 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
12913 dstOffset = srcAllocOffset + srcAllocSize;
12919 srcSuballocIt->offset = dstAllocOffset;
12920 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
12921 dstOffset = dstAllocOffset + srcAllocSize;
12922 m_BytesMoved += srcAllocSize;
12923 ++m_AllocationsMoved;
12925 VmaDefragmentationMove move = {
12926 srcOrigBlockIndex, dstOrigBlockIndex,
12927 srcAllocOffset, dstAllocOffset,
12929 moves.push_back(move);
12937 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
12938 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
12940 VmaSuballocation suballoc = *srcSuballocIt;
12941 suballoc.offset = dstAllocOffset;
12942 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
12943 dstOffset = dstAllocOffset + srcAllocSize;
12944 m_BytesMoved += srcAllocSize;
12945 ++m_AllocationsMoved;
12947 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12949 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12950 srcSuballocIt = nextSuballocIt;
12952 pDstMetadata->m_Suballocations.push_back(suballoc);
12954 VmaDefragmentationMove move = {
12955 srcOrigBlockIndex, dstOrigBlockIndex,
12956 srcAllocOffset, dstAllocOffset,
12958 moves.push_back(move);
12964 m_BlockInfos.clear();
12966 PostprocessMetadata();
12971 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
12973 const size_t blockCount = m_pBlockVector->GetBlockCount();
12974 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12976 VmaBlockMetadata_Generic*
const pMetadata =
12977 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
12978 pMetadata->m_FreeCount = 0;
12979 pMetadata->m_SumFreeSize = pMetadata->GetSize();
12980 pMetadata->m_FreeSuballocationsBySize.clear();
12981 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
12982 it != pMetadata->m_Suballocations.end(); )
12984 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
12986 VmaSuballocationList::iterator nextIt = it;
12988 pMetadata->m_Suballocations.erase(it);
12999 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13001 const size_t blockCount = m_pBlockVector->GetBlockCount();
13002 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13004 VmaBlockMetadata_Generic*
const pMetadata =
13005 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13006 const VkDeviceSize blockSize = pMetadata->GetSize();
13009 if(pMetadata->m_Suballocations.empty())
13011 pMetadata->m_FreeCount = 1;
13013 VmaSuballocation suballoc = {
13017 VMA_SUBALLOCATION_TYPE_FREE };
13018 pMetadata->m_Suballocations.push_back(suballoc);
13019 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13024 VkDeviceSize offset = 0;
13025 VmaSuballocationList::iterator it;
13026 for(it = pMetadata->m_Suballocations.begin();
13027 it != pMetadata->m_Suballocations.end();
13030 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13031 VMA_ASSERT(it->offset >= offset);
13034 if(it->offset > offset)
13036 ++pMetadata->m_FreeCount;
13037 const VkDeviceSize freeSize = it->offset - offset;
13038 VmaSuballocation suballoc = {
13042 VMA_SUBALLOCATION_TYPE_FREE };
13043 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13044 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13046 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13050 pMetadata->m_SumFreeSize -= it->size;
13051 offset = it->offset + it->size;
13055 if(offset < blockSize)
13057 ++pMetadata->m_FreeCount;
13058 const VkDeviceSize freeSize = blockSize - offset;
13059 VmaSuballocation suballoc = {
13063 VMA_SUBALLOCATION_TYPE_FREE };
13064 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13065 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13066 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13068 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13073 pMetadata->m_FreeSuballocationsBySize.begin(),
13074 pMetadata->m_FreeSuballocationsBySize.end(),
13075 VmaSuballocationItemSizeLess());
13078 VMA_HEAVY_ASSERT(pMetadata->Validate());
13082 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13085 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13086 while(it != pMetadata->m_Suballocations.end())
13088 if(it->offset < suballoc.offset)
13093 pMetadata->m_Suballocations.insert(it, suballoc);
13099 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13102 VmaBlockVector* pBlockVector,
13103 uint32_t currFrameIndex,
13104 uint32_t algorithmFlags) :
13106 mutexLocked(false),
13107 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13108 m_hAllocator(hAllocator),
13109 m_hCustomPool(hCustomPool),
13110 m_pBlockVector(pBlockVector),
13111 m_CurrFrameIndex(currFrameIndex),
13112 m_AlgorithmFlags(algorithmFlags),
13113 m_pAlgorithm(VMA_NULL),
13114 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13115 m_AllAllocations(false)
13119 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13121 vma_delete(m_hAllocator, m_pAlgorithm);
13124 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13126 AllocInfo info = { hAlloc, pChanged };
13127 m_Allocations.push_back(info);
13130 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13132 const bool allAllocations = m_AllAllocations ||
13133 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13145 if(VMA_DEBUG_MARGIN == 0 &&
13147 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13149 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13150 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13154 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13155 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13160 m_pAlgorithm->AddAll();
13164 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13166 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13174 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13176 uint32_t currFrameIndex,
13179 m_hAllocator(hAllocator),
13180 m_CurrFrameIndex(currFrameIndex),
13183 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13185 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13188 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13190 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13192 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13193 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13194 vma_delete(m_hAllocator, pBlockVectorCtx);
13196 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13198 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13199 if(pBlockVectorCtx)
13201 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13202 vma_delete(m_hAllocator, pBlockVectorCtx);
13207 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13209 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13211 VmaPool pool = pPools[poolIndex];
13214 if(pool->m_BlockVector.GetAlgorithm() == 0)
13216 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13218 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13220 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13222 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13227 if(!pBlockVectorDefragCtx)
13229 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13232 &pool->m_BlockVector,
13235 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13238 pBlockVectorDefragCtx->AddAll();
13243 void VmaDefragmentationContext_T::AddAllocations(
13244 uint32_t allocationCount,
13246 VkBool32* pAllocationsChanged)
13249 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13252 VMA_ASSERT(hAlloc);
13254 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13256 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13258 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13260 const VmaPool hAllocPool = hAlloc->GetPool();
13262 if(hAllocPool != VK_NULL_HANDLE)
13265 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13267 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13269 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13271 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13275 if(!pBlockVectorDefragCtx)
13277 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13280 &hAllocPool->m_BlockVector,
13283 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13290 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13291 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13292 if(!pBlockVectorDefragCtx)
13294 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13297 m_hAllocator->m_pBlockVectors[memTypeIndex],
13300 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13304 if(pBlockVectorDefragCtx)
13306 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13307 &pAllocationsChanged[allocIndex] : VMA_NULL;
13308 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13314 VkResult VmaDefragmentationContext_T::Defragment(
13315 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13316 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13324 if(commandBuffer == VK_NULL_HANDLE)
13326 maxGpuBytesToMove = 0;
13327 maxGpuAllocationsToMove = 0;
13330 VkResult res = VK_SUCCESS;
13333 for(uint32_t memTypeIndex = 0;
13334 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13337 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13338 if(pBlockVectorCtx)
13340 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13341 pBlockVectorCtx->GetBlockVector()->Defragment(
13344 maxCpuBytesToMove, maxCpuAllocationsToMove,
13345 maxGpuBytesToMove, maxGpuAllocationsToMove,
13347 if(pBlockVectorCtx->res != VK_SUCCESS)
13349 res = pBlockVectorCtx->res;
13355 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13356 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13359 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13360 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13361 pBlockVectorCtx->GetBlockVector()->Defragment(
13364 maxCpuBytesToMove, maxCpuAllocationsToMove,
13365 maxGpuBytesToMove, maxGpuAllocationsToMove,
13367 if(pBlockVectorCtx->res != VK_SUCCESS)
13369 res = pBlockVectorCtx->res;
13379 #if VMA_RECORDING_ENABLED 13381 VmaRecorder::VmaRecorder() :
13386 m_StartCounter(INT64_MAX)
13392 m_UseMutex = useMutex;
13393 m_Flags = settings.
flags;
13395 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13396 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13399 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13402 return VK_ERROR_INITIALIZATION_FAILED;
13406 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13407 fprintf(m_File,
"%s\n",
"1,5");
13412 VmaRecorder::~VmaRecorder()
13414 if(m_File != VMA_NULL)
13420 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13422 CallParams callParams;
13423 GetBasicParams(callParams);
13425 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13426 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13430 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13432 CallParams callParams;
13433 GetBasicParams(callParams);
13435 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13436 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13442 CallParams callParams;
13443 GetBasicParams(callParams);
13445 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13446 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13457 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13459 CallParams callParams;
13460 GetBasicParams(callParams);
13462 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13463 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13468 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13469 const VkMemoryRequirements& vkMemReq,
13473 CallParams callParams;
13474 GetBasicParams(callParams);
13476 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13477 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13478 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13480 vkMemReq.alignment,
13481 vkMemReq.memoryTypeBits,
13489 userDataStr.GetString());
13493 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13494 const VkMemoryRequirements& vkMemReq,
13496 uint64_t allocationCount,
13499 CallParams callParams;
13500 GetBasicParams(callParams);
13502 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13503 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13504 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13506 vkMemReq.alignment,
13507 vkMemReq.memoryTypeBits,
13514 PrintPointerList(allocationCount, pAllocations);
13515 fprintf(m_File,
",%s\n", userDataStr.GetString());
13519 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13520 const VkMemoryRequirements& vkMemReq,
13521 bool requiresDedicatedAllocation,
13522 bool prefersDedicatedAllocation,
13526 CallParams callParams;
13527 GetBasicParams(callParams);
13529 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13530 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13531 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13533 vkMemReq.alignment,
13534 vkMemReq.memoryTypeBits,
13535 requiresDedicatedAllocation ? 1 : 0,
13536 prefersDedicatedAllocation ? 1 : 0,
13544 userDataStr.GetString());
13548 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13549 const VkMemoryRequirements& vkMemReq,
13550 bool requiresDedicatedAllocation,
13551 bool prefersDedicatedAllocation,
13555 CallParams callParams;
13556 GetBasicParams(callParams);
13558 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13559 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13560 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13562 vkMemReq.alignment,
13563 vkMemReq.memoryTypeBits,
13564 requiresDedicatedAllocation ? 1 : 0,
13565 prefersDedicatedAllocation ? 1 : 0,
13573 userDataStr.GetString());
13577 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13580 CallParams callParams;
13581 GetBasicParams(callParams);
13583 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13584 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13589 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13590 uint64_t allocationCount,
13593 CallParams callParams;
13594 GetBasicParams(callParams);
13596 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13597 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13598 PrintPointerList(allocationCount, pAllocations);
13599 fprintf(m_File,
"\n");
13603 void VmaRecorder::RecordResizeAllocation(
13604 uint32_t frameIndex,
13606 VkDeviceSize newSize)
13608 CallParams callParams;
13609 GetBasicParams(callParams);
13611 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13612 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13613 allocation, newSize);
13617 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13619 const void* pUserData)
13621 CallParams callParams;
13622 GetBasicParams(callParams);
13624 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13625 UserDataString userDataStr(
13628 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13630 userDataStr.GetString());
13634 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13637 CallParams callParams;
13638 GetBasicParams(callParams);
13640 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13641 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13646 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13649 CallParams callParams;
13650 GetBasicParams(callParams);
13652 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13653 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13658 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13661 CallParams callParams;
13662 GetBasicParams(callParams);
13664 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13665 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13670 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13671 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13673 CallParams callParams;
13674 GetBasicParams(callParams);
13676 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13677 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13684 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13685 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13687 CallParams callParams;
13688 GetBasicParams(callParams);
13690 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13691 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13698 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13699 const VkBufferCreateInfo& bufCreateInfo,
13703 CallParams callParams;
13704 GetBasicParams(callParams);
13706 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13707 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13708 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13709 bufCreateInfo.flags,
13710 bufCreateInfo.size,
13711 bufCreateInfo.usage,
13712 bufCreateInfo.sharingMode,
13713 allocCreateInfo.
flags,
13714 allocCreateInfo.
usage,
13718 allocCreateInfo.
pool,
13720 userDataStr.GetString());
13724 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13725 const VkImageCreateInfo& imageCreateInfo,
13729 CallParams callParams;
13730 GetBasicParams(callParams);
13732 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13733 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13734 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13735 imageCreateInfo.flags,
13736 imageCreateInfo.imageType,
13737 imageCreateInfo.format,
13738 imageCreateInfo.extent.width,
13739 imageCreateInfo.extent.height,
13740 imageCreateInfo.extent.depth,
13741 imageCreateInfo.mipLevels,
13742 imageCreateInfo.arrayLayers,
13743 imageCreateInfo.samples,
13744 imageCreateInfo.tiling,
13745 imageCreateInfo.usage,
13746 imageCreateInfo.sharingMode,
13747 imageCreateInfo.initialLayout,
13748 allocCreateInfo.
flags,
13749 allocCreateInfo.
usage,
13753 allocCreateInfo.
pool,
13755 userDataStr.GetString());
13759 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13762 CallParams callParams;
13763 GetBasicParams(callParams);
13765 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13766 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13771 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13774 CallParams callParams;
13775 GetBasicParams(callParams);
13777 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13778 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13783 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13786 CallParams callParams;
13787 GetBasicParams(callParams);
13789 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13790 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13795 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13798 CallParams callParams;
13799 GetBasicParams(callParams);
13801 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13802 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13807 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13810 CallParams callParams;
13811 GetBasicParams(callParams);
13813 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13814 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13819 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13823 CallParams callParams;
13824 GetBasicParams(callParams);
13826 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13827 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13830 fprintf(m_File,
",");
13832 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
13842 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
13845 CallParams callParams;
13846 GetBasicParams(callParams);
13848 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13849 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
13856 if(pUserData != VMA_NULL)
13860 m_Str = (
const char*)pUserData;
13864 sprintf_s(m_PtrStr,
"%p", pUserData);
13874 void VmaRecorder::WriteConfiguration(
13875 const VkPhysicalDeviceProperties& devProps,
13876 const VkPhysicalDeviceMemoryProperties& memProps,
13877 bool dedicatedAllocationExtensionEnabled)
13879 fprintf(m_File,
"Config,Begin\n");
13881 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
13882 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
13883 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
13884 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
13885 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
13886 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
13888 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
13889 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
13890 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
13892 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
13893 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
13895 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
13896 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
13898 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
13899 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
13901 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
13902 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
13905 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
13907 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
13908 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
13909 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
13910 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
13911 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
13912 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
13913 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
13914 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
13915 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
13917 fprintf(m_File,
"Config,End\n");
13920 void VmaRecorder::GetBasicParams(CallParams& outParams)
13922 outParams.threadId = GetCurrentThreadId();
13924 LARGE_INTEGER counter;
13925 QueryPerformanceCounter(&counter);
13926 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
13929 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
13933 fprintf(m_File,
"%p", pItems[0]);
13934 for(uint64_t i = 1; i < count; ++i)
13936 fprintf(m_File,
" %p", pItems[i]);
13941 void VmaRecorder::Flush()
13949 #endif // #if VMA_RECORDING_ENABLED 13957 m_hDevice(pCreateInfo->device),
13958 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
13959 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
13960 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
13961 m_PreferredLargeHeapBlockSize(0),
13962 m_PhysicalDevice(pCreateInfo->physicalDevice),
13963 m_CurrentFrameIndex(0),
13964 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
13967 ,m_pRecorder(VMA_NULL)
13970 if(VMA_DEBUG_DETECT_CORRUPTION)
13973 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
13978 #if !(VMA_DEDICATED_ALLOCATION) 13981 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
13985 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
13986 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
13987 memset(&m_MemProps, 0,
sizeof(m_MemProps));
13989 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
13990 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
13992 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
13994 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14005 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14006 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14008 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14009 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14010 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14011 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14018 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14020 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14021 if(limit != VK_WHOLE_SIZE)
14023 m_HeapSizeLimit[heapIndex] = limit;
14024 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14026 m_MemProps.memoryHeaps[heapIndex].size = limit;
14032 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14034 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14036 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14039 preferredBlockSize,
14042 GetBufferImageGranularity(),
14049 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14056 VkResult res = VK_SUCCESS;
14061 #if VMA_RECORDING_ENABLED 14062 m_pRecorder = vma_new(
this, VmaRecorder)();
14064 if(res != VK_SUCCESS)
14068 m_pRecorder->WriteConfiguration(
14069 m_PhysicalDeviceProperties,
14071 m_UseKhrDedicatedAllocation);
14072 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14074 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14075 return VK_ERROR_FEATURE_NOT_PRESENT;
14082 VmaAllocator_T::~VmaAllocator_T()
14084 #if VMA_RECORDING_ENABLED 14085 if(m_pRecorder != VMA_NULL)
14087 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14088 vma_delete(
this, m_pRecorder);
14092 VMA_ASSERT(m_Pools.empty());
14094 for(
size_t i = GetMemoryTypeCount(); i--; )
14096 vma_delete(
this, m_pDedicatedAllocations[i]);
14097 vma_delete(
this, m_pBlockVectors[i]);
14101 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14103 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14104 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
14105 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
14106 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
14107 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
14108 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
14109 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
14110 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
14111 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
14112 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
14113 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
14114 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
14115 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
14116 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
14117 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
14118 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
14119 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
14120 m_VulkanFunctions.vkCmdCopyBuffer = &vkCmdCopyBuffer;
14121 #if VMA_DEDICATED_ALLOCATION 14122 if(m_UseKhrDedicatedAllocation)
14124 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14125 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14126 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14127 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14129 #endif // #if VMA_DEDICATED_ALLOCATION 14130 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14132 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14133 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14135 if(pVulkanFunctions != VMA_NULL)
14137 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14138 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14139 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14140 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14141 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14142 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14143 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14144 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14145 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14146 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14147 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14148 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14149 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14150 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14151 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14152 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14153 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14154 #if VMA_DEDICATED_ALLOCATION 14155 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14156 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14160 #undef VMA_COPY_IF_NOT_NULL 14164 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14165 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14166 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14167 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14168 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14169 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14170 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14171 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14172 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14173 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14174 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14175 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14176 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14177 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14178 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14179 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14180 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14181 #if VMA_DEDICATED_ALLOCATION 14182 if(m_UseKhrDedicatedAllocation)
14184 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14185 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14190 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14192 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14193 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14194 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14195 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14198 VkResult VmaAllocator_T::AllocateMemoryOfType(
14200 VkDeviceSize alignment,
14201 bool dedicatedAllocation,
14202 VkBuffer dedicatedBuffer,
14203 VkImage dedicatedImage,
14205 uint32_t memTypeIndex,
14206 VmaSuballocationType suballocType,
14207 size_t allocationCount,
14210 VMA_ASSERT(pAllocations != VMA_NULL);
14211 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, vkMemReq.size);
14217 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14222 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14223 VMA_ASSERT(blockVector);
14225 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14226 bool preferDedicatedMemory =
14227 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14228 dedicatedAllocation ||
14230 size > preferredBlockSize / 2;
14232 if(preferDedicatedMemory &&
14234 finalCreateInfo.
pool == VK_NULL_HANDLE)
14243 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14247 return AllocateDedicatedMemory(
14262 VkResult res = blockVector->Allocate(
14264 m_CurrentFrameIndex.load(),
14271 if(res == VK_SUCCESS)
14279 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14283 res = AllocateDedicatedMemory(
14289 finalCreateInfo.pUserData,
14294 if(res == VK_SUCCESS)
14297 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14303 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14310 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14312 VmaSuballocationType suballocType,
14313 uint32_t memTypeIndex,
14315 bool isUserDataString,
14317 VkBuffer dedicatedBuffer,
14318 VkImage dedicatedImage,
14319 size_t allocationCount,
14322 VMA_ASSERT(allocationCount > 0 && pAllocations);
14324 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14325 allocInfo.memoryTypeIndex = memTypeIndex;
14326 allocInfo.allocationSize = size;
14328 #if VMA_DEDICATED_ALLOCATION 14329 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14330 if(m_UseKhrDedicatedAllocation)
14332 if(dedicatedBuffer != VK_NULL_HANDLE)
14334 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14335 dedicatedAllocInfo.buffer = dedicatedBuffer;
14336 allocInfo.pNext = &dedicatedAllocInfo;
14338 else if(dedicatedImage != VK_NULL_HANDLE)
14340 dedicatedAllocInfo.image = dedicatedImage;
14341 allocInfo.pNext = &dedicatedAllocInfo;
14344 #endif // #if VMA_DEDICATED_ALLOCATION 14348 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14350 res = AllocateDedicatedMemoryPage(
14358 pAllocations + allocIndex);
14359 if(res != VK_SUCCESS)
14365 if(res == VK_SUCCESS)
14369 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14370 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14371 VMA_ASSERT(pDedicatedAllocations);
14372 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14374 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14378 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14383 while(allocIndex--)
14386 VkDeviceMemory hMemory = currAlloc->GetMemory();
14398 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14400 currAlloc->SetUserData(
this, VMA_NULL);
14401 vma_delete(
this, currAlloc);
14404 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14410 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14412 VmaSuballocationType suballocType,
14413 uint32_t memTypeIndex,
14414 const VkMemoryAllocateInfo& allocInfo,
14416 bool isUserDataString,
14420 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14421 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14424 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14428 void* pMappedData = VMA_NULL;
14431 res = (*m_VulkanFunctions.vkMapMemory)(
14440 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14441 FreeVulkanMemory(memTypeIndex, size, hMemory);
14446 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
14447 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14448 (*pAllocation)->SetUserData(
this, pUserData);
14449 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14451 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14457 void VmaAllocator_T::GetBufferMemoryRequirements(
14459 VkMemoryRequirements& memReq,
14460 bool& requiresDedicatedAllocation,
14461 bool& prefersDedicatedAllocation)
const 14463 #if VMA_DEDICATED_ALLOCATION 14464 if(m_UseKhrDedicatedAllocation)
14466 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14467 memReqInfo.buffer = hBuffer;
14469 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14471 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14472 memReq2.pNext = &memDedicatedReq;
14474 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14476 memReq = memReq2.memoryRequirements;
14477 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14478 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14481 #endif // #if VMA_DEDICATED_ALLOCATION 14483 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14484 requiresDedicatedAllocation =
false;
14485 prefersDedicatedAllocation =
false;
14489 void VmaAllocator_T::GetImageMemoryRequirements(
14491 VkMemoryRequirements& memReq,
14492 bool& requiresDedicatedAllocation,
14493 bool& prefersDedicatedAllocation)
const 14495 #if VMA_DEDICATED_ALLOCATION 14496 if(m_UseKhrDedicatedAllocation)
14498 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14499 memReqInfo.image = hImage;
14501 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14503 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14504 memReq2.pNext = &memDedicatedReq;
14506 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14508 memReq = memReq2.memoryRequirements;
14509 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14510 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14513 #endif // #if VMA_DEDICATED_ALLOCATION 14515 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14516 requiresDedicatedAllocation =
false;
14517 prefersDedicatedAllocation =
false;
14521 VkResult VmaAllocator_T::AllocateMemory(
14522 const VkMemoryRequirements& vkMemReq,
14523 bool requiresDedicatedAllocation,
14524 bool prefersDedicatedAllocation,
14525 VkBuffer dedicatedBuffer,
14526 VkImage dedicatedImage,
14528 VmaSuballocationType suballocType,
14529 size_t allocationCount,
14532 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14534 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14536 if(vkMemReq.size == 0)
14538 return VK_ERROR_VALIDATION_FAILED_EXT;
14543 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14544 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14549 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14550 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14552 if(requiresDedicatedAllocation)
14556 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14557 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14559 if(createInfo.
pool != VK_NULL_HANDLE)
14561 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14562 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14565 if((createInfo.
pool != VK_NULL_HANDLE) &&
14568 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14569 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14572 if(createInfo.
pool != VK_NULL_HANDLE)
14574 const VkDeviceSize alignmentForPool = VMA_MAX(
14575 vkMemReq.alignment,
14576 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14577 return createInfo.
pool->m_BlockVector.Allocate(
14579 m_CurrentFrameIndex.load(),
14590 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14591 uint32_t memTypeIndex = UINT32_MAX;
14593 if(res == VK_SUCCESS)
14595 VkDeviceSize alignmentForMemType = VMA_MAX(
14596 vkMemReq.alignment,
14597 GetMemoryTypeMinAlignment(memTypeIndex));
14599 res = AllocateMemoryOfType(
14601 alignmentForMemType,
14602 requiresDedicatedAllocation || prefersDedicatedAllocation,
14611 if(res == VK_SUCCESS)
14621 memoryTypeBits &= ~(1u << memTypeIndex);
14624 if(res == VK_SUCCESS)
14626 alignmentForMemType = VMA_MAX(
14627 vkMemReq.alignment,
14628 GetMemoryTypeMinAlignment(memTypeIndex));
14630 res = AllocateMemoryOfType(
14632 alignmentForMemType,
14633 requiresDedicatedAllocation || prefersDedicatedAllocation,
14642 if(res == VK_SUCCESS)
14652 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14663 void VmaAllocator_T::FreeMemory(
14664 size_t allocationCount,
14667 VMA_ASSERT(pAllocations);
14669 for(
size_t allocIndex = allocationCount; allocIndex--; )
14673 if(allocation != VK_NULL_HANDLE)
14675 if(TouchAllocation(allocation))
14677 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14679 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14682 switch(allocation->GetType())
14684 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14686 VmaBlockVector* pBlockVector = VMA_NULL;
14687 VmaPool hPool = allocation->GetPool();
14688 if(hPool != VK_NULL_HANDLE)
14690 pBlockVector = &hPool->m_BlockVector;
14694 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14695 pBlockVector = m_pBlockVectors[memTypeIndex];
14697 pBlockVector->Free(allocation);
14700 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14701 FreeDedicatedMemory(allocation);
14708 allocation->SetUserData(
this, VMA_NULL);
14709 vma_delete(
this, allocation);
14714 VkResult VmaAllocator_T::ResizeAllocation(
14716 VkDeviceSize newSize)
14718 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14720 return VK_ERROR_VALIDATION_FAILED_EXT;
14722 if(newSize == alloc->GetSize())
14727 switch(alloc->GetType())
14729 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14730 return VK_ERROR_FEATURE_NOT_PRESENT;
14731 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14732 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14734 alloc->ChangeSize(newSize);
14735 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14740 return VK_ERROR_OUT_OF_POOL_MEMORY;
14744 return VK_ERROR_VALIDATION_FAILED_EXT;
14748 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14751 InitStatInfo(pStats->
total);
14752 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14754 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14758 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14760 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14761 VMA_ASSERT(pBlockVector);
14762 pBlockVector->AddStats(pStats);
14767 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14768 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14770 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14775 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14777 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14778 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14779 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14780 VMA_ASSERT(pDedicatedAllocVector);
14781 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14784 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14785 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14786 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14787 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14792 VmaPostprocessCalcStatInfo(pStats->
total);
14793 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14794 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14795 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14796 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14799 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14801 VkResult VmaAllocator_T::DefragmentationBegin(
14811 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
14812 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
14815 (*pContext)->AddAllocations(
14818 VkResult res = (*pContext)->Defragment(
14823 if(res != VK_NOT_READY)
14825 vma_delete(
this, *pContext);
14826 *pContext = VMA_NULL;
14832 VkResult VmaAllocator_T::DefragmentationEnd(
14835 vma_delete(
this, context);
14841 if(hAllocation->CanBecomeLost())
14847 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14848 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14851 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14855 pAllocationInfo->
offset = 0;
14856 pAllocationInfo->
size = hAllocation->GetSize();
14858 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14861 else if(localLastUseFrameIndex == localCurrFrameIndex)
14863 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14864 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14865 pAllocationInfo->
offset = hAllocation->GetOffset();
14866 pAllocationInfo->
size = hAllocation->GetSize();
14868 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14873 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14875 localLastUseFrameIndex = localCurrFrameIndex;
14882 #if VMA_STATS_STRING_ENABLED 14883 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14884 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14887 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14888 if(localLastUseFrameIndex == localCurrFrameIndex)
14894 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14896 localLastUseFrameIndex = localCurrFrameIndex;
14902 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14903 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14904 pAllocationInfo->
offset = hAllocation->GetOffset();
14905 pAllocationInfo->
size = hAllocation->GetSize();
14906 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
14907 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14911 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
14914 if(hAllocation->CanBecomeLost())
14916 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14917 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14920 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14924 else if(localLastUseFrameIndex == localCurrFrameIndex)
14930 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14932 localLastUseFrameIndex = localCurrFrameIndex;
14939 #if VMA_STATS_STRING_ENABLED 14940 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14941 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14944 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14945 if(localLastUseFrameIndex == localCurrFrameIndex)
14951 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14953 localLastUseFrameIndex = localCurrFrameIndex;
14965 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
14975 return VK_ERROR_INITIALIZATION_FAILED;
14978 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
14980 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
14982 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
14983 if(res != VK_SUCCESS)
14985 vma_delete(
this, *pPool);
14992 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
14993 (*pPool)->SetId(m_NextPoolId++);
14994 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15000 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15004 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15005 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15006 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15009 vma_delete(
this, pool);
15014 pool->m_BlockVector.GetPoolStats(pPoolStats);
15017 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15019 m_CurrentFrameIndex.store(frameIndex);
15022 void VmaAllocator_T::MakePoolAllocationsLost(
15024 size_t* pLostAllocationCount)
15026 hPool->m_BlockVector.MakePoolAllocationsLost(
15027 m_CurrentFrameIndex.load(),
15028 pLostAllocationCount);
15031 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15033 return hPool->m_BlockVector.CheckCorruption();
15036 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15038 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15041 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15043 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15045 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15046 VMA_ASSERT(pBlockVector);
15047 VkResult localRes = pBlockVector->CheckCorruption();
15050 case VK_ERROR_FEATURE_NOT_PRESENT:
15053 finalRes = VK_SUCCESS;
15063 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15064 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15066 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15068 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15071 case VK_ERROR_FEATURE_NOT_PRESENT:
15074 finalRes = VK_SUCCESS;
15086 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15088 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
15089 (*pAllocation)->InitLost();
15092 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15094 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15097 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15099 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15100 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15102 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15103 if(res == VK_SUCCESS)
15105 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15110 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15115 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15118 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15120 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15126 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15128 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15130 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15133 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15135 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15136 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15138 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15139 m_HeapSizeLimit[heapIndex] += size;
15143 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15145 if(hAllocation->CanBecomeLost())
15147 return VK_ERROR_MEMORY_MAP_FAILED;
15150 switch(hAllocation->GetType())
15152 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15154 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15155 char *pBytes = VMA_NULL;
15156 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15157 if(res == VK_SUCCESS)
15159 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15160 hAllocation->BlockAllocMap();
15164 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15165 return hAllocation->DedicatedAllocMap(
this, ppData);
15168 return VK_ERROR_MEMORY_MAP_FAILED;
15174 switch(hAllocation->GetType())
15176 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15178 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15179 hAllocation->BlockAllocUnmap();
15180 pBlock->Unmap(
this, 1);
15183 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15184 hAllocation->DedicatedAllocUnmap(
this);
15191 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15193 VkResult res = VK_SUCCESS;
15194 switch(hAllocation->GetType())
15196 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15197 res = GetVulkanFunctions().vkBindBufferMemory(
15200 hAllocation->GetMemory(),
15203 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15205 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15206 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15207 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15216 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15218 VkResult res = VK_SUCCESS;
15219 switch(hAllocation->GetType())
15221 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15222 res = GetVulkanFunctions().vkBindImageMemory(
15225 hAllocation->GetMemory(),
15228 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15230 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15231 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15232 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15241 void VmaAllocator_T::FlushOrInvalidateAllocation(
15243 VkDeviceSize offset, VkDeviceSize size,
15244 VMA_CACHE_OPERATION op)
15246 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15247 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15249 const VkDeviceSize allocationSize = hAllocation->GetSize();
15250 VMA_ASSERT(offset <= allocationSize);
15252 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15254 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15255 memRange.memory = hAllocation->GetMemory();
15257 switch(hAllocation->GetType())
15259 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15260 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15261 if(size == VK_WHOLE_SIZE)
15263 memRange.size = allocationSize - memRange.offset;
15267 VMA_ASSERT(offset + size <= allocationSize);
15268 memRange.size = VMA_MIN(
15269 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15270 allocationSize - memRange.offset);
15274 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15277 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15278 if(size == VK_WHOLE_SIZE)
15280 size = allocationSize - offset;
15284 VMA_ASSERT(offset + size <= allocationSize);
15286 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15289 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15290 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15291 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15292 memRange.offset += allocationOffset;
15293 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15304 case VMA_CACHE_FLUSH:
15305 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15307 case VMA_CACHE_INVALIDATE:
15308 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15317 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15319 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15321 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15323 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15324 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15325 VMA_ASSERT(pDedicatedAllocations);
15326 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15327 VMA_ASSERT(success);
15330 VkDeviceMemory hMemory = allocation->GetMemory();
15342 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15344 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15347 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15349 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15350 !hAllocation->CanBecomeLost() &&
15351 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15353 void* pData = VMA_NULL;
15354 VkResult res = Map(hAllocation, &pData);
15355 if(res == VK_SUCCESS)
15357 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15358 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15359 Unmap(hAllocation);
15363 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15368 #if VMA_STATS_STRING_ENABLED 15370 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15372 bool dedicatedAllocationsStarted =
false;
15373 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15375 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15376 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15377 VMA_ASSERT(pDedicatedAllocVector);
15378 if(pDedicatedAllocVector->empty() ==
false)
15380 if(dedicatedAllocationsStarted ==
false)
15382 dedicatedAllocationsStarted =
true;
15383 json.WriteString(
"DedicatedAllocations");
15384 json.BeginObject();
15387 json.BeginString(
"Type ");
15388 json.ContinueString(memTypeIndex);
15393 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15395 json.BeginObject(
true);
15397 hAlloc->PrintParameters(json);
15404 if(dedicatedAllocationsStarted)
15410 bool allocationsStarted =
false;
15411 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15413 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15415 if(allocationsStarted ==
false)
15417 allocationsStarted =
true;
15418 json.WriteString(
"DefaultPools");
15419 json.BeginObject();
15422 json.BeginString(
"Type ");
15423 json.ContinueString(memTypeIndex);
15426 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15429 if(allocationsStarted)
15437 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15438 const size_t poolCount = m_Pools.size();
15441 json.WriteString(
"Pools");
15442 json.BeginObject();
15443 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15445 json.BeginString();
15446 json.ContinueString(m_Pools[poolIndex]->GetId());
15449 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15456 #endif // #if VMA_STATS_STRING_ENABLED 15465 VMA_ASSERT(pCreateInfo && pAllocator);
15466 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15468 return (*pAllocator)->Init(pCreateInfo);
15474 if(allocator != VK_NULL_HANDLE)
15476 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15477 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15478 vma_delete(&allocationCallbacks, allocator);
15484 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15486 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15487 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15492 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15494 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15495 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15500 uint32_t memoryTypeIndex,
15501 VkMemoryPropertyFlags* pFlags)
15503 VMA_ASSERT(allocator && pFlags);
15504 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15505 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15510 uint32_t frameIndex)
15512 VMA_ASSERT(allocator);
15513 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15515 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15517 allocator->SetCurrentFrameIndex(frameIndex);
15524 VMA_ASSERT(allocator && pStats);
15525 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15526 allocator->CalculateStats(pStats);
15529 #if VMA_STATS_STRING_ENABLED 15533 char** ppStatsString,
15534 VkBool32 detailedMap)
15536 VMA_ASSERT(allocator && ppStatsString);
15537 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15539 VmaStringBuilder sb(allocator);
15541 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15542 json.BeginObject();
15545 allocator->CalculateStats(&stats);
15547 json.WriteString(
"Total");
15548 VmaPrintStatInfo(json, stats.
total);
15550 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15552 json.BeginString(
"Heap ");
15553 json.ContinueString(heapIndex);
15555 json.BeginObject();
15557 json.WriteString(
"Size");
15558 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15560 json.WriteString(
"Flags");
15561 json.BeginArray(
true);
15562 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15564 json.WriteString(
"DEVICE_LOCAL");
15570 json.WriteString(
"Stats");
15571 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15574 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15576 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15578 json.BeginString(
"Type ");
15579 json.ContinueString(typeIndex);
15582 json.BeginObject();
15584 json.WriteString(
"Flags");
15585 json.BeginArray(
true);
15586 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15587 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15589 json.WriteString(
"DEVICE_LOCAL");
15591 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15593 json.WriteString(
"HOST_VISIBLE");
15595 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15597 json.WriteString(
"HOST_COHERENT");
15599 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15601 json.WriteString(
"HOST_CACHED");
15603 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15605 json.WriteString(
"LAZILY_ALLOCATED");
15611 json.WriteString(
"Stats");
15612 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15621 if(detailedMap == VK_TRUE)
15623 allocator->PrintDetailedMap(json);
15629 const size_t len = sb.GetLength();
15630 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15633 memcpy(pChars, sb.GetData(), len);
15635 pChars[len] =
'\0';
15636 *ppStatsString = pChars;
15641 char* pStatsString)
15643 if(pStatsString != VMA_NULL)
15645 VMA_ASSERT(allocator);
15646 size_t len = strlen(pStatsString);
15647 vma_delete_array(allocator, pStatsString, len + 1);
15651 #endif // #if VMA_STATS_STRING_ENABLED 15658 uint32_t memoryTypeBits,
15660 uint32_t* pMemoryTypeIndex)
15662 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15663 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15664 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15671 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15672 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15677 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15681 switch(pAllocationCreateInfo->
usage)
15686 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15688 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15692 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15695 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15696 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15698 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15702 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15703 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15709 *pMemoryTypeIndex = UINT32_MAX;
15710 uint32_t minCost = UINT32_MAX;
15711 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15712 memTypeIndex < allocator->GetMemoryTypeCount();
15713 ++memTypeIndex, memTypeBit <<= 1)
15716 if((memTypeBit & memoryTypeBits) != 0)
15718 const VkMemoryPropertyFlags currFlags =
15719 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15721 if((requiredFlags & ~currFlags) == 0)
15724 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15726 if(currCost < minCost)
15728 *pMemoryTypeIndex = memTypeIndex;
15733 minCost = currCost;
15738 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15743 const VkBufferCreateInfo* pBufferCreateInfo,
15745 uint32_t* pMemoryTypeIndex)
15747 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15748 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15749 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15750 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15752 const VkDevice hDev = allocator->m_hDevice;
15753 VkBuffer hBuffer = VK_NULL_HANDLE;
15754 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15755 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15756 if(res == VK_SUCCESS)
15758 VkMemoryRequirements memReq = {};
15759 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15760 hDev, hBuffer, &memReq);
15764 memReq.memoryTypeBits,
15765 pAllocationCreateInfo,
15768 allocator->GetVulkanFunctions().vkDestroyBuffer(
15769 hDev, hBuffer, allocator->GetAllocationCallbacks());
15776 const VkImageCreateInfo* pImageCreateInfo,
15778 uint32_t* pMemoryTypeIndex)
15780 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15781 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15782 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15783 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15785 const VkDevice hDev = allocator->m_hDevice;
15786 VkImage hImage = VK_NULL_HANDLE;
15787 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15788 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15789 if(res == VK_SUCCESS)
15791 VkMemoryRequirements memReq = {};
15792 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15793 hDev, hImage, &memReq);
15797 memReq.memoryTypeBits,
15798 pAllocationCreateInfo,
15801 allocator->GetVulkanFunctions().vkDestroyImage(
15802 hDev, hImage, allocator->GetAllocationCallbacks());
15812 VMA_ASSERT(allocator && pCreateInfo && pPool);
15814 VMA_DEBUG_LOG(
"vmaCreatePool");
15816 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15818 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
15820 #if VMA_RECORDING_ENABLED 15821 if(allocator->GetRecorder() != VMA_NULL)
15823 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
15834 VMA_ASSERT(allocator);
15836 if(pool == VK_NULL_HANDLE)
15841 VMA_DEBUG_LOG(
"vmaDestroyPool");
15843 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15845 #if VMA_RECORDING_ENABLED 15846 if(allocator->GetRecorder() != VMA_NULL)
15848 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
15852 allocator->DestroyPool(pool);
15860 VMA_ASSERT(allocator && pool && pPoolStats);
15862 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15864 allocator->GetPoolStats(pool, pPoolStats);
15870 size_t* pLostAllocationCount)
15872 VMA_ASSERT(allocator && pool);
15874 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15876 #if VMA_RECORDING_ENABLED 15877 if(allocator->GetRecorder() != VMA_NULL)
15879 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
15883 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
15888 VMA_ASSERT(allocator && pool);
15890 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15892 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
15894 return allocator->CheckPoolCorruption(pool);
15899 const VkMemoryRequirements* pVkMemoryRequirements,
15904 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
15906 VMA_DEBUG_LOG(
"vmaAllocateMemory");
15908 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15910 VkResult result = allocator->AllocateMemory(
15911 *pVkMemoryRequirements,
15917 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15921 #if VMA_RECORDING_ENABLED 15922 if(allocator->GetRecorder() != VMA_NULL)
15924 allocator->GetRecorder()->RecordAllocateMemory(
15925 allocator->GetCurrentFrameIndex(),
15926 *pVkMemoryRequirements,
15932 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15934 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15942 const VkMemoryRequirements* pVkMemoryRequirements,
15944 size_t allocationCount,
15948 if(allocationCount == 0)
15953 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
15955 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
15957 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15959 VkResult result = allocator->AllocateMemory(
15960 *pVkMemoryRequirements,
15966 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15970 #if VMA_RECORDING_ENABLED 15971 if(allocator->GetRecorder() != VMA_NULL)
15973 allocator->GetRecorder()->RecordAllocateMemoryPages(
15974 allocator->GetCurrentFrameIndex(),
15975 *pVkMemoryRequirements,
15977 (uint64_t)allocationCount,
15982 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15984 for(
size_t i = 0; i < allocationCount; ++i)
15986 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16000 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16002 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16004 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16006 VkMemoryRequirements vkMemReq = {};
16007 bool requiresDedicatedAllocation =
false;
16008 bool prefersDedicatedAllocation =
false;
16009 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16010 requiresDedicatedAllocation,
16011 prefersDedicatedAllocation);
16013 VkResult result = allocator->AllocateMemory(
16015 requiresDedicatedAllocation,
16016 prefersDedicatedAllocation,
16020 VMA_SUBALLOCATION_TYPE_BUFFER,
16024 #if VMA_RECORDING_ENABLED 16025 if(allocator->GetRecorder() != VMA_NULL)
16027 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16028 allocator->GetCurrentFrameIndex(),
16030 requiresDedicatedAllocation,
16031 prefersDedicatedAllocation,
16037 if(pAllocationInfo && result == VK_SUCCESS)
16039 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16052 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16054 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16056 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16058 VkMemoryRequirements vkMemReq = {};
16059 bool requiresDedicatedAllocation =
false;
16060 bool prefersDedicatedAllocation =
false;
16061 allocator->GetImageMemoryRequirements(image, vkMemReq,
16062 requiresDedicatedAllocation, prefersDedicatedAllocation);
16064 VkResult result = allocator->AllocateMemory(
16066 requiresDedicatedAllocation,
16067 prefersDedicatedAllocation,
16071 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16075 #if VMA_RECORDING_ENABLED 16076 if(allocator->GetRecorder() != VMA_NULL)
16078 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16079 allocator->GetCurrentFrameIndex(),
16081 requiresDedicatedAllocation,
16082 prefersDedicatedAllocation,
16088 if(pAllocationInfo && result == VK_SUCCESS)
16090 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16100 VMA_ASSERT(allocator);
16102 if(allocation == VK_NULL_HANDLE)
16107 VMA_DEBUG_LOG(
"vmaFreeMemory");
16109 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16111 #if VMA_RECORDING_ENABLED 16112 if(allocator->GetRecorder() != VMA_NULL)
16114 allocator->GetRecorder()->RecordFreeMemory(
16115 allocator->GetCurrentFrameIndex(),
16120 allocator->FreeMemory(
16127 size_t allocationCount,
16130 if(allocationCount == 0)
16135 VMA_ASSERT(allocator);
16137 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16139 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16141 #if VMA_RECORDING_ENABLED 16142 if(allocator->GetRecorder() != VMA_NULL)
16144 allocator->GetRecorder()->RecordFreeMemoryPages(
16145 allocator->GetCurrentFrameIndex(),
16146 (uint64_t)allocationCount,
16151 allocator->FreeMemory(allocationCount, pAllocations);
16157 VkDeviceSize newSize)
16159 VMA_ASSERT(allocator && allocation);
16161 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16163 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16165 #if VMA_RECORDING_ENABLED 16166 if(allocator->GetRecorder() != VMA_NULL)
16168 allocator->GetRecorder()->RecordResizeAllocation(
16169 allocator->GetCurrentFrameIndex(),
16175 return allocator->ResizeAllocation(allocation, newSize);
16183 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16185 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16187 #if VMA_RECORDING_ENABLED 16188 if(allocator->GetRecorder() != VMA_NULL)
16190 allocator->GetRecorder()->RecordGetAllocationInfo(
16191 allocator->GetCurrentFrameIndex(),
16196 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16203 VMA_ASSERT(allocator && allocation);
16205 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16207 #if VMA_RECORDING_ENABLED 16208 if(allocator->GetRecorder() != VMA_NULL)
16210 allocator->GetRecorder()->RecordTouchAllocation(
16211 allocator->GetCurrentFrameIndex(),
16216 return allocator->TouchAllocation(allocation);
16224 VMA_ASSERT(allocator && allocation);
16226 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16228 allocation->SetUserData(allocator, pUserData);
16230 #if VMA_RECORDING_ENABLED 16231 if(allocator->GetRecorder() != VMA_NULL)
16233 allocator->GetRecorder()->RecordSetAllocationUserData(
16234 allocator->GetCurrentFrameIndex(),
16245 VMA_ASSERT(allocator && pAllocation);
16247 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16249 allocator->CreateLostAllocation(pAllocation);
16251 #if VMA_RECORDING_ENABLED 16252 if(allocator->GetRecorder() != VMA_NULL)
16254 allocator->GetRecorder()->RecordCreateLostAllocation(
16255 allocator->GetCurrentFrameIndex(),
16266 VMA_ASSERT(allocator && allocation && ppData);
16268 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16270 VkResult res = allocator->Map(allocation, ppData);
16272 #if VMA_RECORDING_ENABLED 16273 if(allocator->GetRecorder() != VMA_NULL)
16275 allocator->GetRecorder()->RecordMapMemory(
16276 allocator->GetCurrentFrameIndex(),
16288 VMA_ASSERT(allocator && allocation);
16290 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16292 #if VMA_RECORDING_ENABLED 16293 if(allocator->GetRecorder() != VMA_NULL)
16295 allocator->GetRecorder()->RecordUnmapMemory(
16296 allocator->GetCurrentFrameIndex(),
16301 allocator->Unmap(allocation);
16306 VMA_ASSERT(allocator && allocation);
16308 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16310 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16312 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16314 #if VMA_RECORDING_ENABLED 16315 if(allocator->GetRecorder() != VMA_NULL)
16317 allocator->GetRecorder()->RecordFlushAllocation(
16318 allocator->GetCurrentFrameIndex(),
16319 allocation, offset, size);
16326 VMA_ASSERT(allocator && allocation);
16328 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16330 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16332 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16334 #if VMA_RECORDING_ENABLED 16335 if(allocator->GetRecorder() != VMA_NULL)
16337 allocator->GetRecorder()->RecordInvalidateAllocation(
16338 allocator->GetCurrentFrameIndex(),
16339 allocation, offset, size);
16346 VMA_ASSERT(allocator);
16348 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16350 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16352 return allocator->CheckCorruption(memoryTypeBits);
16358 size_t allocationCount,
16359 VkBool32* pAllocationsChanged,
16369 if(pDefragmentationInfo != VMA_NULL)
16383 if(res == VK_NOT_READY)
16396 VMA_ASSERT(allocator && pInfo && pContext);
16407 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16409 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16411 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16413 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16415 #if VMA_RECORDING_ENABLED 16416 if(allocator->GetRecorder() != VMA_NULL)
16418 allocator->GetRecorder()->RecordDefragmentationBegin(
16419 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16430 VMA_ASSERT(allocator);
16432 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16434 if(context != VK_NULL_HANDLE)
16436 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16438 #if VMA_RECORDING_ENABLED 16439 if(allocator->GetRecorder() != VMA_NULL)
16441 allocator->GetRecorder()->RecordDefragmentationEnd(
16442 allocator->GetCurrentFrameIndex(), context);
16446 return allocator->DefragmentationEnd(context);
16459 VMA_ASSERT(allocator && allocation && buffer);
16461 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16463 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16465 return allocator->BindBufferMemory(allocation, buffer);
16473 VMA_ASSERT(allocator && allocation && image);
16475 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16477 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16479 return allocator->BindImageMemory(allocation, image);
16484 const VkBufferCreateInfo* pBufferCreateInfo,
16490 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16492 if(pBufferCreateInfo->size == 0)
16494 return VK_ERROR_VALIDATION_FAILED_EXT;
16497 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16499 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16501 *pBuffer = VK_NULL_HANDLE;
16502 *pAllocation = VK_NULL_HANDLE;
16505 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16506 allocator->m_hDevice,
16508 allocator->GetAllocationCallbacks(),
16513 VkMemoryRequirements vkMemReq = {};
16514 bool requiresDedicatedAllocation =
false;
16515 bool prefersDedicatedAllocation =
false;
16516 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16517 requiresDedicatedAllocation, prefersDedicatedAllocation);
16521 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16523 VMA_ASSERT(vkMemReq.alignment %
16524 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16526 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16528 VMA_ASSERT(vkMemReq.alignment %
16529 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16531 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16533 VMA_ASSERT(vkMemReq.alignment %
16534 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16538 res = allocator->AllocateMemory(
16540 requiresDedicatedAllocation,
16541 prefersDedicatedAllocation,
16544 *pAllocationCreateInfo,
16545 VMA_SUBALLOCATION_TYPE_BUFFER,
16549 #if VMA_RECORDING_ENABLED 16550 if(allocator->GetRecorder() != VMA_NULL)
16552 allocator->GetRecorder()->RecordCreateBuffer(
16553 allocator->GetCurrentFrameIndex(),
16554 *pBufferCreateInfo,
16555 *pAllocationCreateInfo,
16563 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16567 #if VMA_STATS_STRING_ENABLED 16568 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16570 if(pAllocationInfo != VMA_NULL)
16572 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16577 allocator->FreeMemory(
16580 *pAllocation = VK_NULL_HANDLE;
16581 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16582 *pBuffer = VK_NULL_HANDLE;
16585 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16586 *pBuffer = VK_NULL_HANDLE;
16597 VMA_ASSERT(allocator);
16599 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16604 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16606 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16608 #if VMA_RECORDING_ENABLED 16609 if(allocator->GetRecorder() != VMA_NULL)
16611 allocator->GetRecorder()->RecordDestroyBuffer(
16612 allocator->GetCurrentFrameIndex(),
16617 if(buffer != VK_NULL_HANDLE)
16619 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16622 if(allocation != VK_NULL_HANDLE)
16624 allocator->FreeMemory(
16632 const VkImageCreateInfo* pImageCreateInfo,
16638 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16640 if(pImageCreateInfo->extent.width == 0 ||
16641 pImageCreateInfo->extent.height == 0 ||
16642 pImageCreateInfo->extent.depth == 0 ||
16643 pImageCreateInfo->mipLevels == 0 ||
16644 pImageCreateInfo->arrayLayers == 0)
16646 return VK_ERROR_VALIDATION_FAILED_EXT;
16649 VMA_DEBUG_LOG(
"vmaCreateImage");
16651 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16653 *pImage = VK_NULL_HANDLE;
16654 *pAllocation = VK_NULL_HANDLE;
16657 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16658 allocator->m_hDevice,
16660 allocator->GetAllocationCallbacks(),
16664 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16665 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16666 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16669 VkMemoryRequirements vkMemReq = {};
16670 bool requiresDedicatedAllocation =
false;
16671 bool prefersDedicatedAllocation =
false;
16672 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16673 requiresDedicatedAllocation, prefersDedicatedAllocation);
16675 res = allocator->AllocateMemory(
16677 requiresDedicatedAllocation,
16678 prefersDedicatedAllocation,
16681 *pAllocationCreateInfo,
16686 #if VMA_RECORDING_ENABLED 16687 if(allocator->GetRecorder() != VMA_NULL)
16689 allocator->GetRecorder()->RecordCreateImage(
16690 allocator->GetCurrentFrameIndex(),
16692 *pAllocationCreateInfo,
16700 res = allocator->BindImageMemory(*pAllocation, *pImage);
16704 #if VMA_STATS_STRING_ENABLED 16705 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16707 if(pAllocationInfo != VMA_NULL)
16709 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16714 allocator->FreeMemory(
16717 *pAllocation = VK_NULL_HANDLE;
16718 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16719 *pImage = VK_NULL_HANDLE;
16722 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16723 *pImage = VK_NULL_HANDLE;
16734 VMA_ASSERT(allocator);
16736 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16741 VMA_DEBUG_LOG(
"vmaDestroyImage");
16743 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16745 #if VMA_RECORDING_ENABLED 16746 if(allocator->GetRecorder() != VMA_NULL)
16748 allocator->GetRecorder()->RecordDestroyImage(
16749 allocator->GetCurrentFrameIndex(),
16754 if(image != VK_NULL_HANDLE)
16756 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16758 if(allocation != VK_NULL_HANDLE)
16760 allocator->FreeMemory(
16766 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1723
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2026
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1785
-
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side...
Definition: vk_mem_alloc.h:2782
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1781
+
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side...
Definition: vk_mem_alloc.h:2823
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1759
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2355
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1739
+
Definition: vk_mem_alloc.h:1755
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2351
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1735
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1987
-
Definition: vk_mem_alloc.h:2090
-
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2735
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1731
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2455
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1782
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2818
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2244
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1626
+
Definition: vk_mem_alloc.h:1983
+
Definition: vk_mem_alloc.h:2086
+
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2776
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1727
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2451
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1778
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2859
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2240
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1622
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2336
-
Definition: vk_mem_alloc.h:2067
-
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2738
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1720
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2143
-
Definition: vk_mem_alloc.h:2014
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1794
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2272
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2332
+
Definition: vk_mem_alloc.h:2063
+
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2779
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1716
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2139
+
Definition: vk_mem_alloc.h:2010
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1790
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2268
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1848
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1779
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1844
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1775
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2018
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2014
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1920
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1736
-
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2772
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1919
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2822
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1916
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1732
+
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2813
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1915
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2863
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1811
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1929
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2830
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2127
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2813
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1737
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1662
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1807
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1925
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2871
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2123
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2854
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1733
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1658
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1788
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1784
+
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2286
-
Definition: vk_mem_alloc.h:2280
-
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1743
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1855
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2465
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2282
+
Definition: vk_mem_alloc.h:2276
+
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1739
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1851
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2461
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1732
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1728
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1757
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2164
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2306
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2342
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1753
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2160
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2302
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2338
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1718
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2289
+
Definition: vk_mem_alloc.h:1714
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2285
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2787
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1965
+
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2828
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1961
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2747
+
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2788
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2808
+
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2849
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2826
-
Definition: vk_mem_alloc.h:2004
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2151
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1735
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2867
+
Definition: vk_mem_alloc.h:2000
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2147
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1731
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1925
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1668
-
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2726
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1921
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1664
+
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2767
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:2724
-
Definition: vk_mem_alloc.h:2111
-
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2753
+
Definition: vk_mem_alloc.h:2765
+
Definition: vk_mem_alloc.h:2107
+
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2794
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1689
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1685
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1761
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1694
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2828
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1757
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1690
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2869
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2138
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2352
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2134
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2348
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1728
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1908
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2301
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1681
-
Definition: vk_mem_alloc.h:2276
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1724
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1904
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2297
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1677
+
Definition: vk_mem_alloc.h:2272
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2074
+
Definition: vk_mem_alloc.h:2070
Represents Opaque object that represents started defragmentation process.
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1921
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1685
-
Definition: vk_mem_alloc.h:2101
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2292
-
Definition: vk_mem_alloc.h:2013
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1734
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1917
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1681
+
Definition: vk_mem_alloc.h:2097
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2288
+
Definition: vk_mem_alloc.h:2009
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1730
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2133
-
Definition: vk_mem_alloc.h:2124
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2129
+
Definition: vk_mem_alloc.h:2120
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1911
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1730
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2314
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1797
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2345
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2122
-
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2777
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2157
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1907
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1726
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2310
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1793
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2341
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2118
+
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2818
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2153
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1836
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1927
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:2054
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1920
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1832
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1923
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:2050
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1916
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1741
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1767
-
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use...
Definition: vk_mem_alloc.h:2723
-
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2801
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1683
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1740
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1737
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1763
+
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use...
Definition: vk_mem_alloc.h:2764
+
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2842
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1679
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1736
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2328
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1733
-
Definition: vk_mem_alloc.h:2085
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2324
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1729
+
Definition: vk_mem_alloc.h:2081
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1775
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2479
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1791
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1920
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1917
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1771
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2475
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1787
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1916
+
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1913
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2333
-
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2732
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2329
+
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2773
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
-
Definition: vk_mem_alloc.h:2094
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2460
-
Definition: vk_mem_alloc.h:2108
-
Definition: vk_mem_alloc.h:2120
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2824
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1726
+
Definition: vk_mem_alloc.h:2090
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2456
+
Definition: vk_mem_alloc.h:2104
+
Definition: vk_mem_alloc.h:2116
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2865
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1722
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1915
-
Definition: vk_mem_alloc.h:1970
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2282
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1911
+
Definition: vk_mem_alloc.h:1966
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2278
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1764
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1913
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1738
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1742
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2041
-
Definition: vk_mem_alloc.h:2115
-
Definition: vk_mem_alloc.h:1997
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2474
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1760
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1909
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1734
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1738
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2037
+
Definition: vk_mem_alloc.h:2111
+
Definition: vk_mem_alloc.h:1993
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2470
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1716
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1712
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1729
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2261
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1725
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2257
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2441
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2437
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2105
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2226
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1921
+
Definition: vk_mem_alloc.h:2101
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2222
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1917
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:2080
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1751
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1928
+
Definition: vk_mem_alloc.h:2076
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1747
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1924
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2339
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1921
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2335
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1917
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
-
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side...
Definition: vk_mem_alloc.h:2792
+
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side...
Definition: vk_mem_alloc.h:2833
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2446
-
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2756
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2442
+
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2797