23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1651 #ifndef VMA_RECORDING_ENABLED 1653 #define VMA_RECORDING_ENABLED 1 1655 #define VMA_RECORDING_ENABLED 0 1660 #define NOMINMAX // For windows.h 1664 #include <vulkan/vulkan.h> 1667 #if VMA_RECORDING_ENABLED 1668 #include <windows.h> 1671 #if !defined(VMA_DEDICATED_ALLOCATION) 1672 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1673 #define VMA_DEDICATED_ALLOCATION 1 1675 #define VMA_DEDICATED_ALLOCATION 0 1693 uint32_t memoryType,
1694 VkDeviceMemory memory,
1699 uint32_t memoryType,
1700 VkDeviceMemory memory,
1773 #if VMA_DEDICATED_ALLOCATION 1774 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1775 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1902 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1910 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1920 uint32_t memoryTypeIndex,
1921 VkMemoryPropertyFlags* pFlags);
1933 uint32_t frameIndex);
1966 #ifndef VMA_STATS_STRING_ENABLED 1967 #define VMA_STATS_STRING_ENABLED 1 1970 #if VMA_STATS_STRING_ENABLED 1977 char** ppStatsString,
1978 VkBool32 detailedMap);
1982 char* pStatsString);
1984 #endif // #if VMA_STATS_STRING_ENABLED 2217 uint32_t memoryTypeBits,
2219 uint32_t* pMemoryTypeIndex);
2235 const VkBufferCreateInfo* pBufferCreateInfo,
2237 uint32_t* pMemoryTypeIndex);
2253 const VkImageCreateInfo* pImageCreateInfo,
2255 uint32_t* pMemoryTypeIndex);
2427 size_t* pLostAllocationCount);
2526 const VkMemoryRequirements* pVkMemoryRequirements,
2552 const VkMemoryRequirements* pVkMemoryRequirements,
2554 size_t allocationCount,
2599 size_t allocationCount,
2625 VkDeviceSize newSize);
3005 size_t allocationCount,
3006 VkBool32* pAllocationsChanged,
3072 const VkBufferCreateInfo* pBufferCreateInfo,
3097 const VkImageCreateInfo* pImageCreateInfo,
3123 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3126 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3127 #define VMA_IMPLEMENTATION 3130 #ifdef VMA_IMPLEMENTATION 3131 #undef VMA_IMPLEMENTATION 3153 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3154 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3166 #if VMA_USE_STL_CONTAINERS 3167 #define VMA_USE_STL_VECTOR 1 3168 #define VMA_USE_STL_UNORDERED_MAP 1 3169 #define VMA_USE_STL_LIST 1 3172 #ifndef VMA_USE_STL_SHARED_MUTEX 3174 #if __cplusplus >= 201703L 3175 #define VMA_USE_STL_SHARED_MUTEX 1 3179 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3180 #define VMA_USE_STL_SHARED_MUTEX 1 3182 #define VMA_USE_STL_SHARED_MUTEX 0 3190 #if VMA_USE_STL_VECTOR 3194 #if VMA_USE_STL_UNORDERED_MAP 3195 #include <unordered_map> 3198 #if VMA_USE_STL_LIST 3207 #include <algorithm> 3213 #define VMA_NULL nullptr 3216 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3218 void *aligned_alloc(
size_t alignment,
size_t size)
3221 if(alignment <
sizeof(
void*))
3223 alignment =
sizeof(
void*);
3226 return memalign(alignment, size);
3228 #elif defined(__APPLE__) || defined(__ANDROID__) 3230 void *aligned_alloc(
size_t alignment,
size_t size)
3233 if(alignment <
sizeof(
void*))
3235 alignment =
sizeof(
void*);
3239 if(posix_memalign(&pointer, alignment, size) == 0)
3253 #define VMA_ASSERT(expr) assert(expr) 3255 #define VMA_ASSERT(expr) 3261 #ifndef VMA_HEAVY_ASSERT 3263 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3265 #define VMA_HEAVY_ASSERT(expr) 3269 #ifndef VMA_ALIGN_OF 3270 #define VMA_ALIGN_OF(type) (__alignof(type)) 3273 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3275 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3277 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3281 #ifndef VMA_SYSTEM_FREE 3283 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3285 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3290 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3294 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3298 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3302 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3305 #ifndef VMA_DEBUG_LOG 3306 #define VMA_DEBUG_LOG(format, ...) 3316 #if VMA_STATS_STRING_ENABLED 3317 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3319 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3321 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3323 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3325 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3327 snprintf(outStr, strLen,
"%p", ptr);
3335 void Lock() { m_Mutex.lock(); }
3336 void Unlock() { m_Mutex.unlock(); }
3340 #define VMA_MUTEX VmaMutex 3344 #ifndef VMA_RW_MUTEX 3345 #if VMA_USE_STL_SHARED_MUTEX 3347 #include <shared_mutex> 3351 void LockRead() { m_Mutex.lock_shared(); }
3352 void UnlockRead() { m_Mutex.unlock_shared(); }
3353 void LockWrite() { m_Mutex.lock(); }
3354 void UnlockWrite() { m_Mutex.unlock(); }
3356 std::shared_mutex m_Mutex;
3358 #define VMA_RW_MUTEX VmaRWMutex 3359 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3365 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3366 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3367 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3368 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3369 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3373 #define VMA_RW_MUTEX VmaRWMutex 3379 void LockRead() { m_Mutex.Lock(); }
3380 void UnlockRead() { m_Mutex.Unlock(); }
3381 void LockWrite() { m_Mutex.Lock(); }
3382 void UnlockWrite() { m_Mutex.Unlock(); }
3386 #define VMA_RW_MUTEX VmaRWMutex 3387 #endif // #if VMA_USE_STL_SHARED_MUTEX 3388 #endif // #ifndef VMA_RW_MUTEX 3398 #ifndef VMA_ATOMIC_UINT32 3399 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3402 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3407 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3410 #ifndef VMA_DEBUG_ALIGNMENT 3415 #define VMA_DEBUG_ALIGNMENT (1) 3418 #ifndef VMA_DEBUG_MARGIN 3423 #define VMA_DEBUG_MARGIN (0) 3426 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3431 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3434 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3440 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3443 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3448 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3451 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3456 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3459 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3460 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3464 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3465 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3469 #ifndef VMA_CLASS_NO_COPY 3470 #define VMA_CLASS_NO_COPY(className) \ 3472 className(const className&) = delete; \ 3473 className& operator=(const className&) = delete; 3476 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3479 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3481 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3482 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3488 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3490 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3491 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3494 static inline uint32_t VmaCountBitsSet(uint32_t v)
3496 uint32_t c = v - ((v >> 1) & 0x55555555);
3497 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3498 c = ((c >> 4) + c) & 0x0F0F0F0F;
3499 c = ((c >> 8) + c) & 0x00FF00FF;
3500 c = ((c >> 16) + c) & 0x0000FFFF;
3506 template <
typename T>
3507 static inline T VmaAlignUp(T val, T align)
3509 return (val + align - 1) / align * align;
3513 template <
typename T>
3514 static inline T VmaAlignDown(T val, T align)
3516 return val / align * align;
3520 template <
typename T>
3521 static inline T VmaRoundDiv(T x, T y)
3523 return (x + (y / (T)2)) / y;
3531 template <
typename T>
3532 inline bool VmaIsPow2(T x)
3534 return (x & (x-1)) == 0;
3538 static inline uint32_t VmaNextPow2(uint32_t v)
3549 static inline uint64_t VmaNextPow2(uint64_t v)
3563 static inline uint32_t VmaPrevPow2(uint32_t v)
3573 static inline uint64_t VmaPrevPow2(uint64_t v)
3585 static inline bool VmaStrIsEmpty(
const char* pStr)
3587 return pStr == VMA_NULL || *pStr ==
'\0';
3590 #if VMA_STATS_STRING_ENABLED 3592 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3608 #endif // #if VMA_STATS_STRING_ENABLED 3612 template<
typename Iterator,
typename Compare>
3613 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3615 Iterator centerValue = end; --centerValue;
3616 Iterator insertIndex = beg;
3617 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3619 if(cmp(*memTypeIndex, *centerValue))
3621 if(insertIndex != memTypeIndex)
3623 VMA_SWAP(*memTypeIndex, *insertIndex);
3628 if(insertIndex != centerValue)
3630 VMA_SWAP(*insertIndex, *centerValue);
3635 template<
typename Iterator,
typename Compare>
3636 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3640 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3641 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3642 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3646 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3648 #endif // #ifndef VMA_SORT 3657 static inline bool VmaBlocksOnSamePage(
3658 VkDeviceSize resourceAOffset,
3659 VkDeviceSize resourceASize,
3660 VkDeviceSize resourceBOffset,
3661 VkDeviceSize pageSize)
3663 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3664 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3665 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3666 VkDeviceSize resourceBStart = resourceBOffset;
3667 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3668 return resourceAEndPage == resourceBStartPage;
3671 enum VmaSuballocationType
3673 VMA_SUBALLOCATION_TYPE_FREE = 0,
3674 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3675 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3676 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3677 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3678 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3679 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3688 static inline bool VmaIsBufferImageGranularityConflict(
3689 VmaSuballocationType suballocType1,
3690 VmaSuballocationType suballocType2)
3692 if(suballocType1 > suballocType2)
3694 VMA_SWAP(suballocType1, suballocType2);
3697 switch(suballocType1)
3699 case VMA_SUBALLOCATION_TYPE_FREE:
3701 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3703 case VMA_SUBALLOCATION_TYPE_BUFFER:
3705 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3706 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3707 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3709 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3710 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3711 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3712 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3714 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3715 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3723 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3725 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3726 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3727 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3729 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3733 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3735 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3736 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3737 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3739 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3750 VMA_CLASS_NO_COPY(VmaMutexLock)
3752 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3753 m_pMutex(useMutex ? &mutex : VMA_NULL)
3754 {
if(m_pMutex) { m_pMutex->Lock(); } }
3756 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3758 VMA_MUTEX* m_pMutex;
3762 struct VmaMutexLockRead
3764 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3766 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3767 m_pMutex(useMutex ? &mutex : VMA_NULL)
3768 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3769 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3771 VMA_RW_MUTEX* m_pMutex;
3775 struct VmaMutexLockWrite
3777 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3779 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3780 m_pMutex(useMutex ? &mutex : VMA_NULL)
3781 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3782 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3784 VMA_RW_MUTEX* m_pMutex;
3787 #if VMA_DEBUG_GLOBAL_MUTEX 3788 static VMA_MUTEX gDebugGlobalMutex;
3789 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3791 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3795 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3806 template <
typename CmpLess,
typename IterT,
typename KeyT>
3807 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3809 size_t down = 0, up = (end - beg);
3812 const size_t mid = (down + up) / 2;
3813 if(cmp(*(beg+mid), key))
3830 template<
typename T>
3831 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3833 for(uint32_t i = 0; i < count; ++i)
3835 const T iPtr = arr[i];
3836 if(iPtr == VMA_NULL)
3840 for(uint32_t j = i + 1; j < count; ++j)
3854 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3856 if((pAllocationCallbacks != VMA_NULL) &&
3857 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3859 return (*pAllocationCallbacks->pfnAllocation)(
3860 pAllocationCallbacks->pUserData,
3863 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3867 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3871 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3873 if((pAllocationCallbacks != VMA_NULL) &&
3874 (pAllocationCallbacks->pfnFree != VMA_NULL))
3876 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3880 VMA_SYSTEM_FREE(ptr);
3884 template<
typename T>
3885 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3887 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3890 template<
typename T>
3891 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3893 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3896 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3898 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3900 template<
typename T>
3901 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3904 VmaFree(pAllocationCallbacks, ptr);
3907 template<
typename T>
3908 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3912 for(
size_t i = count; i--; )
3916 VmaFree(pAllocationCallbacks, ptr);
3921 template<
typename T>
3922 class VmaStlAllocator
3925 const VkAllocationCallbacks*
const m_pCallbacks;
3926 typedef T value_type;
3928 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3929 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3931 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3932 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3934 template<
typename U>
3935 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3937 return m_pCallbacks == rhs.m_pCallbacks;
3939 template<
typename U>
3940 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3942 return m_pCallbacks != rhs.m_pCallbacks;
3945 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3948 #if VMA_USE_STL_VECTOR 3950 #define VmaVector std::vector 3952 template<
typename T,
typename allocatorT>
3953 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3955 vec.insert(vec.begin() + index, item);
3958 template<
typename T,
typename allocatorT>
3959 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3961 vec.erase(vec.begin() + index);
3964 #else // #if VMA_USE_STL_VECTOR 3969 template<
typename T,
typename AllocatorT>
3973 typedef T value_type;
3975 VmaVector(
const AllocatorT& allocator) :
3976 m_Allocator(allocator),
3983 VmaVector(
size_t count,
const AllocatorT& allocator) :
3984 m_Allocator(allocator),
3985 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3991 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3992 m_Allocator(src.m_Allocator),
3993 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3994 m_Count(src.m_Count),
3995 m_Capacity(src.m_Count)
3999 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4005 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4008 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4012 resize(rhs.m_Count);
4015 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4021 bool empty()
const {
return m_Count == 0; }
4022 size_t size()
const {
return m_Count; }
4023 T* data() {
return m_pArray; }
4024 const T* data()
const {
return m_pArray; }
4026 T& operator[](
size_t index)
4028 VMA_HEAVY_ASSERT(index < m_Count);
4029 return m_pArray[index];
4031 const T& operator[](
size_t index)
const 4033 VMA_HEAVY_ASSERT(index < m_Count);
4034 return m_pArray[index];
4039 VMA_HEAVY_ASSERT(m_Count > 0);
4042 const T& front()
const 4044 VMA_HEAVY_ASSERT(m_Count > 0);
4049 VMA_HEAVY_ASSERT(m_Count > 0);
4050 return m_pArray[m_Count - 1];
4052 const T& back()
const 4054 VMA_HEAVY_ASSERT(m_Count > 0);
4055 return m_pArray[m_Count - 1];
4058 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4060 newCapacity = VMA_MAX(newCapacity, m_Count);
4062 if((newCapacity < m_Capacity) && !freeMemory)
4064 newCapacity = m_Capacity;
4067 if(newCapacity != m_Capacity)
4069 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4072 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4074 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4075 m_Capacity = newCapacity;
4076 m_pArray = newArray;
4080 void resize(
size_t newCount,
bool freeMemory =
false)
4082 size_t newCapacity = m_Capacity;
4083 if(newCount > m_Capacity)
4085 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4089 newCapacity = newCount;
4092 if(newCapacity != m_Capacity)
4094 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4095 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4096 if(elementsToCopy != 0)
4098 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4100 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4101 m_Capacity = newCapacity;
4102 m_pArray = newArray;
4108 void clear(
bool freeMemory =
false)
4110 resize(0, freeMemory);
4113 void insert(
size_t index,
const T& src)
4115 VMA_HEAVY_ASSERT(index <= m_Count);
4116 const size_t oldCount = size();
4117 resize(oldCount + 1);
4118 if(index < oldCount)
4120 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4122 m_pArray[index] = src;
4125 void remove(
size_t index)
4127 VMA_HEAVY_ASSERT(index < m_Count);
4128 const size_t oldCount = size();
4129 if(index < oldCount - 1)
4131 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4133 resize(oldCount - 1);
4136 void push_back(
const T& src)
4138 const size_t newIndex = size();
4139 resize(newIndex + 1);
4140 m_pArray[newIndex] = src;
4145 VMA_HEAVY_ASSERT(m_Count > 0);
4149 void push_front(
const T& src)
4156 VMA_HEAVY_ASSERT(m_Count > 0);
4160 typedef T* iterator;
4162 iterator begin() {
return m_pArray; }
4163 iterator end() {
return m_pArray + m_Count; }
4166 AllocatorT m_Allocator;
4172 template<
typename T,
typename allocatorT>
4173 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4175 vec.insert(index, item);
4178 template<
typename T,
typename allocatorT>
4179 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4184 #endif // #if VMA_USE_STL_VECTOR 4186 template<
typename CmpLess,
typename VectorT>
4187 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4189 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4191 vector.data() + vector.size(),
4193 CmpLess()) - vector.data();
4194 VmaVectorInsert(vector, indexToInsert, value);
4195 return indexToInsert;
4198 template<
typename CmpLess,
typename VectorT>
4199 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4202 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4207 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4209 size_t indexToRemove = it - vector.begin();
4210 VmaVectorRemove(vector, indexToRemove);
4216 template<
typename CmpLess,
typename IterT,
typename KeyT>
4217 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4220 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4221 beg, end, value, comparator);
4223 (!comparator(*it, value) && !comparator(value, *it)))
4238 template<
typename T>
4239 class VmaPoolAllocator
4241 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4243 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4244 ~VmaPoolAllocator();
4252 uint32_t NextFreeIndex;
4260 uint32_t FirstFreeIndex;
4263 const VkAllocationCallbacks* m_pAllocationCallbacks;
4264 const uint32_t m_FirstBlockCapacity;
4265 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4267 ItemBlock& CreateNewBlock();
4270 template<
typename T>
4271 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4272 m_pAllocationCallbacks(pAllocationCallbacks),
4273 m_FirstBlockCapacity(firstBlockCapacity),
4274 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4276 VMA_ASSERT(m_FirstBlockCapacity > 1);
4279 template<
typename T>
4280 VmaPoolAllocator<T>::~VmaPoolAllocator()
4285 template<
typename T>
4286 void VmaPoolAllocator<T>::Clear()
4288 for(
size_t i = m_ItemBlocks.size(); i--; )
4289 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4290 m_ItemBlocks.clear();
4293 template<
typename T>
4294 T* VmaPoolAllocator<T>::Alloc()
4296 for(
size_t i = m_ItemBlocks.size(); i--; )
4298 ItemBlock& block = m_ItemBlocks[i];
4300 if(block.FirstFreeIndex != UINT32_MAX)
4302 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4303 block.FirstFreeIndex = pItem->NextFreeIndex;
4304 return &pItem->Value;
4309 ItemBlock& newBlock = CreateNewBlock();
4310 Item*
const pItem = &newBlock.pItems[0];
4311 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4312 return &pItem->Value;
4315 template<
typename T>
4316 void VmaPoolAllocator<T>::Free(T* ptr)
4319 for(
size_t i = m_ItemBlocks.size(); i--; )
4321 ItemBlock& block = m_ItemBlocks[i];
4325 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4328 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4330 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4331 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4332 block.FirstFreeIndex = index;
4336 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4339 template<
typename T>
4340 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4342 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4343 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4345 const ItemBlock newBlock = {
4346 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4350 m_ItemBlocks.push_back(newBlock);
4353 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4354 newBlock.pItems[i].NextFreeIndex = i + 1;
4355 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4356 return m_ItemBlocks.back();
4362 #if VMA_USE_STL_LIST 4364 #define VmaList std::list 4366 #else // #if VMA_USE_STL_LIST 4368 template<
typename T>
4377 template<
typename T>
4380 VMA_CLASS_NO_COPY(VmaRawList)
4382 typedef VmaListItem<T> ItemType;
4384 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4388 size_t GetCount()
const {
return m_Count; }
4389 bool IsEmpty()
const {
return m_Count == 0; }
4391 ItemType* Front() {
return m_pFront; }
4392 const ItemType* Front()
const {
return m_pFront; }
4393 ItemType* Back() {
return m_pBack; }
4394 const ItemType* Back()
const {
return m_pBack; }
4396 ItemType* PushBack();
4397 ItemType* PushFront();
4398 ItemType* PushBack(
const T& value);
4399 ItemType* PushFront(
const T& value);
4404 ItemType* InsertBefore(ItemType* pItem);
4406 ItemType* InsertAfter(ItemType* pItem);
4408 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4409 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4411 void Remove(ItemType* pItem);
4414 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4415 VmaPoolAllocator<ItemType> m_ItemAllocator;
4421 template<
typename T>
4422 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4423 m_pAllocationCallbacks(pAllocationCallbacks),
4424 m_ItemAllocator(pAllocationCallbacks, 128),
4431 template<
typename T>
4432 VmaRawList<T>::~VmaRawList()
4438 template<
typename T>
4439 void VmaRawList<T>::Clear()
4441 if(IsEmpty() ==
false)
4443 ItemType* pItem = m_pBack;
4444 while(pItem != VMA_NULL)
4446 ItemType*
const pPrevItem = pItem->pPrev;
4447 m_ItemAllocator.Free(pItem);
4450 m_pFront = VMA_NULL;
4456 template<
typename T>
4457 VmaListItem<T>* VmaRawList<T>::PushBack()
4459 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4460 pNewItem->pNext = VMA_NULL;
4463 pNewItem->pPrev = VMA_NULL;
4464 m_pFront = pNewItem;
4470 pNewItem->pPrev = m_pBack;
4471 m_pBack->pNext = pNewItem;
4478 template<
typename T>
4479 VmaListItem<T>* VmaRawList<T>::PushFront()
4481 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4482 pNewItem->pPrev = VMA_NULL;
4485 pNewItem->pNext = VMA_NULL;
4486 m_pFront = pNewItem;
4492 pNewItem->pNext = m_pFront;
4493 m_pFront->pPrev = pNewItem;
4494 m_pFront = pNewItem;
4500 template<
typename T>
4501 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4503 ItemType*
const pNewItem = PushBack();
4504 pNewItem->Value = value;
4508 template<
typename T>
4509 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4511 ItemType*
const pNewItem = PushFront();
4512 pNewItem->Value = value;
4516 template<
typename T>
4517 void VmaRawList<T>::PopBack()
4519 VMA_HEAVY_ASSERT(m_Count > 0);
4520 ItemType*
const pBackItem = m_pBack;
4521 ItemType*
const pPrevItem = pBackItem->pPrev;
4522 if(pPrevItem != VMA_NULL)
4524 pPrevItem->pNext = VMA_NULL;
4526 m_pBack = pPrevItem;
4527 m_ItemAllocator.Free(pBackItem);
4531 template<
typename T>
4532 void VmaRawList<T>::PopFront()
4534 VMA_HEAVY_ASSERT(m_Count > 0);
4535 ItemType*
const pFrontItem = m_pFront;
4536 ItemType*
const pNextItem = pFrontItem->pNext;
4537 if(pNextItem != VMA_NULL)
4539 pNextItem->pPrev = VMA_NULL;
4541 m_pFront = pNextItem;
4542 m_ItemAllocator.Free(pFrontItem);
4546 template<
typename T>
4547 void VmaRawList<T>::Remove(ItemType* pItem)
4549 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4550 VMA_HEAVY_ASSERT(m_Count > 0);
4552 if(pItem->pPrev != VMA_NULL)
4554 pItem->pPrev->pNext = pItem->pNext;
4558 VMA_HEAVY_ASSERT(m_pFront == pItem);
4559 m_pFront = pItem->pNext;
4562 if(pItem->pNext != VMA_NULL)
4564 pItem->pNext->pPrev = pItem->pPrev;
4568 VMA_HEAVY_ASSERT(m_pBack == pItem);
4569 m_pBack = pItem->pPrev;
4572 m_ItemAllocator.Free(pItem);
4576 template<
typename T>
4577 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4579 if(pItem != VMA_NULL)
4581 ItemType*
const prevItem = pItem->pPrev;
4582 ItemType*
const newItem = m_ItemAllocator.Alloc();
4583 newItem->pPrev = prevItem;
4584 newItem->pNext = pItem;
4585 pItem->pPrev = newItem;
4586 if(prevItem != VMA_NULL)
4588 prevItem->pNext = newItem;
4592 VMA_HEAVY_ASSERT(m_pFront == pItem);
4602 template<
typename T>
4603 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4605 if(pItem != VMA_NULL)
4607 ItemType*
const nextItem = pItem->pNext;
4608 ItemType*
const newItem = m_ItemAllocator.Alloc();
4609 newItem->pNext = nextItem;
4610 newItem->pPrev = pItem;
4611 pItem->pNext = newItem;
4612 if(nextItem != VMA_NULL)
4614 nextItem->pPrev = newItem;
4618 VMA_HEAVY_ASSERT(m_pBack == pItem);
4628 template<
typename T>
4629 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4631 ItemType*
const newItem = InsertBefore(pItem);
4632 newItem->Value = value;
4636 template<
typename T>
4637 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4639 ItemType*
const newItem = InsertAfter(pItem);
4640 newItem->Value = value;
4644 template<
typename T,
typename AllocatorT>
4647 VMA_CLASS_NO_COPY(VmaList)
4658 T& operator*()
const 4660 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4661 return m_pItem->Value;
4663 T* operator->()
const 4665 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4666 return &m_pItem->Value;
4669 iterator& operator++()
4671 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4672 m_pItem = m_pItem->pNext;
4675 iterator& operator--()
4677 if(m_pItem != VMA_NULL)
4679 m_pItem = m_pItem->pPrev;
4683 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4684 m_pItem = m_pList->Back();
4689 iterator operator++(
int)
4691 iterator result = *
this;
4695 iterator operator--(
int)
4697 iterator result = *
this;
4702 bool operator==(
const iterator& rhs)
const 4704 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4705 return m_pItem == rhs.m_pItem;
4707 bool operator!=(
const iterator& rhs)
const 4709 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4710 return m_pItem != rhs.m_pItem;
4714 VmaRawList<T>* m_pList;
4715 VmaListItem<T>* m_pItem;
4717 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4723 friend class VmaList<T, AllocatorT>;
4726 class const_iterator
4735 const_iterator(
const iterator& src) :
4736 m_pList(src.m_pList),
4737 m_pItem(src.m_pItem)
4741 const T& operator*()
const 4743 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4744 return m_pItem->Value;
4746 const T* operator->()
const 4748 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4749 return &m_pItem->Value;
4752 const_iterator& operator++()
4754 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4755 m_pItem = m_pItem->pNext;
4758 const_iterator& operator--()
4760 if(m_pItem != VMA_NULL)
4762 m_pItem = m_pItem->pPrev;
4766 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4767 m_pItem = m_pList->Back();
4772 const_iterator operator++(
int)
4774 const_iterator result = *
this;
4778 const_iterator operator--(
int)
4780 const_iterator result = *
this;
4785 bool operator==(
const const_iterator& rhs)
const 4787 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4788 return m_pItem == rhs.m_pItem;
4790 bool operator!=(
const const_iterator& rhs)
const 4792 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4793 return m_pItem != rhs.m_pItem;
4797 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4803 const VmaRawList<T>* m_pList;
4804 const VmaListItem<T>* m_pItem;
4806 friend class VmaList<T, AllocatorT>;
4809 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4811 bool empty()
const {
return m_RawList.IsEmpty(); }
4812 size_t size()
const {
return m_RawList.GetCount(); }
4814 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4815 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4817 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4818 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4820 void clear() { m_RawList.Clear(); }
4821 void push_back(
const T& value) { m_RawList.PushBack(value); }
4822 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4823 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4826 VmaRawList<T> m_RawList;
4829 #endif // #if VMA_USE_STL_LIST 4837 #if VMA_USE_STL_UNORDERED_MAP 4839 #define VmaPair std::pair 4841 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4842 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4844 #else // #if VMA_USE_STL_UNORDERED_MAP 4846 template<
typename T1,
typename T2>
4852 VmaPair() : first(), second() { }
4853 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4859 template<
typename KeyT,
typename ValueT>
4863 typedef VmaPair<KeyT, ValueT> PairType;
4864 typedef PairType* iterator;
4866 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4868 iterator begin() {
return m_Vector.begin(); }
4869 iterator end() {
return m_Vector.end(); }
4871 void insert(
const PairType& pair);
4872 iterator find(
const KeyT& key);
4873 void erase(iterator it);
4876 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4879 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4881 template<
typename FirstT,
typename SecondT>
4882 struct VmaPairFirstLess
4884 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4886 return lhs.first < rhs.first;
4888 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4890 return lhs.first < rhsFirst;
4894 template<
typename KeyT,
typename ValueT>
4895 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4897 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4899 m_Vector.data() + m_Vector.size(),
4901 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4902 VmaVectorInsert(m_Vector, indexToInsert, pair);
4905 template<
typename KeyT,
typename ValueT>
4906 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4908 PairType* it = VmaBinaryFindFirstNotLess(
4910 m_Vector.data() + m_Vector.size(),
4912 VmaPairFirstLess<KeyT, ValueT>());
4913 if((it != m_Vector.end()) && (it->first == key))
4919 return m_Vector.end();
4923 template<
typename KeyT,
typename ValueT>
4924 void VmaMap<KeyT, ValueT>::erase(iterator it)
4926 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4929 #endif // #if VMA_USE_STL_UNORDERED_MAP 4935 class VmaDeviceMemoryBlock;
4937 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4939 struct VmaAllocation_T
4942 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4946 FLAG_USER_DATA_STRING = 0x01,
4950 enum ALLOCATION_TYPE
4952 ALLOCATION_TYPE_NONE,
4953 ALLOCATION_TYPE_BLOCK,
4954 ALLOCATION_TYPE_DEDICATED,
4962 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
4966 m_pUserData = VMA_NULL;
4967 m_LastUseFrameIndex = currentFrameIndex;
4968 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
4969 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
4971 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
4973 #if VMA_STATS_STRING_ENABLED 4974 m_CreationFrameIndex = currentFrameIndex;
4975 m_BufferImageUsage = 0;
4981 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4984 VMA_ASSERT(m_pUserData == VMA_NULL);
4987 void InitBlockAllocation(
4988 VmaDeviceMemoryBlock* block,
4989 VkDeviceSize offset,
4990 VkDeviceSize alignment,
4992 VmaSuballocationType suballocationType,
4996 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4997 VMA_ASSERT(block != VMA_NULL);
4998 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4999 m_Alignment = alignment;
5001 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5002 m_SuballocationType = (uint8_t)suballocationType;
5003 m_BlockAllocation.m_Block = block;
5004 m_BlockAllocation.m_Offset = offset;
5005 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5010 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5011 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5012 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5013 m_BlockAllocation.m_Block = VMA_NULL;
5014 m_BlockAllocation.m_Offset = 0;
5015 m_BlockAllocation.m_CanBecomeLost =
true;
5018 void ChangeBlockAllocation(
5020 VmaDeviceMemoryBlock* block,
5021 VkDeviceSize offset);
5023 void ChangeSize(VkDeviceSize newSize);
5024 void ChangeOffset(VkDeviceSize newOffset);
5027 void InitDedicatedAllocation(
5028 uint32_t memoryTypeIndex,
5029 VkDeviceMemory hMemory,
5030 VmaSuballocationType suballocationType,
5034 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5035 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5036 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5039 m_SuballocationType = (uint8_t)suballocationType;
5040 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5041 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5042 m_DedicatedAllocation.m_hMemory = hMemory;
5043 m_DedicatedAllocation.m_pMappedData = pMappedData;
5046 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5047 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5048 VkDeviceSize GetSize()
const {
return m_Size; }
5049 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5050 void* GetUserData()
const {
return m_pUserData; }
5051 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5052 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5054 VmaDeviceMemoryBlock* GetBlock()
const 5056 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5057 return m_BlockAllocation.m_Block;
5059 VkDeviceSize GetOffset()
const;
5060 VkDeviceMemory GetMemory()
const;
5061 uint32_t GetMemoryTypeIndex()
const;
5062 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5063 void* GetMappedData()
const;
5064 bool CanBecomeLost()
const;
5066 uint32_t GetLastUseFrameIndex()
const 5068 return m_LastUseFrameIndex.load();
5070 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5072 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5082 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5084 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5086 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5097 void BlockAllocMap();
5098 void BlockAllocUnmap();
5099 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5102 #if VMA_STATS_STRING_ENABLED 5103 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5104 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5106 void InitBufferImageUsage(uint32_t bufferImageUsage)
5108 VMA_ASSERT(m_BufferImageUsage == 0);
5109 m_BufferImageUsage = bufferImageUsage;
5112 void PrintParameters(
class VmaJsonWriter& json)
const;
5116 VkDeviceSize m_Alignment;
5117 VkDeviceSize m_Size;
5119 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5121 uint8_t m_SuballocationType;
5128 struct BlockAllocation
5130 VmaDeviceMemoryBlock* m_Block;
5131 VkDeviceSize m_Offset;
5132 bool m_CanBecomeLost;
5136 struct DedicatedAllocation
5138 uint32_t m_MemoryTypeIndex;
5139 VkDeviceMemory m_hMemory;
5140 void* m_pMappedData;
5146 BlockAllocation m_BlockAllocation;
5148 DedicatedAllocation m_DedicatedAllocation;
5151 #if VMA_STATS_STRING_ENABLED 5152 uint32_t m_CreationFrameIndex;
5153 uint32_t m_BufferImageUsage;
5163 struct VmaSuballocation
5165 VkDeviceSize offset;
5168 VmaSuballocationType type;
5172 struct VmaSuballocationOffsetLess
5174 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5176 return lhs.offset < rhs.offset;
5179 struct VmaSuballocationOffsetGreater
5181 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5183 return lhs.offset > rhs.offset;
5187 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5190 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5192 enum class VmaAllocationRequestType
5214 struct VmaAllocationRequest
5216 VkDeviceSize offset;
5217 VkDeviceSize sumFreeSize;
5218 VkDeviceSize sumItemSize;
5219 VmaSuballocationList::iterator item;
5220 size_t itemsToMakeLostCount;
5222 VmaAllocationRequestType type;
5224 VkDeviceSize CalcCost()
const 5226 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5234 class VmaBlockMetadata
5238 virtual ~VmaBlockMetadata() { }
5239 virtual void Init(VkDeviceSize size) { m_Size = size; }
5242 virtual bool Validate()
const = 0;
5243 VkDeviceSize GetSize()
const {
return m_Size; }
5244 virtual size_t GetAllocationCount()
const = 0;
5245 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5246 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5248 virtual bool IsEmpty()
const = 0;
5250 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5252 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5254 #if VMA_STATS_STRING_ENABLED 5255 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5261 virtual bool CreateAllocationRequest(
5262 uint32_t currentFrameIndex,
5263 uint32_t frameInUseCount,
5264 VkDeviceSize bufferImageGranularity,
5265 VkDeviceSize allocSize,
5266 VkDeviceSize allocAlignment,
5268 VmaSuballocationType allocType,
5269 bool canMakeOtherLost,
5272 VmaAllocationRequest* pAllocationRequest) = 0;
5274 virtual bool MakeRequestedAllocationsLost(
5275 uint32_t currentFrameIndex,
5276 uint32_t frameInUseCount,
5277 VmaAllocationRequest* pAllocationRequest) = 0;
5279 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5281 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5285 const VmaAllocationRequest& request,
5286 VmaSuballocationType type,
5287 VkDeviceSize allocSize,
5292 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5295 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5298 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5300 #if VMA_STATS_STRING_ENABLED 5301 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5302 VkDeviceSize unusedBytes,
5303 size_t allocationCount,
5304 size_t unusedRangeCount)
const;
5305 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5306 VkDeviceSize offset,
5308 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5309 VkDeviceSize offset,
5310 VkDeviceSize size)
const;
5311 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5315 VkDeviceSize m_Size;
5316 const VkAllocationCallbacks* m_pAllocationCallbacks;
5319 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5320 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5324 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5326 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5329 virtual ~VmaBlockMetadata_Generic();
5330 virtual void Init(VkDeviceSize size);
5332 virtual bool Validate()
const;
5333 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5334 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5335 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5336 virtual bool IsEmpty()
const;
5338 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5339 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5341 #if VMA_STATS_STRING_ENABLED 5342 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5345 virtual bool CreateAllocationRequest(
5346 uint32_t currentFrameIndex,
5347 uint32_t frameInUseCount,
5348 VkDeviceSize bufferImageGranularity,
5349 VkDeviceSize allocSize,
5350 VkDeviceSize allocAlignment,
5352 VmaSuballocationType allocType,
5353 bool canMakeOtherLost,
5355 VmaAllocationRequest* pAllocationRequest);
5357 virtual bool MakeRequestedAllocationsLost(
5358 uint32_t currentFrameIndex,
5359 uint32_t frameInUseCount,
5360 VmaAllocationRequest* pAllocationRequest);
5362 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5364 virtual VkResult CheckCorruption(
const void* pBlockData);
5367 const VmaAllocationRequest& request,
5368 VmaSuballocationType type,
5369 VkDeviceSize allocSize,
5373 virtual void FreeAtOffset(VkDeviceSize offset);
5375 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5380 bool IsBufferImageGranularityConflictPossible(
5381 VkDeviceSize bufferImageGranularity,
5382 VmaSuballocationType& inOutPrevSuballocType)
const;
5385 friend class VmaDefragmentationAlgorithm_Generic;
5386 friend class VmaDefragmentationAlgorithm_Fast;
5388 uint32_t m_FreeCount;
5389 VkDeviceSize m_SumFreeSize;
5390 VmaSuballocationList m_Suballocations;
5393 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5395 bool ValidateFreeSuballocationList()
const;
5399 bool CheckAllocation(
5400 uint32_t currentFrameIndex,
5401 uint32_t frameInUseCount,
5402 VkDeviceSize bufferImageGranularity,
5403 VkDeviceSize allocSize,
5404 VkDeviceSize allocAlignment,
5405 VmaSuballocationType allocType,
5406 VmaSuballocationList::const_iterator suballocItem,
5407 bool canMakeOtherLost,
5408 VkDeviceSize* pOffset,
5409 size_t* itemsToMakeLostCount,
5410 VkDeviceSize* pSumFreeSize,
5411 VkDeviceSize* pSumItemSize)
const;
5413 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5417 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5420 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5423 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5504 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5506 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5509 virtual ~VmaBlockMetadata_Linear();
5510 virtual void Init(VkDeviceSize size);
5512 virtual bool Validate()
const;
5513 virtual size_t GetAllocationCount()
const;
5514 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5515 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5516 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5518 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5519 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5521 #if VMA_STATS_STRING_ENABLED 5522 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5525 virtual bool CreateAllocationRequest(
5526 uint32_t currentFrameIndex,
5527 uint32_t frameInUseCount,
5528 VkDeviceSize bufferImageGranularity,
5529 VkDeviceSize allocSize,
5530 VkDeviceSize allocAlignment,
5532 VmaSuballocationType allocType,
5533 bool canMakeOtherLost,
5535 VmaAllocationRequest* pAllocationRequest);
5537 virtual bool MakeRequestedAllocationsLost(
5538 uint32_t currentFrameIndex,
5539 uint32_t frameInUseCount,
5540 VmaAllocationRequest* pAllocationRequest);
5542 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5544 virtual VkResult CheckCorruption(
const void* pBlockData);
5547 const VmaAllocationRequest& request,
5548 VmaSuballocationType type,
5549 VkDeviceSize allocSize,
5553 virtual void FreeAtOffset(VkDeviceSize offset);
5563 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5565 enum SECOND_VECTOR_MODE
5567 SECOND_VECTOR_EMPTY,
5572 SECOND_VECTOR_RING_BUFFER,
5578 SECOND_VECTOR_DOUBLE_STACK,
5581 VkDeviceSize m_SumFreeSize;
5582 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5583 uint32_t m_1stVectorIndex;
5584 SECOND_VECTOR_MODE m_2ndVectorMode;
5586 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5587 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5588 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5589 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5592 size_t m_1stNullItemsBeginCount;
5594 size_t m_1stNullItemsMiddleCount;
5596 size_t m_2ndNullItemsCount;
5598 bool ShouldCompact1st()
const;
5599 void CleanupAfterFree();
5601 bool CreateAllocationRequest_LowerAddress(
5602 uint32_t currentFrameIndex,
5603 uint32_t frameInUseCount,
5604 VkDeviceSize bufferImageGranularity,
5605 VkDeviceSize allocSize,
5606 VkDeviceSize allocAlignment,
5607 VmaSuballocationType allocType,
5608 bool canMakeOtherLost,
5610 VmaAllocationRequest* pAllocationRequest);
5611 bool CreateAllocationRequest_UpperAddress(
5612 uint32_t currentFrameIndex,
5613 uint32_t frameInUseCount,
5614 VkDeviceSize bufferImageGranularity,
5615 VkDeviceSize allocSize,
5616 VkDeviceSize allocAlignment,
5617 VmaSuballocationType allocType,
5618 bool canMakeOtherLost,
5620 VmaAllocationRequest* pAllocationRequest);
5634 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5636 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5639 virtual ~VmaBlockMetadata_Buddy();
5640 virtual void Init(VkDeviceSize size);
5642 virtual bool Validate()
const;
5643 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5644 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5645 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5646 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5648 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5649 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5651 #if VMA_STATS_STRING_ENABLED 5652 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5655 virtual bool CreateAllocationRequest(
5656 uint32_t currentFrameIndex,
5657 uint32_t frameInUseCount,
5658 VkDeviceSize bufferImageGranularity,
5659 VkDeviceSize allocSize,
5660 VkDeviceSize allocAlignment,
5662 VmaSuballocationType allocType,
5663 bool canMakeOtherLost,
5665 VmaAllocationRequest* pAllocationRequest);
5667 virtual bool MakeRequestedAllocationsLost(
5668 uint32_t currentFrameIndex,
5669 uint32_t frameInUseCount,
5670 VmaAllocationRequest* pAllocationRequest);
5672 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5674 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5677 const VmaAllocationRequest& request,
5678 VmaSuballocationType type,
5679 VkDeviceSize allocSize,
5682 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5683 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5686 static const VkDeviceSize MIN_NODE_SIZE = 32;
5687 static const size_t MAX_LEVELS = 30;
5689 struct ValidationContext
5691 size_t calculatedAllocationCount;
5692 size_t calculatedFreeCount;
5693 VkDeviceSize calculatedSumFreeSize;
5695 ValidationContext() :
5696 calculatedAllocationCount(0),
5697 calculatedFreeCount(0),
5698 calculatedSumFreeSize(0) { }
5703 VkDeviceSize offset;
5733 VkDeviceSize m_UsableSize;
5734 uint32_t m_LevelCount;
5740 } m_FreeList[MAX_LEVELS];
5742 size_t m_AllocationCount;
5746 VkDeviceSize m_SumFreeSize;
5748 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5749 void DeleteNode(Node* node);
5750 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5751 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5752 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5754 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5755 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5759 void AddToFreeListFront(uint32_t level, Node* node);
5763 void RemoveFromFreeList(uint32_t level, Node* node);
5765 #if VMA_STATS_STRING_ENABLED 5766 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5776 class VmaDeviceMemoryBlock
5778 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5780 VmaBlockMetadata* m_pMetadata;
5784 ~VmaDeviceMemoryBlock()
5786 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5787 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5794 uint32_t newMemoryTypeIndex,
5795 VkDeviceMemory newMemory,
5796 VkDeviceSize newSize,
5798 uint32_t algorithm);
5802 VmaPool GetParentPool()
const {
return m_hParentPool; }
5803 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5804 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5805 uint32_t GetId()
const {
return m_Id; }
5806 void* GetMappedData()
const {
return m_pMappedData; }
5809 bool Validate()
const;
5814 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5817 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5818 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5820 VkResult BindBufferMemory(
5824 VkResult BindImageMemory(
5831 uint32_t m_MemoryTypeIndex;
5833 VkDeviceMemory m_hMemory;
5841 uint32_t m_MapCount;
5842 void* m_pMappedData;
5845 struct VmaPointerLess
5847 bool operator()(
const void* lhs,
const void* rhs)
const 5853 struct VmaDefragmentationMove
5855 size_t srcBlockIndex;
5856 size_t dstBlockIndex;
5857 VkDeviceSize srcOffset;
5858 VkDeviceSize dstOffset;
5862 class VmaDefragmentationAlgorithm;
5870 struct VmaBlockVector
5872 VMA_CLASS_NO_COPY(VmaBlockVector)
5877 uint32_t memoryTypeIndex,
5878 VkDeviceSize preferredBlockSize,
5879 size_t minBlockCount,
5880 size_t maxBlockCount,
5881 VkDeviceSize bufferImageGranularity,
5882 uint32_t frameInUseCount,
5884 bool explicitBlockSize,
5885 uint32_t algorithm);
5888 VkResult CreateMinBlocks();
5890 VmaPool GetParentPool()
const {
return m_hParentPool; }
5891 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5892 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5893 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5894 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5895 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5899 bool IsEmpty()
const {
return m_Blocks.empty(); }
5900 bool IsCorruptionDetectionEnabled()
const;
5903 uint32_t currentFrameIndex,
5905 VkDeviceSize alignment,
5907 VmaSuballocationType suballocType,
5908 size_t allocationCount,
5917 #if VMA_STATS_STRING_ENABLED 5918 void PrintDetailedMap(
class VmaJsonWriter& json);
5921 void MakePoolAllocationsLost(
5922 uint32_t currentFrameIndex,
5923 size_t* pLostAllocationCount);
5924 VkResult CheckCorruption();
5928 class VmaBlockVectorDefragmentationContext* pCtx,
5930 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5931 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5932 VkCommandBuffer commandBuffer);
5933 void DefragmentationEnd(
5934 class VmaBlockVectorDefragmentationContext* pCtx,
5940 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5941 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5942 size_t CalcAllocationCount()
const;
5943 bool IsBufferImageGranularityConflictPossible()
const;
5946 friend class VmaDefragmentationAlgorithm_Generic;
5950 const uint32_t m_MemoryTypeIndex;
5951 const VkDeviceSize m_PreferredBlockSize;
5952 const size_t m_MinBlockCount;
5953 const size_t m_MaxBlockCount;
5954 const VkDeviceSize m_BufferImageGranularity;
5955 const uint32_t m_FrameInUseCount;
5956 const bool m_IsCustomPool;
5957 const bool m_ExplicitBlockSize;
5958 const uint32_t m_Algorithm;
5962 bool m_HasEmptyBlock;
5963 VMA_RW_MUTEX m_Mutex;
5965 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5966 uint32_t m_NextBlockId;
5968 VkDeviceSize CalcMaxBlockSize()
const;
5971 void Remove(VmaDeviceMemoryBlock* pBlock);
5975 void IncrementallySortBlocks();
5977 VkResult AllocatePage(
5978 uint32_t currentFrameIndex,
5980 VkDeviceSize alignment,
5982 VmaSuballocationType suballocType,
5986 VkResult AllocateFromBlock(
5987 VmaDeviceMemoryBlock* pBlock,
5988 uint32_t currentFrameIndex,
5990 VkDeviceSize alignment,
5993 VmaSuballocationType suballocType,
5997 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6000 void ApplyDefragmentationMovesCpu(
6001 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6002 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6004 void ApplyDefragmentationMovesGpu(
6005 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6006 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6007 VkCommandBuffer commandBuffer);
6018 VMA_CLASS_NO_COPY(VmaPool_T)
6020 VmaBlockVector m_BlockVector;
6025 VkDeviceSize preferredBlockSize);
6028 uint32_t GetId()
const {
return m_Id; }
6029 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6031 #if VMA_STATS_STRING_ENABLED 6046 class VmaDefragmentationAlgorithm
6048 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6050 VmaDefragmentationAlgorithm(
6052 VmaBlockVector* pBlockVector,
6053 uint32_t currentFrameIndex) :
6054 m_hAllocator(hAllocator),
6055 m_pBlockVector(pBlockVector),
6056 m_CurrentFrameIndex(currentFrameIndex)
6059 virtual ~VmaDefragmentationAlgorithm()
6063 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6064 virtual void AddAll() = 0;
6066 virtual VkResult Defragment(
6067 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6068 VkDeviceSize maxBytesToMove,
6069 uint32_t maxAllocationsToMove) = 0;
6071 virtual VkDeviceSize GetBytesMoved()
const = 0;
6072 virtual uint32_t GetAllocationsMoved()
const = 0;
6076 VmaBlockVector*
const m_pBlockVector;
6077 const uint32_t m_CurrentFrameIndex;
6079 struct AllocationInfo
6082 VkBool32* m_pChanged;
6085 m_hAllocation(VK_NULL_HANDLE),
6086 m_pChanged(VMA_NULL)
6090 m_hAllocation(hAlloc),
6091 m_pChanged(pChanged)
6097 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6099 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6101 VmaDefragmentationAlgorithm_Generic(
6103 VmaBlockVector* pBlockVector,
6104 uint32_t currentFrameIndex,
6105 bool overlappingMoveSupported);
6106 virtual ~VmaDefragmentationAlgorithm_Generic();
6108 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6109 virtual void AddAll() { m_AllAllocations =
true; }
6111 virtual VkResult Defragment(
6112 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6113 VkDeviceSize maxBytesToMove,
6114 uint32_t maxAllocationsToMove);
6116 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6117 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6120 uint32_t m_AllocationCount;
6121 bool m_AllAllocations;
6123 VkDeviceSize m_BytesMoved;
6124 uint32_t m_AllocationsMoved;
6126 struct AllocationInfoSizeGreater
6128 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6130 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6134 struct AllocationInfoOffsetGreater
6136 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6138 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6144 size_t m_OriginalBlockIndex;
6145 VmaDeviceMemoryBlock* m_pBlock;
6146 bool m_HasNonMovableAllocations;
6147 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6149 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6150 m_OriginalBlockIndex(SIZE_MAX),
6152 m_HasNonMovableAllocations(true),
6153 m_Allocations(pAllocationCallbacks)
6157 void CalcHasNonMovableAllocations()
6159 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6160 const size_t defragmentAllocCount = m_Allocations.size();
6161 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6164 void SortAllocationsBySizeDescending()
6166 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6169 void SortAllocationsByOffsetDescending()
6171 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6175 struct BlockPointerLess
6177 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6179 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6181 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6183 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6189 struct BlockInfoCompareMoveDestination
6191 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6193 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6197 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6201 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6209 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6210 BlockInfoVector m_Blocks;
6212 VkResult DefragmentRound(
6213 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6214 VkDeviceSize maxBytesToMove,
6215 uint32_t maxAllocationsToMove);
6217 size_t CalcBlocksWithNonMovableCount()
const;
6219 static bool MoveMakesSense(
6220 size_t dstBlockIndex, VkDeviceSize dstOffset,
6221 size_t srcBlockIndex, VkDeviceSize srcOffset);
6224 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6226 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6228 VmaDefragmentationAlgorithm_Fast(
6230 VmaBlockVector* pBlockVector,
6231 uint32_t currentFrameIndex,
6232 bool overlappingMoveSupported);
6233 virtual ~VmaDefragmentationAlgorithm_Fast();
6235 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6236 virtual void AddAll() { m_AllAllocations =
true; }
6238 virtual VkResult Defragment(
6239 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6240 VkDeviceSize maxBytesToMove,
6241 uint32_t maxAllocationsToMove);
6243 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6244 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6249 size_t origBlockIndex;
6252 class FreeSpaceDatabase
6258 s.blockInfoIndex = SIZE_MAX;
6259 for(
size_t i = 0; i < MAX_COUNT; ++i)
6261 m_FreeSpaces[i] = s;
6265 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6267 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6273 size_t bestIndex = SIZE_MAX;
6274 for(
size_t i = 0; i < MAX_COUNT; ++i)
6277 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6282 if(m_FreeSpaces[i].size < size &&
6283 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6289 if(bestIndex != SIZE_MAX)
6291 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6292 m_FreeSpaces[bestIndex].offset = offset;
6293 m_FreeSpaces[bestIndex].size = size;
6297 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6298 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6300 size_t bestIndex = SIZE_MAX;
6301 VkDeviceSize bestFreeSpaceAfter = 0;
6302 for(
size_t i = 0; i < MAX_COUNT; ++i)
6305 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6307 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6309 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6311 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6313 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6316 bestFreeSpaceAfter = freeSpaceAfter;
6322 if(bestIndex != SIZE_MAX)
6324 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6325 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6327 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6330 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6331 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6332 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6337 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6347 static const size_t MAX_COUNT = 4;
6351 size_t blockInfoIndex;
6352 VkDeviceSize offset;
6354 } m_FreeSpaces[MAX_COUNT];
6357 const bool m_OverlappingMoveSupported;
6359 uint32_t m_AllocationCount;
6360 bool m_AllAllocations;
6362 VkDeviceSize m_BytesMoved;
6363 uint32_t m_AllocationsMoved;
6365 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6367 void PreprocessMetadata();
6368 void PostprocessMetadata();
6369 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6372 struct VmaBlockDefragmentationContext
6376 BLOCK_FLAG_USED = 0x00000001,
6381 VmaBlockDefragmentationContext() :
6383 hBuffer(VK_NULL_HANDLE)
6388 class VmaBlockVectorDefragmentationContext
6390 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6394 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6396 VmaBlockVectorDefragmentationContext(
6399 VmaBlockVector* pBlockVector,
6400 uint32_t currFrameIndex,
6402 ~VmaBlockVectorDefragmentationContext();
6404 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6405 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6406 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6408 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6409 void AddAll() { m_AllAllocations =
true; }
6411 void Begin(
bool overlappingMoveSupported);
6418 VmaBlockVector*
const m_pBlockVector;
6419 const uint32_t m_CurrFrameIndex;
6420 const uint32_t m_AlgorithmFlags;
6422 VmaDefragmentationAlgorithm* m_pAlgorithm;
6430 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6431 bool m_AllAllocations;
6434 struct VmaDefragmentationContext_T
6437 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6439 VmaDefragmentationContext_T(
6441 uint32_t currFrameIndex,
6444 ~VmaDefragmentationContext_T();
6446 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6447 void AddAllocations(
6448 uint32_t allocationCount,
6450 VkBool32* pAllocationsChanged);
6458 VkResult Defragment(
6459 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6460 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6465 const uint32_t m_CurrFrameIndex;
6466 const uint32_t m_Flags;
6469 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6471 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6474 #if VMA_RECORDING_ENABLED 6481 void WriteConfiguration(
6482 const VkPhysicalDeviceProperties& devProps,
6483 const VkPhysicalDeviceMemoryProperties& memProps,
6484 bool dedicatedAllocationExtensionEnabled);
6487 void RecordCreateAllocator(uint32_t frameIndex);
6488 void RecordDestroyAllocator(uint32_t frameIndex);
6489 void RecordCreatePool(uint32_t frameIndex,
6492 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6493 void RecordAllocateMemory(uint32_t frameIndex,
6494 const VkMemoryRequirements& vkMemReq,
6497 void RecordAllocateMemoryPages(uint32_t frameIndex,
6498 const VkMemoryRequirements& vkMemReq,
6500 uint64_t allocationCount,
6502 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6503 const VkMemoryRequirements& vkMemReq,
6504 bool requiresDedicatedAllocation,
6505 bool prefersDedicatedAllocation,
6508 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6509 const VkMemoryRequirements& vkMemReq,
6510 bool requiresDedicatedAllocation,
6511 bool prefersDedicatedAllocation,
6514 void RecordFreeMemory(uint32_t frameIndex,
6516 void RecordFreeMemoryPages(uint32_t frameIndex,
6517 uint64_t allocationCount,
6519 void RecordResizeAllocation(
6520 uint32_t frameIndex,
6522 VkDeviceSize newSize);
6523 void RecordSetAllocationUserData(uint32_t frameIndex,
6525 const void* pUserData);
6526 void RecordCreateLostAllocation(uint32_t frameIndex,
6528 void RecordMapMemory(uint32_t frameIndex,
6530 void RecordUnmapMemory(uint32_t frameIndex,
6532 void RecordFlushAllocation(uint32_t frameIndex,
6533 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6534 void RecordInvalidateAllocation(uint32_t frameIndex,
6535 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6536 void RecordCreateBuffer(uint32_t frameIndex,
6537 const VkBufferCreateInfo& bufCreateInfo,
6540 void RecordCreateImage(uint32_t frameIndex,
6541 const VkImageCreateInfo& imageCreateInfo,
6544 void RecordDestroyBuffer(uint32_t frameIndex,
6546 void RecordDestroyImage(uint32_t frameIndex,
6548 void RecordTouchAllocation(uint32_t frameIndex,
6550 void RecordGetAllocationInfo(uint32_t frameIndex,
6552 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6554 void RecordDefragmentationBegin(uint32_t frameIndex,
6557 void RecordDefragmentationEnd(uint32_t frameIndex,
6567 class UserDataString
6571 const char* GetString()
const {
return m_Str; }
6581 VMA_MUTEX m_FileMutex;
6583 int64_t m_StartCounter;
6585 void GetBasicParams(CallParams& outParams);
6588 template<
typename T>
6589 void PrintPointerList(uint64_t count,
const T* pItems)
6593 fprintf(m_File,
"%p", pItems[0]);
6594 for(uint64_t i = 1; i < count; ++i)
6596 fprintf(m_File,
" %p", pItems[i]);
6601 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6605 #endif // #if VMA_RECORDING_ENABLED 6610 class VmaAllocationObjectAllocator
6612 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6614 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6621 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6625 struct VmaAllocator_T
6627 VMA_CLASS_NO_COPY(VmaAllocator_T)
6630 bool m_UseKhrDedicatedAllocation;
6632 bool m_AllocationCallbacksSpecified;
6633 VkAllocationCallbacks m_AllocationCallbacks;
6635 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6638 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6639 VMA_MUTEX m_HeapSizeLimitMutex;
6641 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6642 VkPhysicalDeviceMemoryProperties m_MemProps;
6645 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6648 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6649 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6650 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6656 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6658 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6662 return m_VulkanFunctions;
6665 VkDeviceSize GetBufferImageGranularity()
const 6668 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6669 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6672 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6673 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6675 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6677 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6678 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6681 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6683 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6684 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6687 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6689 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6690 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6691 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6694 bool IsIntegratedGpu()
const 6696 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6699 #if VMA_RECORDING_ENABLED 6700 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6703 void GetBufferMemoryRequirements(
6705 VkMemoryRequirements& memReq,
6706 bool& requiresDedicatedAllocation,
6707 bool& prefersDedicatedAllocation)
const;
6708 void GetImageMemoryRequirements(
6710 VkMemoryRequirements& memReq,
6711 bool& requiresDedicatedAllocation,
6712 bool& prefersDedicatedAllocation)
const;
6715 VkResult AllocateMemory(
6716 const VkMemoryRequirements& vkMemReq,
6717 bool requiresDedicatedAllocation,
6718 bool prefersDedicatedAllocation,
6719 VkBuffer dedicatedBuffer,
6720 VkImage dedicatedImage,
6722 VmaSuballocationType suballocType,
6723 size_t allocationCount,
6728 size_t allocationCount,
6731 VkResult ResizeAllocation(
6733 VkDeviceSize newSize);
6735 void CalculateStats(
VmaStats* pStats);
6737 #if VMA_STATS_STRING_ENABLED 6738 void PrintDetailedMap(
class VmaJsonWriter& json);
6741 VkResult DefragmentationBegin(
6745 VkResult DefragmentationEnd(
6752 void DestroyPool(
VmaPool pool);
6755 void SetCurrentFrameIndex(uint32_t frameIndex);
6756 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6758 void MakePoolAllocationsLost(
6760 size_t* pLostAllocationCount);
6761 VkResult CheckPoolCorruption(
VmaPool hPool);
6762 VkResult CheckCorruption(uint32_t memoryTypeBits);
6766 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6767 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6772 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6773 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6775 void FlushOrInvalidateAllocation(
6777 VkDeviceSize offset, VkDeviceSize size,
6778 VMA_CACHE_OPERATION op);
6780 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6783 VkDeviceSize m_PreferredLargeHeapBlockSize;
6785 VkPhysicalDevice m_PhysicalDevice;
6786 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6788 VMA_RW_MUTEX m_PoolsMutex;
6790 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6791 uint32_t m_NextPoolId;
6795 #if VMA_RECORDING_ENABLED 6796 VmaRecorder* m_pRecorder;
6801 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6803 VkResult AllocateMemoryOfType(
6805 VkDeviceSize alignment,
6806 bool dedicatedAllocation,
6807 VkBuffer dedicatedBuffer,
6808 VkImage dedicatedImage,
6810 uint32_t memTypeIndex,
6811 VmaSuballocationType suballocType,
6812 size_t allocationCount,
6816 VkResult AllocateDedicatedMemoryPage(
6818 VmaSuballocationType suballocType,
6819 uint32_t memTypeIndex,
6820 const VkMemoryAllocateInfo& allocInfo,
6822 bool isUserDataString,
6827 VkResult AllocateDedicatedMemory(
6829 VmaSuballocationType suballocType,
6830 uint32_t memTypeIndex,
6832 bool isUserDataString,
6834 VkBuffer dedicatedBuffer,
6835 VkImage dedicatedImage,
6836 size_t allocationCount,
6846 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6848 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6851 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6853 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6856 template<
typename T>
6859 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6862 template<
typename T>
6863 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6865 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6868 template<
typename T>
6869 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6874 VmaFree(hAllocator, ptr);
6878 template<
typename T>
6879 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6883 for(
size_t i = count; i--; )
6885 VmaFree(hAllocator, ptr);
6892 #if VMA_STATS_STRING_ENABLED 6894 class VmaStringBuilder
6897 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6898 size_t GetLength()
const {
return m_Data.size(); }
6899 const char* GetData()
const {
return m_Data.data(); }
6901 void Add(
char ch) { m_Data.push_back(ch); }
6902 void Add(
const char* pStr);
6903 void AddNewLine() { Add(
'\n'); }
6904 void AddNumber(uint32_t num);
6905 void AddNumber(uint64_t num);
6906 void AddPointer(
const void* ptr);
6909 VmaVector< char, VmaStlAllocator<char> > m_Data;
6912 void VmaStringBuilder::Add(
const char* pStr)
6914 const size_t strLen = strlen(pStr);
6917 const size_t oldCount = m_Data.size();
6918 m_Data.resize(oldCount + strLen);
6919 memcpy(m_Data.data() + oldCount, pStr, strLen);
6923 void VmaStringBuilder::AddNumber(uint32_t num)
6926 VmaUint32ToStr(buf,
sizeof(buf), num);
6930 void VmaStringBuilder::AddNumber(uint64_t num)
6933 VmaUint64ToStr(buf,
sizeof(buf), num);
6937 void VmaStringBuilder::AddPointer(
const void* ptr)
6940 VmaPtrToStr(buf,
sizeof(buf), ptr);
6944 #endif // #if VMA_STATS_STRING_ENABLED 6949 #if VMA_STATS_STRING_ENABLED 6953 VMA_CLASS_NO_COPY(VmaJsonWriter)
6955 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6958 void BeginObject(
bool singleLine =
false);
6961 void BeginArray(
bool singleLine =
false);
6964 void WriteString(
const char* pStr);
6965 void BeginString(
const char* pStr = VMA_NULL);
6966 void ContinueString(
const char* pStr);
6967 void ContinueString(uint32_t n);
6968 void ContinueString(uint64_t n);
6969 void ContinueString_Pointer(
const void* ptr);
6970 void EndString(
const char* pStr = VMA_NULL);
6972 void WriteNumber(uint32_t n);
6973 void WriteNumber(uint64_t n);
6974 void WriteBool(
bool b);
6978 static const char*
const INDENT;
6980 enum COLLECTION_TYPE
6982 COLLECTION_TYPE_OBJECT,
6983 COLLECTION_TYPE_ARRAY,
6987 COLLECTION_TYPE type;
6988 uint32_t valueCount;
6989 bool singleLineMode;
6992 VmaStringBuilder& m_SB;
6993 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6994 bool m_InsideString;
6996 void BeginValue(
bool isString);
6997 void WriteIndent(
bool oneLess =
false);
7000 const char*
const VmaJsonWriter::INDENT =
" ";
7002 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7004 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7005 m_InsideString(false)
7009 VmaJsonWriter::~VmaJsonWriter()
7011 VMA_ASSERT(!m_InsideString);
7012 VMA_ASSERT(m_Stack.empty());
7015 void VmaJsonWriter::BeginObject(
bool singleLine)
7017 VMA_ASSERT(!m_InsideString);
7023 item.type = COLLECTION_TYPE_OBJECT;
7024 item.valueCount = 0;
7025 item.singleLineMode = singleLine;
7026 m_Stack.push_back(item);
7029 void VmaJsonWriter::EndObject()
7031 VMA_ASSERT(!m_InsideString);
7036 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7040 void VmaJsonWriter::BeginArray(
bool singleLine)
7042 VMA_ASSERT(!m_InsideString);
7048 item.type = COLLECTION_TYPE_ARRAY;
7049 item.valueCount = 0;
7050 item.singleLineMode = singleLine;
7051 m_Stack.push_back(item);
7054 void VmaJsonWriter::EndArray()
7056 VMA_ASSERT(!m_InsideString);
7061 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7065 void VmaJsonWriter::WriteString(
const char* pStr)
7071 void VmaJsonWriter::BeginString(
const char* pStr)
7073 VMA_ASSERT(!m_InsideString);
7077 m_InsideString =
true;
7078 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7080 ContinueString(pStr);
7084 void VmaJsonWriter::ContinueString(
const char* pStr)
7086 VMA_ASSERT(m_InsideString);
7088 const size_t strLen = strlen(pStr);
7089 for(
size_t i = 0; i < strLen; ++i)
7122 VMA_ASSERT(0 &&
"Character not currently supported.");
7128 void VmaJsonWriter::ContinueString(uint32_t n)
7130 VMA_ASSERT(m_InsideString);
7134 void VmaJsonWriter::ContinueString(uint64_t n)
7136 VMA_ASSERT(m_InsideString);
7140 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7142 VMA_ASSERT(m_InsideString);
7143 m_SB.AddPointer(ptr);
7146 void VmaJsonWriter::EndString(
const char* pStr)
7148 VMA_ASSERT(m_InsideString);
7149 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7151 ContinueString(pStr);
7154 m_InsideString =
false;
7157 void VmaJsonWriter::WriteNumber(uint32_t n)
7159 VMA_ASSERT(!m_InsideString);
7164 void VmaJsonWriter::WriteNumber(uint64_t n)
7166 VMA_ASSERT(!m_InsideString);
7171 void VmaJsonWriter::WriteBool(
bool b)
7173 VMA_ASSERT(!m_InsideString);
7175 m_SB.Add(b ?
"true" :
"false");
7178 void VmaJsonWriter::WriteNull()
7180 VMA_ASSERT(!m_InsideString);
7185 void VmaJsonWriter::BeginValue(
bool isString)
7187 if(!m_Stack.empty())
7189 StackItem& currItem = m_Stack.back();
7190 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7191 currItem.valueCount % 2 == 0)
7193 VMA_ASSERT(isString);
7196 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7197 currItem.valueCount % 2 != 0)
7201 else if(currItem.valueCount > 0)
7210 ++currItem.valueCount;
7214 void VmaJsonWriter::WriteIndent(
bool oneLess)
7216 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7220 size_t count = m_Stack.size();
7221 if(count > 0 && oneLess)
7225 for(
size_t i = 0; i < count; ++i)
7232 #endif // #if VMA_STATS_STRING_ENABLED 7236 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7238 if(IsUserDataString())
7240 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7242 FreeUserDataString(hAllocator);
7244 if(pUserData != VMA_NULL)
7246 const char*
const newStrSrc = (
char*)pUserData;
7247 const size_t newStrLen = strlen(newStrSrc);
7248 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7249 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7250 m_pUserData = newStrDst;
7255 m_pUserData = pUserData;
7259 void VmaAllocation_T::ChangeBlockAllocation(
7261 VmaDeviceMemoryBlock* block,
7262 VkDeviceSize offset)
7264 VMA_ASSERT(block != VMA_NULL);
7265 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7268 if(block != m_BlockAllocation.m_Block)
7270 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7271 if(IsPersistentMap())
7273 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7274 block->Map(hAllocator, mapRefCount, VMA_NULL);
7277 m_BlockAllocation.m_Block = block;
7278 m_BlockAllocation.m_Offset = offset;
7281 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7283 VMA_ASSERT(newSize > 0);
7287 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7289 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7290 m_BlockAllocation.m_Offset = newOffset;
7293 VkDeviceSize VmaAllocation_T::GetOffset()
const 7297 case ALLOCATION_TYPE_BLOCK:
7298 return m_BlockAllocation.m_Offset;
7299 case ALLOCATION_TYPE_DEDICATED:
7307 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7311 case ALLOCATION_TYPE_BLOCK:
7312 return m_BlockAllocation.m_Block->GetDeviceMemory();
7313 case ALLOCATION_TYPE_DEDICATED:
7314 return m_DedicatedAllocation.m_hMemory;
7317 return VK_NULL_HANDLE;
7321 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7325 case ALLOCATION_TYPE_BLOCK:
7326 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7327 case ALLOCATION_TYPE_DEDICATED:
7328 return m_DedicatedAllocation.m_MemoryTypeIndex;
7335 void* VmaAllocation_T::GetMappedData()
const 7339 case ALLOCATION_TYPE_BLOCK:
7342 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7343 VMA_ASSERT(pBlockData != VMA_NULL);
7344 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7351 case ALLOCATION_TYPE_DEDICATED:
7352 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7353 return m_DedicatedAllocation.m_pMappedData;
7360 bool VmaAllocation_T::CanBecomeLost()
const 7364 case ALLOCATION_TYPE_BLOCK:
7365 return m_BlockAllocation.m_CanBecomeLost;
7366 case ALLOCATION_TYPE_DEDICATED:
7374 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7376 VMA_ASSERT(CanBecomeLost());
7382 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7385 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7390 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7396 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7406 #if VMA_STATS_STRING_ENABLED 7409 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7418 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7420 json.WriteString(
"Type");
7421 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7423 json.WriteString(
"Size");
7424 json.WriteNumber(m_Size);
7426 if(m_pUserData != VMA_NULL)
7428 json.WriteString(
"UserData");
7429 if(IsUserDataString())
7431 json.WriteString((
const char*)m_pUserData);
7436 json.ContinueString_Pointer(m_pUserData);
7441 json.WriteString(
"CreationFrameIndex");
7442 json.WriteNumber(m_CreationFrameIndex);
7444 json.WriteString(
"LastUseFrameIndex");
7445 json.WriteNumber(GetLastUseFrameIndex());
7447 if(m_BufferImageUsage != 0)
7449 json.WriteString(
"Usage");
7450 json.WriteNumber(m_BufferImageUsage);
7456 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7458 VMA_ASSERT(IsUserDataString());
7459 if(m_pUserData != VMA_NULL)
7461 char*
const oldStr = (
char*)m_pUserData;
7462 const size_t oldStrLen = strlen(oldStr);
7463 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7464 m_pUserData = VMA_NULL;
7468 void VmaAllocation_T::BlockAllocMap()
7470 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7472 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7478 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7482 void VmaAllocation_T::BlockAllocUnmap()
7484 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7486 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7492 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7496 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7498 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7502 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7504 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7505 *ppData = m_DedicatedAllocation.m_pMappedData;
7511 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7512 return VK_ERROR_MEMORY_MAP_FAILED;
7517 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7518 hAllocator->m_hDevice,
7519 m_DedicatedAllocation.m_hMemory,
7524 if(result == VK_SUCCESS)
7526 m_DedicatedAllocation.m_pMappedData = *ppData;
7533 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7535 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7537 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7542 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7543 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7544 hAllocator->m_hDevice,
7545 m_DedicatedAllocation.m_hMemory);
7550 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7554 #if VMA_STATS_STRING_ENABLED 7556 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7560 json.WriteString(
"Blocks");
7563 json.WriteString(
"Allocations");
7566 json.WriteString(
"UnusedRanges");
7569 json.WriteString(
"UsedBytes");
7572 json.WriteString(
"UnusedBytes");
7577 json.WriteString(
"AllocationSize");
7578 json.BeginObject(
true);
7579 json.WriteString(
"Min");
7581 json.WriteString(
"Avg");
7583 json.WriteString(
"Max");
7590 json.WriteString(
"UnusedRangeSize");
7591 json.BeginObject(
true);
7592 json.WriteString(
"Min");
7594 json.WriteString(
"Avg");
7596 json.WriteString(
"Max");
7604 #endif // #if VMA_STATS_STRING_ENABLED 7606 struct VmaSuballocationItemSizeLess
7609 const VmaSuballocationList::iterator lhs,
7610 const VmaSuballocationList::iterator rhs)
const 7612 return lhs->size < rhs->size;
7615 const VmaSuballocationList::iterator lhs,
7616 VkDeviceSize rhsSize)
const 7618 return lhs->size < rhsSize;
7626 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7628 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7632 #if VMA_STATS_STRING_ENABLED 7634 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7635 VkDeviceSize unusedBytes,
7636 size_t allocationCount,
7637 size_t unusedRangeCount)
const 7641 json.WriteString(
"TotalBytes");
7642 json.WriteNumber(GetSize());
7644 json.WriteString(
"UnusedBytes");
7645 json.WriteNumber(unusedBytes);
7647 json.WriteString(
"Allocations");
7648 json.WriteNumber((uint64_t)allocationCount);
7650 json.WriteString(
"UnusedRanges");
7651 json.WriteNumber((uint64_t)unusedRangeCount);
7653 json.WriteString(
"Suballocations");
7657 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7658 VkDeviceSize offset,
7661 json.BeginObject(
true);
7663 json.WriteString(
"Offset");
7664 json.WriteNumber(offset);
7666 hAllocation->PrintParameters(json);
7671 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7672 VkDeviceSize offset,
7673 VkDeviceSize size)
const 7675 json.BeginObject(
true);
7677 json.WriteString(
"Offset");
7678 json.WriteNumber(offset);
7680 json.WriteString(
"Type");
7681 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7683 json.WriteString(
"Size");
7684 json.WriteNumber(size);
7689 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7695 #endif // #if VMA_STATS_STRING_ENABLED 7700 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7701 VmaBlockMetadata(hAllocator),
7704 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7705 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7709 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7713 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7715 VmaBlockMetadata::Init(size);
7718 m_SumFreeSize = size;
7720 VmaSuballocation suballoc = {};
7721 suballoc.offset = 0;
7722 suballoc.size = size;
7723 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7724 suballoc.hAllocation = VK_NULL_HANDLE;
7726 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7727 m_Suballocations.push_back(suballoc);
7728 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7730 m_FreeSuballocationsBySize.push_back(suballocItem);
7733 bool VmaBlockMetadata_Generic::Validate()
const 7735 VMA_VALIDATE(!m_Suballocations.empty());
7738 VkDeviceSize calculatedOffset = 0;
7740 uint32_t calculatedFreeCount = 0;
7742 VkDeviceSize calculatedSumFreeSize = 0;
7745 size_t freeSuballocationsToRegister = 0;
7747 bool prevFree =
false;
7749 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7750 suballocItem != m_Suballocations.cend();
7753 const VmaSuballocation& subAlloc = *suballocItem;
7756 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7758 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7760 VMA_VALIDATE(!prevFree || !currFree);
7762 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7766 calculatedSumFreeSize += subAlloc.size;
7767 ++calculatedFreeCount;
7768 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7770 ++freeSuballocationsToRegister;
7774 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7778 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7779 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7782 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7785 calculatedOffset += subAlloc.size;
7786 prevFree = currFree;
7791 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7793 VkDeviceSize lastSize = 0;
7794 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7796 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7799 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7801 VMA_VALIDATE(suballocItem->size >= lastSize);
7803 lastSize = suballocItem->size;
7807 VMA_VALIDATE(ValidateFreeSuballocationList());
7808 VMA_VALIDATE(calculatedOffset == GetSize());
7809 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7810 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7815 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7817 if(!m_FreeSuballocationsBySize.empty())
7819 return m_FreeSuballocationsBySize.back()->size;
7827 bool VmaBlockMetadata_Generic::IsEmpty()
const 7829 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7832 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7836 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7848 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7849 suballocItem != m_Suballocations.cend();
7852 const VmaSuballocation& suballoc = *suballocItem;
7853 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7866 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7868 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7870 inoutStats.
size += GetSize();
7877 #if VMA_STATS_STRING_ENABLED 7879 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7881 PrintDetailedMap_Begin(json,
7883 m_Suballocations.size() - (size_t)m_FreeCount,
7887 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7888 suballocItem != m_Suballocations.cend();
7889 ++suballocItem, ++i)
7891 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7893 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7897 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7901 PrintDetailedMap_End(json);
7904 #endif // #if VMA_STATS_STRING_ENABLED 7906 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7907 uint32_t currentFrameIndex,
7908 uint32_t frameInUseCount,
7909 VkDeviceSize bufferImageGranularity,
7910 VkDeviceSize allocSize,
7911 VkDeviceSize allocAlignment,
7913 VmaSuballocationType allocType,
7914 bool canMakeOtherLost,
7916 VmaAllocationRequest* pAllocationRequest)
7918 VMA_ASSERT(allocSize > 0);
7919 VMA_ASSERT(!upperAddress);
7920 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7921 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7922 VMA_HEAVY_ASSERT(Validate());
7924 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7927 if(canMakeOtherLost ==
false &&
7928 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7934 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7935 if(freeSuballocCount > 0)
7940 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7941 m_FreeSuballocationsBySize.data(),
7942 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7943 allocSize + 2 * VMA_DEBUG_MARGIN,
7944 VmaSuballocationItemSizeLess());
7945 size_t index = it - m_FreeSuballocationsBySize.data();
7946 for(; index < freeSuballocCount; ++index)
7951 bufferImageGranularity,
7955 m_FreeSuballocationsBySize[index],
7957 &pAllocationRequest->offset,
7958 &pAllocationRequest->itemsToMakeLostCount,
7959 &pAllocationRequest->sumFreeSize,
7960 &pAllocationRequest->sumItemSize))
7962 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7967 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7969 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7970 it != m_Suballocations.end();
7973 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7976 bufferImageGranularity,
7982 &pAllocationRequest->offset,
7983 &pAllocationRequest->itemsToMakeLostCount,
7984 &pAllocationRequest->sumFreeSize,
7985 &pAllocationRequest->sumItemSize))
7987 pAllocationRequest->item = it;
7995 for(
size_t index = freeSuballocCount; index--; )
8000 bufferImageGranularity,
8004 m_FreeSuballocationsBySize[index],
8006 &pAllocationRequest->offset,
8007 &pAllocationRequest->itemsToMakeLostCount,
8008 &pAllocationRequest->sumFreeSize,
8009 &pAllocationRequest->sumItemSize))
8011 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8018 if(canMakeOtherLost)
8023 VmaAllocationRequest tmpAllocRequest = {};
8024 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8025 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8026 suballocIt != m_Suballocations.end();
8029 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8030 suballocIt->hAllocation->CanBecomeLost())
8035 bufferImageGranularity,
8041 &tmpAllocRequest.offset,
8042 &tmpAllocRequest.itemsToMakeLostCount,
8043 &tmpAllocRequest.sumFreeSize,
8044 &tmpAllocRequest.sumItemSize))
8048 *pAllocationRequest = tmpAllocRequest;
8049 pAllocationRequest->item = suballocIt;
8052 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8054 *pAllocationRequest = tmpAllocRequest;
8055 pAllocationRequest->item = suballocIt;
8068 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8069 uint32_t currentFrameIndex,
8070 uint32_t frameInUseCount,
8071 VmaAllocationRequest* pAllocationRequest)
8073 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8075 while(pAllocationRequest->itemsToMakeLostCount > 0)
8077 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8079 ++pAllocationRequest->item;
8081 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8082 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8083 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8084 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8086 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8087 --pAllocationRequest->itemsToMakeLostCount;
8095 VMA_HEAVY_ASSERT(Validate());
8096 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8097 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8102 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8104 uint32_t lostAllocationCount = 0;
8105 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8106 it != m_Suballocations.end();
8109 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8110 it->hAllocation->CanBecomeLost() &&
8111 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8113 it = FreeSuballocation(it);
8114 ++lostAllocationCount;
8117 return lostAllocationCount;
8120 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8122 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8123 it != m_Suballocations.end();
8126 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8128 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8130 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8131 return VK_ERROR_VALIDATION_FAILED_EXT;
8133 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8135 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8136 return VK_ERROR_VALIDATION_FAILED_EXT;
8144 void VmaBlockMetadata_Generic::Alloc(
8145 const VmaAllocationRequest& request,
8146 VmaSuballocationType type,
8147 VkDeviceSize allocSize,
8150 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8151 VMA_ASSERT(request.item != m_Suballocations.end());
8152 VmaSuballocation& suballoc = *request.item;
8154 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8156 VMA_ASSERT(request.offset >= suballoc.offset);
8157 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8158 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8159 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8163 UnregisterFreeSuballocation(request.item);
8165 suballoc.offset = request.offset;
8166 suballoc.size = allocSize;
8167 suballoc.type = type;
8168 suballoc.hAllocation = hAllocation;
8173 VmaSuballocation paddingSuballoc = {};
8174 paddingSuballoc.offset = request.offset + allocSize;
8175 paddingSuballoc.size = paddingEnd;
8176 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8177 VmaSuballocationList::iterator next = request.item;
8179 const VmaSuballocationList::iterator paddingEndItem =
8180 m_Suballocations.insert(next, paddingSuballoc);
8181 RegisterFreeSuballocation(paddingEndItem);
8187 VmaSuballocation paddingSuballoc = {};
8188 paddingSuballoc.offset = request.offset - paddingBegin;
8189 paddingSuballoc.size = paddingBegin;
8190 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8191 const VmaSuballocationList::iterator paddingBeginItem =
8192 m_Suballocations.insert(request.item, paddingSuballoc);
8193 RegisterFreeSuballocation(paddingBeginItem);
8197 m_FreeCount = m_FreeCount - 1;
8198 if(paddingBegin > 0)
8206 m_SumFreeSize -= allocSize;
8209 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8211 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8212 suballocItem != m_Suballocations.end();
8215 VmaSuballocation& suballoc = *suballocItem;
8216 if(suballoc.hAllocation == allocation)
8218 FreeSuballocation(suballocItem);
8219 VMA_HEAVY_ASSERT(Validate());
8223 VMA_ASSERT(0 &&
"Not found!");
8226 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8228 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8229 suballocItem != m_Suballocations.end();
8232 VmaSuballocation& suballoc = *suballocItem;
8233 if(suballoc.offset == offset)
8235 FreeSuballocation(suballocItem);
8239 VMA_ASSERT(0 &&
"Not found!");
8242 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8244 typedef VmaSuballocationList::iterator iter_type;
8245 for(iter_type suballocItem = m_Suballocations.begin();
8246 suballocItem != m_Suballocations.end();
8249 VmaSuballocation& suballoc = *suballocItem;
8250 if(suballoc.hAllocation == alloc)
8252 iter_type nextItem = suballocItem;
8256 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8259 if(newSize < alloc->GetSize())
8261 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8264 if(nextItem != m_Suballocations.end())
8267 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8270 UnregisterFreeSuballocation(nextItem);
8271 nextItem->offset -= sizeDiff;
8272 nextItem->size += sizeDiff;
8273 RegisterFreeSuballocation(nextItem);
8279 VmaSuballocation newFreeSuballoc;
8280 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8281 newFreeSuballoc.offset = suballoc.offset + newSize;
8282 newFreeSuballoc.size = sizeDiff;
8283 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8284 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8285 RegisterFreeSuballocation(newFreeSuballocIt);
8294 VmaSuballocation newFreeSuballoc;
8295 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8296 newFreeSuballoc.offset = suballoc.offset + newSize;
8297 newFreeSuballoc.size = sizeDiff;
8298 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8299 m_Suballocations.push_back(newFreeSuballoc);
8301 iter_type newFreeSuballocIt = m_Suballocations.end();
8302 RegisterFreeSuballocation(--newFreeSuballocIt);
8307 suballoc.size = newSize;
8308 m_SumFreeSize += sizeDiff;
8313 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8316 if(nextItem != m_Suballocations.end())
8319 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8322 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8328 if(nextItem->size > sizeDiff)
8331 UnregisterFreeSuballocation(nextItem);
8332 nextItem->offset += sizeDiff;
8333 nextItem->size -= sizeDiff;
8334 RegisterFreeSuballocation(nextItem);
8340 UnregisterFreeSuballocation(nextItem);
8341 m_Suballocations.erase(nextItem);
8357 suballoc.size = newSize;
8358 m_SumFreeSize -= sizeDiff;
8365 VMA_ASSERT(0 &&
"Not found!");
8369 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8371 VkDeviceSize lastSize = 0;
8372 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8374 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8376 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8377 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8378 VMA_VALIDATE(it->size >= lastSize);
8379 lastSize = it->size;
8384 bool VmaBlockMetadata_Generic::CheckAllocation(
8385 uint32_t currentFrameIndex,
8386 uint32_t frameInUseCount,
8387 VkDeviceSize bufferImageGranularity,
8388 VkDeviceSize allocSize,
8389 VkDeviceSize allocAlignment,
8390 VmaSuballocationType allocType,
8391 VmaSuballocationList::const_iterator suballocItem,
8392 bool canMakeOtherLost,
8393 VkDeviceSize* pOffset,
8394 size_t* itemsToMakeLostCount,
8395 VkDeviceSize* pSumFreeSize,
8396 VkDeviceSize* pSumItemSize)
const 8398 VMA_ASSERT(allocSize > 0);
8399 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8400 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8401 VMA_ASSERT(pOffset != VMA_NULL);
8403 *itemsToMakeLostCount = 0;
8407 if(canMakeOtherLost)
8409 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8411 *pSumFreeSize = suballocItem->size;
8415 if(suballocItem->hAllocation->CanBecomeLost() &&
8416 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8418 ++*itemsToMakeLostCount;
8419 *pSumItemSize = suballocItem->size;
8428 if(GetSize() - suballocItem->offset < allocSize)
8434 *pOffset = suballocItem->offset;
8437 if(VMA_DEBUG_MARGIN > 0)
8439 *pOffset += VMA_DEBUG_MARGIN;
8443 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8447 if(bufferImageGranularity > 1)
8449 bool bufferImageGranularityConflict =
false;
8450 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8451 while(prevSuballocItem != m_Suballocations.cbegin())
8454 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8455 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8457 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8459 bufferImageGranularityConflict =
true;
8467 if(bufferImageGranularityConflict)
8469 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8475 if(*pOffset >= suballocItem->offset + suballocItem->size)
8481 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8484 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8486 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8488 if(suballocItem->offset + totalSize > GetSize())
8495 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8496 if(totalSize > suballocItem->size)
8498 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8499 while(remainingSize > 0)
8502 if(lastSuballocItem == m_Suballocations.cend())
8506 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8508 *pSumFreeSize += lastSuballocItem->size;
8512 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8513 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8514 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8516 ++*itemsToMakeLostCount;
8517 *pSumItemSize += lastSuballocItem->size;
8524 remainingSize = (lastSuballocItem->size < remainingSize) ?
8525 remainingSize - lastSuballocItem->size : 0;
8531 if(bufferImageGranularity > 1)
8533 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8535 while(nextSuballocItem != m_Suballocations.cend())
8537 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8538 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8540 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8542 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8543 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8544 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8546 ++*itemsToMakeLostCount;
8565 const VmaSuballocation& suballoc = *suballocItem;
8566 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8568 *pSumFreeSize = suballoc.size;
8571 if(suballoc.size < allocSize)
8577 *pOffset = suballoc.offset;
8580 if(VMA_DEBUG_MARGIN > 0)
8582 *pOffset += VMA_DEBUG_MARGIN;
8586 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8590 if(bufferImageGranularity > 1)
8592 bool bufferImageGranularityConflict =
false;
8593 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8594 while(prevSuballocItem != m_Suballocations.cbegin())
8597 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8598 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8600 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8602 bufferImageGranularityConflict =
true;
8610 if(bufferImageGranularityConflict)
8612 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8617 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8620 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8623 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8630 if(bufferImageGranularity > 1)
8632 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8634 while(nextSuballocItem != m_Suballocations.cend())
8636 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8637 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8639 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8658 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8660 VMA_ASSERT(item != m_Suballocations.end());
8661 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8663 VmaSuballocationList::iterator nextItem = item;
8665 VMA_ASSERT(nextItem != m_Suballocations.end());
8666 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8668 item->size += nextItem->size;
8670 m_Suballocations.erase(nextItem);
8673 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8676 VmaSuballocation& suballoc = *suballocItem;
8677 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8678 suballoc.hAllocation = VK_NULL_HANDLE;
8682 m_SumFreeSize += suballoc.size;
8685 bool mergeWithNext =
false;
8686 bool mergeWithPrev =
false;
8688 VmaSuballocationList::iterator nextItem = suballocItem;
8690 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8692 mergeWithNext =
true;
8695 VmaSuballocationList::iterator prevItem = suballocItem;
8696 if(suballocItem != m_Suballocations.begin())
8699 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8701 mergeWithPrev =
true;
8707 UnregisterFreeSuballocation(nextItem);
8708 MergeFreeWithNext(suballocItem);
8713 UnregisterFreeSuballocation(prevItem);
8714 MergeFreeWithNext(prevItem);
8715 RegisterFreeSuballocation(prevItem);
8720 RegisterFreeSuballocation(suballocItem);
8721 return suballocItem;
8725 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8727 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8728 VMA_ASSERT(item->size > 0);
8732 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8734 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8736 if(m_FreeSuballocationsBySize.empty())
8738 m_FreeSuballocationsBySize.push_back(item);
8742 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8750 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8752 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8753 VMA_ASSERT(item->size > 0);
8757 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8759 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8761 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8762 m_FreeSuballocationsBySize.data(),
8763 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8765 VmaSuballocationItemSizeLess());
8766 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8767 index < m_FreeSuballocationsBySize.size();
8770 if(m_FreeSuballocationsBySize[index] == item)
8772 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8775 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8777 VMA_ASSERT(0 &&
"Not found.");
8783 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8784 VkDeviceSize bufferImageGranularity,
8785 VmaSuballocationType& inOutPrevSuballocType)
const 8787 if(bufferImageGranularity == 1 || IsEmpty())
8792 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8793 bool typeConflictFound =
false;
8794 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8795 it != m_Suballocations.cend();
8798 const VmaSuballocationType suballocType = it->type;
8799 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8801 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8802 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8804 typeConflictFound =
true;
8806 inOutPrevSuballocType = suballocType;
8810 return typeConflictFound || minAlignment >= bufferImageGranularity;
8816 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8817 VmaBlockMetadata(hAllocator),
8819 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8820 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8821 m_1stVectorIndex(0),
8822 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8823 m_1stNullItemsBeginCount(0),
8824 m_1stNullItemsMiddleCount(0),
8825 m_2ndNullItemsCount(0)
8829 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8833 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8835 VmaBlockMetadata::Init(size);
8836 m_SumFreeSize = size;
8839 bool VmaBlockMetadata_Linear::Validate()
const 8841 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8842 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8844 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8845 VMA_VALIDATE(!suballocations1st.empty() ||
8846 suballocations2nd.empty() ||
8847 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8849 if(!suballocations1st.empty())
8852 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8854 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8856 if(!suballocations2nd.empty())
8859 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8862 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8863 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8865 VkDeviceSize sumUsedSize = 0;
8866 const size_t suballoc1stCount = suballocations1st.size();
8867 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8869 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8871 const size_t suballoc2ndCount = suballocations2nd.size();
8872 size_t nullItem2ndCount = 0;
8873 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8875 const VmaSuballocation& suballoc = suballocations2nd[i];
8876 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8878 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8879 VMA_VALIDATE(suballoc.offset >= offset);
8883 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8884 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8885 sumUsedSize += suballoc.size;
8892 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8895 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8898 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8900 const VmaSuballocation& suballoc = suballocations1st[i];
8901 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8902 suballoc.hAllocation == VK_NULL_HANDLE);
8905 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8907 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8909 const VmaSuballocation& suballoc = suballocations1st[i];
8910 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8912 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8913 VMA_VALIDATE(suballoc.offset >= offset);
8914 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8918 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8919 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8920 sumUsedSize += suballoc.size;
8927 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8929 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8931 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8933 const size_t suballoc2ndCount = suballocations2nd.size();
8934 size_t nullItem2ndCount = 0;
8935 for(
size_t i = suballoc2ndCount; i--; )
8937 const VmaSuballocation& suballoc = suballocations2nd[i];
8938 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8940 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8941 VMA_VALIDATE(suballoc.offset >= offset);
8945 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8946 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8947 sumUsedSize += suballoc.size;
8954 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8957 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8960 VMA_VALIDATE(offset <= GetSize());
8961 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8966 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8968 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8969 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8972 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8974 const VkDeviceSize size = GetSize();
8986 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8988 switch(m_2ndVectorMode)
8990 case SECOND_VECTOR_EMPTY:
8996 const size_t suballocations1stCount = suballocations1st.size();
8997 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8998 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8999 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9001 firstSuballoc.offset,
9002 size - (lastSuballoc.offset + lastSuballoc.size));
9006 case SECOND_VECTOR_RING_BUFFER:
9011 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9012 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9013 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9014 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9018 case SECOND_VECTOR_DOUBLE_STACK:
9023 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9024 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9025 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9026 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9036 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9038 const VkDeviceSize size = GetSize();
9039 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9040 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9041 const size_t suballoc1stCount = suballocations1st.size();
9042 const size_t suballoc2ndCount = suballocations2nd.size();
9053 VkDeviceSize lastOffset = 0;
9055 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9057 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9058 size_t nextAlloc2ndIndex = 0;
9059 while(lastOffset < freeSpace2ndTo1stEnd)
9062 while(nextAlloc2ndIndex < suballoc2ndCount &&
9063 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9065 ++nextAlloc2ndIndex;
9069 if(nextAlloc2ndIndex < suballoc2ndCount)
9071 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9074 if(lastOffset < suballoc.offset)
9077 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9091 lastOffset = suballoc.offset + suballoc.size;
9092 ++nextAlloc2ndIndex;
9098 if(lastOffset < freeSpace2ndTo1stEnd)
9100 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9108 lastOffset = freeSpace2ndTo1stEnd;
9113 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9114 const VkDeviceSize freeSpace1stTo2ndEnd =
9115 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9116 while(lastOffset < freeSpace1stTo2ndEnd)
9119 while(nextAlloc1stIndex < suballoc1stCount &&
9120 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9122 ++nextAlloc1stIndex;
9126 if(nextAlloc1stIndex < suballoc1stCount)
9128 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9131 if(lastOffset < suballoc.offset)
9134 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9148 lastOffset = suballoc.offset + suballoc.size;
9149 ++nextAlloc1stIndex;
9155 if(lastOffset < freeSpace1stTo2ndEnd)
9157 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9165 lastOffset = freeSpace1stTo2ndEnd;
9169 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9171 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9172 while(lastOffset < size)
9175 while(nextAlloc2ndIndex != SIZE_MAX &&
9176 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9178 --nextAlloc2ndIndex;
9182 if(nextAlloc2ndIndex != SIZE_MAX)
9184 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9187 if(lastOffset < suballoc.offset)
9190 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9204 lastOffset = suballoc.offset + suballoc.size;
9205 --nextAlloc2ndIndex;
9211 if(lastOffset < size)
9213 const VkDeviceSize unusedRangeSize = size - lastOffset;
9229 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9231 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9232 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9233 const VkDeviceSize size = GetSize();
9234 const size_t suballoc1stCount = suballocations1st.size();
9235 const size_t suballoc2ndCount = suballocations2nd.size();
9237 inoutStats.
size += size;
9239 VkDeviceSize lastOffset = 0;
9241 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9243 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9244 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9245 while(lastOffset < freeSpace2ndTo1stEnd)
9248 while(nextAlloc2ndIndex < suballoc2ndCount &&
9249 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9251 ++nextAlloc2ndIndex;
9255 if(nextAlloc2ndIndex < suballoc2ndCount)
9257 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9260 if(lastOffset < suballoc.offset)
9263 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9274 lastOffset = suballoc.offset + suballoc.size;
9275 ++nextAlloc2ndIndex;
9280 if(lastOffset < freeSpace2ndTo1stEnd)
9283 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9290 lastOffset = freeSpace2ndTo1stEnd;
9295 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9296 const VkDeviceSize freeSpace1stTo2ndEnd =
9297 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9298 while(lastOffset < freeSpace1stTo2ndEnd)
9301 while(nextAlloc1stIndex < suballoc1stCount &&
9302 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9304 ++nextAlloc1stIndex;
9308 if(nextAlloc1stIndex < suballoc1stCount)
9310 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9313 if(lastOffset < suballoc.offset)
9316 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9327 lastOffset = suballoc.offset + suballoc.size;
9328 ++nextAlloc1stIndex;
9333 if(lastOffset < freeSpace1stTo2ndEnd)
9336 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9343 lastOffset = freeSpace1stTo2ndEnd;
9347 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9349 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9350 while(lastOffset < size)
9353 while(nextAlloc2ndIndex != SIZE_MAX &&
9354 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9356 --nextAlloc2ndIndex;
9360 if(nextAlloc2ndIndex != SIZE_MAX)
9362 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9365 if(lastOffset < suballoc.offset)
9368 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9379 lastOffset = suballoc.offset + suballoc.size;
9380 --nextAlloc2ndIndex;
9385 if(lastOffset < size)
9388 const VkDeviceSize unusedRangeSize = size - lastOffset;
9401 #if VMA_STATS_STRING_ENABLED 9402 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9404 const VkDeviceSize size = GetSize();
9405 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9406 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9407 const size_t suballoc1stCount = suballocations1st.size();
9408 const size_t suballoc2ndCount = suballocations2nd.size();
9412 size_t unusedRangeCount = 0;
9413 VkDeviceSize usedBytes = 0;
9415 VkDeviceSize lastOffset = 0;
9417 size_t alloc2ndCount = 0;
9418 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9420 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9421 size_t nextAlloc2ndIndex = 0;
9422 while(lastOffset < freeSpace2ndTo1stEnd)
9425 while(nextAlloc2ndIndex < suballoc2ndCount &&
9426 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9428 ++nextAlloc2ndIndex;
9432 if(nextAlloc2ndIndex < suballoc2ndCount)
9434 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9437 if(lastOffset < suballoc.offset)
9446 usedBytes += suballoc.size;
9449 lastOffset = suballoc.offset + suballoc.size;
9450 ++nextAlloc2ndIndex;
9455 if(lastOffset < freeSpace2ndTo1stEnd)
9462 lastOffset = freeSpace2ndTo1stEnd;
9467 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9468 size_t alloc1stCount = 0;
9469 const VkDeviceSize freeSpace1stTo2ndEnd =
9470 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9471 while(lastOffset < freeSpace1stTo2ndEnd)
9474 while(nextAlloc1stIndex < suballoc1stCount &&
9475 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9477 ++nextAlloc1stIndex;
9481 if(nextAlloc1stIndex < suballoc1stCount)
9483 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9486 if(lastOffset < suballoc.offset)
9495 usedBytes += suballoc.size;
9498 lastOffset = suballoc.offset + suballoc.size;
9499 ++nextAlloc1stIndex;
9504 if(lastOffset < size)
9511 lastOffset = freeSpace1stTo2ndEnd;
9515 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9517 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9518 while(lastOffset < size)
9521 while(nextAlloc2ndIndex != SIZE_MAX &&
9522 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9524 --nextAlloc2ndIndex;
9528 if(nextAlloc2ndIndex != SIZE_MAX)
9530 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9533 if(lastOffset < suballoc.offset)
9542 usedBytes += suballoc.size;
9545 lastOffset = suballoc.offset + suballoc.size;
9546 --nextAlloc2ndIndex;
9551 if(lastOffset < size)
9563 const VkDeviceSize unusedBytes = size - usedBytes;
9564 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9569 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9571 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9572 size_t nextAlloc2ndIndex = 0;
9573 while(lastOffset < freeSpace2ndTo1stEnd)
9576 while(nextAlloc2ndIndex < suballoc2ndCount &&
9577 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9579 ++nextAlloc2ndIndex;
9583 if(nextAlloc2ndIndex < suballoc2ndCount)
9585 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9588 if(lastOffset < suballoc.offset)
9591 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9592 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9597 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9600 lastOffset = suballoc.offset + suballoc.size;
9601 ++nextAlloc2ndIndex;
9606 if(lastOffset < freeSpace2ndTo1stEnd)
9609 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9610 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9614 lastOffset = freeSpace2ndTo1stEnd;
9619 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9620 while(lastOffset < freeSpace1stTo2ndEnd)
9623 while(nextAlloc1stIndex < suballoc1stCount &&
9624 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9626 ++nextAlloc1stIndex;
9630 if(nextAlloc1stIndex < suballoc1stCount)
9632 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9635 if(lastOffset < suballoc.offset)
9638 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9639 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9644 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9647 lastOffset = suballoc.offset + suballoc.size;
9648 ++nextAlloc1stIndex;
9653 if(lastOffset < freeSpace1stTo2ndEnd)
9656 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9657 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9661 lastOffset = freeSpace1stTo2ndEnd;
9665 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9667 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9668 while(lastOffset < size)
9671 while(nextAlloc2ndIndex != SIZE_MAX &&
9672 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9674 --nextAlloc2ndIndex;
9678 if(nextAlloc2ndIndex != SIZE_MAX)
9680 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9683 if(lastOffset < suballoc.offset)
9686 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9687 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9692 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9695 lastOffset = suballoc.offset + suballoc.size;
9696 --nextAlloc2ndIndex;
9701 if(lastOffset < size)
9704 const VkDeviceSize unusedRangeSize = size - lastOffset;
9705 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9714 PrintDetailedMap_End(json);
9716 #endif // #if VMA_STATS_STRING_ENABLED 9718 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9719 uint32_t currentFrameIndex,
9720 uint32_t frameInUseCount,
9721 VkDeviceSize bufferImageGranularity,
9722 VkDeviceSize allocSize,
9723 VkDeviceSize allocAlignment,
9725 VmaSuballocationType allocType,
9726 bool canMakeOtherLost,
9728 VmaAllocationRequest* pAllocationRequest)
9730 VMA_ASSERT(allocSize > 0);
9731 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9732 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9733 VMA_HEAVY_ASSERT(Validate());
9734 return upperAddress ?
9735 CreateAllocationRequest_UpperAddress(
9736 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9737 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9738 CreateAllocationRequest_LowerAddress(
9739 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9740 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9743 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9744 uint32_t currentFrameIndex,
9745 uint32_t frameInUseCount,
9746 VkDeviceSize bufferImageGranularity,
9747 VkDeviceSize allocSize,
9748 VkDeviceSize allocAlignment,
9749 VmaSuballocationType allocType,
9750 bool canMakeOtherLost,
9752 VmaAllocationRequest* pAllocationRequest)
9754 const VkDeviceSize size = GetSize();
9755 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9756 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9758 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9760 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9765 if(allocSize > size)
9769 VkDeviceSize resultBaseOffset = size - allocSize;
9770 if(!suballocations2nd.empty())
9772 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9773 resultBaseOffset = lastSuballoc.offset - allocSize;
9774 if(allocSize > lastSuballoc.offset)
9781 VkDeviceSize resultOffset = resultBaseOffset;
9784 if(VMA_DEBUG_MARGIN > 0)
9786 if(resultOffset < VMA_DEBUG_MARGIN)
9790 resultOffset -= VMA_DEBUG_MARGIN;
9794 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9798 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9800 bool bufferImageGranularityConflict =
false;
9801 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9803 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9804 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9806 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9808 bufferImageGranularityConflict =
true;
9816 if(bufferImageGranularityConflict)
9818 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9823 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9824 suballocations1st.back().offset + suballocations1st.back().size :
9826 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9830 if(bufferImageGranularity > 1)
9832 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9834 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9835 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9837 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9851 pAllocationRequest->offset = resultOffset;
9852 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9853 pAllocationRequest->sumItemSize = 0;
9855 pAllocationRequest->itemsToMakeLostCount = 0;
9856 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9863 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9864 uint32_t currentFrameIndex,
9865 uint32_t frameInUseCount,
9866 VkDeviceSize bufferImageGranularity,
9867 VkDeviceSize allocSize,
9868 VkDeviceSize allocAlignment,
9869 VmaSuballocationType allocType,
9870 bool canMakeOtherLost,
9872 VmaAllocationRequest* pAllocationRequest)
9874 const VkDeviceSize size = GetSize();
9875 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9876 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9878 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9882 VkDeviceSize resultBaseOffset = 0;
9883 if(!suballocations1st.empty())
9885 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9886 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9890 VkDeviceSize resultOffset = resultBaseOffset;
9893 if(VMA_DEBUG_MARGIN > 0)
9895 resultOffset += VMA_DEBUG_MARGIN;
9899 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9903 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9905 bool bufferImageGranularityConflict =
false;
9906 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9908 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9909 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9911 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9913 bufferImageGranularityConflict =
true;
9921 if(bufferImageGranularityConflict)
9923 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9927 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9928 suballocations2nd.back().offset : size;
9931 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9935 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9937 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9939 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9940 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9942 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9956 pAllocationRequest->offset = resultOffset;
9957 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9958 pAllocationRequest->sumItemSize = 0;
9960 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9961 pAllocationRequest->itemsToMakeLostCount = 0;
9968 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9970 VMA_ASSERT(!suballocations1st.empty());
9972 VkDeviceSize resultBaseOffset = 0;
9973 if(!suballocations2nd.empty())
9975 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9976 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9980 VkDeviceSize resultOffset = resultBaseOffset;
9983 if(VMA_DEBUG_MARGIN > 0)
9985 resultOffset += VMA_DEBUG_MARGIN;
9989 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9993 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9995 bool bufferImageGranularityConflict =
false;
9996 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9998 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9999 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10001 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10003 bufferImageGranularityConflict =
true;
10011 if(bufferImageGranularityConflict)
10013 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10017 pAllocationRequest->itemsToMakeLostCount = 0;
10018 pAllocationRequest->sumItemSize = 0;
10019 size_t index1st = m_1stNullItemsBeginCount;
10021 if(canMakeOtherLost)
10023 while(index1st < suballocations1st.size() &&
10024 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10027 const VmaSuballocation& suballoc = suballocations1st[index1st];
10028 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10034 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10035 if(suballoc.hAllocation->CanBecomeLost() &&
10036 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10038 ++pAllocationRequest->itemsToMakeLostCount;
10039 pAllocationRequest->sumItemSize += suballoc.size;
10051 if(bufferImageGranularity > 1)
10053 while(index1st < suballocations1st.size())
10055 const VmaSuballocation& suballoc = suballocations1st[index1st];
10056 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10058 if(suballoc.hAllocation != VK_NULL_HANDLE)
10061 if(suballoc.hAllocation->CanBecomeLost() &&
10062 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10064 ++pAllocationRequest->itemsToMakeLostCount;
10065 pAllocationRequest->sumItemSize += suballoc.size;
10083 if(index1st == suballocations1st.size() &&
10084 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10087 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10092 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10093 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10097 if(bufferImageGranularity > 1)
10099 for(
size_t nextSuballocIndex = index1st;
10100 nextSuballocIndex < suballocations1st.size();
10101 nextSuballocIndex++)
10103 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10104 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10106 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10120 pAllocationRequest->offset = resultOffset;
10121 pAllocationRequest->sumFreeSize =
10122 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10124 - pAllocationRequest->sumItemSize;
10125 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10134 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10135 uint32_t currentFrameIndex,
10136 uint32_t frameInUseCount,
10137 VmaAllocationRequest* pAllocationRequest)
10139 if(pAllocationRequest->itemsToMakeLostCount == 0)
10144 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10147 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10148 size_t index = m_1stNullItemsBeginCount;
10149 size_t madeLostCount = 0;
10150 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10152 if(index == suballocations->size())
10156 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10158 suballocations = &AccessSuballocations2nd();
10162 VMA_ASSERT(!suballocations->empty());
10164 VmaSuballocation& suballoc = (*suballocations)[index];
10165 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10167 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10168 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10169 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10171 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10172 suballoc.hAllocation = VK_NULL_HANDLE;
10173 m_SumFreeSize += suballoc.size;
10174 if(suballocations == &AccessSuballocations1st())
10176 ++m_1stNullItemsMiddleCount;
10180 ++m_2ndNullItemsCount;
10192 CleanupAfterFree();
10198 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10200 uint32_t lostAllocationCount = 0;
10202 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10203 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10205 VmaSuballocation& suballoc = suballocations1st[i];
10206 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10207 suballoc.hAllocation->CanBecomeLost() &&
10208 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10210 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10211 suballoc.hAllocation = VK_NULL_HANDLE;
10212 ++m_1stNullItemsMiddleCount;
10213 m_SumFreeSize += suballoc.size;
10214 ++lostAllocationCount;
10218 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10219 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10221 VmaSuballocation& suballoc = suballocations2nd[i];
10222 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10223 suballoc.hAllocation->CanBecomeLost() &&
10224 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10226 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10227 suballoc.hAllocation = VK_NULL_HANDLE;
10228 ++m_2ndNullItemsCount;
10229 m_SumFreeSize += suballoc.size;
10230 ++lostAllocationCount;
10234 if(lostAllocationCount)
10236 CleanupAfterFree();
10239 return lostAllocationCount;
10242 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10244 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10245 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10247 const VmaSuballocation& suballoc = suballocations1st[i];
10248 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10250 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10252 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10253 return VK_ERROR_VALIDATION_FAILED_EXT;
10255 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10257 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10258 return VK_ERROR_VALIDATION_FAILED_EXT;
10263 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10264 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10266 const VmaSuballocation& suballoc = suballocations2nd[i];
10267 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10269 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10271 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10272 return VK_ERROR_VALIDATION_FAILED_EXT;
10274 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10276 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10277 return VK_ERROR_VALIDATION_FAILED_EXT;
10285 void VmaBlockMetadata_Linear::Alloc(
10286 const VmaAllocationRequest& request,
10287 VmaSuballocationType type,
10288 VkDeviceSize allocSize,
10291 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10293 switch(request.type)
10295 case VmaAllocationRequestType::UpperAddress:
10297 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10298 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10299 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10300 suballocations2nd.push_back(newSuballoc);
10301 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10304 case VmaAllocationRequestType::EndOf1st:
10306 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10308 VMA_ASSERT(suballocations1st.empty() ||
10309 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10311 VMA_ASSERT(request.offset + allocSize <= GetSize());
10313 suballocations1st.push_back(newSuballoc);
10316 case VmaAllocationRequestType::EndOf2nd:
10318 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10320 VMA_ASSERT(!suballocations1st.empty() &&
10321 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10322 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10324 switch(m_2ndVectorMode)
10326 case SECOND_VECTOR_EMPTY:
10328 VMA_ASSERT(suballocations2nd.empty());
10329 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10331 case SECOND_VECTOR_RING_BUFFER:
10333 VMA_ASSERT(!suballocations2nd.empty());
10335 case SECOND_VECTOR_DOUBLE_STACK:
10336 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10342 suballocations2nd.push_back(newSuballoc);
10346 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10349 m_SumFreeSize -= newSuballoc.size;
10352 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10354 FreeAtOffset(allocation->GetOffset());
10357 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10359 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10360 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10362 if(!suballocations1st.empty())
10365 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10366 if(firstSuballoc.offset == offset)
10368 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10369 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10370 m_SumFreeSize += firstSuballoc.size;
10371 ++m_1stNullItemsBeginCount;
10372 CleanupAfterFree();
10378 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10379 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10381 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10382 if(lastSuballoc.offset == offset)
10384 m_SumFreeSize += lastSuballoc.size;
10385 suballocations2nd.pop_back();
10386 CleanupAfterFree();
10391 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10393 VmaSuballocation& lastSuballoc = suballocations1st.back();
10394 if(lastSuballoc.offset == offset)
10396 m_SumFreeSize += lastSuballoc.size;
10397 suballocations1st.pop_back();
10398 CleanupAfterFree();
10405 VmaSuballocation refSuballoc;
10406 refSuballoc.offset = offset;
10408 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10409 suballocations1st.begin() + m_1stNullItemsBeginCount,
10410 suballocations1st.end(),
10412 if(it != suballocations1st.end())
10414 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10415 it->hAllocation = VK_NULL_HANDLE;
10416 ++m_1stNullItemsMiddleCount;
10417 m_SumFreeSize += it->size;
10418 CleanupAfterFree();
10423 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10426 VmaSuballocation refSuballoc;
10427 refSuballoc.offset = offset;
10429 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10430 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10431 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10432 if(it != suballocations2nd.end())
10434 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10435 it->hAllocation = VK_NULL_HANDLE;
10436 ++m_2ndNullItemsCount;
10437 m_SumFreeSize += it->size;
10438 CleanupAfterFree();
10443 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10446 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10448 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10449 const size_t suballocCount = AccessSuballocations1st().size();
10450 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10453 void VmaBlockMetadata_Linear::CleanupAfterFree()
10455 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10456 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10460 suballocations1st.clear();
10461 suballocations2nd.clear();
10462 m_1stNullItemsBeginCount = 0;
10463 m_1stNullItemsMiddleCount = 0;
10464 m_2ndNullItemsCount = 0;
10465 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10469 const size_t suballoc1stCount = suballocations1st.size();
10470 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10471 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10474 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10475 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10477 ++m_1stNullItemsBeginCount;
10478 --m_1stNullItemsMiddleCount;
10482 while(m_1stNullItemsMiddleCount > 0 &&
10483 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10485 --m_1stNullItemsMiddleCount;
10486 suballocations1st.pop_back();
10490 while(m_2ndNullItemsCount > 0 &&
10491 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10493 --m_2ndNullItemsCount;
10494 suballocations2nd.pop_back();
10498 while(m_2ndNullItemsCount > 0 &&
10499 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10501 --m_2ndNullItemsCount;
10502 suballocations2nd.remove(0);
10505 if(ShouldCompact1st())
10507 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10508 size_t srcIndex = m_1stNullItemsBeginCount;
10509 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10511 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10515 if(dstIndex != srcIndex)
10517 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10521 suballocations1st.resize(nonNullItemCount);
10522 m_1stNullItemsBeginCount = 0;
10523 m_1stNullItemsMiddleCount = 0;
10527 if(suballocations2nd.empty())
10529 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10533 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10535 suballocations1st.clear();
10536 m_1stNullItemsBeginCount = 0;
10538 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10541 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10542 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10543 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10544 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10546 ++m_1stNullItemsBeginCount;
10547 --m_1stNullItemsMiddleCount;
10549 m_2ndNullItemsCount = 0;
10550 m_1stVectorIndex ^= 1;
10555 VMA_HEAVY_ASSERT(Validate());
10562 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10563 VmaBlockMetadata(hAllocator),
10565 m_AllocationCount(0),
10569 memset(m_FreeList, 0,
sizeof(m_FreeList));
10572 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10574 DeleteNode(m_Root);
10577 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10579 VmaBlockMetadata::Init(size);
10581 m_UsableSize = VmaPrevPow2(size);
10582 m_SumFreeSize = m_UsableSize;
10586 while(m_LevelCount < MAX_LEVELS &&
10587 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10592 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10593 rootNode->offset = 0;
10594 rootNode->type = Node::TYPE_FREE;
10595 rootNode->parent = VMA_NULL;
10596 rootNode->buddy = VMA_NULL;
10599 AddToFreeListFront(0, rootNode);
10602 bool VmaBlockMetadata_Buddy::Validate()
const 10605 ValidationContext ctx;
10606 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10608 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10610 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10611 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10614 for(uint32_t level = 0; level < m_LevelCount; ++level)
10616 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10617 m_FreeList[level].front->free.prev == VMA_NULL);
10619 for(Node* node = m_FreeList[level].front;
10621 node = node->free.next)
10623 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10625 if(node->free.next == VMA_NULL)
10627 VMA_VALIDATE(m_FreeList[level].back == node);
10631 VMA_VALIDATE(node->free.next->free.prev == node);
10637 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10639 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10645 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10647 for(uint32_t level = 0; level < m_LevelCount; ++level)
10649 if(m_FreeList[level].front != VMA_NULL)
10651 return LevelToNodeSize(level);
10657 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10659 const VkDeviceSize unusableSize = GetUnusableSize();
10670 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10672 if(unusableSize > 0)
10681 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10683 const VkDeviceSize unusableSize = GetUnusableSize();
10685 inoutStats.
size += GetSize();
10686 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10691 if(unusableSize > 0)
10698 #if VMA_STATS_STRING_ENABLED 10700 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10704 CalcAllocationStatInfo(stat);
10706 PrintDetailedMap_Begin(
10712 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10714 const VkDeviceSize unusableSize = GetUnusableSize();
10715 if(unusableSize > 0)
10717 PrintDetailedMap_UnusedRange(json,
10722 PrintDetailedMap_End(json);
10725 #endif // #if VMA_STATS_STRING_ENABLED 10727 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10728 uint32_t currentFrameIndex,
10729 uint32_t frameInUseCount,
10730 VkDeviceSize bufferImageGranularity,
10731 VkDeviceSize allocSize,
10732 VkDeviceSize allocAlignment,
10734 VmaSuballocationType allocType,
10735 bool canMakeOtherLost,
10737 VmaAllocationRequest* pAllocationRequest)
10739 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10743 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10744 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10745 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10747 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10748 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10751 if(allocSize > m_UsableSize)
10756 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10757 for(uint32_t level = targetLevel + 1; level--; )
10759 for(Node* freeNode = m_FreeList[level].front;
10760 freeNode != VMA_NULL;
10761 freeNode = freeNode->free.next)
10763 if(freeNode->offset % allocAlignment == 0)
10765 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10766 pAllocationRequest->offset = freeNode->offset;
10767 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10768 pAllocationRequest->sumItemSize = 0;
10769 pAllocationRequest->itemsToMakeLostCount = 0;
10770 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10779 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10780 uint32_t currentFrameIndex,
10781 uint32_t frameInUseCount,
10782 VmaAllocationRequest* pAllocationRequest)
10788 return pAllocationRequest->itemsToMakeLostCount == 0;
10791 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10800 void VmaBlockMetadata_Buddy::Alloc(
10801 const VmaAllocationRequest& request,
10802 VmaSuballocationType type,
10803 VkDeviceSize allocSize,
10806 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10808 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10809 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10811 Node* currNode = m_FreeList[currLevel].front;
10812 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10813 while(currNode->offset != request.offset)
10815 currNode = currNode->free.next;
10816 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10820 while(currLevel < targetLevel)
10824 RemoveFromFreeList(currLevel, currNode);
10826 const uint32_t childrenLevel = currLevel + 1;
10829 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10830 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10832 leftChild->offset = currNode->offset;
10833 leftChild->type = Node::TYPE_FREE;
10834 leftChild->parent = currNode;
10835 leftChild->buddy = rightChild;
10837 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10838 rightChild->type = Node::TYPE_FREE;
10839 rightChild->parent = currNode;
10840 rightChild->buddy = leftChild;
10843 currNode->type = Node::TYPE_SPLIT;
10844 currNode->split.leftChild = leftChild;
10847 AddToFreeListFront(childrenLevel, rightChild);
10848 AddToFreeListFront(childrenLevel, leftChild);
10853 currNode = m_FreeList[currLevel].front;
10862 VMA_ASSERT(currLevel == targetLevel &&
10863 currNode != VMA_NULL &&
10864 currNode->type == Node::TYPE_FREE);
10865 RemoveFromFreeList(currLevel, currNode);
10868 currNode->type = Node::TYPE_ALLOCATION;
10869 currNode->allocation.alloc = hAllocation;
10871 ++m_AllocationCount;
10873 m_SumFreeSize -= allocSize;
10876 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10878 if(node->type == Node::TYPE_SPLIT)
10880 DeleteNode(node->split.leftChild->buddy);
10881 DeleteNode(node->split.leftChild);
10884 vma_delete(GetAllocationCallbacks(), node);
10887 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10889 VMA_VALIDATE(level < m_LevelCount);
10890 VMA_VALIDATE(curr->parent == parent);
10891 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10892 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10895 case Node::TYPE_FREE:
10897 ctx.calculatedSumFreeSize += levelNodeSize;
10898 ++ctx.calculatedFreeCount;
10900 case Node::TYPE_ALLOCATION:
10901 ++ctx.calculatedAllocationCount;
10902 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10903 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10905 case Node::TYPE_SPLIT:
10907 const uint32_t childrenLevel = level + 1;
10908 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10909 const Node*
const leftChild = curr->split.leftChild;
10910 VMA_VALIDATE(leftChild != VMA_NULL);
10911 VMA_VALIDATE(leftChild->offset == curr->offset);
10912 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10914 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10916 const Node*
const rightChild = leftChild->buddy;
10917 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10918 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10920 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10931 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10934 uint32_t level = 0;
10935 VkDeviceSize currLevelNodeSize = m_UsableSize;
10936 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10937 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10940 currLevelNodeSize = nextLevelNodeSize;
10941 nextLevelNodeSize = currLevelNodeSize >> 1;
10946 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10949 Node* node = m_Root;
10950 VkDeviceSize nodeOffset = 0;
10951 uint32_t level = 0;
10952 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10953 while(node->type == Node::TYPE_SPLIT)
10955 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10956 if(offset < nodeOffset + nextLevelSize)
10958 node = node->split.leftChild;
10962 node = node->split.leftChild->buddy;
10963 nodeOffset += nextLevelSize;
10966 levelNodeSize = nextLevelSize;
10969 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10970 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10973 --m_AllocationCount;
10974 m_SumFreeSize += alloc->GetSize();
10976 node->type = Node::TYPE_FREE;
10979 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10981 RemoveFromFreeList(level, node->buddy);
10982 Node*
const parent = node->parent;
10984 vma_delete(GetAllocationCallbacks(), node->buddy);
10985 vma_delete(GetAllocationCallbacks(), node);
10986 parent->type = Node::TYPE_FREE;
10994 AddToFreeListFront(level, node);
10997 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 11001 case Node::TYPE_FREE:
11007 case Node::TYPE_ALLOCATION:
11009 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11015 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11016 if(unusedRangeSize > 0)
11025 case Node::TYPE_SPLIT:
11027 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11028 const Node*
const leftChild = node->split.leftChild;
11029 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11030 const Node*
const rightChild = leftChild->buddy;
11031 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11039 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11041 VMA_ASSERT(node->type == Node::TYPE_FREE);
11044 Node*
const frontNode = m_FreeList[level].front;
11045 if(frontNode == VMA_NULL)
11047 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11048 node->free.prev = node->free.next = VMA_NULL;
11049 m_FreeList[level].front = m_FreeList[level].back = node;
11053 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11054 node->free.prev = VMA_NULL;
11055 node->free.next = frontNode;
11056 frontNode->free.prev = node;
11057 m_FreeList[level].front = node;
11061 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11063 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11066 if(node->free.prev == VMA_NULL)
11068 VMA_ASSERT(m_FreeList[level].front == node);
11069 m_FreeList[level].front = node->free.next;
11073 Node*
const prevFreeNode = node->free.prev;
11074 VMA_ASSERT(prevFreeNode->free.next == node);
11075 prevFreeNode->free.next = node->free.next;
11079 if(node->free.next == VMA_NULL)
11081 VMA_ASSERT(m_FreeList[level].back == node);
11082 m_FreeList[level].back = node->free.prev;
11086 Node*
const nextFreeNode = node->free.next;
11087 VMA_ASSERT(nextFreeNode->free.prev == node);
11088 nextFreeNode->free.prev = node->free.prev;
11092 #if VMA_STATS_STRING_ENABLED 11093 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11097 case Node::TYPE_FREE:
11098 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11100 case Node::TYPE_ALLOCATION:
11102 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11103 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11104 if(allocSize < levelNodeSize)
11106 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11110 case Node::TYPE_SPLIT:
11112 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11113 const Node*
const leftChild = node->split.leftChild;
11114 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11115 const Node*
const rightChild = leftChild->buddy;
11116 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11123 #endif // #if VMA_STATS_STRING_ENABLED 11129 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11130 m_pMetadata(VMA_NULL),
11131 m_MemoryTypeIndex(UINT32_MAX),
11133 m_hMemory(VK_NULL_HANDLE),
11135 m_pMappedData(VMA_NULL)
11139 void VmaDeviceMemoryBlock::Init(
11142 uint32_t newMemoryTypeIndex,
11143 VkDeviceMemory newMemory,
11144 VkDeviceSize newSize,
11146 uint32_t algorithm)
11148 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11150 m_hParentPool = hParentPool;
11151 m_MemoryTypeIndex = newMemoryTypeIndex;
11153 m_hMemory = newMemory;
11158 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11161 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11167 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11169 m_pMetadata->Init(newSize);
11172 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11176 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11178 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11179 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11180 m_hMemory = VK_NULL_HANDLE;
11182 vma_delete(allocator, m_pMetadata);
11183 m_pMetadata = VMA_NULL;
11186 bool VmaDeviceMemoryBlock::Validate()
const 11188 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11189 (m_pMetadata->GetSize() != 0));
11191 return m_pMetadata->Validate();
11194 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11196 void* pData =
nullptr;
11197 VkResult res = Map(hAllocator, 1, &pData);
11198 if(res != VK_SUCCESS)
11203 res = m_pMetadata->CheckCorruption(pData);
11205 Unmap(hAllocator, 1);
11210 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11217 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11218 if(m_MapCount != 0)
11220 m_MapCount += count;
11221 VMA_ASSERT(m_pMappedData != VMA_NULL);
11222 if(ppData != VMA_NULL)
11224 *ppData = m_pMappedData;
11230 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11231 hAllocator->m_hDevice,
11237 if(result == VK_SUCCESS)
11239 if(ppData != VMA_NULL)
11241 *ppData = m_pMappedData;
11243 m_MapCount = count;
11249 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11256 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11257 if(m_MapCount >= count)
11259 m_MapCount -= count;
11260 if(m_MapCount == 0)
11262 m_pMappedData = VMA_NULL;
11263 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11268 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11272 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11274 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11275 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11278 VkResult res = Map(hAllocator, 1, &pData);
11279 if(res != VK_SUCCESS)
11284 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11285 VmaWriteMagicValue(pData, allocOffset + allocSize);
11287 Unmap(hAllocator, 1);
11292 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11294 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11295 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11298 VkResult res = Map(hAllocator, 1, &pData);
11299 if(res != VK_SUCCESS)
11304 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11306 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11308 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11310 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11313 Unmap(hAllocator, 1);
11318 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11323 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11324 hAllocation->GetBlock() ==
this);
11326 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11327 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11328 hAllocator->m_hDevice,
11331 hAllocation->GetOffset());
11334 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11339 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11340 hAllocation->GetBlock() ==
this);
11342 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11343 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11344 hAllocator->m_hDevice,
11347 hAllocation->GetOffset());
11352 memset(&outInfo, 0,
sizeof(outInfo));
11371 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11379 VmaPool_T::VmaPool_T(
11382 VkDeviceSize preferredBlockSize) :
11386 createInfo.memoryTypeIndex,
11387 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11388 createInfo.minBlockCount,
11389 createInfo.maxBlockCount,
11391 createInfo.frameInUseCount,
11393 createInfo.blockSize != 0,
11399 VmaPool_T::~VmaPool_T()
11403 #if VMA_STATS_STRING_ENABLED 11405 #endif // #if VMA_STATS_STRING_ENABLED 11407 VmaBlockVector::VmaBlockVector(
11410 uint32_t memoryTypeIndex,
11411 VkDeviceSize preferredBlockSize,
11412 size_t minBlockCount,
11413 size_t maxBlockCount,
11414 VkDeviceSize bufferImageGranularity,
11415 uint32_t frameInUseCount,
11417 bool explicitBlockSize,
11418 uint32_t algorithm) :
11419 m_hAllocator(hAllocator),
11420 m_hParentPool(hParentPool),
11421 m_MemoryTypeIndex(memoryTypeIndex),
11422 m_PreferredBlockSize(preferredBlockSize),
11423 m_MinBlockCount(minBlockCount),
11424 m_MaxBlockCount(maxBlockCount),
11425 m_BufferImageGranularity(bufferImageGranularity),
11426 m_FrameInUseCount(frameInUseCount),
11427 m_IsCustomPool(isCustomPool),
11428 m_ExplicitBlockSize(explicitBlockSize),
11429 m_Algorithm(algorithm),
11430 m_HasEmptyBlock(false),
11431 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11436 VmaBlockVector::~VmaBlockVector()
11438 for(
size_t i = m_Blocks.size(); i--; )
11440 m_Blocks[i]->Destroy(m_hAllocator);
11441 vma_delete(m_hAllocator, m_Blocks[i]);
11445 VkResult VmaBlockVector::CreateMinBlocks()
11447 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11449 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11450 if(res != VK_SUCCESS)
11458 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11460 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11462 const size_t blockCount = m_Blocks.size();
11471 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11473 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11474 VMA_ASSERT(pBlock);
11475 VMA_HEAVY_ASSERT(pBlock->Validate());
11476 pBlock->m_pMetadata->AddPoolStats(*pStats);
11480 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11482 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11483 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11484 (VMA_DEBUG_MARGIN > 0) &&
11486 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11489 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11491 VkResult VmaBlockVector::Allocate(
11492 uint32_t currentFrameIndex,
11494 VkDeviceSize alignment,
11496 VmaSuballocationType suballocType,
11497 size_t allocationCount,
11501 VkResult res = VK_SUCCESS;
11503 if(IsCorruptionDetectionEnabled())
11505 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11506 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11510 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11511 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11513 res = AllocatePage(
11519 pAllocations + allocIndex);
11520 if(res != VK_SUCCESS)
11527 if(res != VK_SUCCESS)
11530 while(allocIndex--)
11532 Free(pAllocations[allocIndex]);
11534 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11540 VkResult VmaBlockVector::AllocatePage(
11541 uint32_t currentFrameIndex,
11543 VkDeviceSize alignment,
11545 VmaSuballocationType suballocType,
11552 const bool canCreateNewBlock =
11554 (m_Blocks.size() < m_MaxBlockCount);
11561 canMakeOtherLost =
false;
11565 if(isUpperAddress &&
11568 return VK_ERROR_FEATURE_NOT_PRESENT;
11582 return VK_ERROR_FEATURE_NOT_PRESENT;
11586 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11588 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11596 if(!canMakeOtherLost || canCreateNewBlock)
11605 if(!m_Blocks.empty())
11607 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11608 VMA_ASSERT(pCurrBlock);
11609 VkResult res = AllocateFromBlock(
11619 if(res == VK_SUCCESS)
11621 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11631 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11633 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11634 VMA_ASSERT(pCurrBlock);
11635 VkResult res = AllocateFromBlock(
11645 if(res == VK_SUCCESS)
11647 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11655 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11657 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11658 VMA_ASSERT(pCurrBlock);
11659 VkResult res = AllocateFromBlock(
11669 if(res == VK_SUCCESS)
11671 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11679 if(canCreateNewBlock)
11682 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11683 uint32_t newBlockSizeShift = 0;
11684 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11686 if(!m_ExplicitBlockSize)
11689 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11690 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11692 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11693 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11695 newBlockSize = smallerNewBlockSize;
11696 ++newBlockSizeShift;
11705 size_t newBlockIndex = 0;
11706 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11708 if(!m_ExplicitBlockSize)
11710 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11712 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11713 if(smallerNewBlockSize >= size)
11715 newBlockSize = smallerNewBlockSize;
11716 ++newBlockSizeShift;
11717 res = CreateBlock(newBlockSize, &newBlockIndex);
11726 if(res == VK_SUCCESS)
11728 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11729 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11731 res = AllocateFromBlock(
11741 if(res == VK_SUCCESS)
11743 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11749 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11756 if(canMakeOtherLost)
11758 uint32_t tryIndex = 0;
11759 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11761 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11762 VmaAllocationRequest bestRequest = {};
11763 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11769 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11771 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11772 VMA_ASSERT(pCurrBlock);
11773 VmaAllocationRequest currRequest = {};
11774 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11777 m_BufferImageGranularity,
11786 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11787 if(pBestRequestBlock == VMA_NULL ||
11788 currRequestCost < bestRequestCost)
11790 pBestRequestBlock = pCurrBlock;
11791 bestRequest = currRequest;
11792 bestRequestCost = currRequestCost;
11794 if(bestRequestCost == 0)
11805 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11807 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11808 VMA_ASSERT(pCurrBlock);
11809 VmaAllocationRequest currRequest = {};
11810 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11813 m_BufferImageGranularity,
11822 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11823 if(pBestRequestBlock == VMA_NULL ||
11824 currRequestCost < bestRequestCost ||
11827 pBestRequestBlock = pCurrBlock;
11828 bestRequest = currRequest;
11829 bestRequestCost = currRequestCost;
11831 if(bestRequestCost == 0 ||
11841 if(pBestRequestBlock != VMA_NULL)
11845 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11846 if(res != VK_SUCCESS)
11852 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11858 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11860 m_HasEmptyBlock =
false;
11863 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11864 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11865 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11866 (*pAllocation)->InitBlockAllocation(
11868 bestRequest.offset,
11874 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11875 VMA_DEBUG_LOG(
" Returned from existing block");
11876 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11877 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11879 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11881 if(IsCorruptionDetectionEnabled())
11883 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11884 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11899 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11901 return VK_ERROR_TOO_MANY_OBJECTS;
11905 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11908 void VmaBlockVector::Free(
11911 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11915 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11917 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11919 if(IsCorruptionDetectionEnabled())
11921 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11922 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11925 if(hAllocation->IsPersistentMap())
11927 pBlock->Unmap(m_hAllocator, 1);
11930 pBlock->m_pMetadata->Free(hAllocation);
11931 VMA_HEAVY_ASSERT(pBlock->Validate());
11933 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11936 if(pBlock->m_pMetadata->IsEmpty())
11939 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11941 pBlockToDelete = pBlock;
11947 m_HasEmptyBlock =
true;
11952 else if(m_HasEmptyBlock)
11954 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11955 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11957 pBlockToDelete = pLastBlock;
11958 m_Blocks.pop_back();
11959 m_HasEmptyBlock =
false;
11963 IncrementallySortBlocks();
11968 if(pBlockToDelete != VMA_NULL)
11970 VMA_DEBUG_LOG(
" Deleted empty allocation");
11971 pBlockToDelete->Destroy(m_hAllocator);
11972 vma_delete(m_hAllocator, pBlockToDelete);
11976 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11978 VkDeviceSize result = 0;
11979 for(
size_t i = m_Blocks.size(); i--; )
11981 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11982 if(result >= m_PreferredBlockSize)
11990 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11992 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11994 if(m_Blocks[blockIndex] == pBlock)
11996 VmaVectorRemove(m_Blocks, blockIndex);
12003 void VmaBlockVector::IncrementallySortBlocks()
12008 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12010 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12012 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12019 VkResult VmaBlockVector::AllocateFromBlock(
12020 VmaDeviceMemoryBlock* pBlock,
12021 uint32_t currentFrameIndex,
12023 VkDeviceSize alignment,
12026 VmaSuballocationType suballocType,
12035 VmaAllocationRequest currRequest = {};
12036 if(pBlock->m_pMetadata->CreateAllocationRequest(
12039 m_BufferImageGranularity,
12049 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12053 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12054 if(res != VK_SUCCESS)
12061 if(pBlock->m_pMetadata->IsEmpty())
12063 m_HasEmptyBlock =
false;
12066 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12067 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12068 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12069 (*pAllocation)->InitBlockAllocation(
12071 currRequest.offset,
12077 VMA_HEAVY_ASSERT(pBlock->Validate());
12078 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12079 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12081 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12083 if(IsCorruptionDetectionEnabled())
12085 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12086 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12090 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12093 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12095 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12096 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12097 allocInfo.allocationSize = blockSize;
12098 VkDeviceMemory mem = VK_NULL_HANDLE;
12099 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12108 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12114 allocInfo.allocationSize,
12118 m_Blocks.push_back(pBlock);
12119 if(pNewBlockIndex != VMA_NULL)
12121 *pNewBlockIndex = m_Blocks.size() - 1;
12127 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12128 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12129 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12131 const size_t blockCount = m_Blocks.size();
12132 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12136 BLOCK_FLAG_USED = 0x00000001,
12137 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12145 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12146 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12147 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12150 const size_t moveCount = moves.size();
12151 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12153 const VmaDefragmentationMove& move = moves[moveIndex];
12154 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12155 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12158 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12161 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12163 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12164 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12165 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12167 currBlockInfo.pMappedData = pBlock->GetMappedData();
12169 if(currBlockInfo.pMappedData == VMA_NULL)
12171 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12172 if(pDefragCtx->res == VK_SUCCESS)
12174 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12181 if(pDefragCtx->res == VK_SUCCESS)
12183 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12184 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12186 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12188 const VmaDefragmentationMove& move = moves[moveIndex];
12190 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12191 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12193 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12198 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12199 memRange.memory = pSrcBlock->GetDeviceMemory();
12200 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12201 memRange.size = VMA_MIN(
12202 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12203 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12204 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12209 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12210 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12211 static_cast<size_t>(move.size));
12213 if(IsCorruptionDetectionEnabled())
12215 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12216 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12222 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12223 memRange.memory = pDstBlock->GetDeviceMemory();
12224 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12225 memRange.size = VMA_MIN(
12226 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12227 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12228 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12235 for(
size_t blockIndex = blockCount; blockIndex--; )
12237 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12238 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12240 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12241 pBlock->Unmap(m_hAllocator, 1);
12246 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12247 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12248 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12249 VkCommandBuffer commandBuffer)
12251 const size_t blockCount = m_Blocks.size();
12253 pDefragCtx->blockContexts.resize(blockCount);
12254 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12257 const size_t moveCount = moves.size();
12258 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12260 const VmaDefragmentationMove& move = moves[moveIndex];
12261 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12262 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12265 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12269 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
12270 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
12271 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
12273 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12275 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12276 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12277 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12279 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12280 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12281 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12282 if(pDefragCtx->res == VK_SUCCESS)
12284 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12285 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12292 if(pDefragCtx->res == VK_SUCCESS)
12294 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12296 const VmaDefragmentationMove& move = moves[moveIndex];
12298 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12299 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12301 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12303 VkBufferCopy region = {
12307 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12308 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12313 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12315 pDefragCtx->res = VK_NOT_READY;
12321 m_HasEmptyBlock =
false;
12322 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12324 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12325 if(pBlock->m_pMetadata->IsEmpty())
12327 if(m_Blocks.size() > m_MinBlockCount)
12329 if(pDefragmentationStats != VMA_NULL)
12332 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12335 VmaVectorRemove(m_Blocks, blockIndex);
12336 pBlock->Destroy(m_hAllocator);
12337 vma_delete(m_hAllocator, pBlock);
12341 m_HasEmptyBlock =
true;
12347 #if VMA_STATS_STRING_ENABLED 12349 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12351 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12353 json.BeginObject();
12357 json.WriteString(
"MemoryTypeIndex");
12358 json.WriteNumber(m_MemoryTypeIndex);
12360 json.WriteString(
"BlockSize");
12361 json.WriteNumber(m_PreferredBlockSize);
12363 json.WriteString(
"BlockCount");
12364 json.BeginObject(
true);
12365 if(m_MinBlockCount > 0)
12367 json.WriteString(
"Min");
12368 json.WriteNumber((uint64_t)m_MinBlockCount);
12370 if(m_MaxBlockCount < SIZE_MAX)
12372 json.WriteString(
"Max");
12373 json.WriteNumber((uint64_t)m_MaxBlockCount);
12375 json.WriteString(
"Cur");
12376 json.WriteNumber((uint64_t)m_Blocks.size());
12379 if(m_FrameInUseCount > 0)
12381 json.WriteString(
"FrameInUseCount");
12382 json.WriteNumber(m_FrameInUseCount);
12385 if(m_Algorithm != 0)
12387 json.WriteString(
"Algorithm");
12388 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12393 json.WriteString(
"PreferredBlockSize");
12394 json.WriteNumber(m_PreferredBlockSize);
12397 json.WriteString(
"Blocks");
12398 json.BeginObject();
12399 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12401 json.BeginString();
12402 json.ContinueString(m_Blocks[i]->GetId());
12405 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12412 #endif // #if VMA_STATS_STRING_ENABLED 12414 void VmaBlockVector::Defragment(
12415 class VmaBlockVectorDefragmentationContext* pCtx,
12417 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12418 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12419 VkCommandBuffer commandBuffer)
12421 pCtx->res = VK_SUCCESS;
12423 const VkMemoryPropertyFlags memPropFlags =
12424 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12425 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12426 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12428 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12430 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12431 !IsCorruptionDetectionEnabled();
12434 if(canDefragmentOnCpu || canDefragmentOnGpu)
12436 bool defragmentOnGpu;
12438 if(canDefragmentOnGpu != canDefragmentOnCpu)
12440 defragmentOnGpu = canDefragmentOnGpu;
12445 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12446 m_hAllocator->IsIntegratedGpu();
12449 bool overlappingMoveSupported = !defragmentOnGpu;
12451 if(m_hAllocator->m_UseMutex)
12453 m_Mutex.LockWrite();
12454 pCtx->mutexLocked =
true;
12457 pCtx->Begin(overlappingMoveSupported);
12461 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12462 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12463 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12464 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12465 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12468 if(pStats != VMA_NULL)
12470 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12471 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12474 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12475 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12476 if(defragmentOnGpu)
12478 maxGpuBytesToMove -= bytesMoved;
12479 maxGpuAllocationsToMove -= allocationsMoved;
12483 maxCpuBytesToMove -= bytesMoved;
12484 maxCpuAllocationsToMove -= allocationsMoved;
12488 if(pCtx->res >= VK_SUCCESS)
12490 if(defragmentOnGpu)
12492 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12496 ApplyDefragmentationMovesCpu(pCtx, moves);
12502 void VmaBlockVector::DefragmentationEnd(
12503 class VmaBlockVectorDefragmentationContext* pCtx,
12507 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12509 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12510 if(blockCtx.hBuffer)
12512 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12513 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12517 if(pCtx->res >= VK_SUCCESS)
12519 FreeEmptyBlocks(pStats);
12522 if(pCtx->mutexLocked)
12524 VMA_ASSERT(m_hAllocator->m_UseMutex);
12525 m_Mutex.UnlockWrite();
12529 size_t VmaBlockVector::CalcAllocationCount()
const 12532 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12534 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12539 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12541 if(m_BufferImageGranularity == 1)
12545 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12546 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12548 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12549 VMA_ASSERT(m_Algorithm == 0);
12550 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12551 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12559 void VmaBlockVector::MakePoolAllocationsLost(
12560 uint32_t currentFrameIndex,
12561 size_t* pLostAllocationCount)
12563 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12564 size_t lostAllocationCount = 0;
12565 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12567 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12568 VMA_ASSERT(pBlock);
12569 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12571 if(pLostAllocationCount != VMA_NULL)
12573 *pLostAllocationCount = lostAllocationCount;
12577 VkResult VmaBlockVector::CheckCorruption()
12579 if(!IsCorruptionDetectionEnabled())
12581 return VK_ERROR_FEATURE_NOT_PRESENT;
12584 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12585 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12587 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12588 VMA_ASSERT(pBlock);
12589 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12590 if(res != VK_SUCCESS)
12598 void VmaBlockVector::AddStats(
VmaStats* pStats)
12600 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12601 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12603 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12605 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12607 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12608 VMA_ASSERT(pBlock);
12609 VMA_HEAVY_ASSERT(pBlock->Validate());
12611 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12612 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12613 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12614 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12621 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12623 VmaBlockVector* pBlockVector,
12624 uint32_t currentFrameIndex,
12625 bool overlappingMoveSupported) :
12626 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12627 m_AllocationCount(0),
12628 m_AllAllocations(false),
12630 m_AllocationsMoved(0),
12631 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12634 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12635 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12637 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12638 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12639 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12640 m_Blocks.push_back(pBlockInfo);
12644 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12647 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12649 for(
size_t i = m_Blocks.size(); i--; )
12651 vma_delete(m_hAllocator, m_Blocks[i]);
12655 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12658 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12660 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12661 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12662 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12664 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12665 (*it)->m_Allocations.push_back(allocInfo);
12672 ++m_AllocationCount;
12676 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12677 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12678 VkDeviceSize maxBytesToMove,
12679 uint32_t maxAllocationsToMove)
12681 if(m_Blocks.empty())
12694 size_t srcBlockMinIndex = 0;
12707 size_t srcBlockIndex = m_Blocks.size() - 1;
12708 size_t srcAllocIndex = SIZE_MAX;
12714 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12716 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12719 if(srcBlockIndex == srcBlockMinIndex)
12726 srcAllocIndex = SIZE_MAX;
12731 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12735 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12736 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12738 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12739 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12740 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12741 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12744 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12746 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12747 VmaAllocationRequest dstAllocRequest;
12748 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12749 m_CurrentFrameIndex,
12750 m_pBlockVector->GetFrameInUseCount(),
12751 m_pBlockVector->GetBufferImageGranularity(),
12758 &dstAllocRequest) &&
12760 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12762 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12765 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12766 (m_BytesMoved + size > maxBytesToMove))
12771 VmaDefragmentationMove move;
12772 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12773 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12774 move.srcOffset = srcOffset;
12775 move.dstOffset = dstAllocRequest.offset;
12777 moves.push_back(move);
12779 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12783 allocInfo.m_hAllocation);
12784 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12786 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12788 if(allocInfo.m_pChanged != VMA_NULL)
12790 *allocInfo.m_pChanged = VK_TRUE;
12793 ++m_AllocationsMoved;
12794 m_BytesMoved += size;
12796 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12804 if(srcAllocIndex > 0)
12810 if(srcBlockIndex > 0)
12813 srcAllocIndex = SIZE_MAX;
12823 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12826 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12828 if(m_Blocks[i]->m_HasNonMovableAllocations)
12836 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12837 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12838 VkDeviceSize maxBytesToMove,
12839 uint32_t maxAllocationsToMove)
12841 if(!m_AllAllocations && m_AllocationCount == 0)
12846 const size_t blockCount = m_Blocks.size();
12847 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12849 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12851 if(m_AllAllocations)
12853 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12854 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12855 it != pMetadata->m_Suballocations.end();
12858 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12860 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12861 pBlockInfo->m_Allocations.push_back(allocInfo);
12866 pBlockInfo->CalcHasNonMovableAllocations();
12870 pBlockInfo->SortAllocationsByOffsetDescending();
12876 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12879 const uint32_t roundCount = 2;
12882 VkResult result = VK_SUCCESS;
12883 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12885 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12891 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12892 size_t dstBlockIndex, VkDeviceSize dstOffset,
12893 size_t srcBlockIndex, VkDeviceSize srcOffset)
12895 if(dstBlockIndex < srcBlockIndex)
12899 if(dstBlockIndex > srcBlockIndex)
12903 if(dstOffset < srcOffset)
12913 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12915 VmaBlockVector* pBlockVector,
12916 uint32_t currentFrameIndex,
12917 bool overlappingMoveSupported) :
12918 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12919 m_OverlappingMoveSupported(overlappingMoveSupported),
12920 m_AllocationCount(0),
12921 m_AllAllocations(false),
12923 m_AllocationsMoved(0),
12924 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12926 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12930 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12934 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12935 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12936 VkDeviceSize maxBytesToMove,
12937 uint32_t maxAllocationsToMove)
12939 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12941 const size_t blockCount = m_pBlockVector->GetBlockCount();
12942 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12947 PreprocessMetadata();
12951 m_BlockInfos.resize(blockCount);
12952 for(
size_t i = 0; i < blockCount; ++i)
12954 m_BlockInfos[i].origBlockIndex = i;
12957 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12958 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12959 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12964 FreeSpaceDatabase freeSpaceDb;
12966 size_t dstBlockInfoIndex = 0;
12967 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12968 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12969 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12970 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12971 VkDeviceSize dstOffset = 0;
12974 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12976 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12977 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12978 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12979 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12980 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12982 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12983 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12984 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12985 if(m_AllocationsMoved == maxAllocationsToMove ||
12986 m_BytesMoved + srcAllocSize > maxBytesToMove)
12991 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12994 size_t freeSpaceInfoIndex;
12995 VkDeviceSize dstAllocOffset;
12996 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12997 freeSpaceInfoIndex, dstAllocOffset))
12999 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13000 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13001 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13004 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13006 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13010 VmaSuballocation suballoc = *srcSuballocIt;
13011 suballoc.offset = dstAllocOffset;
13012 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13013 m_BytesMoved += srcAllocSize;
13014 ++m_AllocationsMoved;
13016 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13018 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13019 srcSuballocIt = nextSuballocIt;
13021 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13023 VmaDefragmentationMove move = {
13024 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13025 srcAllocOffset, dstAllocOffset,
13027 moves.push_back(move);
13034 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13036 VmaSuballocation suballoc = *srcSuballocIt;
13037 suballoc.offset = dstAllocOffset;
13038 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13039 m_BytesMoved += srcAllocSize;
13040 ++m_AllocationsMoved;
13042 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13044 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13045 srcSuballocIt = nextSuballocIt;
13047 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13049 VmaDefragmentationMove move = {
13050 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13051 srcAllocOffset, dstAllocOffset,
13053 moves.push_back(move);
13058 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13061 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13062 dstAllocOffset + srcAllocSize > dstBlockSize)
13065 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13067 ++dstBlockInfoIndex;
13068 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13069 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13070 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13071 dstBlockSize = pDstMetadata->GetSize();
13073 dstAllocOffset = 0;
13077 if(dstBlockInfoIndex == srcBlockInfoIndex)
13079 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13081 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13083 bool skipOver = overlap;
13084 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13088 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13093 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13095 dstOffset = srcAllocOffset + srcAllocSize;
13101 srcSuballocIt->offset = dstAllocOffset;
13102 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13103 dstOffset = dstAllocOffset + srcAllocSize;
13104 m_BytesMoved += srcAllocSize;
13105 ++m_AllocationsMoved;
13107 VmaDefragmentationMove move = {
13108 srcOrigBlockIndex, dstOrigBlockIndex,
13109 srcAllocOffset, dstAllocOffset,
13111 moves.push_back(move);
13119 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13120 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13122 VmaSuballocation suballoc = *srcSuballocIt;
13123 suballoc.offset = dstAllocOffset;
13124 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13125 dstOffset = dstAllocOffset + srcAllocSize;
13126 m_BytesMoved += srcAllocSize;
13127 ++m_AllocationsMoved;
13129 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13131 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13132 srcSuballocIt = nextSuballocIt;
13134 pDstMetadata->m_Suballocations.push_back(suballoc);
13136 VmaDefragmentationMove move = {
13137 srcOrigBlockIndex, dstOrigBlockIndex,
13138 srcAllocOffset, dstAllocOffset,
13140 moves.push_back(move);
13146 m_BlockInfos.clear();
13148 PostprocessMetadata();
13153 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13155 const size_t blockCount = m_pBlockVector->GetBlockCount();
13156 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13158 VmaBlockMetadata_Generic*
const pMetadata =
13159 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13160 pMetadata->m_FreeCount = 0;
13161 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13162 pMetadata->m_FreeSuballocationsBySize.clear();
13163 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13164 it != pMetadata->m_Suballocations.end(); )
13166 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13168 VmaSuballocationList::iterator nextIt = it;
13170 pMetadata->m_Suballocations.erase(it);
13181 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13183 const size_t blockCount = m_pBlockVector->GetBlockCount();
13184 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13186 VmaBlockMetadata_Generic*
const pMetadata =
13187 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13188 const VkDeviceSize blockSize = pMetadata->GetSize();
13191 if(pMetadata->m_Suballocations.empty())
13193 pMetadata->m_FreeCount = 1;
13195 VmaSuballocation suballoc = {
13199 VMA_SUBALLOCATION_TYPE_FREE };
13200 pMetadata->m_Suballocations.push_back(suballoc);
13201 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13206 VkDeviceSize offset = 0;
13207 VmaSuballocationList::iterator it;
13208 for(it = pMetadata->m_Suballocations.begin();
13209 it != pMetadata->m_Suballocations.end();
13212 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13213 VMA_ASSERT(it->offset >= offset);
13216 if(it->offset > offset)
13218 ++pMetadata->m_FreeCount;
13219 const VkDeviceSize freeSize = it->offset - offset;
13220 VmaSuballocation suballoc = {
13224 VMA_SUBALLOCATION_TYPE_FREE };
13225 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13226 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13228 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13232 pMetadata->m_SumFreeSize -= it->size;
13233 offset = it->offset + it->size;
13237 if(offset < blockSize)
13239 ++pMetadata->m_FreeCount;
13240 const VkDeviceSize freeSize = blockSize - offset;
13241 VmaSuballocation suballoc = {
13245 VMA_SUBALLOCATION_TYPE_FREE };
13246 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13247 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13248 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13250 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13255 pMetadata->m_FreeSuballocationsBySize.begin(),
13256 pMetadata->m_FreeSuballocationsBySize.end(),
13257 VmaSuballocationItemSizeLess());
13260 VMA_HEAVY_ASSERT(pMetadata->Validate());
13264 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13267 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13268 while(it != pMetadata->m_Suballocations.end())
13270 if(it->offset < suballoc.offset)
13275 pMetadata->m_Suballocations.insert(it, suballoc);
13281 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13284 VmaBlockVector* pBlockVector,
13285 uint32_t currFrameIndex,
13286 uint32_t algorithmFlags) :
13288 mutexLocked(false),
13289 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13290 m_hAllocator(hAllocator),
13291 m_hCustomPool(hCustomPool),
13292 m_pBlockVector(pBlockVector),
13293 m_CurrFrameIndex(currFrameIndex),
13294 m_AlgorithmFlags(algorithmFlags),
13295 m_pAlgorithm(VMA_NULL),
13296 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13297 m_AllAllocations(false)
13301 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13303 vma_delete(m_hAllocator, m_pAlgorithm);
13306 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13308 AllocInfo info = { hAlloc, pChanged };
13309 m_Allocations.push_back(info);
13312 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13314 const bool allAllocations = m_AllAllocations ||
13315 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13327 if(VMA_DEBUG_MARGIN == 0 &&
13329 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13331 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13332 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13336 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13337 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13342 m_pAlgorithm->AddAll();
13346 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13348 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13356 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13358 uint32_t currFrameIndex,
13361 m_hAllocator(hAllocator),
13362 m_CurrFrameIndex(currFrameIndex),
13365 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13367 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13370 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13372 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13374 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13375 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13376 vma_delete(m_hAllocator, pBlockVectorCtx);
13378 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13380 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13381 if(pBlockVectorCtx)
13383 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13384 vma_delete(m_hAllocator, pBlockVectorCtx);
13389 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13391 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13393 VmaPool pool = pPools[poolIndex];
13396 if(pool->m_BlockVector.GetAlgorithm() == 0)
13398 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13400 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13402 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13404 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13409 if(!pBlockVectorDefragCtx)
13411 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13414 &pool->m_BlockVector,
13417 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13420 pBlockVectorDefragCtx->AddAll();
13425 void VmaDefragmentationContext_T::AddAllocations(
13426 uint32_t allocationCount,
13428 VkBool32* pAllocationsChanged)
13431 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13434 VMA_ASSERT(hAlloc);
13436 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13438 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13440 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13442 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13444 if(hAllocPool != VK_NULL_HANDLE)
13447 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13449 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13451 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13453 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13457 if(!pBlockVectorDefragCtx)
13459 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13462 &hAllocPool->m_BlockVector,
13465 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13472 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13473 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13474 if(!pBlockVectorDefragCtx)
13476 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13479 m_hAllocator->m_pBlockVectors[memTypeIndex],
13482 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13486 if(pBlockVectorDefragCtx)
13488 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13489 &pAllocationsChanged[allocIndex] : VMA_NULL;
13490 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13496 VkResult VmaDefragmentationContext_T::Defragment(
13497 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13498 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13506 if(commandBuffer == VK_NULL_HANDLE)
13508 maxGpuBytesToMove = 0;
13509 maxGpuAllocationsToMove = 0;
13512 VkResult res = VK_SUCCESS;
13515 for(uint32_t memTypeIndex = 0;
13516 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13519 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13520 if(pBlockVectorCtx)
13522 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13523 pBlockVectorCtx->GetBlockVector()->Defragment(
13526 maxCpuBytesToMove, maxCpuAllocationsToMove,
13527 maxGpuBytesToMove, maxGpuAllocationsToMove,
13529 if(pBlockVectorCtx->res != VK_SUCCESS)
13531 res = pBlockVectorCtx->res;
13537 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13538 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13541 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13542 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13543 pBlockVectorCtx->GetBlockVector()->Defragment(
13546 maxCpuBytesToMove, maxCpuAllocationsToMove,
13547 maxGpuBytesToMove, maxGpuAllocationsToMove,
13549 if(pBlockVectorCtx->res != VK_SUCCESS)
13551 res = pBlockVectorCtx->res;
13561 #if VMA_RECORDING_ENABLED 13563 VmaRecorder::VmaRecorder() :
13568 m_StartCounter(INT64_MAX)
13574 m_UseMutex = useMutex;
13575 m_Flags = settings.
flags;
13577 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13578 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13581 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13584 return VK_ERROR_INITIALIZATION_FAILED;
13588 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13589 fprintf(m_File,
"%s\n",
"1,5");
13594 VmaRecorder::~VmaRecorder()
13596 if(m_File != VMA_NULL)
13602 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13604 CallParams callParams;
13605 GetBasicParams(callParams);
13607 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13608 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13612 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13614 CallParams callParams;
13615 GetBasicParams(callParams);
13617 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13618 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13624 CallParams callParams;
13625 GetBasicParams(callParams);
13627 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13628 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13639 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13641 CallParams callParams;
13642 GetBasicParams(callParams);
13644 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13645 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13650 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13651 const VkMemoryRequirements& vkMemReq,
13655 CallParams callParams;
13656 GetBasicParams(callParams);
13658 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13659 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13660 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13662 vkMemReq.alignment,
13663 vkMemReq.memoryTypeBits,
13671 userDataStr.GetString());
13675 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13676 const VkMemoryRequirements& vkMemReq,
13678 uint64_t allocationCount,
13681 CallParams callParams;
13682 GetBasicParams(callParams);
13684 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13685 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13686 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13688 vkMemReq.alignment,
13689 vkMemReq.memoryTypeBits,
13696 PrintPointerList(allocationCount, pAllocations);
13697 fprintf(m_File,
",%s\n", userDataStr.GetString());
13701 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13702 const VkMemoryRequirements& vkMemReq,
13703 bool requiresDedicatedAllocation,
13704 bool prefersDedicatedAllocation,
13708 CallParams callParams;
13709 GetBasicParams(callParams);
13711 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13712 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13713 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13715 vkMemReq.alignment,
13716 vkMemReq.memoryTypeBits,
13717 requiresDedicatedAllocation ? 1 : 0,
13718 prefersDedicatedAllocation ? 1 : 0,
13726 userDataStr.GetString());
13730 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13731 const VkMemoryRequirements& vkMemReq,
13732 bool requiresDedicatedAllocation,
13733 bool prefersDedicatedAllocation,
13737 CallParams callParams;
13738 GetBasicParams(callParams);
13740 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13741 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13742 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13744 vkMemReq.alignment,
13745 vkMemReq.memoryTypeBits,
13746 requiresDedicatedAllocation ? 1 : 0,
13747 prefersDedicatedAllocation ? 1 : 0,
13755 userDataStr.GetString());
13759 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13762 CallParams callParams;
13763 GetBasicParams(callParams);
13765 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13766 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13771 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13772 uint64_t allocationCount,
13775 CallParams callParams;
13776 GetBasicParams(callParams);
13778 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13779 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13780 PrintPointerList(allocationCount, pAllocations);
13781 fprintf(m_File,
"\n");
13785 void VmaRecorder::RecordResizeAllocation(
13786 uint32_t frameIndex,
13788 VkDeviceSize newSize)
13790 CallParams callParams;
13791 GetBasicParams(callParams);
13793 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13794 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13795 allocation, newSize);
13799 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13801 const void* pUserData)
13803 CallParams callParams;
13804 GetBasicParams(callParams);
13806 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13807 UserDataString userDataStr(
13810 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13812 userDataStr.GetString());
13816 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13819 CallParams callParams;
13820 GetBasicParams(callParams);
13822 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13823 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13828 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13831 CallParams callParams;
13832 GetBasicParams(callParams);
13834 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13835 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13840 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13843 CallParams callParams;
13844 GetBasicParams(callParams);
13846 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13847 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13852 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13853 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13855 CallParams callParams;
13856 GetBasicParams(callParams);
13858 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13859 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13866 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13867 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13869 CallParams callParams;
13870 GetBasicParams(callParams);
13872 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13873 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13880 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13881 const VkBufferCreateInfo& bufCreateInfo,
13885 CallParams callParams;
13886 GetBasicParams(callParams);
13888 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13889 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13890 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13891 bufCreateInfo.flags,
13892 bufCreateInfo.size,
13893 bufCreateInfo.usage,
13894 bufCreateInfo.sharingMode,
13895 allocCreateInfo.
flags,
13896 allocCreateInfo.
usage,
13900 allocCreateInfo.
pool,
13902 userDataStr.GetString());
13906 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13907 const VkImageCreateInfo& imageCreateInfo,
13911 CallParams callParams;
13912 GetBasicParams(callParams);
13914 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13915 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13916 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13917 imageCreateInfo.flags,
13918 imageCreateInfo.imageType,
13919 imageCreateInfo.format,
13920 imageCreateInfo.extent.width,
13921 imageCreateInfo.extent.height,
13922 imageCreateInfo.extent.depth,
13923 imageCreateInfo.mipLevels,
13924 imageCreateInfo.arrayLayers,
13925 imageCreateInfo.samples,
13926 imageCreateInfo.tiling,
13927 imageCreateInfo.usage,
13928 imageCreateInfo.sharingMode,
13929 imageCreateInfo.initialLayout,
13930 allocCreateInfo.
flags,
13931 allocCreateInfo.
usage,
13935 allocCreateInfo.
pool,
13937 userDataStr.GetString());
13941 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13944 CallParams callParams;
13945 GetBasicParams(callParams);
13947 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13948 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13953 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13956 CallParams callParams;
13957 GetBasicParams(callParams);
13959 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13960 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13965 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13968 CallParams callParams;
13969 GetBasicParams(callParams);
13971 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13972 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13977 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13980 CallParams callParams;
13981 GetBasicParams(callParams);
13983 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13984 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13989 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13992 CallParams callParams;
13993 GetBasicParams(callParams);
13995 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13996 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14001 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14005 CallParams callParams;
14006 GetBasicParams(callParams);
14008 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14009 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14012 fprintf(m_File,
",");
14014 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14024 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14027 CallParams callParams;
14028 GetBasicParams(callParams);
14030 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14031 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14038 if(pUserData != VMA_NULL)
14042 m_Str = (
const char*)pUserData;
14046 sprintf_s(m_PtrStr,
"%p", pUserData);
14056 void VmaRecorder::WriteConfiguration(
14057 const VkPhysicalDeviceProperties& devProps,
14058 const VkPhysicalDeviceMemoryProperties& memProps,
14059 bool dedicatedAllocationExtensionEnabled)
14061 fprintf(m_File,
"Config,Begin\n");
14063 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14064 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14065 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14066 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14067 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14068 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14070 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14071 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14072 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14074 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14075 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14077 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14078 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14080 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14081 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14083 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14084 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14087 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14089 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14090 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14091 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14092 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14093 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14094 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14095 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14096 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14097 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14099 fprintf(m_File,
"Config,End\n");
14102 void VmaRecorder::GetBasicParams(CallParams& outParams)
14104 outParams.threadId = GetCurrentThreadId();
14106 LARGE_INTEGER counter;
14107 QueryPerformanceCounter(&counter);
14108 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14111 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14115 fprintf(m_File,
"%p", pItems[0]);
14116 for(uint64_t i = 1; i < count; ++i)
14118 fprintf(m_File,
" %p", pItems[i]);
14123 void VmaRecorder::Flush()
14131 #endif // #if VMA_RECORDING_ENABLED 14136 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14137 m_Allocator(pAllocationCallbacks, 1024)
14143 VmaMutexLock mutexLock(m_Mutex);
14144 return m_Allocator.Alloc();
14147 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14149 VmaMutexLock mutexLock(m_Mutex);
14150 m_Allocator.Free(hAlloc);
14159 m_hDevice(pCreateInfo->device),
14160 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14161 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14162 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14163 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14164 m_PreferredLargeHeapBlockSize(0),
14165 m_PhysicalDevice(pCreateInfo->physicalDevice),
14166 m_CurrentFrameIndex(0),
14167 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14170 ,m_pRecorder(VMA_NULL)
14173 if(VMA_DEBUG_DETECT_CORRUPTION)
14176 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14181 #if !(VMA_DEDICATED_ALLOCATION) 14184 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14188 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14189 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14190 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14192 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14193 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14195 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14197 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14208 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14209 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14211 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14212 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14213 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14214 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14221 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14223 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14224 if(limit != VK_WHOLE_SIZE)
14226 m_HeapSizeLimit[heapIndex] = limit;
14227 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14229 m_MemProps.memoryHeaps[heapIndex].size = limit;
14235 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14237 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14239 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14243 preferredBlockSize,
14246 GetBufferImageGranularity(),
14253 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14260 VkResult res = VK_SUCCESS;
14265 #if VMA_RECORDING_ENABLED 14266 m_pRecorder = vma_new(
this, VmaRecorder)();
14268 if(res != VK_SUCCESS)
14272 m_pRecorder->WriteConfiguration(
14273 m_PhysicalDeviceProperties,
14275 m_UseKhrDedicatedAllocation);
14276 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14278 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14279 return VK_ERROR_FEATURE_NOT_PRESENT;
14286 VmaAllocator_T::~VmaAllocator_T()
14288 #if VMA_RECORDING_ENABLED 14289 if(m_pRecorder != VMA_NULL)
14291 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14292 vma_delete(
this, m_pRecorder);
14296 VMA_ASSERT(m_Pools.empty());
14298 for(
size_t i = GetMemoryTypeCount(); i--; )
14300 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14302 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14305 vma_delete(
this, m_pDedicatedAllocations[i]);
14306 vma_delete(
this, m_pBlockVectors[i]);
14310 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14312 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14313 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14314 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14315 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14316 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14317 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14318 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14319 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14320 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14321 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14322 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14323 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14324 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14325 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14326 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14327 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14328 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14329 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14330 #if VMA_DEDICATED_ALLOCATION 14331 if(m_UseKhrDedicatedAllocation)
14333 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14334 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14335 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14336 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14338 #endif // #if VMA_DEDICATED_ALLOCATION 14339 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14341 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14342 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14344 if(pVulkanFunctions != VMA_NULL)
14346 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14347 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14348 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14349 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14350 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14351 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14352 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14353 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14354 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14355 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14356 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14357 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14358 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14359 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14360 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14361 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14362 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14363 #if VMA_DEDICATED_ALLOCATION 14364 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14365 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14369 #undef VMA_COPY_IF_NOT_NULL 14373 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14374 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14375 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14376 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14377 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14378 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14379 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14380 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14381 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14382 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14383 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14384 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14385 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14386 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14387 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14388 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14389 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14390 #if VMA_DEDICATED_ALLOCATION 14391 if(m_UseKhrDedicatedAllocation)
14393 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14394 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14399 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14401 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14402 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14403 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14404 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14407 VkResult VmaAllocator_T::AllocateMemoryOfType(
14409 VkDeviceSize alignment,
14410 bool dedicatedAllocation,
14411 VkBuffer dedicatedBuffer,
14412 VkImage dedicatedImage,
14414 uint32_t memTypeIndex,
14415 VmaSuballocationType suballocType,
14416 size_t allocationCount,
14419 VMA_ASSERT(pAllocations != VMA_NULL);
14420 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14426 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14431 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14432 VMA_ASSERT(blockVector);
14434 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14435 bool preferDedicatedMemory =
14436 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14437 dedicatedAllocation ||
14439 size > preferredBlockSize / 2;
14441 if(preferDedicatedMemory &&
14443 finalCreateInfo.
pool == VK_NULL_HANDLE)
14452 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14456 return AllocateDedicatedMemory(
14471 VkResult res = blockVector->Allocate(
14472 m_CurrentFrameIndex.load(),
14479 if(res == VK_SUCCESS)
14487 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14491 res = AllocateDedicatedMemory(
14497 finalCreateInfo.pUserData,
14502 if(res == VK_SUCCESS)
14505 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14511 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14518 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14520 VmaSuballocationType suballocType,
14521 uint32_t memTypeIndex,
14523 bool isUserDataString,
14525 VkBuffer dedicatedBuffer,
14526 VkImage dedicatedImage,
14527 size_t allocationCount,
14530 VMA_ASSERT(allocationCount > 0 && pAllocations);
14532 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14533 allocInfo.memoryTypeIndex = memTypeIndex;
14534 allocInfo.allocationSize = size;
14536 #if VMA_DEDICATED_ALLOCATION 14537 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14538 if(m_UseKhrDedicatedAllocation)
14540 if(dedicatedBuffer != VK_NULL_HANDLE)
14542 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14543 dedicatedAllocInfo.buffer = dedicatedBuffer;
14544 allocInfo.pNext = &dedicatedAllocInfo;
14546 else if(dedicatedImage != VK_NULL_HANDLE)
14548 dedicatedAllocInfo.image = dedicatedImage;
14549 allocInfo.pNext = &dedicatedAllocInfo;
14552 #endif // #if VMA_DEDICATED_ALLOCATION 14555 VkResult res = VK_SUCCESS;
14556 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14558 res = AllocateDedicatedMemoryPage(
14566 pAllocations + allocIndex);
14567 if(res != VK_SUCCESS)
14573 if(res == VK_SUCCESS)
14577 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14578 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14579 VMA_ASSERT(pDedicatedAllocations);
14580 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14582 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14586 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14591 while(allocIndex--)
14594 VkDeviceMemory hMemory = currAlloc->GetMemory();
14606 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14608 currAlloc->SetUserData(
this, VMA_NULL);
14610 m_AllocationObjectAllocator.Free(currAlloc);
14613 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14619 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14621 VmaSuballocationType suballocType,
14622 uint32_t memTypeIndex,
14623 const VkMemoryAllocateInfo& allocInfo,
14625 bool isUserDataString,
14629 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14630 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14633 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14637 void* pMappedData = VMA_NULL;
14640 res = (*m_VulkanFunctions.vkMapMemory)(
14649 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14650 FreeVulkanMemory(memTypeIndex, size, hMemory);
14655 *pAllocation = m_AllocationObjectAllocator.Allocate();
14656 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14657 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14658 (*pAllocation)->SetUserData(
this, pUserData);
14659 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14661 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14667 void VmaAllocator_T::GetBufferMemoryRequirements(
14669 VkMemoryRequirements& memReq,
14670 bool& requiresDedicatedAllocation,
14671 bool& prefersDedicatedAllocation)
const 14673 #if VMA_DEDICATED_ALLOCATION 14674 if(m_UseKhrDedicatedAllocation)
14676 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14677 memReqInfo.buffer = hBuffer;
14679 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14681 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14682 memReq2.pNext = &memDedicatedReq;
14684 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14686 memReq = memReq2.memoryRequirements;
14687 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14688 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14691 #endif // #if VMA_DEDICATED_ALLOCATION 14693 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14694 requiresDedicatedAllocation =
false;
14695 prefersDedicatedAllocation =
false;
14699 void VmaAllocator_T::GetImageMemoryRequirements(
14701 VkMemoryRequirements& memReq,
14702 bool& requiresDedicatedAllocation,
14703 bool& prefersDedicatedAllocation)
const 14705 #if VMA_DEDICATED_ALLOCATION 14706 if(m_UseKhrDedicatedAllocation)
14708 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14709 memReqInfo.image = hImage;
14711 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14713 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14714 memReq2.pNext = &memDedicatedReq;
14716 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14718 memReq = memReq2.memoryRequirements;
14719 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14720 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14723 #endif // #if VMA_DEDICATED_ALLOCATION 14725 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14726 requiresDedicatedAllocation =
false;
14727 prefersDedicatedAllocation =
false;
14731 VkResult VmaAllocator_T::AllocateMemory(
14732 const VkMemoryRequirements& vkMemReq,
14733 bool requiresDedicatedAllocation,
14734 bool prefersDedicatedAllocation,
14735 VkBuffer dedicatedBuffer,
14736 VkImage dedicatedImage,
14738 VmaSuballocationType suballocType,
14739 size_t allocationCount,
14742 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14744 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14746 if(vkMemReq.size == 0)
14748 return VK_ERROR_VALIDATION_FAILED_EXT;
14753 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14754 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14759 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14760 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14762 if(requiresDedicatedAllocation)
14766 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14767 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14769 if(createInfo.
pool != VK_NULL_HANDLE)
14771 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14772 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14775 if((createInfo.
pool != VK_NULL_HANDLE) &&
14778 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14779 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14782 if(createInfo.
pool != VK_NULL_HANDLE)
14784 const VkDeviceSize alignmentForPool = VMA_MAX(
14785 vkMemReq.alignment,
14786 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14787 return createInfo.
pool->m_BlockVector.Allocate(
14788 m_CurrentFrameIndex.load(),
14799 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14800 uint32_t memTypeIndex = UINT32_MAX;
14802 if(res == VK_SUCCESS)
14804 VkDeviceSize alignmentForMemType = VMA_MAX(
14805 vkMemReq.alignment,
14806 GetMemoryTypeMinAlignment(memTypeIndex));
14808 res = AllocateMemoryOfType(
14810 alignmentForMemType,
14811 requiresDedicatedAllocation || prefersDedicatedAllocation,
14820 if(res == VK_SUCCESS)
14830 memoryTypeBits &= ~(1u << memTypeIndex);
14833 if(res == VK_SUCCESS)
14835 alignmentForMemType = VMA_MAX(
14836 vkMemReq.alignment,
14837 GetMemoryTypeMinAlignment(memTypeIndex));
14839 res = AllocateMemoryOfType(
14841 alignmentForMemType,
14842 requiresDedicatedAllocation || prefersDedicatedAllocation,
14851 if(res == VK_SUCCESS)
14861 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14872 void VmaAllocator_T::FreeMemory(
14873 size_t allocationCount,
14876 VMA_ASSERT(pAllocations);
14878 for(
size_t allocIndex = allocationCount; allocIndex--; )
14882 if(allocation != VK_NULL_HANDLE)
14884 if(TouchAllocation(allocation))
14886 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14888 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14891 switch(allocation->GetType())
14893 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14895 VmaBlockVector* pBlockVector = VMA_NULL;
14896 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14897 if(hPool != VK_NULL_HANDLE)
14899 pBlockVector = &hPool->m_BlockVector;
14903 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14904 pBlockVector = m_pBlockVectors[memTypeIndex];
14906 pBlockVector->Free(allocation);
14909 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14910 FreeDedicatedMemory(allocation);
14917 allocation->SetUserData(
this, VMA_NULL);
14918 allocation->Dtor();
14919 m_AllocationObjectAllocator.Free(allocation);
14924 VkResult VmaAllocator_T::ResizeAllocation(
14926 VkDeviceSize newSize)
14928 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14930 return VK_ERROR_VALIDATION_FAILED_EXT;
14932 if(newSize == alloc->GetSize())
14937 switch(alloc->GetType())
14939 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14940 return VK_ERROR_FEATURE_NOT_PRESENT;
14941 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14942 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14944 alloc->ChangeSize(newSize);
14945 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14950 return VK_ERROR_OUT_OF_POOL_MEMORY;
14954 return VK_ERROR_VALIDATION_FAILED_EXT;
14958 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14961 InitStatInfo(pStats->
total);
14962 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14964 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14968 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14970 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14971 VMA_ASSERT(pBlockVector);
14972 pBlockVector->AddStats(pStats);
14977 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14978 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14980 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14985 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14987 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14988 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14989 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14990 VMA_ASSERT(pDedicatedAllocVector);
14991 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14994 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14995 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14996 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14997 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15002 VmaPostprocessCalcStatInfo(pStats->
total);
15003 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15004 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15005 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15006 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15009 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15011 VkResult VmaAllocator_T::DefragmentationBegin(
15021 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15022 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15025 (*pContext)->AddAllocations(
15028 VkResult res = (*pContext)->Defragment(
15033 if(res != VK_NOT_READY)
15035 vma_delete(
this, *pContext);
15036 *pContext = VMA_NULL;
15042 VkResult VmaAllocator_T::DefragmentationEnd(
15045 vma_delete(
this, context);
15051 if(hAllocation->CanBecomeLost())
15057 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15058 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15061 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15065 pAllocationInfo->
offset = 0;
15066 pAllocationInfo->
size = hAllocation->GetSize();
15068 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15071 else if(localLastUseFrameIndex == localCurrFrameIndex)
15073 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15074 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15075 pAllocationInfo->
offset = hAllocation->GetOffset();
15076 pAllocationInfo->
size = hAllocation->GetSize();
15078 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15083 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15085 localLastUseFrameIndex = localCurrFrameIndex;
15092 #if VMA_STATS_STRING_ENABLED 15093 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15094 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15097 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15098 if(localLastUseFrameIndex == localCurrFrameIndex)
15104 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15106 localLastUseFrameIndex = localCurrFrameIndex;
15112 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15113 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15114 pAllocationInfo->
offset = hAllocation->GetOffset();
15115 pAllocationInfo->
size = hAllocation->GetSize();
15116 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15117 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15121 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15124 if(hAllocation->CanBecomeLost())
15126 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15127 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15130 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15134 else if(localLastUseFrameIndex == localCurrFrameIndex)
15140 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15142 localLastUseFrameIndex = localCurrFrameIndex;
15149 #if VMA_STATS_STRING_ENABLED 15150 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15151 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15154 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15155 if(localLastUseFrameIndex == localCurrFrameIndex)
15161 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15163 localLastUseFrameIndex = localCurrFrameIndex;
15175 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15185 return VK_ERROR_INITIALIZATION_FAILED;
15188 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15190 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15192 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15193 if(res != VK_SUCCESS)
15195 vma_delete(
this, *pPool);
15202 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15203 (*pPool)->SetId(m_NextPoolId++);
15204 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15210 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15214 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15215 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15216 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15219 vma_delete(
this, pool);
15224 pool->m_BlockVector.GetPoolStats(pPoolStats);
15227 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15229 m_CurrentFrameIndex.store(frameIndex);
15232 void VmaAllocator_T::MakePoolAllocationsLost(
15234 size_t* pLostAllocationCount)
15236 hPool->m_BlockVector.MakePoolAllocationsLost(
15237 m_CurrentFrameIndex.load(),
15238 pLostAllocationCount);
15241 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15243 return hPool->m_BlockVector.CheckCorruption();
15246 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15248 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15251 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15253 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15255 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15256 VMA_ASSERT(pBlockVector);
15257 VkResult localRes = pBlockVector->CheckCorruption();
15260 case VK_ERROR_FEATURE_NOT_PRESENT:
15263 finalRes = VK_SUCCESS;
15273 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15274 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15276 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15278 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15281 case VK_ERROR_FEATURE_NOT_PRESENT:
15284 finalRes = VK_SUCCESS;
15296 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15298 *pAllocation = m_AllocationObjectAllocator.Allocate();
15299 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15300 (*pAllocation)->InitLost();
15303 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15305 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15308 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15310 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15311 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15313 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15314 if(res == VK_SUCCESS)
15316 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15321 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15326 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15329 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15331 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15337 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15339 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15341 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15344 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15346 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15347 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15349 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15350 m_HeapSizeLimit[heapIndex] += size;
15354 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15356 if(hAllocation->CanBecomeLost())
15358 return VK_ERROR_MEMORY_MAP_FAILED;
15361 switch(hAllocation->GetType())
15363 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15365 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15366 char *pBytes = VMA_NULL;
15367 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15368 if(res == VK_SUCCESS)
15370 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15371 hAllocation->BlockAllocMap();
15375 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15376 return hAllocation->DedicatedAllocMap(
this, ppData);
15379 return VK_ERROR_MEMORY_MAP_FAILED;
15385 switch(hAllocation->GetType())
15387 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15389 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15390 hAllocation->BlockAllocUnmap();
15391 pBlock->Unmap(
this, 1);
15394 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15395 hAllocation->DedicatedAllocUnmap(
this);
15402 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15404 VkResult res = VK_SUCCESS;
15405 switch(hAllocation->GetType())
15407 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15408 res = GetVulkanFunctions().vkBindBufferMemory(
15411 hAllocation->GetMemory(),
15414 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15416 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15417 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15418 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15427 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15429 VkResult res = VK_SUCCESS;
15430 switch(hAllocation->GetType())
15432 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15433 res = GetVulkanFunctions().vkBindImageMemory(
15436 hAllocation->GetMemory(),
15439 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15441 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15442 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15443 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15452 void VmaAllocator_T::FlushOrInvalidateAllocation(
15454 VkDeviceSize offset, VkDeviceSize size,
15455 VMA_CACHE_OPERATION op)
15457 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15458 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15460 const VkDeviceSize allocationSize = hAllocation->GetSize();
15461 VMA_ASSERT(offset <= allocationSize);
15463 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15465 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15466 memRange.memory = hAllocation->GetMemory();
15468 switch(hAllocation->GetType())
15470 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15471 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15472 if(size == VK_WHOLE_SIZE)
15474 memRange.size = allocationSize - memRange.offset;
15478 VMA_ASSERT(offset + size <= allocationSize);
15479 memRange.size = VMA_MIN(
15480 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15481 allocationSize - memRange.offset);
15485 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15488 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15489 if(size == VK_WHOLE_SIZE)
15491 size = allocationSize - offset;
15495 VMA_ASSERT(offset + size <= allocationSize);
15497 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15500 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15501 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15502 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15503 memRange.offset += allocationOffset;
15504 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15515 case VMA_CACHE_FLUSH:
15516 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15518 case VMA_CACHE_INVALIDATE:
15519 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15528 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15530 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15532 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15534 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15535 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15536 VMA_ASSERT(pDedicatedAllocations);
15537 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15538 VMA_ASSERT(success);
15541 VkDeviceMemory hMemory = allocation->GetMemory();
15553 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15555 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15558 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15560 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15561 !hAllocation->CanBecomeLost() &&
15562 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15564 void* pData = VMA_NULL;
15565 VkResult res = Map(hAllocation, &pData);
15566 if(res == VK_SUCCESS)
15568 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15569 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15570 Unmap(hAllocation);
15574 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15579 #if VMA_STATS_STRING_ENABLED 15581 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15583 bool dedicatedAllocationsStarted =
false;
15584 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15586 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15587 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15588 VMA_ASSERT(pDedicatedAllocVector);
15589 if(pDedicatedAllocVector->empty() ==
false)
15591 if(dedicatedAllocationsStarted ==
false)
15593 dedicatedAllocationsStarted =
true;
15594 json.WriteString(
"DedicatedAllocations");
15595 json.BeginObject();
15598 json.BeginString(
"Type ");
15599 json.ContinueString(memTypeIndex);
15604 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15606 json.BeginObject(
true);
15608 hAlloc->PrintParameters(json);
15615 if(dedicatedAllocationsStarted)
15621 bool allocationsStarted =
false;
15622 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15624 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15626 if(allocationsStarted ==
false)
15628 allocationsStarted =
true;
15629 json.WriteString(
"DefaultPools");
15630 json.BeginObject();
15633 json.BeginString(
"Type ");
15634 json.ContinueString(memTypeIndex);
15637 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15640 if(allocationsStarted)
15648 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15649 const size_t poolCount = m_Pools.size();
15652 json.WriteString(
"Pools");
15653 json.BeginObject();
15654 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15656 json.BeginString();
15657 json.ContinueString(m_Pools[poolIndex]->GetId());
15660 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15667 #endif // #if VMA_STATS_STRING_ENABLED 15676 VMA_ASSERT(pCreateInfo && pAllocator);
15677 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15679 return (*pAllocator)->Init(pCreateInfo);
15685 if(allocator != VK_NULL_HANDLE)
15687 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15688 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15689 vma_delete(&allocationCallbacks, allocator);
15695 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15697 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15698 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15703 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15705 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15706 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15711 uint32_t memoryTypeIndex,
15712 VkMemoryPropertyFlags* pFlags)
15714 VMA_ASSERT(allocator && pFlags);
15715 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15716 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15721 uint32_t frameIndex)
15723 VMA_ASSERT(allocator);
15724 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15726 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15728 allocator->SetCurrentFrameIndex(frameIndex);
15735 VMA_ASSERT(allocator && pStats);
15736 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15737 allocator->CalculateStats(pStats);
15740 #if VMA_STATS_STRING_ENABLED 15744 char** ppStatsString,
15745 VkBool32 detailedMap)
15747 VMA_ASSERT(allocator && ppStatsString);
15748 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15750 VmaStringBuilder sb(allocator);
15752 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15753 json.BeginObject();
15756 allocator->CalculateStats(&stats);
15758 json.WriteString(
"Total");
15759 VmaPrintStatInfo(json, stats.
total);
15761 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15763 json.BeginString(
"Heap ");
15764 json.ContinueString(heapIndex);
15766 json.BeginObject();
15768 json.WriteString(
"Size");
15769 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15771 json.WriteString(
"Flags");
15772 json.BeginArray(
true);
15773 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15775 json.WriteString(
"DEVICE_LOCAL");
15781 json.WriteString(
"Stats");
15782 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15785 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15787 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15789 json.BeginString(
"Type ");
15790 json.ContinueString(typeIndex);
15793 json.BeginObject();
15795 json.WriteString(
"Flags");
15796 json.BeginArray(
true);
15797 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15798 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15800 json.WriteString(
"DEVICE_LOCAL");
15802 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15804 json.WriteString(
"HOST_VISIBLE");
15806 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15808 json.WriteString(
"HOST_COHERENT");
15810 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15812 json.WriteString(
"HOST_CACHED");
15814 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15816 json.WriteString(
"LAZILY_ALLOCATED");
15822 json.WriteString(
"Stats");
15823 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15832 if(detailedMap == VK_TRUE)
15834 allocator->PrintDetailedMap(json);
15840 const size_t len = sb.GetLength();
15841 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15844 memcpy(pChars, sb.GetData(), len);
15846 pChars[len] =
'\0';
15847 *ppStatsString = pChars;
15852 char* pStatsString)
15854 if(pStatsString != VMA_NULL)
15856 VMA_ASSERT(allocator);
15857 size_t len = strlen(pStatsString);
15858 vma_delete_array(allocator, pStatsString, len + 1);
15862 #endif // #if VMA_STATS_STRING_ENABLED 15869 uint32_t memoryTypeBits,
15871 uint32_t* pMemoryTypeIndex)
15873 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15874 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15875 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15882 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15883 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15886 switch(pAllocationCreateInfo->
usage)
15891 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15893 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15897 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15900 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15901 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15903 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15907 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15908 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15914 *pMemoryTypeIndex = UINT32_MAX;
15915 uint32_t minCost = UINT32_MAX;
15916 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15917 memTypeIndex < allocator->GetMemoryTypeCount();
15918 ++memTypeIndex, memTypeBit <<= 1)
15921 if((memTypeBit & memoryTypeBits) != 0)
15923 const VkMemoryPropertyFlags currFlags =
15924 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15926 if((requiredFlags & ~currFlags) == 0)
15929 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15931 if(currCost < minCost)
15933 *pMemoryTypeIndex = memTypeIndex;
15938 minCost = currCost;
15943 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15948 const VkBufferCreateInfo* pBufferCreateInfo,
15950 uint32_t* pMemoryTypeIndex)
15952 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15953 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15954 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15955 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15957 const VkDevice hDev = allocator->m_hDevice;
15958 VkBuffer hBuffer = VK_NULL_HANDLE;
15959 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15960 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15961 if(res == VK_SUCCESS)
15963 VkMemoryRequirements memReq = {};
15964 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15965 hDev, hBuffer, &memReq);
15969 memReq.memoryTypeBits,
15970 pAllocationCreateInfo,
15973 allocator->GetVulkanFunctions().vkDestroyBuffer(
15974 hDev, hBuffer, allocator->GetAllocationCallbacks());
15981 const VkImageCreateInfo* pImageCreateInfo,
15983 uint32_t* pMemoryTypeIndex)
15985 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15986 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15987 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15988 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15990 const VkDevice hDev = allocator->m_hDevice;
15991 VkImage hImage = VK_NULL_HANDLE;
15992 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15993 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15994 if(res == VK_SUCCESS)
15996 VkMemoryRequirements memReq = {};
15997 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15998 hDev, hImage, &memReq);
16002 memReq.memoryTypeBits,
16003 pAllocationCreateInfo,
16006 allocator->GetVulkanFunctions().vkDestroyImage(
16007 hDev, hImage, allocator->GetAllocationCallbacks());
16017 VMA_ASSERT(allocator && pCreateInfo && pPool);
16019 VMA_DEBUG_LOG(
"vmaCreatePool");
16021 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16023 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16025 #if VMA_RECORDING_ENABLED 16026 if(allocator->GetRecorder() != VMA_NULL)
16028 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16039 VMA_ASSERT(allocator);
16041 if(pool == VK_NULL_HANDLE)
16046 VMA_DEBUG_LOG(
"vmaDestroyPool");
16048 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16050 #if VMA_RECORDING_ENABLED 16051 if(allocator->GetRecorder() != VMA_NULL)
16053 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16057 allocator->DestroyPool(pool);
16065 VMA_ASSERT(allocator && pool && pPoolStats);
16067 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16069 allocator->GetPoolStats(pool, pPoolStats);
16075 size_t* pLostAllocationCount)
16077 VMA_ASSERT(allocator && pool);
16079 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16081 #if VMA_RECORDING_ENABLED 16082 if(allocator->GetRecorder() != VMA_NULL)
16084 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16088 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16093 VMA_ASSERT(allocator && pool);
16095 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16097 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16099 return allocator->CheckPoolCorruption(pool);
16104 const VkMemoryRequirements* pVkMemoryRequirements,
16109 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16111 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16113 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16115 VkResult result = allocator->AllocateMemory(
16116 *pVkMemoryRequirements,
16122 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16126 #if VMA_RECORDING_ENABLED 16127 if(allocator->GetRecorder() != VMA_NULL)
16129 allocator->GetRecorder()->RecordAllocateMemory(
16130 allocator->GetCurrentFrameIndex(),
16131 *pVkMemoryRequirements,
16137 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16139 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16147 const VkMemoryRequirements* pVkMemoryRequirements,
16149 size_t allocationCount,
16153 if(allocationCount == 0)
16158 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16160 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16162 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16164 VkResult result = allocator->AllocateMemory(
16165 *pVkMemoryRequirements,
16171 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16175 #if VMA_RECORDING_ENABLED 16176 if(allocator->GetRecorder() != VMA_NULL)
16178 allocator->GetRecorder()->RecordAllocateMemoryPages(
16179 allocator->GetCurrentFrameIndex(),
16180 *pVkMemoryRequirements,
16182 (uint64_t)allocationCount,
16187 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16189 for(
size_t i = 0; i < allocationCount; ++i)
16191 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16205 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16207 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16209 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16211 VkMemoryRequirements vkMemReq = {};
16212 bool requiresDedicatedAllocation =
false;
16213 bool prefersDedicatedAllocation =
false;
16214 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16215 requiresDedicatedAllocation,
16216 prefersDedicatedAllocation);
16218 VkResult result = allocator->AllocateMemory(
16220 requiresDedicatedAllocation,
16221 prefersDedicatedAllocation,
16225 VMA_SUBALLOCATION_TYPE_BUFFER,
16229 #if VMA_RECORDING_ENABLED 16230 if(allocator->GetRecorder() != VMA_NULL)
16232 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16233 allocator->GetCurrentFrameIndex(),
16235 requiresDedicatedAllocation,
16236 prefersDedicatedAllocation,
16242 if(pAllocationInfo && result == VK_SUCCESS)
16244 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16257 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16259 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16261 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16263 VkMemoryRequirements vkMemReq = {};
16264 bool requiresDedicatedAllocation =
false;
16265 bool prefersDedicatedAllocation =
false;
16266 allocator->GetImageMemoryRequirements(image, vkMemReq,
16267 requiresDedicatedAllocation, prefersDedicatedAllocation);
16269 VkResult result = allocator->AllocateMemory(
16271 requiresDedicatedAllocation,
16272 prefersDedicatedAllocation,
16276 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16280 #if VMA_RECORDING_ENABLED 16281 if(allocator->GetRecorder() != VMA_NULL)
16283 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16284 allocator->GetCurrentFrameIndex(),
16286 requiresDedicatedAllocation,
16287 prefersDedicatedAllocation,
16293 if(pAllocationInfo && result == VK_SUCCESS)
16295 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16305 VMA_ASSERT(allocator);
16307 if(allocation == VK_NULL_HANDLE)
16312 VMA_DEBUG_LOG(
"vmaFreeMemory");
16314 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16316 #if VMA_RECORDING_ENABLED 16317 if(allocator->GetRecorder() != VMA_NULL)
16319 allocator->GetRecorder()->RecordFreeMemory(
16320 allocator->GetCurrentFrameIndex(),
16325 allocator->FreeMemory(
16332 size_t allocationCount,
16335 if(allocationCount == 0)
16340 VMA_ASSERT(allocator);
16342 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16344 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16346 #if VMA_RECORDING_ENABLED 16347 if(allocator->GetRecorder() != VMA_NULL)
16349 allocator->GetRecorder()->RecordFreeMemoryPages(
16350 allocator->GetCurrentFrameIndex(),
16351 (uint64_t)allocationCount,
16356 allocator->FreeMemory(allocationCount, pAllocations);
16362 VkDeviceSize newSize)
16364 VMA_ASSERT(allocator && allocation);
16366 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16368 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16370 #if VMA_RECORDING_ENABLED 16371 if(allocator->GetRecorder() != VMA_NULL)
16373 allocator->GetRecorder()->RecordResizeAllocation(
16374 allocator->GetCurrentFrameIndex(),
16380 return allocator->ResizeAllocation(allocation, newSize);
16388 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16390 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16392 #if VMA_RECORDING_ENABLED 16393 if(allocator->GetRecorder() != VMA_NULL)
16395 allocator->GetRecorder()->RecordGetAllocationInfo(
16396 allocator->GetCurrentFrameIndex(),
16401 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16408 VMA_ASSERT(allocator && allocation);
16410 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16412 #if VMA_RECORDING_ENABLED 16413 if(allocator->GetRecorder() != VMA_NULL)
16415 allocator->GetRecorder()->RecordTouchAllocation(
16416 allocator->GetCurrentFrameIndex(),
16421 return allocator->TouchAllocation(allocation);
16429 VMA_ASSERT(allocator && allocation);
16431 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16433 allocation->SetUserData(allocator, pUserData);
16435 #if VMA_RECORDING_ENABLED 16436 if(allocator->GetRecorder() != VMA_NULL)
16438 allocator->GetRecorder()->RecordSetAllocationUserData(
16439 allocator->GetCurrentFrameIndex(),
16450 VMA_ASSERT(allocator && pAllocation);
16452 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16454 allocator->CreateLostAllocation(pAllocation);
16456 #if VMA_RECORDING_ENABLED 16457 if(allocator->GetRecorder() != VMA_NULL)
16459 allocator->GetRecorder()->RecordCreateLostAllocation(
16460 allocator->GetCurrentFrameIndex(),
16471 VMA_ASSERT(allocator && allocation && ppData);
16473 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16475 VkResult res = allocator->Map(allocation, ppData);
16477 #if VMA_RECORDING_ENABLED 16478 if(allocator->GetRecorder() != VMA_NULL)
16480 allocator->GetRecorder()->RecordMapMemory(
16481 allocator->GetCurrentFrameIndex(),
16493 VMA_ASSERT(allocator && allocation);
16495 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16497 #if VMA_RECORDING_ENABLED 16498 if(allocator->GetRecorder() != VMA_NULL)
16500 allocator->GetRecorder()->RecordUnmapMemory(
16501 allocator->GetCurrentFrameIndex(),
16506 allocator->Unmap(allocation);
16511 VMA_ASSERT(allocator && allocation);
16513 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16515 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16517 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16519 #if VMA_RECORDING_ENABLED 16520 if(allocator->GetRecorder() != VMA_NULL)
16522 allocator->GetRecorder()->RecordFlushAllocation(
16523 allocator->GetCurrentFrameIndex(),
16524 allocation, offset, size);
16531 VMA_ASSERT(allocator && allocation);
16533 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16535 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16537 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16539 #if VMA_RECORDING_ENABLED 16540 if(allocator->GetRecorder() != VMA_NULL)
16542 allocator->GetRecorder()->RecordInvalidateAllocation(
16543 allocator->GetCurrentFrameIndex(),
16544 allocation, offset, size);
16551 VMA_ASSERT(allocator);
16553 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16555 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16557 return allocator->CheckCorruption(memoryTypeBits);
16563 size_t allocationCount,
16564 VkBool32* pAllocationsChanged,
16574 if(pDefragmentationInfo != VMA_NULL)
16588 if(res == VK_NOT_READY)
16601 VMA_ASSERT(allocator && pInfo && pContext);
16612 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16614 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16616 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16618 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16620 #if VMA_RECORDING_ENABLED 16621 if(allocator->GetRecorder() != VMA_NULL)
16623 allocator->GetRecorder()->RecordDefragmentationBegin(
16624 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16635 VMA_ASSERT(allocator);
16637 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16639 if(context != VK_NULL_HANDLE)
16641 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16643 #if VMA_RECORDING_ENABLED 16644 if(allocator->GetRecorder() != VMA_NULL)
16646 allocator->GetRecorder()->RecordDefragmentationEnd(
16647 allocator->GetCurrentFrameIndex(), context);
16651 return allocator->DefragmentationEnd(context);
16664 VMA_ASSERT(allocator && allocation && buffer);
16666 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16668 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16670 return allocator->BindBufferMemory(allocation, buffer);
16678 VMA_ASSERT(allocator && allocation && image);
16680 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16682 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16684 return allocator->BindImageMemory(allocation, image);
16689 const VkBufferCreateInfo* pBufferCreateInfo,
16695 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16697 if(pBufferCreateInfo->size == 0)
16699 return VK_ERROR_VALIDATION_FAILED_EXT;
16702 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16704 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16706 *pBuffer = VK_NULL_HANDLE;
16707 *pAllocation = VK_NULL_HANDLE;
16710 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16711 allocator->m_hDevice,
16713 allocator->GetAllocationCallbacks(),
16718 VkMemoryRequirements vkMemReq = {};
16719 bool requiresDedicatedAllocation =
false;
16720 bool prefersDedicatedAllocation =
false;
16721 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16722 requiresDedicatedAllocation, prefersDedicatedAllocation);
16726 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16728 VMA_ASSERT(vkMemReq.alignment %
16729 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16731 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16733 VMA_ASSERT(vkMemReq.alignment %
16734 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16736 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16738 VMA_ASSERT(vkMemReq.alignment %
16739 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16743 res = allocator->AllocateMemory(
16745 requiresDedicatedAllocation,
16746 prefersDedicatedAllocation,
16749 *pAllocationCreateInfo,
16750 VMA_SUBALLOCATION_TYPE_BUFFER,
16754 #if VMA_RECORDING_ENABLED 16755 if(allocator->GetRecorder() != VMA_NULL)
16757 allocator->GetRecorder()->RecordCreateBuffer(
16758 allocator->GetCurrentFrameIndex(),
16759 *pBufferCreateInfo,
16760 *pAllocationCreateInfo,
16770 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16775 #if VMA_STATS_STRING_ENABLED 16776 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16778 if(pAllocationInfo != VMA_NULL)
16780 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16785 allocator->FreeMemory(
16788 *pAllocation = VK_NULL_HANDLE;
16789 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16790 *pBuffer = VK_NULL_HANDLE;
16793 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16794 *pBuffer = VK_NULL_HANDLE;
16805 VMA_ASSERT(allocator);
16807 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16812 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16814 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16816 #if VMA_RECORDING_ENABLED 16817 if(allocator->GetRecorder() != VMA_NULL)
16819 allocator->GetRecorder()->RecordDestroyBuffer(
16820 allocator->GetCurrentFrameIndex(),
16825 if(buffer != VK_NULL_HANDLE)
16827 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16830 if(allocation != VK_NULL_HANDLE)
16832 allocator->FreeMemory(
16840 const VkImageCreateInfo* pImageCreateInfo,
16846 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16848 if(pImageCreateInfo->extent.width == 0 ||
16849 pImageCreateInfo->extent.height == 0 ||
16850 pImageCreateInfo->extent.depth == 0 ||
16851 pImageCreateInfo->mipLevels == 0 ||
16852 pImageCreateInfo->arrayLayers == 0)
16854 return VK_ERROR_VALIDATION_FAILED_EXT;
16857 VMA_DEBUG_LOG(
"vmaCreateImage");
16859 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16861 *pImage = VK_NULL_HANDLE;
16862 *pAllocation = VK_NULL_HANDLE;
16865 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16866 allocator->m_hDevice,
16868 allocator->GetAllocationCallbacks(),
16872 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16873 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16874 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16877 VkMemoryRequirements vkMemReq = {};
16878 bool requiresDedicatedAllocation =
false;
16879 bool prefersDedicatedAllocation =
false;
16880 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16881 requiresDedicatedAllocation, prefersDedicatedAllocation);
16883 res = allocator->AllocateMemory(
16885 requiresDedicatedAllocation,
16886 prefersDedicatedAllocation,
16889 *pAllocationCreateInfo,
16894 #if VMA_RECORDING_ENABLED 16895 if(allocator->GetRecorder() != VMA_NULL)
16897 allocator->GetRecorder()->RecordCreateImage(
16898 allocator->GetCurrentFrameIndex(),
16900 *pAllocationCreateInfo,
16910 res = allocator->BindImageMemory(*pAllocation, *pImage);
16915 #if VMA_STATS_STRING_ENABLED 16916 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16918 if(pAllocationInfo != VMA_NULL)
16920 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16925 allocator->FreeMemory(
16928 *pAllocation = VK_NULL_HANDLE;
16929 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16930 *pImage = VK_NULL_HANDLE;
16933 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16934 *pImage = VK_NULL_HANDLE;
16945 VMA_ASSERT(allocator);
16947 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16952 VMA_DEBUG_LOG(
"vmaDestroyImage");
16954 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16956 #if VMA_RECORDING_ENABLED 16957 if(allocator->GetRecorder() != VMA_NULL)
16959 allocator->GetRecorder()->RecordDestroyImage(
16960 allocator->GetCurrentFrameIndex(),
16965 if(image != VK_NULL_HANDLE)
16967 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16969 if(allocation != VK_NULL_HANDLE)
16971 allocator->FreeMemory(
16977 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1756
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2056
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1811
-
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2864
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1814
+
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2867
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1785
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2384
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1765
+
Definition: vk_mem_alloc.h:1788
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2387
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1768
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:2015
-
Definition: vk_mem_alloc.h:2119
-
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2817
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1757
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2484
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1808
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2900
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2273
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1652
+
Definition: vk_mem_alloc.h:2018
+
Definition: vk_mem_alloc.h:2122
+
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2820
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1760
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2487
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1811
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2903
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2276
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1655
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2365
-
Definition: vk_mem_alloc.h:2090
-
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2820
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1746
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2172
-
Definition: vk_mem_alloc.h:2042
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1820
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2301
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2368
+
Definition: vk_mem_alloc.h:2093
+
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2823
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1749
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2175
+
Definition: vk_mem_alloc.h:2045
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1823
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2304
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1874
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1805
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1877
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1808
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2046
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2049
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1946
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1762
-
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2854
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1945
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2904
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1949
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1765
+
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2857
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1948
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2907
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1837
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1955
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2912
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2156
-
Definition: vk_mem_alloc.h:2114
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2895
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1763
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1688
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1840
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1958
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2915
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2159
+
Definition: vk_mem_alloc.h:2117
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2898
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1766
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1691
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1814
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1817
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2315
-
Definition: vk_mem_alloc.h:2309
-
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1769
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1881
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2494
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2318
+
Definition: vk_mem_alloc.h:2312
+
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1772
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1884
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2497
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1758
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1761
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1783
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2193
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2335
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2371
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1786
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2196
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2338
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2374
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1744
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2318
+
Definition: vk_mem_alloc.h:1747
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2321
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2869
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1993
+
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2872
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1996
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2829
+
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2832
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2890
+
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2893
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2908
-
Definition: vk_mem_alloc.h:2032
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2180
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1761
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2911
+
Definition: vk_mem_alloc.h:2035
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2183
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1764
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1951
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1694
-
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2808
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1954
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1697
+
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2811
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:2806
-
Definition: vk_mem_alloc.h:2140
-
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2835
+
Definition: vk_mem_alloc.h:2809
+
Definition: vk_mem_alloc.h:2143
+
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2838
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1715
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1718
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1787
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1720
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2910
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1790
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1723
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2913
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2167
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2381
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2170
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2384
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1754
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1934
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2330
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1707
-
Definition: vk_mem_alloc.h:2305
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1757
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1937
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2333
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1710
+
Definition: vk_mem_alloc.h:2308
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2097
+
Definition: vk_mem_alloc.h:2100
Represents Opaque object that represents started defragmentation process.
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1947
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1711
-
Definition: vk_mem_alloc.h:2130
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2321
-
Definition: vk_mem_alloc.h:2041
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1760
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1950
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1714
+
Definition: vk_mem_alloc.h:2133
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2324
+
Definition: vk_mem_alloc.h:2044
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1763
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2162
-
Definition: vk_mem_alloc.h:2153
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2165
+
Definition: vk_mem_alloc.h:2156
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1937
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1756
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2343
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1823
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2374
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2151
-
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2859
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2186
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1940
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1759
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2346
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1826
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2377
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2154
+
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2862
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2189
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1862
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1953
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2077
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1946
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1865
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1956
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2080
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1949
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1767
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1793
-
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2805
-
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2883
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1709
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1766
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1770
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1796
+
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2808
+
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2886
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1712
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1769
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2357
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1759
-
Definition: vk_mem_alloc.h:2108
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2360
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1762
+
Definition: vk_mem_alloc.h:2111
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1801
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2508
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1817
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1946
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1804
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2511
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1820
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1949
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1943
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1946
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2362
-
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2814
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2365
+
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2817
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
-
Definition: vk_mem_alloc.h:2123
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2489
-
Definition: vk_mem_alloc.h:2137
-
Definition: vk_mem_alloc.h:2149
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2906
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1752
+
Definition: vk_mem_alloc.h:2126
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2492
+
Definition: vk_mem_alloc.h:2140
+
Definition: vk_mem_alloc.h:2152
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2909
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1755
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1941
-
Definition: vk_mem_alloc.h:1998
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2311
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1944
+
Definition: vk_mem_alloc.h:2001
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2314
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1790
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1939
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1764
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1768
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2064
-
Definition: vk_mem_alloc.h:2144
-
Definition: vk_mem_alloc.h:2025
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2503
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1793
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1942
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1767
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1771
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2067
+
Definition: vk_mem_alloc.h:2147
+
Definition: vk_mem_alloc.h:2028
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2506
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1742
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1745
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1755
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2290
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1758
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2293
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2470
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2473
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2134
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2255
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1947
+
Definition: vk_mem_alloc.h:2137
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2258
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1950
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
-
Definition: vk_mem_alloc.h:2103
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1777
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1954
+
Definition: vk_mem_alloc.h:2106
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1780
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1957
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2368
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1947
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2371
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1950
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
-
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2874
+
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2877
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2475
-
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2838
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2478
+
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2841