23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1639 #ifndef VMA_RECORDING_ENABLED 1641 #define VMA_RECORDING_ENABLED 1 1643 #define VMA_RECORDING_ENABLED 0 1648 #define NOMINMAX // For windows.h 1652 #include <vulkan/vulkan.h> 1655 #if VMA_RECORDING_ENABLED 1656 #include <windows.h> 1659 #if !defined(VMA_DEDICATED_ALLOCATION) 1660 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1661 #define VMA_DEDICATED_ALLOCATION 1 1663 #define VMA_DEDICATED_ALLOCATION 0 1681 uint32_t memoryType,
1682 VkDeviceMemory memory,
1687 uint32_t memoryType,
1688 VkDeviceMemory memory,
1761 #if VMA_DEDICATED_ALLOCATION 1762 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1763 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1890 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1898 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1908 uint32_t memoryTypeIndex,
1909 VkMemoryPropertyFlags* pFlags);
1921 uint32_t frameIndex);
1954 #ifndef VMA_STATS_STRING_ENABLED 1955 #define VMA_STATS_STRING_ENABLED 1 1958 #if VMA_STATS_STRING_ENABLED 1965 char** ppStatsString,
1966 VkBool32 detailedMap);
1970 char* pStatsString);
1972 #endif // #if VMA_STATS_STRING_ENABLED 2205 uint32_t memoryTypeBits,
2207 uint32_t* pMemoryTypeIndex);
2223 const VkBufferCreateInfo* pBufferCreateInfo,
2225 uint32_t* pMemoryTypeIndex);
2241 const VkImageCreateInfo* pImageCreateInfo,
2243 uint32_t* pMemoryTypeIndex);
2415 size_t* pLostAllocationCount);
2514 const VkMemoryRequirements* pVkMemoryRequirements,
2540 const VkMemoryRequirements* pVkMemoryRequirements,
2542 size_t allocationCount,
2587 size_t allocationCount,
2613 VkDeviceSize newSize);
2993 size_t allocationCount,
2994 VkBool32* pAllocationsChanged,
3060 const VkBufferCreateInfo* pBufferCreateInfo,
3085 const VkImageCreateInfo* pImageCreateInfo,
3111 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3114 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3115 #define VMA_IMPLEMENTATION 3118 #ifdef VMA_IMPLEMENTATION 3119 #undef VMA_IMPLEMENTATION 3141 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3142 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3154 #if VMA_USE_STL_CONTAINERS 3155 #define VMA_USE_STL_VECTOR 1 3156 #define VMA_USE_STL_UNORDERED_MAP 1 3157 #define VMA_USE_STL_LIST 1 3160 #ifndef VMA_USE_STL_SHARED_MUTEX 3162 #if __cplusplus >= 201703L 3163 #define VMA_USE_STL_SHARED_MUTEX 1 3167 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3168 #define VMA_USE_STL_SHARED_MUTEX 1 3170 #define VMA_USE_STL_SHARED_MUTEX 0 3178 #if VMA_USE_STL_VECTOR 3182 #if VMA_USE_STL_UNORDERED_MAP 3183 #include <unordered_map> 3186 #if VMA_USE_STL_LIST 3195 #include <algorithm> 3200 #define VMA_NULL nullptr 3203 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3205 void *aligned_alloc(
size_t alignment,
size_t size)
3208 if(alignment <
sizeof(
void*))
3210 alignment =
sizeof(
void*);
3213 return memalign(alignment, size);
3215 #elif defined(__APPLE__) || defined(__ANDROID__) 3217 void *aligned_alloc(
size_t alignment,
size_t size)
3220 if(alignment <
sizeof(
void*))
3222 alignment =
sizeof(
void*);
3226 if(posix_memalign(&pointer, alignment, size) == 0)
3240 #define VMA_ASSERT(expr) assert(expr) 3242 #define VMA_ASSERT(expr) 3248 #ifndef VMA_HEAVY_ASSERT 3250 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3252 #define VMA_HEAVY_ASSERT(expr) 3256 #ifndef VMA_ALIGN_OF 3257 #define VMA_ALIGN_OF(type) (__alignof(type)) 3260 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3262 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3264 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3268 #ifndef VMA_SYSTEM_FREE 3270 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3272 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3277 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3281 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3285 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3289 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3292 #ifndef VMA_DEBUG_LOG 3293 #define VMA_DEBUG_LOG(format, ...) 3303 #if VMA_STATS_STRING_ENABLED 3304 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3306 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3308 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3310 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3312 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3314 snprintf(outStr, strLen,
"%p", ptr);
3322 void Lock() { m_Mutex.lock(); }
3323 void Unlock() { m_Mutex.unlock(); }
3327 #define VMA_MUTEX VmaMutex 3331 #ifndef VMA_RW_MUTEX 3332 #if VMA_USE_STL_SHARED_MUTEX 3334 #include <shared_mutex> 3338 void LockRead() { m_Mutex.lock_shared(); }
3339 void UnlockRead() { m_Mutex.unlock_shared(); }
3340 void LockWrite() { m_Mutex.lock(); }
3341 void UnlockWrite() { m_Mutex.unlock(); }
3343 std::shared_mutex m_Mutex;
3345 #define VMA_RW_MUTEX VmaRWMutex 3346 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3352 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3353 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3354 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3355 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3356 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3360 #define VMA_RW_MUTEX VmaRWMutex 3366 void LockRead() { m_Mutex.Lock(); }
3367 void UnlockRead() { m_Mutex.Unlock(); }
3368 void LockWrite() { m_Mutex.Lock(); }
3369 void UnlockWrite() { m_Mutex.Unlock(); }
3373 #define VMA_RW_MUTEX VmaRWMutex 3374 #endif // #if VMA_USE_STL_SHARED_MUTEX 3375 #endif // #ifndef VMA_RW_MUTEX 3385 #ifndef VMA_ATOMIC_UINT32 3387 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3390 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3395 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3398 #ifndef VMA_DEBUG_ALIGNMENT 3403 #define VMA_DEBUG_ALIGNMENT (1) 3406 #ifndef VMA_DEBUG_MARGIN 3411 #define VMA_DEBUG_MARGIN (0) 3414 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3419 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3422 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3428 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3431 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3436 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3439 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3444 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3447 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3448 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3452 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3453 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3457 #ifndef VMA_CLASS_NO_COPY 3458 #define VMA_CLASS_NO_COPY(className) \ 3460 className(const className&) = delete; \ 3461 className& operator=(const className&) = delete; 3464 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3467 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3469 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3470 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3476 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3478 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3479 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3482 static inline uint32_t VmaCountBitsSet(uint32_t v)
3484 uint32_t c = v - ((v >> 1) & 0x55555555);
3485 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3486 c = ((c >> 4) + c) & 0x0F0F0F0F;
3487 c = ((c >> 8) + c) & 0x00FF00FF;
3488 c = ((c >> 16) + c) & 0x0000FFFF;
3494 template <
typename T>
3495 static inline T VmaAlignUp(T val, T align)
3497 return (val + align - 1) / align * align;
3501 template <
typename T>
3502 static inline T VmaAlignDown(T val, T align)
3504 return val / align * align;
3508 template <
typename T>
3509 static inline T VmaRoundDiv(T x, T y)
3511 return (x + (y / (T)2)) / y;
3519 template <
typename T>
3520 inline bool VmaIsPow2(T x)
3522 return (x & (x-1)) == 0;
3526 static inline uint32_t VmaNextPow2(uint32_t v)
3537 static inline uint64_t VmaNextPow2(uint64_t v)
3551 static inline uint32_t VmaPrevPow2(uint32_t v)
3561 static inline uint64_t VmaPrevPow2(uint64_t v)
3573 static inline bool VmaStrIsEmpty(
const char* pStr)
3575 return pStr == VMA_NULL || *pStr ==
'\0';
3578 #if VMA_STATS_STRING_ENABLED 3580 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3596 #endif // #if VMA_STATS_STRING_ENABLED 3600 template<
typename Iterator,
typename Compare>
3601 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3603 Iterator centerValue = end; --centerValue;
3604 Iterator insertIndex = beg;
3605 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3607 if(cmp(*memTypeIndex, *centerValue))
3609 if(insertIndex != memTypeIndex)
3611 VMA_SWAP(*memTypeIndex, *insertIndex);
3616 if(insertIndex != centerValue)
3618 VMA_SWAP(*insertIndex, *centerValue);
3623 template<
typename Iterator,
typename Compare>
3624 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3628 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3629 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3630 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3634 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3636 #endif // #ifndef VMA_SORT 3645 static inline bool VmaBlocksOnSamePage(
3646 VkDeviceSize resourceAOffset,
3647 VkDeviceSize resourceASize,
3648 VkDeviceSize resourceBOffset,
3649 VkDeviceSize pageSize)
3651 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3652 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3653 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3654 VkDeviceSize resourceBStart = resourceBOffset;
3655 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3656 return resourceAEndPage == resourceBStartPage;
3659 enum VmaSuballocationType
3661 VMA_SUBALLOCATION_TYPE_FREE = 0,
3662 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3663 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3664 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3665 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3666 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3667 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3676 static inline bool VmaIsBufferImageGranularityConflict(
3677 VmaSuballocationType suballocType1,
3678 VmaSuballocationType suballocType2)
3680 if(suballocType1 > suballocType2)
3682 VMA_SWAP(suballocType1, suballocType2);
3685 switch(suballocType1)
3687 case VMA_SUBALLOCATION_TYPE_FREE:
3689 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3691 case VMA_SUBALLOCATION_TYPE_BUFFER:
3693 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3694 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3695 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3697 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3698 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3699 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3700 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3702 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3703 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3711 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3713 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3714 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3715 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3717 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3721 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3723 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3724 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3725 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3727 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3739 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3741 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3742 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3743 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3744 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3750 VMA_CLASS_NO_COPY(VmaMutexLock)
3752 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3753 m_pMutex(useMutex ? &mutex : VMA_NULL)
3754 {
if(m_pMutex) { m_pMutex->Lock(); } }
3756 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3758 VMA_MUTEX* m_pMutex;
3762 struct VmaMutexLockRead
3764 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3766 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3767 m_pMutex(useMutex ? &mutex : VMA_NULL)
3768 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3769 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3771 VMA_RW_MUTEX* m_pMutex;
3775 struct VmaMutexLockWrite
3777 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3779 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3780 m_pMutex(useMutex ? &mutex : VMA_NULL)
3781 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3782 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3784 VMA_RW_MUTEX* m_pMutex;
3787 #if VMA_DEBUG_GLOBAL_MUTEX 3788 static VMA_MUTEX gDebugGlobalMutex;
3789 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3791 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3795 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3806 template <
typename CmpLess,
typename IterT,
typename KeyT>
3807 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3809 size_t down = 0, up = (end - beg);
3812 const size_t mid = (down + up) / 2;
3813 if(cmp(*(beg+mid), key))
3830 template<
typename T>
3831 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3833 for(uint32_t i = 0; i < count; ++i)
3835 const T iPtr = arr[i];
3836 if(iPtr == VMA_NULL)
3840 for(uint32_t j = i + 1; j < count; ++j)
3854 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3856 if((pAllocationCallbacks != VMA_NULL) &&
3857 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3859 return (*pAllocationCallbacks->pfnAllocation)(
3860 pAllocationCallbacks->pUserData,
3863 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3867 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3871 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3873 if((pAllocationCallbacks != VMA_NULL) &&
3874 (pAllocationCallbacks->pfnFree != VMA_NULL))
3876 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3880 VMA_SYSTEM_FREE(ptr);
3884 template<
typename T>
3885 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3887 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3890 template<
typename T>
3891 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3893 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3896 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3898 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3900 template<
typename T>
3901 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3904 VmaFree(pAllocationCallbacks, ptr);
3907 template<
typename T>
3908 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3912 for(
size_t i = count; i--; )
3916 VmaFree(pAllocationCallbacks, ptr);
3921 template<
typename T>
3922 class VmaStlAllocator
3925 const VkAllocationCallbacks*
const m_pCallbacks;
3926 typedef T value_type;
3928 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3929 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3931 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3932 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3934 template<
typename U>
3935 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3937 return m_pCallbacks == rhs.m_pCallbacks;
3939 template<
typename U>
3940 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3942 return m_pCallbacks != rhs.m_pCallbacks;
3945 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3948 #if VMA_USE_STL_VECTOR 3950 #define VmaVector std::vector 3952 template<
typename T,
typename allocatorT>
3953 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3955 vec.insert(vec.begin() + index, item);
3958 template<
typename T,
typename allocatorT>
3959 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3961 vec.erase(vec.begin() + index);
3964 #else // #if VMA_USE_STL_VECTOR 3969 template<
typename T,
typename AllocatorT>
3973 typedef T value_type;
3975 VmaVector(
const AllocatorT& allocator) :
3976 m_Allocator(allocator),
3983 VmaVector(
size_t count,
const AllocatorT& allocator) :
3984 m_Allocator(allocator),
3985 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3991 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3992 m_Allocator(src.m_Allocator),
3993 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3994 m_Count(src.m_Count),
3995 m_Capacity(src.m_Count)
3999 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4005 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4008 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4012 resize(rhs.m_Count);
4015 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4021 bool empty()
const {
return m_Count == 0; }
4022 size_t size()
const {
return m_Count; }
4023 T* data() {
return m_pArray; }
4024 const T* data()
const {
return m_pArray; }
4026 T& operator[](
size_t index)
4028 VMA_HEAVY_ASSERT(index < m_Count);
4029 return m_pArray[index];
4031 const T& operator[](
size_t index)
const 4033 VMA_HEAVY_ASSERT(index < m_Count);
4034 return m_pArray[index];
4039 VMA_HEAVY_ASSERT(m_Count > 0);
4042 const T& front()
const 4044 VMA_HEAVY_ASSERT(m_Count > 0);
4049 VMA_HEAVY_ASSERT(m_Count > 0);
4050 return m_pArray[m_Count - 1];
4052 const T& back()
const 4054 VMA_HEAVY_ASSERT(m_Count > 0);
4055 return m_pArray[m_Count - 1];
4058 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4060 newCapacity = VMA_MAX(newCapacity, m_Count);
4062 if((newCapacity < m_Capacity) && !freeMemory)
4064 newCapacity = m_Capacity;
4067 if(newCapacity != m_Capacity)
4069 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4072 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4074 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4075 m_Capacity = newCapacity;
4076 m_pArray = newArray;
4080 void resize(
size_t newCount,
bool freeMemory =
false)
4082 size_t newCapacity = m_Capacity;
4083 if(newCount > m_Capacity)
4085 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4089 newCapacity = newCount;
4092 if(newCapacity != m_Capacity)
4094 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4095 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4096 if(elementsToCopy != 0)
4098 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4100 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4101 m_Capacity = newCapacity;
4102 m_pArray = newArray;
4108 void clear(
bool freeMemory =
false)
4110 resize(0, freeMemory);
4113 void insert(
size_t index,
const T& src)
4115 VMA_HEAVY_ASSERT(index <= m_Count);
4116 const size_t oldCount = size();
4117 resize(oldCount + 1);
4118 if(index < oldCount)
4120 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4122 m_pArray[index] = src;
4125 void remove(
size_t index)
4127 VMA_HEAVY_ASSERT(index < m_Count);
4128 const size_t oldCount = size();
4129 if(index < oldCount - 1)
4131 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4133 resize(oldCount - 1);
4136 void push_back(
const T& src)
4138 const size_t newIndex = size();
4139 resize(newIndex + 1);
4140 m_pArray[newIndex] = src;
4145 VMA_HEAVY_ASSERT(m_Count > 0);
4149 void push_front(
const T& src)
4156 VMA_HEAVY_ASSERT(m_Count > 0);
4160 typedef T* iterator;
4162 iterator begin() {
return m_pArray; }
4163 iterator end() {
return m_pArray + m_Count; }
4166 AllocatorT m_Allocator;
4172 template<
typename T,
typename allocatorT>
4173 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4175 vec.insert(index, item);
4178 template<
typename T,
typename allocatorT>
4179 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4184 #endif // #if VMA_USE_STL_VECTOR 4186 template<
typename CmpLess,
typename VectorT>
4187 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4189 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4191 vector.data() + vector.size(),
4193 CmpLess()) - vector.data();
4194 VmaVectorInsert(vector, indexToInsert, value);
4195 return indexToInsert;
4198 template<
typename CmpLess,
typename VectorT>
4199 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4202 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4207 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4209 size_t indexToRemove = it - vector.begin();
4210 VmaVectorRemove(vector, indexToRemove);
4216 template<
typename CmpLess,
typename IterT,
typename KeyT>
4217 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4220 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4221 beg, end, value, comparator);
4223 (!comparator(*it, value) && !comparator(value, *it)))
4238 template<
typename T>
4239 class VmaPoolAllocator
4241 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4243 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4244 ~VmaPoolAllocator();
4252 uint32_t NextFreeIndex;
4260 uint32_t FirstFreeIndex;
4263 const VkAllocationCallbacks* m_pAllocationCallbacks;
4264 const uint32_t m_FirstBlockCapacity;
4265 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4267 ItemBlock& CreateNewBlock();
4270 template<
typename T>
4271 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4272 m_pAllocationCallbacks(pAllocationCallbacks),
4273 m_FirstBlockCapacity(firstBlockCapacity),
4274 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4276 VMA_ASSERT(m_FirstBlockCapacity > 1);
4279 template<
typename T>
4280 VmaPoolAllocator<T>::~VmaPoolAllocator()
4285 template<
typename T>
4286 void VmaPoolAllocator<T>::Clear()
4288 for(
size_t i = m_ItemBlocks.size(); i--; )
4289 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4290 m_ItemBlocks.clear();
4293 template<
typename T>
4294 T* VmaPoolAllocator<T>::Alloc()
4296 for(
size_t i = m_ItemBlocks.size(); i--; )
4298 ItemBlock& block = m_ItemBlocks[i];
4300 if(block.FirstFreeIndex != UINT32_MAX)
4302 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4303 block.FirstFreeIndex = pItem->NextFreeIndex;
4304 return &pItem->Value;
4309 ItemBlock& newBlock = CreateNewBlock();
4310 Item*
const pItem = &newBlock.pItems[0];
4311 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4312 return &pItem->Value;
4315 template<
typename T>
4316 void VmaPoolAllocator<T>::Free(T* ptr)
4319 for(
size_t i = m_ItemBlocks.size(); i--; )
4321 ItemBlock& block = m_ItemBlocks[i];
4325 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4328 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4330 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4331 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4332 block.FirstFreeIndex = index;
4336 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4339 template<
typename T>
4340 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4342 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4343 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4345 const ItemBlock newBlock = {
4346 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4350 m_ItemBlocks.push_back(newBlock);
4353 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4354 newBlock.pItems[i].NextFreeIndex = i + 1;
4355 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4356 return m_ItemBlocks.back();
4362 #if VMA_USE_STL_LIST 4364 #define VmaList std::list 4366 #else // #if VMA_USE_STL_LIST 4368 template<
typename T>
4377 template<
typename T>
4380 VMA_CLASS_NO_COPY(VmaRawList)
4382 typedef VmaListItem<T> ItemType;
4384 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4388 size_t GetCount()
const {
return m_Count; }
4389 bool IsEmpty()
const {
return m_Count == 0; }
4391 ItemType* Front() {
return m_pFront; }
4392 const ItemType* Front()
const {
return m_pFront; }
4393 ItemType* Back() {
return m_pBack; }
4394 const ItemType* Back()
const {
return m_pBack; }
4396 ItemType* PushBack();
4397 ItemType* PushFront();
4398 ItemType* PushBack(
const T& value);
4399 ItemType* PushFront(
const T& value);
4404 ItemType* InsertBefore(ItemType* pItem);
4406 ItemType* InsertAfter(ItemType* pItem);
4408 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4409 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4411 void Remove(ItemType* pItem);
4414 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4415 VmaPoolAllocator<ItemType> m_ItemAllocator;
4421 template<
typename T>
4422 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4423 m_pAllocationCallbacks(pAllocationCallbacks),
4424 m_ItemAllocator(pAllocationCallbacks, 128),
4431 template<
typename T>
4432 VmaRawList<T>::~VmaRawList()
4438 template<
typename T>
4439 void VmaRawList<T>::Clear()
4441 if(IsEmpty() ==
false)
4443 ItemType* pItem = m_pBack;
4444 while(pItem != VMA_NULL)
4446 ItemType*
const pPrevItem = pItem->pPrev;
4447 m_ItemAllocator.Free(pItem);
4450 m_pFront = VMA_NULL;
4456 template<
typename T>
4457 VmaListItem<T>* VmaRawList<T>::PushBack()
4459 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4460 pNewItem->pNext = VMA_NULL;
4463 pNewItem->pPrev = VMA_NULL;
4464 m_pFront = pNewItem;
4470 pNewItem->pPrev = m_pBack;
4471 m_pBack->pNext = pNewItem;
4478 template<
typename T>
4479 VmaListItem<T>* VmaRawList<T>::PushFront()
4481 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4482 pNewItem->pPrev = VMA_NULL;
4485 pNewItem->pNext = VMA_NULL;
4486 m_pFront = pNewItem;
4492 pNewItem->pNext = m_pFront;
4493 m_pFront->pPrev = pNewItem;
4494 m_pFront = pNewItem;
4500 template<
typename T>
4501 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4503 ItemType*
const pNewItem = PushBack();
4504 pNewItem->Value = value;
4508 template<
typename T>
4509 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4511 ItemType*
const pNewItem = PushFront();
4512 pNewItem->Value = value;
4516 template<
typename T>
4517 void VmaRawList<T>::PopBack()
4519 VMA_HEAVY_ASSERT(m_Count > 0);
4520 ItemType*
const pBackItem = m_pBack;
4521 ItemType*
const pPrevItem = pBackItem->pPrev;
4522 if(pPrevItem != VMA_NULL)
4524 pPrevItem->pNext = VMA_NULL;
4526 m_pBack = pPrevItem;
4527 m_ItemAllocator.Free(pBackItem);
4531 template<
typename T>
4532 void VmaRawList<T>::PopFront()
4534 VMA_HEAVY_ASSERT(m_Count > 0);
4535 ItemType*
const pFrontItem = m_pFront;
4536 ItemType*
const pNextItem = pFrontItem->pNext;
4537 if(pNextItem != VMA_NULL)
4539 pNextItem->pPrev = VMA_NULL;
4541 m_pFront = pNextItem;
4542 m_ItemAllocator.Free(pFrontItem);
4546 template<
typename T>
4547 void VmaRawList<T>::Remove(ItemType* pItem)
4549 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4550 VMA_HEAVY_ASSERT(m_Count > 0);
4552 if(pItem->pPrev != VMA_NULL)
4554 pItem->pPrev->pNext = pItem->pNext;
4558 VMA_HEAVY_ASSERT(m_pFront == pItem);
4559 m_pFront = pItem->pNext;
4562 if(pItem->pNext != VMA_NULL)
4564 pItem->pNext->pPrev = pItem->pPrev;
4568 VMA_HEAVY_ASSERT(m_pBack == pItem);
4569 m_pBack = pItem->pPrev;
4572 m_ItemAllocator.Free(pItem);
4576 template<
typename T>
4577 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4579 if(pItem != VMA_NULL)
4581 ItemType*
const prevItem = pItem->pPrev;
4582 ItemType*
const newItem = m_ItemAllocator.Alloc();
4583 newItem->pPrev = prevItem;
4584 newItem->pNext = pItem;
4585 pItem->pPrev = newItem;
4586 if(prevItem != VMA_NULL)
4588 prevItem->pNext = newItem;
4592 VMA_HEAVY_ASSERT(m_pFront == pItem);
4602 template<
typename T>
4603 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4605 if(pItem != VMA_NULL)
4607 ItemType*
const nextItem = pItem->pNext;
4608 ItemType*
const newItem = m_ItemAllocator.Alloc();
4609 newItem->pNext = nextItem;
4610 newItem->pPrev = pItem;
4611 pItem->pNext = newItem;
4612 if(nextItem != VMA_NULL)
4614 nextItem->pPrev = newItem;
4618 VMA_HEAVY_ASSERT(m_pBack == pItem);
4628 template<
typename T>
4629 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4631 ItemType*
const newItem = InsertBefore(pItem);
4632 newItem->Value = value;
4636 template<
typename T>
4637 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4639 ItemType*
const newItem = InsertAfter(pItem);
4640 newItem->Value = value;
4644 template<
typename T,
typename AllocatorT>
4647 VMA_CLASS_NO_COPY(VmaList)
4658 T& operator*()
const 4660 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4661 return m_pItem->Value;
4663 T* operator->()
const 4665 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4666 return &m_pItem->Value;
4669 iterator& operator++()
4671 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4672 m_pItem = m_pItem->pNext;
4675 iterator& operator--()
4677 if(m_pItem != VMA_NULL)
4679 m_pItem = m_pItem->pPrev;
4683 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4684 m_pItem = m_pList->Back();
4689 iterator operator++(
int)
4691 iterator result = *
this;
4695 iterator operator--(
int)
4697 iterator result = *
this;
4702 bool operator==(
const iterator& rhs)
const 4704 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4705 return m_pItem == rhs.m_pItem;
4707 bool operator!=(
const iterator& rhs)
const 4709 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4710 return m_pItem != rhs.m_pItem;
4714 VmaRawList<T>* m_pList;
4715 VmaListItem<T>* m_pItem;
4717 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4723 friend class VmaList<T, AllocatorT>;
4726 class const_iterator
4735 const_iterator(
const iterator& src) :
4736 m_pList(src.m_pList),
4737 m_pItem(src.m_pItem)
4741 const T& operator*()
const 4743 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4744 return m_pItem->Value;
4746 const T* operator->()
const 4748 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4749 return &m_pItem->Value;
4752 const_iterator& operator++()
4754 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4755 m_pItem = m_pItem->pNext;
4758 const_iterator& operator--()
4760 if(m_pItem != VMA_NULL)
4762 m_pItem = m_pItem->pPrev;
4766 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4767 m_pItem = m_pList->Back();
4772 const_iterator operator++(
int)
4774 const_iterator result = *
this;
4778 const_iterator operator--(
int)
4780 const_iterator result = *
this;
4785 bool operator==(
const const_iterator& rhs)
const 4787 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4788 return m_pItem == rhs.m_pItem;
4790 bool operator!=(
const const_iterator& rhs)
const 4792 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4793 return m_pItem != rhs.m_pItem;
4797 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4803 const VmaRawList<T>* m_pList;
4804 const VmaListItem<T>* m_pItem;
4806 friend class VmaList<T, AllocatorT>;
4809 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4811 bool empty()
const {
return m_RawList.IsEmpty(); }
4812 size_t size()
const {
return m_RawList.GetCount(); }
4814 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4815 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4817 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4818 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4820 void clear() { m_RawList.Clear(); }
4821 void push_back(
const T& value) { m_RawList.PushBack(value); }
4822 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4823 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4826 VmaRawList<T> m_RawList;
4829 #endif // #if VMA_USE_STL_LIST 4837 #if VMA_USE_STL_UNORDERED_MAP 4839 #define VmaPair std::pair 4841 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4842 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4844 #else // #if VMA_USE_STL_UNORDERED_MAP 4846 template<
typename T1,
typename T2>
4852 VmaPair() : first(), second() { }
4853 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4859 template<
typename KeyT,
typename ValueT>
4863 typedef VmaPair<KeyT, ValueT> PairType;
4864 typedef PairType* iterator;
4866 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4868 iterator begin() {
return m_Vector.begin(); }
4869 iterator end() {
return m_Vector.end(); }
4871 void insert(
const PairType& pair);
4872 iterator find(
const KeyT& key);
4873 void erase(iterator it);
4876 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4879 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4881 template<
typename FirstT,
typename SecondT>
4882 struct VmaPairFirstLess
4884 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4886 return lhs.first < rhs.first;
4888 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4890 return lhs.first < rhsFirst;
4894 template<
typename KeyT,
typename ValueT>
4895 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4897 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4899 m_Vector.data() + m_Vector.size(),
4901 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4902 VmaVectorInsert(m_Vector, indexToInsert, pair);
4905 template<
typename KeyT,
typename ValueT>
4906 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4908 PairType* it = VmaBinaryFindFirstNotLess(
4910 m_Vector.data() + m_Vector.size(),
4912 VmaPairFirstLess<KeyT, ValueT>());
4913 if((it != m_Vector.end()) && (it->first == key))
4919 return m_Vector.end();
4923 template<
typename KeyT,
typename ValueT>
4924 void VmaMap<KeyT, ValueT>::erase(iterator it)
4926 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4929 #endif // #if VMA_USE_STL_UNORDERED_MAP 4935 class VmaDeviceMemoryBlock;
4937 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4939 struct VmaAllocation_T
4942 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4946 FLAG_USER_DATA_STRING = 0x01,
4950 enum ALLOCATION_TYPE
4952 ALLOCATION_TYPE_NONE,
4953 ALLOCATION_TYPE_BLOCK,
4954 ALLOCATION_TYPE_DEDICATED,
4962 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
4966 m_pUserData = VMA_NULL;
4967 m_LastUseFrameIndex = currentFrameIndex;
4968 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
4969 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
4971 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
4973 #if VMA_STATS_STRING_ENABLED 4974 m_CreationFrameIndex = currentFrameIndex;
4975 m_BufferImageUsage = 0;
4981 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4984 VMA_ASSERT(m_pUserData == VMA_NULL);
4987 void InitBlockAllocation(
4988 VmaDeviceMemoryBlock* block,
4989 VkDeviceSize offset,
4990 VkDeviceSize alignment,
4992 VmaSuballocationType suballocationType,
4996 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4997 VMA_ASSERT(block != VMA_NULL);
4998 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4999 m_Alignment = alignment;
5001 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5002 m_SuballocationType = (uint8_t)suballocationType;
5003 m_BlockAllocation.m_Block = block;
5004 m_BlockAllocation.m_Offset = offset;
5005 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5010 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5011 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5012 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5013 m_BlockAllocation.m_Block = VMA_NULL;
5014 m_BlockAllocation.m_Offset = 0;
5015 m_BlockAllocation.m_CanBecomeLost =
true;
5018 void ChangeBlockAllocation(
5020 VmaDeviceMemoryBlock* block,
5021 VkDeviceSize offset);
5023 void ChangeSize(VkDeviceSize newSize);
5024 void ChangeOffset(VkDeviceSize newOffset);
5027 void InitDedicatedAllocation(
5028 uint32_t memoryTypeIndex,
5029 VkDeviceMemory hMemory,
5030 VmaSuballocationType suballocationType,
5034 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5035 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5036 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5039 m_SuballocationType = (uint8_t)suballocationType;
5040 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5041 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5042 m_DedicatedAllocation.m_hMemory = hMemory;
5043 m_DedicatedAllocation.m_pMappedData = pMappedData;
5046 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5047 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5048 VkDeviceSize GetSize()
const {
return m_Size; }
5049 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5050 void* GetUserData()
const {
return m_pUserData; }
5051 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5052 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5054 VmaDeviceMemoryBlock* GetBlock()
const 5056 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5057 return m_BlockAllocation.m_Block;
5059 VkDeviceSize GetOffset()
const;
5060 VkDeviceMemory GetMemory()
const;
5061 uint32_t GetMemoryTypeIndex()
const;
5062 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5063 void* GetMappedData()
const;
5064 bool CanBecomeLost()
const;
5066 uint32_t GetLastUseFrameIndex()
const 5068 return m_LastUseFrameIndex.load();
5070 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5072 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5082 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5084 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5086 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5097 void BlockAllocMap();
5098 void BlockAllocUnmap();
5099 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5102 #if VMA_STATS_STRING_ENABLED 5103 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5104 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5106 void InitBufferImageUsage(uint32_t bufferImageUsage)
5108 VMA_ASSERT(m_BufferImageUsage == 0);
5109 m_BufferImageUsage = bufferImageUsage;
5112 void PrintParameters(
class VmaJsonWriter& json)
const;
5116 VkDeviceSize m_Alignment;
5117 VkDeviceSize m_Size;
5119 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5121 uint8_t m_SuballocationType;
5128 struct BlockAllocation
5130 VmaDeviceMemoryBlock* m_Block;
5131 VkDeviceSize m_Offset;
5132 bool m_CanBecomeLost;
5136 struct DedicatedAllocation
5138 uint32_t m_MemoryTypeIndex;
5139 VkDeviceMemory m_hMemory;
5140 void* m_pMappedData;
5146 BlockAllocation m_BlockAllocation;
5148 DedicatedAllocation m_DedicatedAllocation;
5151 #if VMA_STATS_STRING_ENABLED 5152 uint32_t m_CreationFrameIndex;
5153 uint32_t m_BufferImageUsage;
5163 struct VmaSuballocation
5165 VkDeviceSize offset;
5168 VmaSuballocationType type;
5172 struct VmaSuballocationOffsetLess
5174 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5176 return lhs.offset < rhs.offset;
5179 struct VmaSuballocationOffsetGreater
5181 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5183 return lhs.offset > rhs.offset;
5187 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5190 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5192 enum class VmaAllocationRequestType
5214 struct VmaAllocationRequest
5216 VkDeviceSize offset;
5217 VkDeviceSize sumFreeSize;
5218 VkDeviceSize sumItemSize;
5219 VmaSuballocationList::iterator item;
5220 size_t itemsToMakeLostCount;
5222 VmaAllocationRequestType type;
5224 VkDeviceSize CalcCost()
const 5226 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5234 class VmaBlockMetadata
5238 virtual ~VmaBlockMetadata() { }
5239 virtual void Init(VkDeviceSize size) { m_Size = size; }
5242 virtual bool Validate()
const = 0;
5243 VkDeviceSize GetSize()
const {
return m_Size; }
5244 virtual size_t GetAllocationCount()
const = 0;
5245 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5246 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5248 virtual bool IsEmpty()
const = 0;
5250 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5252 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5254 #if VMA_STATS_STRING_ENABLED 5255 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5261 virtual bool CreateAllocationRequest(
5262 uint32_t currentFrameIndex,
5263 uint32_t frameInUseCount,
5264 VkDeviceSize bufferImageGranularity,
5265 VkDeviceSize allocSize,
5266 VkDeviceSize allocAlignment,
5268 VmaSuballocationType allocType,
5269 bool canMakeOtherLost,
5272 VmaAllocationRequest* pAllocationRequest) = 0;
5274 virtual bool MakeRequestedAllocationsLost(
5275 uint32_t currentFrameIndex,
5276 uint32_t frameInUseCount,
5277 VmaAllocationRequest* pAllocationRequest) = 0;
5279 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5281 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5285 const VmaAllocationRequest& request,
5286 VmaSuballocationType type,
5287 VkDeviceSize allocSize,
5292 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5295 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5298 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5300 #if VMA_STATS_STRING_ENABLED 5301 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5302 VkDeviceSize unusedBytes,
5303 size_t allocationCount,
5304 size_t unusedRangeCount)
const;
5305 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5306 VkDeviceSize offset,
5308 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5309 VkDeviceSize offset,
5310 VkDeviceSize size)
const;
5311 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5315 VkDeviceSize m_Size;
5316 const VkAllocationCallbacks* m_pAllocationCallbacks;
5319 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5320 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5324 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5326 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5329 virtual ~VmaBlockMetadata_Generic();
5330 virtual void Init(VkDeviceSize size);
5332 virtual bool Validate()
const;
5333 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5334 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5335 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5336 virtual bool IsEmpty()
const;
5338 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5339 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5341 #if VMA_STATS_STRING_ENABLED 5342 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5345 virtual bool CreateAllocationRequest(
5346 uint32_t currentFrameIndex,
5347 uint32_t frameInUseCount,
5348 VkDeviceSize bufferImageGranularity,
5349 VkDeviceSize allocSize,
5350 VkDeviceSize allocAlignment,
5352 VmaSuballocationType allocType,
5353 bool canMakeOtherLost,
5355 VmaAllocationRequest* pAllocationRequest);
5357 virtual bool MakeRequestedAllocationsLost(
5358 uint32_t currentFrameIndex,
5359 uint32_t frameInUseCount,
5360 VmaAllocationRequest* pAllocationRequest);
5362 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5364 virtual VkResult CheckCorruption(
const void* pBlockData);
5367 const VmaAllocationRequest& request,
5368 VmaSuballocationType type,
5369 VkDeviceSize allocSize,
5373 virtual void FreeAtOffset(VkDeviceSize offset);
5375 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5380 bool IsBufferImageGranularityConflictPossible(
5381 VkDeviceSize bufferImageGranularity,
5382 VmaSuballocationType& inOutPrevSuballocType)
const;
5385 friend class VmaDefragmentationAlgorithm_Generic;
5386 friend class VmaDefragmentationAlgorithm_Fast;
5388 uint32_t m_FreeCount;
5389 VkDeviceSize m_SumFreeSize;
5390 VmaSuballocationList m_Suballocations;
5393 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5395 bool ValidateFreeSuballocationList()
const;
5399 bool CheckAllocation(
5400 uint32_t currentFrameIndex,
5401 uint32_t frameInUseCount,
5402 VkDeviceSize bufferImageGranularity,
5403 VkDeviceSize allocSize,
5404 VkDeviceSize allocAlignment,
5405 VmaSuballocationType allocType,
5406 VmaSuballocationList::const_iterator suballocItem,
5407 bool canMakeOtherLost,
5408 VkDeviceSize* pOffset,
5409 size_t* itemsToMakeLostCount,
5410 VkDeviceSize* pSumFreeSize,
5411 VkDeviceSize* pSumItemSize)
const;
5413 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5417 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5420 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5423 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5504 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5506 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5509 virtual ~VmaBlockMetadata_Linear();
5510 virtual void Init(VkDeviceSize size);
5512 virtual bool Validate()
const;
5513 virtual size_t GetAllocationCount()
const;
5514 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5515 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5516 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5518 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5519 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5521 #if VMA_STATS_STRING_ENABLED 5522 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5525 virtual bool CreateAllocationRequest(
5526 uint32_t currentFrameIndex,
5527 uint32_t frameInUseCount,
5528 VkDeviceSize bufferImageGranularity,
5529 VkDeviceSize allocSize,
5530 VkDeviceSize allocAlignment,
5532 VmaSuballocationType allocType,
5533 bool canMakeOtherLost,
5535 VmaAllocationRequest* pAllocationRequest);
5537 virtual bool MakeRequestedAllocationsLost(
5538 uint32_t currentFrameIndex,
5539 uint32_t frameInUseCount,
5540 VmaAllocationRequest* pAllocationRequest);
5542 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5544 virtual VkResult CheckCorruption(
const void* pBlockData);
5547 const VmaAllocationRequest& request,
5548 VmaSuballocationType type,
5549 VkDeviceSize allocSize,
5553 virtual void FreeAtOffset(VkDeviceSize offset);
5563 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5565 enum SECOND_VECTOR_MODE
5567 SECOND_VECTOR_EMPTY,
5572 SECOND_VECTOR_RING_BUFFER,
5578 SECOND_VECTOR_DOUBLE_STACK,
5581 VkDeviceSize m_SumFreeSize;
5582 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5583 uint32_t m_1stVectorIndex;
5584 SECOND_VECTOR_MODE m_2ndVectorMode;
5586 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5587 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5588 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5589 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5592 size_t m_1stNullItemsBeginCount;
5594 size_t m_1stNullItemsMiddleCount;
5596 size_t m_2ndNullItemsCount;
5598 bool ShouldCompact1st()
const;
5599 void CleanupAfterFree();
5601 bool CreateAllocationRequest_LowerAddress(
5602 uint32_t currentFrameIndex,
5603 uint32_t frameInUseCount,
5604 VkDeviceSize bufferImageGranularity,
5605 VkDeviceSize allocSize,
5606 VkDeviceSize allocAlignment,
5607 VmaSuballocationType allocType,
5608 bool canMakeOtherLost,
5610 VmaAllocationRequest* pAllocationRequest);
5611 bool CreateAllocationRequest_UpperAddress(
5612 uint32_t currentFrameIndex,
5613 uint32_t frameInUseCount,
5614 VkDeviceSize bufferImageGranularity,
5615 VkDeviceSize allocSize,
5616 VkDeviceSize allocAlignment,
5617 VmaSuballocationType allocType,
5618 bool canMakeOtherLost,
5620 VmaAllocationRequest* pAllocationRequest);
5634 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5636 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5639 virtual ~VmaBlockMetadata_Buddy();
5640 virtual void Init(VkDeviceSize size);
5642 virtual bool Validate()
const;
5643 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5644 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5645 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5646 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5648 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5649 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5651 #if VMA_STATS_STRING_ENABLED 5652 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5655 virtual bool CreateAllocationRequest(
5656 uint32_t currentFrameIndex,
5657 uint32_t frameInUseCount,
5658 VkDeviceSize bufferImageGranularity,
5659 VkDeviceSize allocSize,
5660 VkDeviceSize allocAlignment,
5662 VmaSuballocationType allocType,
5663 bool canMakeOtherLost,
5665 VmaAllocationRequest* pAllocationRequest);
5667 virtual bool MakeRequestedAllocationsLost(
5668 uint32_t currentFrameIndex,
5669 uint32_t frameInUseCount,
5670 VmaAllocationRequest* pAllocationRequest);
5672 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5674 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5677 const VmaAllocationRequest& request,
5678 VmaSuballocationType type,
5679 VkDeviceSize allocSize,
5682 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5683 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5686 static const VkDeviceSize MIN_NODE_SIZE = 32;
5687 static const size_t MAX_LEVELS = 30;
5689 struct ValidationContext
5691 size_t calculatedAllocationCount;
5692 size_t calculatedFreeCount;
5693 VkDeviceSize calculatedSumFreeSize;
5695 ValidationContext() :
5696 calculatedAllocationCount(0),
5697 calculatedFreeCount(0),
5698 calculatedSumFreeSize(0) { }
5703 VkDeviceSize offset;
5733 VkDeviceSize m_UsableSize;
5734 uint32_t m_LevelCount;
5740 } m_FreeList[MAX_LEVELS];
5742 size_t m_AllocationCount;
5746 VkDeviceSize m_SumFreeSize;
5748 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5749 void DeleteNode(Node* node);
5750 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5751 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5752 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5754 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5755 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5759 void AddToFreeListFront(uint32_t level, Node* node);
5763 void RemoveFromFreeList(uint32_t level, Node* node);
5765 #if VMA_STATS_STRING_ENABLED 5766 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5776 class VmaDeviceMemoryBlock
5778 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5780 VmaBlockMetadata* m_pMetadata;
5784 ~VmaDeviceMemoryBlock()
5786 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5787 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5794 uint32_t newMemoryTypeIndex,
5795 VkDeviceMemory newMemory,
5796 VkDeviceSize newSize,
5798 uint32_t algorithm);
5802 VmaPool GetParentPool()
const {
return m_hParentPool; }
5803 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5804 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5805 uint32_t GetId()
const {
return m_Id; }
5806 void* GetMappedData()
const {
return m_pMappedData; }
5809 bool Validate()
const;
5814 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5817 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5818 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5820 VkResult BindBufferMemory(
5824 VkResult BindImageMemory(
5831 uint32_t m_MemoryTypeIndex;
5833 VkDeviceMemory m_hMemory;
5841 uint32_t m_MapCount;
5842 void* m_pMappedData;
5845 struct VmaPointerLess
5847 bool operator()(
const void* lhs,
const void* rhs)
const 5853 struct VmaDefragmentationMove
5855 size_t srcBlockIndex;
5856 size_t dstBlockIndex;
5857 VkDeviceSize srcOffset;
5858 VkDeviceSize dstOffset;
5862 class VmaDefragmentationAlgorithm;
5870 struct VmaBlockVector
5872 VMA_CLASS_NO_COPY(VmaBlockVector)
5877 uint32_t memoryTypeIndex,
5878 VkDeviceSize preferredBlockSize,
5879 size_t minBlockCount,
5880 size_t maxBlockCount,
5881 VkDeviceSize bufferImageGranularity,
5882 uint32_t frameInUseCount,
5884 bool explicitBlockSize,
5885 uint32_t algorithm);
5888 VkResult CreateMinBlocks();
5890 VmaPool GetParentPool()
const {
return m_hParentPool; }
5891 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5892 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5893 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5894 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5895 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5899 bool IsEmpty()
const {
return m_Blocks.empty(); }
5900 bool IsCorruptionDetectionEnabled()
const;
5903 uint32_t currentFrameIndex,
5905 VkDeviceSize alignment,
5907 VmaSuballocationType suballocType,
5908 size_t allocationCount,
5917 #if VMA_STATS_STRING_ENABLED 5918 void PrintDetailedMap(
class VmaJsonWriter& json);
5921 void MakePoolAllocationsLost(
5922 uint32_t currentFrameIndex,
5923 size_t* pLostAllocationCount);
5924 VkResult CheckCorruption();
5928 class VmaBlockVectorDefragmentationContext* pCtx,
5930 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5931 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5932 VkCommandBuffer commandBuffer);
5933 void DefragmentationEnd(
5934 class VmaBlockVectorDefragmentationContext* pCtx,
5940 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5941 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5942 size_t CalcAllocationCount()
const;
5943 bool IsBufferImageGranularityConflictPossible()
const;
5946 friend class VmaDefragmentationAlgorithm_Generic;
5950 const uint32_t m_MemoryTypeIndex;
5951 const VkDeviceSize m_PreferredBlockSize;
5952 const size_t m_MinBlockCount;
5953 const size_t m_MaxBlockCount;
5954 const VkDeviceSize m_BufferImageGranularity;
5955 const uint32_t m_FrameInUseCount;
5956 const bool m_IsCustomPool;
5957 const bool m_ExplicitBlockSize;
5958 const uint32_t m_Algorithm;
5962 bool m_HasEmptyBlock;
5963 VMA_RW_MUTEX m_Mutex;
5965 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5966 uint32_t m_NextBlockId;
5968 VkDeviceSize CalcMaxBlockSize()
const;
5971 void Remove(VmaDeviceMemoryBlock* pBlock);
5975 void IncrementallySortBlocks();
5977 VkResult AllocatePage(
5978 uint32_t currentFrameIndex,
5980 VkDeviceSize alignment,
5982 VmaSuballocationType suballocType,
5986 VkResult AllocateFromBlock(
5987 VmaDeviceMemoryBlock* pBlock,
5988 uint32_t currentFrameIndex,
5990 VkDeviceSize alignment,
5993 VmaSuballocationType suballocType,
5997 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6000 void ApplyDefragmentationMovesCpu(
6001 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6002 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6004 void ApplyDefragmentationMovesGpu(
6005 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6006 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6007 VkCommandBuffer commandBuffer);
6018 VMA_CLASS_NO_COPY(VmaPool_T)
6020 VmaBlockVector m_BlockVector;
6025 VkDeviceSize preferredBlockSize);
6028 uint32_t GetId()
const {
return m_Id; }
6029 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6031 #if VMA_STATS_STRING_ENABLED 6046 class VmaDefragmentationAlgorithm
6048 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6050 VmaDefragmentationAlgorithm(
6052 VmaBlockVector* pBlockVector,
6053 uint32_t currentFrameIndex) :
6054 m_hAllocator(hAllocator),
6055 m_pBlockVector(pBlockVector),
6056 m_CurrentFrameIndex(currentFrameIndex)
6059 virtual ~VmaDefragmentationAlgorithm()
6063 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6064 virtual void AddAll() = 0;
6066 virtual VkResult Defragment(
6067 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6068 VkDeviceSize maxBytesToMove,
6069 uint32_t maxAllocationsToMove) = 0;
6071 virtual VkDeviceSize GetBytesMoved()
const = 0;
6072 virtual uint32_t GetAllocationsMoved()
const = 0;
6076 VmaBlockVector*
const m_pBlockVector;
6077 const uint32_t m_CurrentFrameIndex;
6079 struct AllocationInfo
6082 VkBool32* m_pChanged;
6085 m_hAllocation(VK_NULL_HANDLE),
6086 m_pChanged(VMA_NULL)
6090 m_hAllocation(hAlloc),
6091 m_pChanged(pChanged)
6097 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6099 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6101 VmaDefragmentationAlgorithm_Generic(
6103 VmaBlockVector* pBlockVector,
6104 uint32_t currentFrameIndex,
6105 bool overlappingMoveSupported);
6106 virtual ~VmaDefragmentationAlgorithm_Generic();
6108 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6109 virtual void AddAll() { m_AllAllocations =
true; }
6111 virtual VkResult Defragment(
6112 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6113 VkDeviceSize maxBytesToMove,
6114 uint32_t maxAllocationsToMove);
6116 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6117 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6120 uint32_t m_AllocationCount;
6121 bool m_AllAllocations;
6123 VkDeviceSize m_BytesMoved;
6124 uint32_t m_AllocationsMoved;
6126 struct AllocationInfoSizeGreater
6128 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6130 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6134 struct AllocationInfoOffsetGreater
6136 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6138 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6144 size_t m_OriginalBlockIndex;
6145 VmaDeviceMemoryBlock* m_pBlock;
6146 bool m_HasNonMovableAllocations;
6147 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6149 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6150 m_OriginalBlockIndex(SIZE_MAX),
6152 m_HasNonMovableAllocations(true),
6153 m_Allocations(pAllocationCallbacks)
6157 void CalcHasNonMovableAllocations()
6159 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6160 const size_t defragmentAllocCount = m_Allocations.size();
6161 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6164 void SortAllocationsBySizeDescending()
6166 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6169 void SortAllocationsByOffsetDescending()
6171 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6175 struct BlockPointerLess
6177 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6179 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6181 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6183 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6189 struct BlockInfoCompareMoveDestination
6191 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6193 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6197 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6201 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6209 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6210 BlockInfoVector m_Blocks;
6212 VkResult DefragmentRound(
6213 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6214 VkDeviceSize maxBytesToMove,
6215 uint32_t maxAllocationsToMove);
6217 size_t CalcBlocksWithNonMovableCount()
const;
6219 static bool MoveMakesSense(
6220 size_t dstBlockIndex, VkDeviceSize dstOffset,
6221 size_t srcBlockIndex, VkDeviceSize srcOffset);
6224 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6226 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6228 VmaDefragmentationAlgorithm_Fast(
6230 VmaBlockVector* pBlockVector,
6231 uint32_t currentFrameIndex,
6232 bool overlappingMoveSupported);
6233 virtual ~VmaDefragmentationAlgorithm_Fast();
6235 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6236 virtual void AddAll() { m_AllAllocations =
true; }
6238 virtual VkResult Defragment(
6239 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6240 VkDeviceSize maxBytesToMove,
6241 uint32_t maxAllocationsToMove);
6243 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6244 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6249 size_t origBlockIndex;
6252 class FreeSpaceDatabase
6258 s.blockInfoIndex = SIZE_MAX;
6259 for(
size_t i = 0; i < MAX_COUNT; ++i)
6261 m_FreeSpaces[i] = s;
6265 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6267 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6273 size_t bestIndex = SIZE_MAX;
6274 for(
size_t i = 0; i < MAX_COUNT; ++i)
6277 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6282 if(m_FreeSpaces[i].size < size &&
6283 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6289 if(bestIndex != SIZE_MAX)
6291 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6292 m_FreeSpaces[bestIndex].offset = offset;
6293 m_FreeSpaces[bestIndex].size = size;
6297 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6298 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6300 size_t bestIndex = SIZE_MAX;
6301 VkDeviceSize bestFreeSpaceAfter = 0;
6302 for(
size_t i = 0; i < MAX_COUNT; ++i)
6305 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6307 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6309 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6311 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6313 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6316 bestFreeSpaceAfter = freeSpaceAfter;
6322 if(bestIndex != SIZE_MAX)
6324 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6325 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6327 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6330 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6331 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6332 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6337 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6347 static const size_t MAX_COUNT = 4;
6351 size_t blockInfoIndex;
6352 VkDeviceSize offset;
6354 } m_FreeSpaces[MAX_COUNT];
6357 const bool m_OverlappingMoveSupported;
6359 uint32_t m_AllocationCount;
6360 bool m_AllAllocations;
6362 VkDeviceSize m_BytesMoved;
6363 uint32_t m_AllocationsMoved;
6365 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6367 void PreprocessMetadata();
6368 void PostprocessMetadata();
6369 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6372 struct VmaBlockDefragmentationContext
6376 BLOCK_FLAG_USED = 0x00000001,
6381 VmaBlockDefragmentationContext() :
6383 hBuffer(VK_NULL_HANDLE)
6388 class VmaBlockVectorDefragmentationContext
6390 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6394 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6396 VmaBlockVectorDefragmentationContext(
6399 VmaBlockVector* pBlockVector,
6400 uint32_t currFrameIndex,
6402 ~VmaBlockVectorDefragmentationContext();
6404 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6405 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6406 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6408 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6409 void AddAll() { m_AllAllocations =
true; }
6411 void Begin(
bool overlappingMoveSupported);
6418 VmaBlockVector*
const m_pBlockVector;
6419 const uint32_t m_CurrFrameIndex;
6420 const uint32_t m_AlgorithmFlags;
6422 VmaDefragmentationAlgorithm* m_pAlgorithm;
6430 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6431 bool m_AllAllocations;
6434 struct VmaDefragmentationContext_T
6437 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6439 VmaDefragmentationContext_T(
6441 uint32_t currFrameIndex,
6444 ~VmaDefragmentationContext_T();
6446 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6447 void AddAllocations(
6448 uint32_t allocationCount,
6450 VkBool32* pAllocationsChanged);
6458 VkResult Defragment(
6459 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6460 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6465 const uint32_t m_CurrFrameIndex;
6466 const uint32_t m_Flags;
6469 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6471 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6474 #if VMA_RECORDING_ENABLED 6481 void WriteConfiguration(
6482 const VkPhysicalDeviceProperties& devProps,
6483 const VkPhysicalDeviceMemoryProperties& memProps,
6484 bool dedicatedAllocationExtensionEnabled);
6487 void RecordCreateAllocator(uint32_t frameIndex);
6488 void RecordDestroyAllocator(uint32_t frameIndex);
6489 void RecordCreatePool(uint32_t frameIndex,
6492 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6493 void RecordAllocateMemory(uint32_t frameIndex,
6494 const VkMemoryRequirements& vkMemReq,
6497 void RecordAllocateMemoryPages(uint32_t frameIndex,
6498 const VkMemoryRequirements& vkMemReq,
6500 uint64_t allocationCount,
6502 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6503 const VkMemoryRequirements& vkMemReq,
6504 bool requiresDedicatedAllocation,
6505 bool prefersDedicatedAllocation,
6508 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6509 const VkMemoryRequirements& vkMemReq,
6510 bool requiresDedicatedAllocation,
6511 bool prefersDedicatedAllocation,
6514 void RecordFreeMemory(uint32_t frameIndex,
6516 void RecordFreeMemoryPages(uint32_t frameIndex,
6517 uint64_t allocationCount,
6519 void RecordResizeAllocation(
6520 uint32_t frameIndex,
6522 VkDeviceSize newSize);
6523 void RecordSetAllocationUserData(uint32_t frameIndex,
6525 const void* pUserData);
6526 void RecordCreateLostAllocation(uint32_t frameIndex,
6528 void RecordMapMemory(uint32_t frameIndex,
6530 void RecordUnmapMemory(uint32_t frameIndex,
6532 void RecordFlushAllocation(uint32_t frameIndex,
6533 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6534 void RecordInvalidateAllocation(uint32_t frameIndex,
6535 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6536 void RecordCreateBuffer(uint32_t frameIndex,
6537 const VkBufferCreateInfo& bufCreateInfo,
6540 void RecordCreateImage(uint32_t frameIndex,
6541 const VkImageCreateInfo& imageCreateInfo,
6544 void RecordDestroyBuffer(uint32_t frameIndex,
6546 void RecordDestroyImage(uint32_t frameIndex,
6548 void RecordTouchAllocation(uint32_t frameIndex,
6550 void RecordGetAllocationInfo(uint32_t frameIndex,
6552 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6554 void RecordDefragmentationBegin(uint32_t frameIndex,
6557 void RecordDefragmentationEnd(uint32_t frameIndex,
6567 class UserDataString
6571 const char* GetString()
const {
return m_Str; }
6581 VMA_MUTEX m_FileMutex;
6583 int64_t m_StartCounter;
6585 void GetBasicParams(CallParams& outParams);
6588 template<
typename T>
6589 void PrintPointerList(uint64_t count,
const T* pItems)
6593 fprintf(m_File,
"%p", pItems[0]);
6594 for(uint64_t i = 1; i < count; ++i)
6596 fprintf(m_File,
" %p", pItems[i]);
6601 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6605 #endif // #if VMA_RECORDING_ENABLED 6610 class VmaAllocationObjectAllocator
6612 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6614 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6621 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6625 struct VmaAllocator_T
6627 VMA_CLASS_NO_COPY(VmaAllocator_T)
6630 bool m_UseKhrDedicatedAllocation;
6632 bool m_AllocationCallbacksSpecified;
6633 VkAllocationCallbacks m_AllocationCallbacks;
6635 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6638 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6639 VMA_MUTEX m_HeapSizeLimitMutex;
6641 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6642 VkPhysicalDeviceMemoryProperties m_MemProps;
6645 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6648 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6649 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6650 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6656 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6658 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6662 return m_VulkanFunctions;
6665 VkDeviceSize GetBufferImageGranularity()
const 6668 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6669 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6672 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6673 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6675 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6677 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6678 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6681 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6683 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6684 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6687 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6689 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6690 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6691 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6694 bool IsIntegratedGpu()
const 6696 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6699 #if VMA_RECORDING_ENABLED 6700 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6703 void GetBufferMemoryRequirements(
6705 VkMemoryRequirements& memReq,
6706 bool& requiresDedicatedAllocation,
6707 bool& prefersDedicatedAllocation)
const;
6708 void GetImageMemoryRequirements(
6710 VkMemoryRequirements& memReq,
6711 bool& requiresDedicatedAllocation,
6712 bool& prefersDedicatedAllocation)
const;
6715 VkResult AllocateMemory(
6716 const VkMemoryRequirements& vkMemReq,
6717 bool requiresDedicatedAllocation,
6718 bool prefersDedicatedAllocation,
6719 VkBuffer dedicatedBuffer,
6720 VkImage dedicatedImage,
6722 VmaSuballocationType suballocType,
6723 size_t allocationCount,
6728 size_t allocationCount,
6731 VkResult ResizeAllocation(
6733 VkDeviceSize newSize);
6735 void CalculateStats(
VmaStats* pStats);
6737 #if VMA_STATS_STRING_ENABLED 6738 void PrintDetailedMap(
class VmaJsonWriter& json);
6741 VkResult DefragmentationBegin(
6745 VkResult DefragmentationEnd(
6752 void DestroyPool(
VmaPool pool);
6755 void SetCurrentFrameIndex(uint32_t frameIndex);
6756 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6758 void MakePoolAllocationsLost(
6760 size_t* pLostAllocationCount);
6761 VkResult CheckPoolCorruption(
VmaPool hPool);
6762 VkResult CheckCorruption(uint32_t memoryTypeBits);
6766 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6767 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6772 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6773 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6775 void FlushOrInvalidateAllocation(
6777 VkDeviceSize offset, VkDeviceSize size,
6778 VMA_CACHE_OPERATION op);
6780 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6786 uint32_t GetGpuDefragmentationMemoryTypeBits();
6789 VkDeviceSize m_PreferredLargeHeapBlockSize;
6791 VkPhysicalDevice m_PhysicalDevice;
6792 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6793 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
6795 VMA_RW_MUTEX m_PoolsMutex;
6797 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6798 uint32_t m_NextPoolId;
6802 #if VMA_RECORDING_ENABLED 6803 VmaRecorder* m_pRecorder;
6808 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6810 VkResult AllocateMemoryOfType(
6812 VkDeviceSize alignment,
6813 bool dedicatedAllocation,
6814 VkBuffer dedicatedBuffer,
6815 VkImage dedicatedImage,
6817 uint32_t memTypeIndex,
6818 VmaSuballocationType suballocType,
6819 size_t allocationCount,
6823 VkResult AllocateDedicatedMemoryPage(
6825 VmaSuballocationType suballocType,
6826 uint32_t memTypeIndex,
6827 const VkMemoryAllocateInfo& allocInfo,
6829 bool isUserDataString,
6834 VkResult AllocateDedicatedMemory(
6836 VmaSuballocationType suballocType,
6837 uint32_t memTypeIndex,
6839 bool isUserDataString,
6841 VkBuffer dedicatedBuffer,
6842 VkImage dedicatedImage,
6843 size_t allocationCount,
6853 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
6859 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6861 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6864 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6866 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6869 template<
typename T>
6872 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6875 template<
typename T>
6876 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6878 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6881 template<
typename T>
6882 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6887 VmaFree(hAllocator, ptr);
6891 template<
typename T>
6892 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6896 for(
size_t i = count; i--; )
6898 VmaFree(hAllocator, ptr);
6905 #if VMA_STATS_STRING_ENABLED 6907 class VmaStringBuilder
6910 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6911 size_t GetLength()
const {
return m_Data.size(); }
6912 const char* GetData()
const {
return m_Data.data(); }
6914 void Add(
char ch) { m_Data.push_back(ch); }
6915 void Add(
const char* pStr);
6916 void AddNewLine() { Add(
'\n'); }
6917 void AddNumber(uint32_t num);
6918 void AddNumber(uint64_t num);
6919 void AddPointer(
const void* ptr);
6922 VmaVector< char, VmaStlAllocator<char> > m_Data;
6925 void VmaStringBuilder::Add(
const char* pStr)
6927 const size_t strLen = strlen(pStr);
6930 const size_t oldCount = m_Data.size();
6931 m_Data.resize(oldCount + strLen);
6932 memcpy(m_Data.data() + oldCount, pStr, strLen);
6936 void VmaStringBuilder::AddNumber(uint32_t num)
6939 VmaUint32ToStr(buf,
sizeof(buf), num);
6943 void VmaStringBuilder::AddNumber(uint64_t num)
6946 VmaUint64ToStr(buf,
sizeof(buf), num);
6950 void VmaStringBuilder::AddPointer(
const void* ptr)
6953 VmaPtrToStr(buf,
sizeof(buf), ptr);
6957 #endif // #if VMA_STATS_STRING_ENABLED 6962 #if VMA_STATS_STRING_ENABLED 6966 VMA_CLASS_NO_COPY(VmaJsonWriter)
6968 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6971 void BeginObject(
bool singleLine =
false);
6974 void BeginArray(
bool singleLine =
false);
6977 void WriteString(
const char* pStr);
6978 void BeginString(
const char* pStr = VMA_NULL);
6979 void ContinueString(
const char* pStr);
6980 void ContinueString(uint32_t n);
6981 void ContinueString(uint64_t n);
6982 void ContinueString_Pointer(
const void* ptr);
6983 void EndString(
const char* pStr = VMA_NULL);
6985 void WriteNumber(uint32_t n);
6986 void WriteNumber(uint64_t n);
6987 void WriteBool(
bool b);
6991 static const char*
const INDENT;
6993 enum COLLECTION_TYPE
6995 COLLECTION_TYPE_OBJECT,
6996 COLLECTION_TYPE_ARRAY,
7000 COLLECTION_TYPE type;
7001 uint32_t valueCount;
7002 bool singleLineMode;
7005 VmaStringBuilder& m_SB;
7006 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7007 bool m_InsideString;
7009 void BeginValue(
bool isString);
7010 void WriteIndent(
bool oneLess =
false);
7013 const char*
const VmaJsonWriter::INDENT =
" ";
7015 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7017 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7018 m_InsideString(false)
7022 VmaJsonWriter::~VmaJsonWriter()
7024 VMA_ASSERT(!m_InsideString);
7025 VMA_ASSERT(m_Stack.empty());
7028 void VmaJsonWriter::BeginObject(
bool singleLine)
7030 VMA_ASSERT(!m_InsideString);
7036 item.type = COLLECTION_TYPE_OBJECT;
7037 item.valueCount = 0;
7038 item.singleLineMode = singleLine;
7039 m_Stack.push_back(item);
7042 void VmaJsonWriter::EndObject()
7044 VMA_ASSERT(!m_InsideString);
7049 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7053 void VmaJsonWriter::BeginArray(
bool singleLine)
7055 VMA_ASSERT(!m_InsideString);
7061 item.type = COLLECTION_TYPE_ARRAY;
7062 item.valueCount = 0;
7063 item.singleLineMode = singleLine;
7064 m_Stack.push_back(item);
7067 void VmaJsonWriter::EndArray()
7069 VMA_ASSERT(!m_InsideString);
7074 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7078 void VmaJsonWriter::WriteString(
const char* pStr)
7084 void VmaJsonWriter::BeginString(
const char* pStr)
7086 VMA_ASSERT(!m_InsideString);
7090 m_InsideString =
true;
7091 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7093 ContinueString(pStr);
7097 void VmaJsonWriter::ContinueString(
const char* pStr)
7099 VMA_ASSERT(m_InsideString);
7101 const size_t strLen = strlen(pStr);
7102 for(
size_t i = 0; i < strLen; ++i)
7135 VMA_ASSERT(0 &&
"Character not currently supported.");
7141 void VmaJsonWriter::ContinueString(uint32_t n)
7143 VMA_ASSERT(m_InsideString);
7147 void VmaJsonWriter::ContinueString(uint64_t n)
7149 VMA_ASSERT(m_InsideString);
7153 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7155 VMA_ASSERT(m_InsideString);
7156 m_SB.AddPointer(ptr);
7159 void VmaJsonWriter::EndString(
const char* pStr)
7161 VMA_ASSERT(m_InsideString);
7162 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7164 ContinueString(pStr);
7167 m_InsideString =
false;
7170 void VmaJsonWriter::WriteNumber(uint32_t n)
7172 VMA_ASSERT(!m_InsideString);
7177 void VmaJsonWriter::WriteNumber(uint64_t n)
7179 VMA_ASSERT(!m_InsideString);
7184 void VmaJsonWriter::WriteBool(
bool b)
7186 VMA_ASSERT(!m_InsideString);
7188 m_SB.Add(b ?
"true" :
"false");
7191 void VmaJsonWriter::WriteNull()
7193 VMA_ASSERT(!m_InsideString);
7198 void VmaJsonWriter::BeginValue(
bool isString)
7200 if(!m_Stack.empty())
7202 StackItem& currItem = m_Stack.back();
7203 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7204 currItem.valueCount % 2 == 0)
7206 VMA_ASSERT(isString);
7209 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7210 currItem.valueCount % 2 != 0)
7214 else if(currItem.valueCount > 0)
7223 ++currItem.valueCount;
7227 void VmaJsonWriter::WriteIndent(
bool oneLess)
7229 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7233 size_t count = m_Stack.size();
7234 if(count > 0 && oneLess)
7238 for(
size_t i = 0; i < count; ++i)
7245 #endif // #if VMA_STATS_STRING_ENABLED 7249 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7251 if(IsUserDataString())
7253 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7255 FreeUserDataString(hAllocator);
7257 if(pUserData != VMA_NULL)
7259 const char*
const newStrSrc = (
char*)pUserData;
7260 const size_t newStrLen = strlen(newStrSrc);
7261 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7262 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7263 m_pUserData = newStrDst;
7268 m_pUserData = pUserData;
7272 void VmaAllocation_T::ChangeBlockAllocation(
7274 VmaDeviceMemoryBlock* block,
7275 VkDeviceSize offset)
7277 VMA_ASSERT(block != VMA_NULL);
7278 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7281 if(block != m_BlockAllocation.m_Block)
7283 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7284 if(IsPersistentMap())
7286 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7287 block->Map(hAllocator, mapRefCount, VMA_NULL);
7290 m_BlockAllocation.m_Block = block;
7291 m_BlockAllocation.m_Offset = offset;
7294 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7296 VMA_ASSERT(newSize > 0);
7300 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7302 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7303 m_BlockAllocation.m_Offset = newOffset;
7306 VkDeviceSize VmaAllocation_T::GetOffset()
const 7310 case ALLOCATION_TYPE_BLOCK:
7311 return m_BlockAllocation.m_Offset;
7312 case ALLOCATION_TYPE_DEDICATED:
7320 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7324 case ALLOCATION_TYPE_BLOCK:
7325 return m_BlockAllocation.m_Block->GetDeviceMemory();
7326 case ALLOCATION_TYPE_DEDICATED:
7327 return m_DedicatedAllocation.m_hMemory;
7330 return VK_NULL_HANDLE;
7334 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7338 case ALLOCATION_TYPE_BLOCK:
7339 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7340 case ALLOCATION_TYPE_DEDICATED:
7341 return m_DedicatedAllocation.m_MemoryTypeIndex;
7348 void* VmaAllocation_T::GetMappedData()
const 7352 case ALLOCATION_TYPE_BLOCK:
7355 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7356 VMA_ASSERT(pBlockData != VMA_NULL);
7357 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7364 case ALLOCATION_TYPE_DEDICATED:
7365 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7366 return m_DedicatedAllocation.m_pMappedData;
7373 bool VmaAllocation_T::CanBecomeLost()
const 7377 case ALLOCATION_TYPE_BLOCK:
7378 return m_BlockAllocation.m_CanBecomeLost;
7379 case ALLOCATION_TYPE_DEDICATED:
7387 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7389 VMA_ASSERT(CanBecomeLost());
7395 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7398 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7403 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7409 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7419 #if VMA_STATS_STRING_ENABLED 7422 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7431 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7433 json.WriteString(
"Type");
7434 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7436 json.WriteString(
"Size");
7437 json.WriteNumber(m_Size);
7439 if(m_pUserData != VMA_NULL)
7441 json.WriteString(
"UserData");
7442 if(IsUserDataString())
7444 json.WriteString((
const char*)m_pUserData);
7449 json.ContinueString_Pointer(m_pUserData);
7454 json.WriteString(
"CreationFrameIndex");
7455 json.WriteNumber(m_CreationFrameIndex);
7457 json.WriteString(
"LastUseFrameIndex");
7458 json.WriteNumber(GetLastUseFrameIndex());
7460 if(m_BufferImageUsage != 0)
7462 json.WriteString(
"Usage");
7463 json.WriteNumber(m_BufferImageUsage);
7469 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7471 VMA_ASSERT(IsUserDataString());
7472 if(m_pUserData != VMA_NULL)
7474 char*
const oldStr = (
char*)m_pUserData;
7475 const size_t oldStrLen = strlen(oldStr);
7476 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7477 m_pUserData = VMA_NULL;
7481 void VmaAllocation_T::BlockAllocMap()
7483 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7485 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7491 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7495 void VmaAllocation_T::BlockAllocUnmap()
7497 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7499 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7505 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7509 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7511 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7515 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7517 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7518 *ppData = m_DedicatedAllocation.m_pMappedData;
7524 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7525 return VK_ERROR_MEMORY_MAP_FAILED;
7530 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7531 hAllocator->m_hDevice,
7532 m_DedicatedAllocation.m_hMemory,
7537 if(result == VK_SUCCESS)
7539 m_DedicatedAllocation.m_pMappedData = *ppData;
7546 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7548 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7550 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7555 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7556 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7557 hAllocator->m_hDevice,
7558 m_DedicatedAllocation.m_hMemory);
7563 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7567 #if VMA_STATS_STRING_ENABLED 7569 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7573 json.WriteString(
"Blocks");
7576 json.WriteString(
"Allocations");
7579 json.WriteString(
"UnusedRanges");
7582 json.WriteString(
"UsedBytes");
7585 json.WriteString(
"UnusedBytes");
7590 json.WriteString(
"AllocationSize");
7591 json.BeginObject(
true);
7592 json.WriteString(
"Min");
7594 json.WriteString(
"Avg");
7596 json.WriteString(
"Max");
7603 json.WriteString(
"UnusedRangeSize");
7604 json.BeginObject(
true);
7605 json.WriteString(
"Min");
7607 json.WriteString(
"Avg");
7609 json.WriteString(
"Max");
7617 #endif // #if VMA_STATS_STRING_ENABLED 7619 struct VmaSuballocationItemSizeLess
7622 const VmaSuballocationList::iterator lhs,
7623 const VmaSuballocationList::iterator rhs)
const 7625 return lhs->size < rhs->size;
7628 const VmaSuballocationList::iterator lhs,
7629 VkDeviceSize rhsSize)
const 7631 return lhs->size < rhsSize;
7639 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7641 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7645 #if VMA_STATS_STRING_ENABLED 7647 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7648 VkDeviceSize unusedBytes,
7649 size_t allocationCount,
7650 size_t unusedRangeCount)
const 7654 json.WriteString(
"TotalBytes");
7655 json.WriteNumber(GetSize());
7657 json.WriteString(
"UnusedBytes");
7658 json.WriteNumber(unusedBytes);
7660 json.WriteString(
"Allocations");
7661 json.WriteNumber((uint64_t)allocationCount);
7663 json.WriteString(
"UnusedRanges");
7664 json.WriteNumber((uint64_t)unusedRangeCount);
7666 json.WriteString(
"Suballocations");
7670 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7671 VkDeviceSize offset,
7674 json.BeginObject(
true);
7676 json.WriteString(
"Offset");
7677 json.WriteNumber(offset);
7679 hAllocation->PrintParameters(json);
7684 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7685 VkDeviceSize offset,
7686 VkDeviceSize size)
const 7688 json.BeginObject(
true);
7690 json.WriteString(
"Offset");
7691 json.WriteNumber(offset);
7693 json.WriteString(
"Type");
7694 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7696 json.WriteString(
"Size");
7697 json.WriteNumber(size);
7702 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7708 #endif // #if VMA_STATS_STRING_ENABLED 7713 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7714 VmaBlockMetadata(hAllocator),
7717 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7718 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7722 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7726 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7728 VmaBlockMetadata::Init(size);
7731 m_SumFreeSize = size;
7733 VmaSuballocation suballoc = {};
7734 suballoc.offset = 0;
7735 suballoc.size = size;
7736 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7737 suballoc.hAllocation = VK_NULL_HANDLE;
7739 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7740 m_Suballocations.push_back(suballoc);
7741 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7743 m_FreeSuballocationsBySize.push_back(suballocItem);
7746 bool VmaBlockMetadata_Generic::Validate()
const 7748 VMA_VALIDATE(!m_Suballocations.empty());
7751 VkDeviceSize calculatedOffset = 0;
7753 uint32_t calculatedFreeCount = 0;
7755 VkDeviceSize calculatedSumFreeSize = 0;
7758 size_t freeSuballocationsToRegister = 0;
7760 bool prevFree =
false;
7762 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7763 suballocItem != m_Suballocations.cend();
7766 const VmaSuballocation& subAlloc = *suballocItem;
7769 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7771 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7773 VMA_VALIDATE(!prevFree || !currFree);
7775 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7779 calculatedSumFreeSize += subAlloc.size;
7780 ++calculatedFreeCount;
7781 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7783 ++freeSuballocationsToRegister;
7787 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7791 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7792 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7795 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7798 calculatedOffset += subAlloc.size;
7799 prevFree = currFree;
7804 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7806 VkDeviceSize lastSize = 0;
7807 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7809 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7812 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7814 VMA_VALIDATE(suballocItem->size >= lastSize);
7816 lastSize = suballocItem->size;
7820 VMA_VALIDATE(ValidateFreeSuballocationList());
7821 VMA_VALIDATE(calculatedOffset == GetSize());
7822 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7823 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7828 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7830 if(!m_FreeSuballocationsBySize.empty())
7832 return m_FreeSuballocationsBySize.back()->size;
7840 bool VmaBlockMetadata_Generic::IsEmpty()
const 7842 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7845 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7849 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7861 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7862 suballocItem != m_Suballocations.cend();
7865 const VmaSuballocation& suballoc = *suballocItem;
7866 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7879 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7881 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7883 inoutStats.
size += GetSize();
7890 #if VMA_STATS_STRING_ENABLED 7892 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7894 PrintDetailedMap_Begin(json,
7896 m_Suballocations.size() - (size_t)m_FreeCount,
7900 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7901 suballocItem != m_Suballocations.cend();
7902 ++suballocItem, ++i)
7904 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7906 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7910 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7914 PrintDetailedMap_End(json);
7917 #endif // #if VMA_STATS_STRING_ENABLED 7919 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7920 uint32_t currentFrameIndex,
7921 uint32_t frameInUseCount,
7922 VkDeviceSize bufferImageGranularity,
7923 VkDeviceSize allocSize,
7924 VkDeviceSize allocAlignment,
7926 VmaSuballocationType allocType,
7927 bool canMakeOtherLost,
7929 VmaAllocationRequest* pAllocationRequest)
7931 VMA_ASSERT(allocSize > 0);
7932 VMA_ASSERT(!upperAddress);
7933 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7934 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7935 VMA_HEAVY_ASSERT(Validate());
7937 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7940 if(canMakeOtherLost ==
false &&
7941 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7947 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7948 if(freeSuballocCount > 0)
7953 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7954 m_FreeSuballocationsBySize.data(),
7955 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7956 allocSize + 2 * VMA_DEBUG_MARGIN,
7957 VmaSuballocationItemSizeLess());
7958 size_t index = it - m_FreeSuballocationsBySize.data();
7959 for(; index < freeSuballocCount; ++index)
7964 bufferImageGranularity,
7968 m_FreeSuballocationsBySize[index],
7970 &pAllocationRequest->offset,
7971 &pAllocationRequest->itemsToMakeLostCount,
7972 &pAllocationRequest->sumFreeSize,
7973 &pAllocationRequest->sumItemSize))
7975 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7980 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7982 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7983 it != m_Suballocations.end();
7986 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7989 bufferImageGranularity,
7995 &pAllocationRequest->offset,
7996 &pAllocationRequest->itemsToMakeLostCount,
7997 &pAllocationRequest->sumFreeSize,
7998 &pAllocationRequest->sumItemSize))
8000 pAllocationRequest->item = it;
8008 for(
size_t index = freeSuballocCount; index--; )
8013 bufferImageGranularity,
8017 m_FreeSuballocationsBySize[index],
8019 &pAllocationRequest->offset,
8020 &pAllocationRequest->itemsToMakeLostCount,
8021 &pAllocationRequest->sumFreeSize,
8022 &pAllocationRequest->sumItemSize))
8024 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8031 if(canMakeOtherLost)
8036 VmaAllocationRequest tmpAllocRequest = {};
8037 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8038 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8039 suballocIt != m_Suballocations.end();
8042 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8043 suballocIt->hAllocation->CanBecomeLost())
8048 bufferImageGranularity,
8054 &tmpAllocRequest.offset,
8055 &tmpAllocRequest.itemsToMakeLostCount,
8056 &tmpAllocRequest.sumFreeSize,
8057 &tmpAllocRequest.sumItemSize))
8061 *pAllocationRequest = tmpAllocRequest;
8062 pAllocationRequest->item = suballocIt;
8065 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8067 *pAllocationRequest = tmpAllocRequest;
8068 pAllocationRequest->item = suballocIt;
8081 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8082 uint32_t currentFrameIndex,
8083 uint32_t frameInUseCount,
8084 VmaAllocationRequest* pAllocationRequest)
8086 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8088 while(pAllocationRequest->itemsToMakeLostCount > 0)
8090 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8092 ++pAllocationRequest->item;
8094 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8095 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8096 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8097 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8099 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8100 --pAllocationRequest->itemsToMakeLostCount;
8108 VMA_HEAVY_ASSERT(Validate());
8109 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8110 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8115 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8117 uint32_t lostAllocationCount = 0;
8118 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8119 it != m_Suballocations.end();
8122 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8123 it->hAllocation->CanBecomeLost() &&
8124 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8126 it = FreeSuballocation(it);
8127 ++lostAllocationCount;
8130 return lostAllocationCount;
8133 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8135 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8136 it != m_Suballocations.end();
8139 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8141 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8143 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8144 return VK_ERROR_VALIDATION_FAILED_EXT;
8146 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8148 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8149 return VK_ERROR_VALIDATION_FAILED_EXT;
8157 void VmaBlockMetadata_Generic::Alloc(
8158 const VmaAllocationRequest& request,
8159 VmaSuballocationType type,
8160 VkDeviceSize allocSize,
8163 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8164 VMA_ASSERT(request.item != m_Suballocations.end());
8165 VmaSuballocation& suballoc = *request.item;
8167 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8169 VMA_ASSERT(request.offset >= suballoc.offset);
8170 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8171 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8172 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8176 UnregisterFreeSuballocation(request.item);
8178 suballoc.offset = request.offset;
8179 suballoc.size = allocSize;
8180 suballoc.type = type;
8181 suballoc.hAllocation = hAllocation;
8186 VmaSuballocation paddingSuballoc = {};
8187 paddingSuballoc.offset = request.offset + allocSize;
8188 paddingSuballoc.size = paddingEnd;
8189 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8190 VmaSuballocationList::iterator next = request.item;
8192 const VmaSuballocationList::iterator paddingEndItem =
8193 m_Suballocations.insert(next, paddingSuballoc);
8194 RegisterFreeSuballocation(paddingEndItem);
8200 VmaSuballocation paddingSuballoc = {};
8201 paddingSuballoc.offset = request.offset - paddingBegin;
8202 paddingSuballoc.size = paddingBegin;
8203 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8204 const VmaSuballocationList::iterator paddingBeginItem =
8205 m_Suballocations.insert(request.item, paddingSuballoc);
8206 RegisterFreeSuballocation(paddingBeginItem);
8210 m_FreeCount = m_FreeCount - 1;
8211 if(paddingBegin > 0)
8219 m_SumFreeSize -= allocSize;
8222 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8224 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8225 suballocItem != m_Suballocations.end();
8228 VmaSuballocation& suballoc = *suballocItem;
8229 if(suballoc.hAllocation == allocation)
8231 FreeSuballocation(suballocItem);
8232 VMA_HEAVY_ASSERT(Validate());
8236 VMA_ASSERT(0 &&
"Not found!");
8239 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8241 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8242 suballocItem != m_Suballocations.end();
8245 VmaSuballocation& suballoc = *suballocItem;
8246 if(suballoc.offset == offset)
8248 FreeSuballocation(suballocItem);
8252 VMA_ASSERT(0 &&
"Not found!");
8255 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8257 typedef VmaSuballocationList::iterator iter_type;
8258 for(iter_type suballocItem = m_Suballocations.begin();
8259 suballocItem != m_Suballocations.end();
8262 VmaSuballocation& suballoc = *suballocItem;
8263 if(suballoc.hAllocation == alloc)
8265 iter_type nextItem = suballocItem;
8269 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8272 if(newSize < alloc->GetSize())
8274 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8277 if(nextItem != m_Suballocations.end())
8280 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8283 UnregisterFreeSuballocation(nextItem);
8284 nextItem->offset -= sizeDiff;
8285 nextItem->size += sizeDiff;
8286 RegisterFreeSuballocation(nextItem);
8292 VmaSuballocation newFreeSuballoc;
8293 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8294 newFreeSuballoc.offset = suballoc.offset + newSize;
8295 newFreeSuballoc.size = sizeDiff;
8296 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8297 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8298 RegisterFreeSuballocation(newFreeSuballocIt);
8307 VmaSuballocation newFreeSuballoc;
8308 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8309 newFreeSuballoc.offset = suballoc.offset + newSize;
8310 newFreeSuballoc.size = sizeDiff;
8311 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8312 m_Suballocations.push_back(newFreeSuballoc);
8314 iter_type newFreeSuballocIt = m_Suballocations.end();
8315 RegisterFreeSuballocation(--newFreeSuballocIt);
8320 suballoc.size = newSize;
8321 m_SumFreeSize += sizeDiff;
8326 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8329 if(nextItem != m_Suballocations.end())
8332 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8335 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8341 if(nextItem->size > sizeDiff)
8344 UnregisterFreeSuballocation(nextItem);
8345 nextItem->offset += sizeDiff;
8346 nextItem->size -= sizeDiff;
8347 RegisterFreeSuballocation(nextItem);
8353 UnregisterFreeSuballocation(nextItem);
8354 m_Suballocations.erase(nextItem);
8370 suballoc.size = newSize;
8371 m_SumFreeSize -= sizeDiff;
8378 VMA_ASSERT(0 &&
"Not found!");
8382 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8384 VkDeviceSize lastSize = 0;
8385 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8387 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8389 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8390 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8391 VMA_VALIDATE(it->size >= lastSize);
8392 lastSize = it->size;
8397 bool VmaBlockMetadata_Generic::CheckAllocation(
8398 uint32_t currentFrameIndex,
8399 uint32_t frameInUseCount,
8400 VkDeviceSize bufferImageGranularity,
8401 VkDeviceSize allocSize,
8402 VkDeviceSize allocAlignment,
8403 VmaSuballocationType allocType,
8404 VmaSuballocationList::const_iterator suballocItem,
8405 bool canMakeOtherLost,
8406 VkDeviceSize* pOffset,
8407 size_t* itemsToMakeLostCount,
8408 VkDeviceSize* pSumFreeSize,
8409 VkDeviceSize* pSumItemSize)
const 8411 VMA_ASSERT(allocSize > 0);
8412 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8413 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8414 VMA_ASSERT(pOffset != VMA_NULL);
8416 *itemsToMakeLostCount = 0;
8420 if(canMakeOtherLost)
8422 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8424 *pSumFreeSize = suballocItem->size;
8428 if(suballocItem->hAllocation->CanBecomeLost() &&
8429 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8431 ++*itemsToMakeLostCount;
8432 *pSumItemSize = suballocItem->size;
8441 if(GetSize() - suballocItem->offset < allocSize)
8447 *pOffset = suballocItem->offset;
8450 if(VMA_DEBUG_MARGIN > 0)
8452 *pOffset += VMA_DEBUG_MARGIN;
8456 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8460 if(bufferImageGranularity > 1)
8462 bool bufferImageGranularityConflict =
false;
8463 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8464 while(prevSuballocItem != m_Suballocations.cbegin())
8467 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8468 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8470 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8472 bufferImageGranularityConflict =
true;
8480 if(bufferImageGranularityConflict)
8482 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8488 if(*pOffset >= suballocItem->offset + suballocItem->size)
8494 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8497 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8499 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8501 if(suballocItem->offset + totalSize > GetSize())
8508 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8509 if(totalSize > suballocItem->size)
8511 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8512 while(remainingSize > 0)
8515 if(lastSuballocItem == m_Suballocations.cend())
8519 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8521 *pSumFreeSize += lastSuballocItem->size;
8525 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8526 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8527 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8529 ++*itemsToMakeLostCount;
8530 *pSumItemSize += lastSuballocItem->size;
8537 remainingSize = (lastSuballocItem->size < remainingSize) ?
8538 remainingSize - lastSuballocItem->size : 0;
8544 if(bufferImageGranularity > 1)
8546 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8548 while(nextSuballocItem != m_Suballocations.cend())
8550 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8551 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8553 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8555 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8556 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8557 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8559 ++*itemsToMakeLostCount;
8578 const VmaSuballocation& suballoc = *suballocItem;
8579 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8581 *pSumFreeSize = suballoc.size;
8584 if(suballoc.size < allocSize)
8590 *pOffset = suballoc.offset;
8593 if(VMA_DEBUG_MARGIN > 0)
8595 *pOffset += VMA_DEBUG_MARGIN;
8599 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8603 if(bufferImageGranularity > 1)
8605 bool bufferImageGranularityConflict =
false;
8606 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8607 while(prevSuballocItem != m_Suballocations.cbegin())
8610 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8611 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8613 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8615 bufferImageGranularityConflict =
true;
8623 if(bufferImageGranularityConflict)
8625 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8630 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8633 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8636 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8643 if(bufferImageGranularity > 1)
8645 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8647 while(nextSuballocItem != m_Suballocations.cend())
8649 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8650 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8652 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8671 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8673 VMA_ASSERT(item != m_Suballocations.end());
8674 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8676 VmaSuballocationList::iterator nextItem = item;
8678 VMA_ASSERT(nextItem != m_Suballocations.end());
8679 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8681 item->size += nextItem->size;
8683 m_Suballocations.erase(nextItem);
8686 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8689 VmaSuballocation& suballoc = *suballocItem;
8690 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8691 suballoc.hAllocation = VK_NULL_HANDLE;
8695 m_SumFreeSize += suballoc.size;
8698 bool mergeWithNext =
false;
8699 bool mergeWithPrev =
false;
8701 VmaSuballocationList::iterator nextItem = suballocItem;
8703 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8705 mergeWithNext =
true;
8708 VmaSuballocationList::iterator prevItem = suballocItem;
8709 if(suballocItem != m_Suballocations.begin())
8712 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8714 mergeWithPrev =
true;
8720 UnregisterFreeSuballocation(nextItem);
8721 MergeFreeWithNext(suballocItem);
8726 UnregisterFreeSuballocation(prevItem);
8727 MergeFreeWithNext(prevItem);
8728 RegisterFreeSuballocation(prevItem);
8733 RegisterFreeSuballocation(suballocItem);
8734 return suballocItem;
8738 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8740 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8741 VMA_ASSERT(item->size > 0);
8745 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8747 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8749 if(m_FreeSuballocationsBySize.empty())
8751 m_FreeSuballocationsBySize.push_back(item);
8755 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8763 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8765 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8766 VMA_ASSERT(item->size > 0);
8770 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8772 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8774 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8775 m_FreeSuballocationsBySize.data(),
8776 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8778 VmaSuballocationItemSizeLess());
8779 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8780 index < m_FreeSuballocationsBySize.size();
8783 if(m_FreeSuballocationsBySize[index] == item)
8785 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8788 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8790 VMA_ASSERT(0 &&
"Not found.");
8796 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8797 VkDeviceSize bufferImageGranularity,
8798 VmaSuballocationType& inOutPrevSuballocType)
const 8800 if(bufferImageGranularity == 1 || IsEmpty())
8805 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8806 bool typeConflictFound =
false;
8807 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8808 it != m_Suballocations.cend();
8811 const VmaSuballocationType suballocType = it->type;
8812 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8814 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8815 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8817 typeConflictFound =
true;
8819 inOutPrevSuballocType = suballocType;
8823 return typeConflictFound || minAlignment >= bufferImageGranularity;
8829 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8830 VmaBlockMetadata(hAllocator),
8832 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8833 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8834 m_1stVectorIndex(0),
8835 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8836 m_1stNullItemsBeginCount(0),
8837 m_1stNullItemsMiddleCount(0),
8838 m_2ndNullItemsCount(0)
8842 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8846 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8848 VmaBlockMetadata::Init(size);
8849 m_SumFreeSize = size;
8852 bool VmaBlockMetadata_Linear::Validate()
const 8854 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8855 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8857 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8858 VMA_VALIDATE(!suballocations1st.empty() ||
8859 suballocations2nd.empty() ||
8860 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8862 if(!suballocations1st.empty())
8865 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8867 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8869 if(!suballocations2nd.empty())
8872 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8875 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8876 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8878 VkDeviceSize sumUsedSize = 0;
8879 const size_t suballoc1stCount = suballocations1st.size();
8880 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8882 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8884 const size_t suballoc2ndCount = suballocations2nd.size();
8885 size_t nullItem2ndCount = 0;
8886 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8888 const VmaSuballocation& suballoc = suballocations2nd[i];
8889 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8891 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8892 VMA_VALIDATE(suballoc.offset >= offset);
8896 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8897 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8898 sumUsedSize += suballoc.size;
8905 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8908 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8911 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8913 const VmaSuballocation& suballoc = suballocations1st[i];
8914 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8915 suballoc.hAllocation == VK_NULL_HANDLE);
8918 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8920 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8922 const VmaSuballocation& suballoc = suballocations1st[i];
8923 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8925 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8926 VMA_VALIDATE(suballoc.offset >= offset);
8927 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8931 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8932 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8933 sumUsedSize += suballoc.size;
8940 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8942 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8944 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8946 const size_t suballoc2ndCount = suballocations2nd.size();
8947 size_t nullItem2ndCount = 0;
8948 for(
size_t i = suballoc2ndCount; i--; )
8950 const VmaSuballocation& suballoc = suballocations2nd[i];
8951 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8953 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8954 VMA_VALIDATE(suballoc.offset >= offset);
8958 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8959 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8960 sumUsedSize += suballoc.size;
8967 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8970 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8973 VMA_VALIDATE(offset <= GetSize());
8974 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8979 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8981 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8982 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8985 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8987 const VkDeviceSize size = GetSize();
8999 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9001 switch(m_2ndVectorMode)
9003 case SECOND_VECTOR_EMPTY:
9009 const size_t suballocations1stCount = suballocations1st.size();
9010 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9011 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9012 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9014 firstSuballoc.offset,
9015 size - (lastSuballoc.offset + lastSuballoc.size));
9019 case SECOND_VECTOR_RING_BUFFER:
9024 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9025 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9026 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9027 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9031 case SECOND_VECTOR_DOUBLE_STACK:
9036 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9037 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9038 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9039 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9049 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9051 const VkDeviceSize size = GetSize();
9052 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9053 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9054 const size_t suballoc1stCount = suballocations1st.size();
9055 const size_t suballoc2ndCount = suballocations2nd.size();
9066 VkDeviceSize lastOffset = 0;
9068 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9070 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9071 size_t nextAlloc2ndIndex = 0;
9072 while(lastOffset < freeSpace2ndTo1stEnd)
9075 while(nextAlloc2ndIndex < suballoc2ndCount &&
9076 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9078 ++nextAlloc2ndIndex;
9082 if(nextAlloc2ndIndex < suballoc2ndCount)
9084 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9087 if(lastOffset < suballoc.offset)
9090 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9104 lastOffset = suballoc.offset + suballoc.size;
9105 ++nextAlloc2ndIndex;
9111 if(lastOffset < freeSpace2ndTo1stEnd)
9113 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9121 lastOffset = freeSpace2ndTo1stEnd;
9126 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9127 const VkDeviceSize freeSpace1stTo2ndEnd =
9128 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9129 while(lastOffset < freeSpace1stTo2ndEnd)
9132 while(nextAlloc1stIndex < suballoc1stCount &&
9133 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9135 ++nextAlloc1stIndex;
9139 if(nextAlloc1stIndex < suballoc1stCount)
9141 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9144 if(lastOffset < suballoc.offset)
9147 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9161 lastOffset = suballoc.offset + suballoc.size;
9162 ++nextAlloc1stIndex;
9168 if(lastOffset < freeSpace1stTo2ndEnd)
9170 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9178 lastOffset = freeSpace1stTo2ndEnd;
9182 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9184 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9185 while(lastOffset < size)
9188 while(nextAlloc2ndIndex != SIZE_MAX &&
9189 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9191 --nextAlloc2ndIndex;
9195 if(nextAlloc2ndIndex != SIZE_MAX)
9197 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9200 if(lastOffset < suballoc.offset)
9203 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9217 lastOffset = suballoc.offset + suballoc.size;
9218 --nextAlloc2ndIndex;
9224 if(lastOffset < size)
9226 const VkDeviceSize unusedRangeSize = size - lastOffset;
9242 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9244 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9245 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9246 const VkDeviceSize size = GetSize();
9247 const size_t suballoc1stCount = suballocations1st.size();
9248 const size_t suballoc2ndCount = suballocations2nd.size();
9250 inoutStats.
size += size;
9252 VkDeviceSize lastOffset = 0;
9254 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9256 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9257 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9258 while(lastOffset < freeSpace2ndTo1stEnd)
9261 while(nextAlloc2ndIndex < suballoc2ndCount &&
9262 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9264 ++nextAlloc2ndIndex;
9268 if(nextAlloc2ndIndex < suballoc2ndCount)
9270 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9273 if(lastOffset < suballoc.offset)
9276 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9287 lastOffset = suballoc.offset + suballoc.size;
9288 ++nextAlloc2ndIndex;
9293 if(lastOffset < freeSpace2ndTo1stEnd)
9296 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9303 lastOffset = freeSpace2ndTo1stEnd;
9308 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9309 const VkDeviceSize freeSpace1stTo2ndEnd =
9310 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9311 while(lastOffset < freeSpace1stTo2ndEnd)
9314 while(nextAlloc1stIndex < suballoc1stCount &&
9315 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9317 ++nextAlloc1stIndex;
9321 if(nextAlloc1stIndex < suballoc1stCount)
9323 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9326 if(lastOffset < suballoc.offset)
9329 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9340 lastOffset = suballoc.offset + suballoc.size;
9341 ++nextAlloc1stIndex;
9346 if(lastOffset < freeSpace1stTo2ndEnd)
9349 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9356 lastOffset = freeSpace1stTo2ndEnd;
9360 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9362 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9363 while(lastOffset < size)
9366 while(nextAlloc2ndIndex != SIZE_MAX &&
9367 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9369 --nextAlloc2ndIndex;
9373 if(nextAlloc2ndIndex != SIZE_MAX)
9375 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9378 if(lastOffset < suballoc.offset)
9381 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9392 lastOffset = suballoc.offset + suballoc.size;
9393 --nextAlloc2ndIndex;
9398 if(lastOffset < size)
9401 const VkDeviceSize unusedRangeSize = size - lastOffset;
9414 #if VMA_STATS_STRING_ENABLED 9415 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9417 const VkDeviceSize size = GetSize();
9418 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9419 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9420 const size_t suballoc1stCount = suballocations1st.size();
9421 const size_t suballoc2ndCount = suballocations2nd.size();
9425 size_t unusedRangeCount = 0;
9426 VkDeviceSize usedBytes = 0;
9428 VkDeviceSize lastOffset = 0;
9430 size_t alloc2ndCount = 0;
9431 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9433 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9434 size_t nextAlloc2ndIndex = 0;
9435 while(lastOffset < freeSpace2ndTo1stEnd)
9438 while(nextAlloc2ndIndex < suballoc2ndCount &&
9439 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9441 ++nextAlloc2ndIndex;
9445 if(nextAlloc2ndIndex < suballoc2ndCount)
9447 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9450 if(lastOffset < suballoc.offset)
9459 usedBytes += suballoc.size;
9462 lastOffset = suballoc.offset + suballoc.size;
9463 ++nextAlloc2ndIndex;
9468 if(lastOffset < freeSpace2ndTo1stEnd)
9475 lastOffset = freeSpace2ndTo1stEnd;
9480 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9481 size_t alloc1stCount = 0;
9482 const VkDeviceSize freeSpace1stTo2ndEnd =
9483 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9484 while(lastOffset < freeSpace1stTo2ndEnd)
9487 while(nextAlloc1stIndex < suballoc1stCount &&
9488 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9490 ++nextAlloc1stIndex;
9494 if(nextAlloc1stIndex < suballoc1stCount)
9496 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9499 if(lastOffset < suballoc.offset)
9508 usedBytes += suballoc.size;
9511 lastOffset = suballoc.offset + suballoc.size;
9512 ++nextAlloc1stIndex;
9517 if(lastOffset < size)
9524 lastOffset = freeSpace1stTo2ndEnd;
9528 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9530 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9531 while(lastOffset < size)
9534 while(nextAlloc2ndIndex != SIZE_MAX &&
9535 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9537 --nextAlloc2ndIndex;
9541 if(nextAlloc2ndIndex != SIZE_MAX)
9543 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9546 if(lastOffset < suballoc.offset)
9555 usedBytes += suballoc.size;
9558 lastOffset = suballoc.offset + suballoc.size;
9559 --nextAlloc2ndIndex;
9564 if(lastOffset < size)
9576 const VkDeviceSize unusedBytes = size - usedBytes;
9577 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9582 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9584 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9585 size_t nextAlloc2ndIndex = 0;
9586 while(lastOffset < freeSpace2ndTo1stEnd)
9589 while(nextAlloc2ndIndex < suballoc2ndCount &&
9590 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9592 ++nextAlloc2ndIndex;
9596 if(nextAlloc2ndIndex < suballoc2ndCount)
9598 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9601 if(lastOffset < suballoc.offset)
9604 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9605 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9610 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9613 lastOffset = suballoc.offset + suballoc.size;
9614 ++nextAlloc2ndIndex;
9619 if(lastOffset < freeSpace2ndTo1stEnd)
9622 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9623 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9627 lastOffset = freeSpace2ndTo1stEnd;
9632 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9633 while(lastOffset < freeSpace1stTo2ndEnd)
9636 while(nextAlloc1stIndex < suballoc1stCount &&
9637 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9639 ++nextAlloc1stIndex;
9643 if(nextAlloc1stIndex < suballoc1stCount)
9645 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9648 if(lastOffset < suballoc.offset)
9651 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9652 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9657 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9660 lastOffset = suballoc.offset + suballoc.size;
9661 ++nextAlloc1stIndex;
9666 if(lastOffset < freeSpace1stTo2ndEnd)
9669 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9670 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9674 lastOffset = freeSpace1stTo2ndEnd;
9678 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9680 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9681 while(lastOffset < size)
9684 while(nextAlloc2ndIndex != SIZE_MAX &&
9685 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9687 --nextAlloc2ndIndex;
9691 if(nextAlloc2ndIndex != SIZE_MAX)
9693 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9696 if(lastOffset < suballoc.offset)
9699 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9700 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9705 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9708 lastOffset = suballoc.offset + suballoc.size;
9709 --nextAlloc2ndIndex;
9714 if(lastOffset < size)
9717 const VkDeviceSize unusedRangeSize = size - lastOffset;
9718 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9727 PrintDetailedMap_End(json);
9729 #endif // #if VMA_STATS_STRING_ENABLED 9731 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9732 uint32_t currentFrameIndex,
9733 uint32_t frameInUseCount,
9734 VkDeviceSize bufferImageGranularity,
9735 VkDeviceSize allocSize,
9736 VkDeviceSize allocAlignment,
9738 VmaSuballocationType allocType,
9739 bool canMakeOtherLost,
9741 VmaAllocationRequest* pAllocationRequest)
9743 VMA_ASSERT(allocSize > 0);
9744 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9745 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9746 VMA_HEAVY_ASSERT(Validate());
9747 return upperAddress ?
9748 CreateAllocationRequest_UpperAddress(
9749 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9750 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9751 CreateAllocationRequest_LowerAddress(
9752 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9753 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9756 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9757 uint32_t currentFrameIndex,
9758 uint32_t frameInUseCount,
9759 VkDeviceSize bufferImageGranularity,
9760 VkDeviceSize allocSize,
9761 VkDeviceSize allocAlignment,
9762 VmaSuballocationType allocType,
9763 bool canMakeOtherLost,
9765 VmaAllocationRequest* pAllocationRequest)
9767 const VkDeviceSize size = GetSize();
9768 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9769 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9771 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9773 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9778 if(allocSize > size)
9782 VkDeviceSize resultBaseOffset = size - allocSize;
9783 if(!suballocations2nd.empty())
9785 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9786 resultBaseOffset = lastSuballoc.offset - allocSize;
9787 if(allocSize > lastSuballoc.offset)
9794 VkDeviceSize resultOffset = resultBaseOffset;
9797 if(VMA_DEBUG_MARGIN > 0)
9799 if(resultOffset < VMA_DEBUG_MARGIN)
9803 resultOffset -= VMA_DEBUG_MARGIN;
9807 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9811 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9813 bool bufferImageGranularityConflict =
false;
9814 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9816 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9817 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9819 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9821 bufferImageGranularityConflict =
true;
9829 if(bufferImageGranularityConflict)
9831 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9836 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9837 suballocations1st.back().offset + suballocations1st.back().size :
9839 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9843 if(bufferImageGranularity > 1)
9845 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9847 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9848 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9850 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9864 pAllocationRequest->offset = resultOffset;
9865 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9866 pAllocationRequest->sumItemSize = 0;
9868 pAllocationRequest->itemsToMakeLostCount = 0;
9869 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9876 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9877 uint32_t currentFrameIndex,
9878 uint32_t frameInUseCount,
9879 VkDeviceSize bufferImageGranularity,
9880 VkDeviceSize allocSize,
9881 VkDeviceSize allocAlignment,
9882 VmaSuballocationType allocType,
9883 bool canMakeOtherLost,
9885 VmaAllocationRequest* pAllocationRequest)
9887 const VkDeviceSize size = GetSize();
9888 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9889 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9891 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9895 VkDeviceSize resultBaseOffset = 0;
9896 if(!suballocations1st.empty())
9898 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9899 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9903 VkDeviceSize resultOffset = resultBaseOffset;
9906 if(VMA_DEBUG_MARGIN > 0)
9908 resultOffset += VMA_DEBUG_MARGIN;
9912 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9916 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9918 bool bufferImageGranularityConflict =
false;
9919 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9921 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9922 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9924 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9926 bufferImageGranularityConflict =
true;
9934 if(bufferImageGranularityConflict)
9936 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9940 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9941 suballocations2nd.back().offset : size;
9944 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9948 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9950 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9952 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9953 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9955 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9969 pAllocationRequest->offset = resultOffset;
9970 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9971 pAllocationRequest->sumItemSize = 0;
9973 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9974 pAllocationRequest->itemsToMakeLostCount = 0;
9981 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9983 VMA_ASSERT(!suballocations1st.empty());
9985 VkDeviceSize resultBaseOffset = 0;
9986 if(!suballocations2nd.empty())
9988 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9989 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9993 VkDeviceSize resultOffset = resultBaseOffset;
9996 if(VMA_DEBUG_MARGIN > 0)
9998 resultOffset += VMA_DEBUG_MARGIN;
10002 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10006 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10008 bool bufferImageGranularityConflict =
false;
10009 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10011 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10012 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10014 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10016 bufferImageGranularityConflict =
true;
10024 if(bufferImageGranularityConflict)
10026 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10030 pAllocationRequest->itemsToMakeLostCount = 0;
10031 pAllocationRequest->sumItemSize = 0;
10032 size_t index1st = m_1stNullItemsBeginCount;
10034 if(canMakeOtherLost)
10036 while(index1st < suballocations1st.size() &&
10037 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10040 const VmaSuballocation& suballoc = suballocations1st[index1st];
10041 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10047 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10048 if(suballoc.hAllocation->CanBecomeLost() &&
10049 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10051 ++pAllocationRequest->itemsToMakeLostCount;
10052 pAllocationRequest->sumItemSize += suballoc.size;
10064 if(bufferImageGranularity > 1)
10066 while(index1st < suballocations1st.size())
10068 const VmaSuballocation& suballoc = suballocations1st[index1st];
10069 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10071 if(suballoc.hAllocation != VK_NULL_HANDLE)
10074 if(suballoc.hAllocation->CanBecomeLost() &&
10075 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10077 ++pAllocationRequest->itemsToMakeLostCount;
10078 pAllocationRequest->sumItemSize += suballoc.size;
10096 if(index1st == suballocations1st.size() &&
10097 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10100 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10105 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10106 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10110 if(bufferImageGranularity > 1)
10112 for(
size_t nextSuballocIndex = index1st;
10113 nextSuballocIndex < suballocations1st.size();
10114 nextSuballocIndex++)
10116 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10117 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10119 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10133 pAllocationRequest->offset = resultOffset;
10134 pAllocationRequest->sumFreeSize =
10135 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10137 - pAllocationRequest->sumItemSize;
10138 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10147 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10148 uint32_t currentFrameIndex,
10149 uint32_t frameInUseCount,
10150 VmaAllocationRequest* pAllocationRequest)
10152 if(pAllocationRequest->itemsToMakeLostCount == 0)
10157 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10160 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10161 size_t index = m_1stNullItemsBeginCount;
10162 size_t madeLostCount = 0;
10163 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10165 if(index == suballocations->size())
10169 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10171 suballocations = &AccessSuballocations2nd();
10175 VMA_ASSERT(!suballocations->empty());
10177 VmaSuballocation& suballoc = (*suballocations)[index];
10178 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10180 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10181 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10182 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10184 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10185 suballoc.hAllocation = VK_NULL_HANDLE;
10186 m_SumFreeSize += suballoc.size;
10187 if(suballocations == &AccessSuballocations1st())
10189 ++m_1stNullItemsMiddleCount;
10193 ++m_2ndNullItemsCount;
10205 CleanupAfterFree();
10211 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10213 uint32_t lostAllocationCount = 0;
10215 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10216 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10218 VmaSuballocation& suballoc = suballocations1st[i];
10219 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10220 suballoc.hAllocation->CanBecomeLost() &&
10221 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10223 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10224 suballoc.hAllocation = VK_NULL_HANDLE;
10225 ++m_1stNullItemsMiddleCount;
10226 m_SumFreeSize += suballoc.size;
10227 ++lostAllocationCount;
10231 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10232 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10234 VmaSuballocation& suballoc = suballocations2nd[i];
10235 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10236 suballoc.hAllocation->CanBecomeLost() &&
10237 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10239 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10240 suballoc.hAllocation = VK_NULL_HANDLE;
10241 ++m_2ndNullItemsCount;
10242 m_SumFreeSize += suballoc.size;
10243 ++lostAllocationCount;
10247 if(lostAllocationCount)
10249 CleanupAfterFree();
10252 return lostAllocationCount;
10255 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10257 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10258 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10260 const VmaSuballocation& suballoc = suballocations1st[i];
10261 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10263 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10265 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10266 return VK_ERROR_VALIDATION_FAILED_EXT;
10268 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10270 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10271 return VK_ERROR_VALIDATION_FAILED_EXT;
10276 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10277 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10279 const VmaSuballocation& suballoc = suballocations2nd[i];
10280 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10282 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10284 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10285 return VK_ERROR_VALIDATION_FAILED_EXT;
10287 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10289 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10290 return VK_ERROR_VALIDATION_FAILED_EXT;
10298 void VmaBlockMetadata_Linear::Alloc(
10299 const VmaAllocationRequest& request,
10300 VmaSuballocationType type,
10301 VkDeviceSize allocSize,
10304 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10306 switch(request.type)
10308 case VmaAllocationRequestType::UpperAddress:
10310 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10311 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10312 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10313 suballocations2nd.push_back(newSuballoc);
10314 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10317 case VmaAllocationRequestType::EndOf1st:
10319 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10321 VMA_ASSERT(suballocations1st.empty() ||
10322 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10324 VMA_ASSERT(request.offset + allocSize <= GetSize());
10326 suballocations1st.push_back(newSuballoc);
10329 case VmaAllocationRequestType::EndOf2nd:
10331 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10333 VMA_ASSERT(!suballocations1st.empty() &&
10334 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10335 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10337 switch(m_2ndVectorMode)
10339 case SECOND_VECTOR_EMPTY:
10341 VMA_ASSERT(suballocations2nd.empty());
10342 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10344 case SECOND_VECTOR_RING_BUFFER:
10346 VMA_ASSERT(!suballocations2nd.empty());
10348 case SECOND_VECTOR_DOUBLE_STACK:
10349 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10355 suballocations2nd.push_back(newSuballoc);
10359 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10362 m_SumFreeSize -= newSuballoc.size;
10365 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10367 FreeAtOffset(allocation->GetOffset());
10370 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10372 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10373 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10375 if(!suballocations1st.empty())
10378 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10379 if(firstSuballoc.offset == offset)
10381 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10382 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10383 m_SumFreeSize += firstSuballoc.size;
10384 ++m_1stNullItemsBeginCount;
10385 CleanupAfterFree();
10391 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10392 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10394 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10395 if(lastSuballoc.offset == offset)
10397 m_SumFreeSize += lastSuballoc.size;
10398 suballocations2nd.pop_back();
10399 CleanupAfterFree();
10404 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10406 VmaSuballocation& lastSuballoc = suballocations1st.back();
10407 if(lastSuballoc.offset == offset)
10409 m_SumFreeSize += lastSuballoc.size;
10410 suballocations1st.pop_back();
10411 CleanupAfterFree();
10418 VmaSuballocation refSuballoc;
10419 refSuballoc.offset = offset;
10421 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10422 suballocations1st.begin() + m_1stNullItemsBeginCount,
10423 suballocations1st.end(),
10425 if(it != suballocations1st.end())
10427 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10428 it->hAllocation = VK_NULL_HANDLE;
10429 ++m_1stNullItemsMiddleCount;
10430 m_SumFreeSize += it->size;
10431 CleanupAfterFree();
10436 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10439 VmaSuballocation refSuballoc;
10440 refSuballoc.offset = offset;
10442 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10443 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10444 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10445 if(it != suballocations2nd.end())
10447 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10448 it->hAllocation = VK_NULL_HANDLE;
10449 ++m_2ndNullItemsCount;
10450 m_SumFreeSize += it->size;
10451 CleanupAfterFree();
10456 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10459 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10461 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10462 const size_t suballocCount = AccessSuballocations1st().size();
10463 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10466 void VmaBlockMetadata_Linear::CleanupAfterFree()
10468 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10469 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10473 suballocations1st.clear();
10474 suballocations2nd.clear();
10475 m_1stNullItemsBeginCount = 0;
10476 m_1stNullItemsMiddleCount = 0;
10477 m_2ndNullItemsCount = 0;
10478 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10482 const size_t suballoc1stCount = suballocations1st.size();
10483 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10484 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10487 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10488 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10490 ++m_1stNullItemsBeginCount;
10491 --m_1stNullItemsMiddleCount;
10495 while(m_1stNullItemsMiddleCount > 0 &&
10496 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10498 --m_1stNullItemsMiddleCount;
10499 suballocations1st.pop_back();
10503 while(m_2ndNullItemsCount > 0 &&
10504 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10506 --m_2ndNullItemsCount;
10507 suballocations2nd.pop_back();
10511 while(m_2ndNullItemsCount > 0 &&
10512 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10514 --m_2ndNullItemsCount;
10515 suballocations2nd.remove(0);
10518 if(ShouldCompact1st())
10520 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10521 size_t srcIndex = m_1stNullItemsBeginCount;
10522 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10524 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10528 if(dstIndex != srcIndex)
10530 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10534 suballocations1st.resize(nonNullItemCount);
10535 m_1stNullItemsBeginCount = 0;
10536 m_1stNullItemsMiddleCount = 0;
10540 if(suballocations2nd.empty())
10542 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10546 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10548 suballocations1st.clear();
10549 m_1stNullItemsBeginCount = 0;
10551 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10554 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10555 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10556 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10557 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10559 ++m_1stNullItemsBeginCount;
10560 --m_1stNullItemsMiddleCount;
10562 m_2ndNullItemsCount = 0;
10563 m_1stVectorIndex ^= 1;
10568 VMA_HEAVY_ASSERT(Validate());
10575 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10576 VmaBlockMetadata(hAllocator),
10578 m_AllocationCount(0),
10582 memset(m_FreeList, 0,
sizeof(m_FreeList));
10585 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10587 DeleteNode(m_Root);
10590 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10592 VmaBlockMetadata::Init(size);
10594 m_UsableSize = VmaPrevPow2(size);
10595 m_SumFreeSize = m_UsableSize;
10599 while(m_LevelCount < MAX_LEVELS &&
10600 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10605 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10606 rootNode->offset = 0;
10607 rootNode->type = Node::TYPE_FREE;
10608 rootNode->parent = VMA_NULL;
10609 rootNode->buddy = VMA_NULL;
10612 AddToFreeListFront(0, rootNode);
10615 bool VmaBlockMetadata_Buddy::Validate()
const 10618 ValidationContext ctx;
10619 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10621 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10623 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10624 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10627 for(uint32_t level = 0; level < m_LevelCount; ++level)
10629 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10630 m_FreeList[level].front->free.prev == VMA_NULL);
10632 for(Node* node = m_FreeList[level].front;
10634 node = node->free.next)
10636 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10638 if(node->free.next == VMA_NULL)
10640 VMA_VALIDATE(m_FreeList[level].back == node);
10644 VMA_VALIDATE(node->free.next->free.prev == node);
10650 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10652 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10658 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10660 for(uint32_t level = 0; level < m_LevelCount; ++level)
10662 if(m_FreeList[level].front != VMA_NULL)
10664 return LevelToNodeSize(level);
10670 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10672 const VkDeviceSize unusableSize = GetUnusableSize();
10683 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10685 if(unusableSize > 0)
10694 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10696 const VkDeviceSize unusableSize = GetUnusableSize();
10698 inoutStats.
size += GetSize();
10699 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10704 if(unusableSize > 0)
10711 #if VMA_STATS_STRING_ENABLED 10713 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10717 CalcAllocationStatInfo(stat);
10719 PrintDetailedMap_Begin(
10725 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10727 const VkDeviceSize unusableSize = GetUnusableSize();
10728 if(unusableSize > 0)
10730 PrintDetailedMap_UnusedRange(json,
10735 PrintDetailedMap_End(json);
10738 #endif // #if VMA_STATS_STRING_ENABLED 10740 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10741 uint32_t currentFrameIndex,
10742 uint32_t frameInUseCount,
10743 VkDeviceSize bufferImageGranularity,
10744 VkDeviceSize allocSize,
10745 VkDeviceSize allocAlignment,
10747 VmaSuballocationType allocType,
10748 bool canMakeOtherLost,
10750 VmaAllocationRequest* pAllocationRequest)
10752 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10756 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10757 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10758 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10760 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10761 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10764 if(allocSize > m_UsableSize)
10769 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10770 for(uint32_t level = targetLevel + 1; level--; )
10772 for(Node* freeNode = m_FreeList[level].front;
10773 freeNode != VMA_NULL;
10774 freeNode = freeNode->free.next)
10776 if(freeNode->offset % allocAlignment == 0)
10778 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10779 pAllocationRequest->offset = freeNode->offset;
10780 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10781 pAllocationRequest->sumItemSize = 0;
10782 pAllocationRequest->itemsToMakeLostCount = 0;
10783 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10792 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10793 uint32_t currentFrameIndex,
10794 uint32_t frameInUseCount,
10795 VmaAllocationRequest* pAllocationRequest)
10801 return pAllocationRequest->itemsToMakeLostCount == 0;
10804 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10813 void VmaBlockMetadata_Buddy::Alloc(
10814 const VmaAllocationRequest& request,
10815 VmaSuballocationType type,
10816 VkDeviceSize allocSize,
10819 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10821 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10822 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10824 Node* currNode = m_FreeList[currLevel].front;
10825 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10826 while(currNode->offset != request.offset)
10828 currNode = currNode->free.next;
10829 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10833 while(currLevel < targetLevel)
10837 RemoveFromFreeList(currLevel, currNode);
10839 const uint32_t childrenLevel = currLevel + 1;
10842 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10843 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10845 leftChild->offset = currNode->offset;
10846 leftChild->type = Node::TYPE_FREE;
10847 leftChild->parent = currNode;
10848 leftChild->buddy = rightChild;
10850 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10851 rightChild->type = Node::TYPE_FREE;
10852 rightChild->parent = currNode;
10853 rightChild->buddy = leftChild;
10856 currNode->type = Node::TYPE_SPLIT;
10857 currNode->split.leftChild = leftChild;
10860 AddToFreeListFront(childrenLevel, rightChild);
10861 AddToFreeListFront(childrenLevel, leftChild);
10866 currNode = m_FreeList[currLevel].front;
10875 VMA_ASSERT(currLevel == targetLevel &&
10876 currNode != VMA_NULL &&
10877 currNode->type == Node::TYPE_FREE);
10878 RemoveFromFreeList(currLevel, currNode);
10881 currNode->type = Node::TYPE_ALLOCATION;
10882 currNode->allocation.alloc = hAllocation;
10884 ++m_AllocationCount;
10886 m_SumFreeSize -= allocSize;
10889 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10891 if(node->type == Node::TYPE_SPLIT)
10893 DeleteNode(node->split.leftChild->buddy);
10894 DeleteNode(node->split.leftChild);
10897 vma_delete(GetAllocationCallbacks(), node);
10900 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10902 VMA_VALIDATE(level < m_LevelCount);
10903 VMA_VALIDATE(curr->parent == parent);
10904 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10905 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10908 case Node::TYPE_FREE:
10910 ctx.calculatedSumFreeSize += levelNodeSize;
10911 ++ctx.calculatedFreeCount;
10913 case Node::TYPE_ALLOCATION:
10914 ++ctx.calculatedAllocationCount;
10915 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10916 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10918 case Node::TYPE_SPLIT:
10920 const uint32_t childrenLevel = level + 1;
10921 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10922 const Node*
const leftChild = curr->split.leftChild;
10923 VMA_VALIDATE(leftChild != VMA_NULL);
10924 VMA_VALIDATE(leftChild->offset == curr->offset);
10925 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10927 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10929 const Node*
const rightChild = leftChild->buddy;
10930 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10931 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10933 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10944 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10947 uint32_t level = 0;
10948 VkDeviceSize currLevelNodeSize = m_UsableSize;
10949 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10950 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10953 currLevelNodeSize = nextLevelNodeSize;
10954 nextLevelNodeSize = currLevelNodeSize >> 1;
10959 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10962 Node* node = m_Root;
10963 VkDeviceSize nodeOffset = 0;
10964 uint32_t level = 0;
10965 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10966 while(node->type == Node::TYPE_SPLIT)
10968 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10969 if(offset < nodeOffset + nextLevelSize)
10971 node = node->split.leftChild;
10975 node = node->split.leftChild->buddy;
10976 nodeOffset += nextLevelSize;
10979 levelNodeSize = nextLevelSize;
10982 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10983 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10986 --m_AllocationCount;
10987 m_SumFreeSize += alloc->GetSize();
10989 node->type = Node::TYPE_FREE;
10992 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10994 RemoveFromFreeList(level, node->buddy);
10995 Node*
const parent = node->parent;
10997 vma_delete(GetAllocationCallbacks(), node->buddy);
10998 vma_delete(GetAllocationCallbacks(), node);
10999 parent->type = Node::TYPE_FREE;
11007 AddToFreeListFront(level, node);
11010 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 11014 case Node::TYPE_FREE:
11020 case Node::TYPE_ALLOCATION:
11022 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11028 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11029 if(unusedRangeSize > 0)
11038 case Node::TYPE_SPLIT:
11040 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11041 const Node*
const leftChild = node->split.leftChild;
11042 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11043 const Node*
const rightChild = leftChild->buddy;
11044 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11052 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11054 VMA_ASSERT(node->type == Node::TYPE_FREE);
11057 Node*
const frontNode = m_FreeList[level].front;
11058 if(frontNode == VMA_NULL)
11060 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11061 node->free.prev = node->free.next = VMA_NULL;
11062 m_FreeList[level].front = m_FreeList[level].back = node;
11066 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11067 node->free.prev = VMA_NULL;
11068 node->free.next = frontNode;
11069 frontNode->free.prev = node;
11070 m_FreeList[level].front = node;
11074 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11076 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11079 if(node->free.prev == VMA_NULL)
11081 VMA_ASSERT(m_FreeList[level].front == node);
11082 m_FreeList[level].front = node->free.next;
11086 Node*
const prevFreeNode = node->free.prev;
11087 VMA_ASSERT(prevFreeNode->free.next == node);
11088 prevFreeNode->free.next = node->free.next;
11092 if(node->free.next == VMA_NULL)
11094 VMA_ASSERT(m_FreeList[level].back == node);
11095 m_FreeList[level].back = node->free.prev;
11099 Node*
const nextFreeNode = node->free.next;
11100 VMA_ASSERT(nextFreeNode->free.prev == node);
11101 nextFreeNode->free.prev = node->free.prev;
11105 #if VMA_STATS_STRING_ENABLED 11106 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11110 case Node::TYPE_FREE:
11111 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11113 case Node::TYPE_ALLOCATION:
11115 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11116 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11117 if(allocSize < levelNodeSize)
11119 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11123 case Node::TYPE_SPLIT:
11125 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11126 const Node*
const leftChild = node->split.leftChild;
11127 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11128 const Node*
const rightChild = leftChild->buddy;
11129 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11136 #endif // #if VMA_STATS_STRING_ENABLED 11142 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11143 m_pMetadata(VMA_NULL),
11144 m_MemoryTypeIndex(UINT32_MAX),
11146 m_hMemory(VK_NULL_HANDLE),
11148 m_pMappedData(VMA_NULL)
11152 void VmaDeviceMemoryBlock::Init(
11155 uint32_t newMemoryTypeIndex,
11156 VkDeviceMemory newMemory,
11157 VkDeviceSize newSize,
11159 uint32_t algorithm)
11161 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11163 m_hParentPool = hParentPool;
11164 m_MemoryTypeIndex = newMemoryTypeIndex;
11166 m_hMemory = newMemory;
11171 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11174 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11180 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11182 m_pMetadata->Init(newSize);
11185 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11189 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11191 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11192 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11193 m_hMemory = VK_NULL_HANDLE;
11195 vma_delete(allocator, m_pMetadata);
11196 m_pMetadata = VMA_NULL;
11199 bool VmaDeviceMemoryBlock::Validate()
const 11201 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11202 (m_pMetadata->GetSize() != 0));
11204 return m_pMetadata->Validate();
11207 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11209 void* pData =
nullptr;
11210 VkResult res = Map(hAllocator, 1, &pData);
11211 if(res != VK_SUCCESS)
11216 res = m_pMetadata->CheckCorruption(pData);
11218 Unmap(hAllocator, 1);
11223 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11230 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11231 if(m_MapCount != 0)
11233 m_MapCount += count;
11234 VMA_ASSERT(m_pMappedData != VMA_NULL);
11235 if(ppData != VMA_NULL)
11237 *ppData = m_pMappedData;
11243 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11244 hAllocator->m_hDevice,
11250 if(result == VK_SUCCESS)
11252 if(ppData != VMA_NULL)
11254 *ppData = m_pMappedData;
11256 m_MapCount = count;
11262 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11269 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11270 if(m_MapCount >= count)
11272 m_MapCount -= count;
11273 if(m_MapCount == 0)
11275 m_pMappedData = VMA_NULL;
11276 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11281 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11285 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11287 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11288 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11291 VkResult res = Map(hAllocator, 1, &pData);
11292 if(res != VK_SUCCESS)
11297 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11298 VmaWriteMagicValue(pData, allocOffset + allocSize);
11300 Unmap(hAllocator, 1);
11305 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11307 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11308 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11311 VkResult res = Map(hAllocator, 1, &pData);
11312 if(res != VK_SUCCESS)
11317 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11319 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11321 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11323 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11326 Unmap(hAllocator, 1);
11331 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11336 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11337 hAllocation->GetBlock() ==
this);
11339 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11340 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11341 hAllocator->m_hDevice,
11344 hAllocation->GetOffset());
11347 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11352 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11353 hAllocation->GetBlock() ==
this);
11355 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11356 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11357 hAllocator->m_hDevice,
11360 hAllocation->GetOffset());
11365 memset(&outInfo, 0,
sizeof(outInfo));
11384 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11392 VmaPool_T::VmaPool_T(
11395 VkDeviceSize preferredBlockSize) :
11399 createInfo.memoryTypeIndex,
11400 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11401 createInfo.minBlockCount,
11402 createInfo.maxBlockCount,
11404 createInfo.frameInUseCount,
11406 createInfo.blockSize != 0,
11412 VmaPool_T::~VmaPool_T()
11416 #if VMA_STATS_STRING_ENABLED 11418 #endif // #if VMA_STATS_STRING_ENABLED 11420 VmaBlockVector::VmaBlockVector(
11423 uint32_t memoryTypeIndex,
11424 VkDeviceSize preferredBlockSize,
11425 size_t minBlockCount,
11426 size_t maxBlockCount,
11427 VkDeviceSize bufferImageGranularity,
11428 uint32_t frameInUseCount,
11430 bool explicitBlockSize,
11431 uint32_t algorithm) :
11432 m_hAllocator(hAllocator),
11433 m_hParentPool(hParentPool),
11434 m_MemoryTypeIndex(memoryTypeIndex),
11435 m_PreferredBlockSize(preferredBlockSize),
11436 m_MinBlockCount(minBlockCount),
11437 m_MaxBlockCount(maxBlockCount),
11438 m_BufferImageGranularity(bufferImageGranularity),
11439 m_FrameInUseCount(frameInUseCount),
11440 m_IsCustomPool(isCustomPool),
11441 m_ExplicitBlockSize(explicitBlockSize),
11442 m_Algorithm(algorithm),
11443 m_HasEmptyBlock(false),
11444 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11449 VmaBlockVector::~VmaBlockVector()
11451 for(
size_t i = m_Blocks.size(); i--; )
11453 m_Blocks[i]->Destroy(m_hAllocator);
11454 vma_delete(m_hAllocator, m_Blocks[i]);
11458 VkResult VmaBlockVector::CreateMinBlocks()
11460 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11462 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11463 if(res != VK_SUCCESS)
11471 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11473 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11475 const size_t blockCount = m_Blocks.size();
11484 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11486 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11487 VMA_ASSERT(pBlock);
11488 VMA_HEAVY_ASSERT(pBlock->Validate());
11489 pBlock->m_pMetadata->AddPoolStats(*pStats);
11493 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11495 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11496 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11497 (VMA_DEBUG_MARGIN > 0) &&
11499 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11502 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11504 VkResult VmaBlockVector::Allocate(
11505 uint32_t currentFrameIndex,
11507 VkDeviceSize alignment,
11509 VmaSuballocationType suballocType,
11510 size_t allocationCount,
11514 VkResult res = VK_SUCCESS;
11516 if(IsCorruptionDetectionEnabled())
11518 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11519 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11523 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11524 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11526 res = AllocatePage(
11532 pAllocations + allocIndex);
11533 if(res != VK_SUCCESS)
11540 if(res != VK_SUCCESS)
11543 while(allocIndex--)
11545 Free(pAllocations[allocIndex]);
11547 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11553 VkResult VmaBlockVector::AllocatePage(
11554 uint32_t currentFrameIndex,
11556 VkDeviceSize alignment,
11558 VmaSuballocationType suballocType,
11565 const bool canCreateNewBlock =
11567 (m_Blocks.size() < m_MaxBlockCount);
11574 canMakeOtherLost =
false;
11578 if(isUpperAddress &&
11581 return VK_ERROR_FEATURE_NOT_PRESENT;
11595 return VK_ERROR_FEATURE_NOT_PRESENT;
11599 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11601 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11609 if(!canMakeOtherLost || canCreateNewBlock)
11618 if(!m_Blocks.empty())
11620 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11621 VMA_ASSERT(pCurrBlock);
11622 VkResult res = AllocateFromBlock(
11632 if(res == VK_SUCCESS)
11634 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11644 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11646 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11647 VMA_ASSERT(pCurrBlock);
11648 VkResult res = AllocateFromBlock(
11658 if(res == VK_SUCCESS)
11660 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11668 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11670 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11671 VMA_ASSERT(pCurrBlock);
11672 VkResult res = AllocateFromBlock(
11682 if(res == VK_SUCCESS)
11684 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11692 if(canCreateNewBlock)
11695 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11696 uint32_t newBlockSizeShift = 0;
11697 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11699 if(!m_ExplicitBlockSize)
11702 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11703 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11705 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11706 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11708 newBlockSize = smallerNewBlockSize;
11709 ++newBlockSizeShift;
11718 size_t newBlockIndex = 0;
11719 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11721 if(!m_ExplicitBlockSize)
11723 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11725 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11726 if(smallerNewBlockSize >= size)
11728 newBlockSize = smallerNewBlockSize;
11729 ++newBlockSizeShift;
11730 res = CreateBlock(newBlockSize, &newBlockIndex);
11739 if(res == VK_SUCCESS)
11741 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11742 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11744 res = AllocateFromBlock(
11754 if(res == VK_SUCCESS)
11756 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11762 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11769 if(canMakeOtherLost)
11771 uint32_t tryIndex = 0;
11772 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11774 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11775 VmaAllocationRequest bestRequest = {};
11776 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11782 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11784 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11785 VMA_ASSERT(pCurrBlock);
11786 VmaAllocationRequest currRequest = {};
11787 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11790 m_BufferImageGranularity,
11799 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11800 if(pBestRequestBlock == VMA_NULL ||
11801 currRequestCost < bestRequestCost)
11803 pBestRequestBlock = pCurrBlock;
11804 bestRequest = currRequest;
11805 bestRequestCost = currRequestCost;
11807 if(bestRequestCost == 0)
11818 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11820 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11821 VMA_ASSERT(pCurrBlock);
11822 VmaAllocationRequest currRequest = {};
11823 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11826 m_BufferImageGranularity,
11835 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11836 if(pBestRequestBlock == VMA_NULL ||
11837 currRequestCost < bestRequestCost ||
11840 pBestRequestBlock = pCurrBlock;
11841 bestRequest = currRequest;
11842 bestRequestCost = currRequestCost;
11844 if(bestRequestCost == 0 ||
11854 if(pBestRequestBlock != VMA_NULL)
11858 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11859 if(res != VK_SUCCESS)
11865 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11871 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11873 m_HasEmptyBlock =
false;
11876 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11877 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11878 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11879 (*pAllocation)->InitBlockAllocation(
11881 bestRequest.offset,
11887 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11888 VMA_DEBUG_LOG(
" Returned from existing block");
11889 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11890 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11892 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11894 if(IsCorruptionDetectionEnabled())
11896 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11897 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11912 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11914 return VK_ERROR_TOO_MANY_OBJECTS;
11918 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11921 void VmaBlockVector::Free(
11924 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11928 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11930 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11932 if(IsCorruptionDetectionEnabled())
11934 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11935 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11938 if(hAllocation->IsPersistentMap())
11940 pBlock->Unmap(m_hAllocator, 1);
11943 pBlock->m_pMetadata->Free(hAllocation);
11944 VMA_HEAVY_ASSERT(pBlock->Validate());
11946 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11949 if(pBlock->m_pMetadata->IsEmpty())
11952 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11954 pBlockToDelete = pBlock;
11960 m_HasEmptyBlock =
true;
11965 else if(m_HasEmptyBlock)
11967 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11968 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11970 pBlockToDelete = pLastBlock;
11971 m_Blocks.pop_back();
11972 m_HasEmptyBlock =
false;
11976 IncrementallySortBlocks();
11981 if(pBlockToDelete != VMA_NULL)
11983 VMA_DEBUG_LOG(
" Deleted empty allocation");
11984 pBlockToDelete->Destroy(m_hAllocator);
11985 vma_delete(m_hAllocator, pBlockToDelete);
11989 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11991 VkDeviceSize result = 0;
11992 for(
size_t i = m_Blocks.size(); i--; )
11994 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11995 if(result >= m_PreferredBlockSize)
12003 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12005 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12007 if(m_Blocks[blockIndex] == pBlock)
12009 VmaVectorRemove(m_Blocks, blockIndex);
12016 void VmaBlockVector::IncrementallySortBlocks()
12021 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12023 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12025 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12032 VkResult VmaBlockVector::AllocateFromBlock(
12033 VmaDeviceMemoryBlock* pBlock,
12034 uint32_t currentFrameIndex,
12036 VkDeviceSize alignment,
12039 VmaSuballocationType suballocType,
12048 VmaAllocationRequest currRequest = {};
12049 if(pBlock->m_pMetadata->CreateAllocationRequest(
12052 m_BufferImageGranularity,
12062 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12066 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12067 if(res != VK_SUCCESS)
12074 if(pBlock->m_pMetadata->IsEmpty())
12076 m_HasEmptyBlock =
false;
12079 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12080 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12081 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12082 (*pAllocation)->InitBlockAllocation(
12084 currRequest.offset,
12090 VMA_HEAVY_ASSERT(pBlock->Validate());
12091 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12092 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12094 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12096 if(IsCorruptionDetectionEnabled())
12098 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12099 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12103 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12106 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12108 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12109 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12110 allocInfo.allocationSize = blockSize;
12111 VkDeviceMemory mem = VK_NULL_HANDLE;
12112 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12121 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12127 allocInfo.allocationSize,
12131 m_Blocks.push_back(pBlock);
12132 if(pNewBlockIndex != VMA_NULL)
12134 *pNewBlockIndex = m_Blocks.size() - 1;
12140 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12141 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12142 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12144 const size_t blockCount = m_Blocks.size();
12145 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12149 BLOCK_FLAG_USED = 0x00000001,
12150 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12158 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12159 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12160 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12163 const size_t moveCount = moves.size();
12164 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12166 const VmaDefragmentationMove& move = moves[moveIndex];
12167 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12168 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12171 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12174 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12176 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12177 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12178 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12180 currBlockInfo.pMappedData = pBlock->GetMappedData();
12182 if(currBlockInfo.pMappedData == VMA_NULL)
12184 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12185 if(pDefragCtx->res == VK_SUCCESS)
12187 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12194 if(pDefragCtx->res == VK_SUCCESS)
12196 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12197 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12199 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12201 const VmaDefragmentationMove& move = moves[moveIndex];
12203 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12204 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12206 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12211 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12212 memRange.memory = pSrcBlock->GetDeviceMemory();
12213 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12214 memRange.size = VMA_MIN(
12215 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12216 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12217 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12222 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12223 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12224 static_cast<size_t>(move.size));
12226 if(IsCorruptionDetectionEnabled())
12228 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12229 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12235 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12236 memRange.memory = pDstBlock->GetDeviceMemory();
12237 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12238 memRange.size = VMA_MIN(
12239 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12240 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12241 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12248 for(
size_t blockIndex = blockCount; blockIndex--; )
12250 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12251 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12253 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12254 pBlock->Unmap(m_hAllocator, 1);
12259 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12260 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12261 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12262 VkCommandBuffer commandBuffer)
12264 const size_t blockCount = m_Blocks.size();
12266 pDefragCtx->blockContexts.resize(blockCount);
12267 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12270 const size_t moveCount = moves.size();
12271 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12273 const VmaDefragmentationMove& move = moves[moveIndex];
12274 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12275 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12278 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12282 VkBufferCreateInfo bufCreateInfo;
12283 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12285 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12287 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12288 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12289 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12291 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12292 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12293 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12294 if(pDefragCtx->res == VK_SUCCESS)
12296 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12297 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12304 if(pDefragCtx->res == VK_SUCCESS)
12306 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12308 const VmaDefragmentationMove& move = moves[moveIndex];
12310 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12311 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12313 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12315 VkBufferCopy region = {
12319 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12320 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12325 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12327 pDefragCtx->res = VK_NOT_READY;
12333 m_HasEmptyBlock =
false;
12334 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12336 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12337 if(pBlock->m_pMetadata->IsEmpty())
12339 if(m_Blocks.size() > m_MinBlockCount)
12341 if(pDefragmentationStats != VMA_NULL)
12344 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12347 VmaVectorRemove(m_Blocks, blockIndex);
12348 pBlock->Destroy(m_hAllocator);
12349 vma_delete(m_hAllocator, pBlock);
12353 m_HasEmptyBlock =
true;
12359 #if VMA_STATS_STRING_ENABLED 12361 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12363 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12365 json.BeginObject();
12369 json.WriteString(
"MemoryTypeIndex");
12370 json.WriteNumber(m_MemoryTypeIndex);
12372 json.WriteString(
"BlockSize");
12373 json.WriteNumber(m_PreferredBlockSize);
12375 json.WriteString(
"BlockCount");
12376 json.BeginObject(
true);
12377 if(m_MinBlockCount > 0)
12379 json.WriteString(
"Min");
12380 json.WriteNumber((uint64_t)m_MinBlockCount);
12382 if(m_MaxBlockCount < SIZE_MAX)
12384 json.WriteString(
"Max");
12385 json.WriteNumber((uint64_t)m_MaxBlockCount);
12387 json.WriteString(
"Cur");
12388 json.WriteNumber((uint64_t)m_Blocks.size());
12391 if(m_FrameInUseCount > 0)
12393 json.WriteString(
"FrameInUseCount");
12394 json.WriteNumber(m_FrameInUseCount);
12397 if(m_Algorithm != 0)
12399 json.WriteString(
"Algorithm");
12400 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12405 json.WriteString(
"PreferredBlockSize");
12406 json.WriteNumber(m_PreferredBlockSize);
12409 json.WriteString(
"Blocks");
12410 json.BeginObject();
12411 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12413 json.BeginString();
12414 json.ContinueString(m_Blocks[i]->GetId());
12417 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12424 #endif // #if VMA_STATS_STRING_ENABLED 12426 void VmaBlockVector::Defragment(
12427 class VmaBlockVectorDefragmentationContext* pCtx,
12429 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12430 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12431 VkCommandBuffer commandBuffer)
12433 pCtx->res = VK_SUCCESS;
12435 const VkMemoryPropertyFlags memPropFlags =
12436 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12437 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12438 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12440 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12442 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12443 !IsCorruptionDetectionEnabled() &&
12444 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12447 if(canDefragmentOnCpu || canDefragmentOnGpu)
12449 bool defragmentOnGpu;
12451 if(canDefragmentOnGpu != canDefragmentOnCpu)
12453 defragmentOnGpu = canDefragmentOnGpu;
12458 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12459 m_hAllocator->IsIntegratedGpu();
12462 bool overlappingMoveSupported = !defragmentOnGpu;
12464 if(m_hAllocator->m_UseMutex)
12466 m_Mutex.LockWrite();
12467 pCtx->mutexLocked =
true;
12470 pCtx->Begin(overlappingMoveSupported);
12474 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12475 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12476 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12477 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12478 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12481 if(pStats != VMA_NULL)
12483 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12484 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12487 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12488 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12489 if(defragmentOnGpu)
12491 maxGpuBytesToMove -= bytesMoved;
12492 maxGpuAllocationsToMove -= allocationsMoved;
12496 maxCpuBytesToMove -= bytesMoved;
12497 maxCpuAllocationsToMove -= allocationsMoved;
12501 if(pCtx->res >= VK_SUCCESS)
12503 if(defragmentOnGpu)
12505 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12509 ApplyDefragmentationMovesCpu(pCtx, moves);
12515 void VmaBlockVector::DefragmentationEnd(
12516 class VmaBlockVectorDefragmentationContext* pCtx,
12520 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12522 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12523 if(blockCtx.hBuffer)
12525 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12526 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12530 if(pCtx->res >= VK_SUCCESS)
12532 FreeEmptyBlocks(pStats);
12535 if(pCtx->mutexLocked)
12537 VMA_ASSERT(m_hAllocator->m_UseMutex);
12538 m_Mutex.UnlockWrite();
12542 size_t VmaBlockVector::CalcAllocationCount()
const 12545 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12547 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12552 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12554 if(m_BufferImageGranularity == 1)
12558 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12559 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12561 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12562 VMA_ASSERT(m_Algorithm == 0);
12563 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12564 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12572 void VmaBlockVector::MakePoolAllocationsLost(
12573 uint32_t currentFrameIndex,
12574 size_t* pLostAllocationCount)
12576 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12577 size_t lostAllocationCount = 0;
12578 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12580 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12581 VMA_ASSERT(pBlock);
12582 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12584 if(pLostAllocationCount != VMA_NULL)
12586 *pLostAllocationCount = lostAllocationCount;
12590 VkResult VmaBlockVector::CheckCorruption()
12592 if(!IsCorruptionDetectionEnabled())
12594 return VK_ERROR_FEATURE_NOT_PRESENT;
12597 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12598 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12600 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12601 VMA_ASSERT(pBlock);
12602 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12603 if(res != VK_SUCCESS)
12611 void VmaBlockVector::AddStats(
VmaStats* pStats)
12613 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12614 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12616 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12618 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12620 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12621 VMA_ASSERT(pBlock);
12622 VMA_HEAVY_ASSERT(pBlock->Validate());
12624 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12625 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12626 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12627 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12634 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12636 VmaBlockVector* pBlockVector,
12637 uint32_t currentFrameIndex,
12638 bool overlappingMoveSupported) :
12639 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12640 m_AllocationCount(0),
12641 m_AllAllocations(false),
12643 m_AllocationsMoved(0),
12644 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12647 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12648 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12650 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12651 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12652 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12653 m_Blocks.push_back(pBlockInfo);
12657 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12660 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12662 for(
size_t i = m_Blocks.size(); i--; )
12664 vma_delete(m_hAllocator, m_Blocks[i]);
12668 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12671 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12673 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12674 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12675 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12677 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12678 (*it)->m_Allocations.push_back(allocInfo);
12685 ++m_AllocationCount;
12689 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12690 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12691 VkDeviceSize maxBytesToMove,
12692 uint32_t maxAllocationsToMove)
12694 if(m_Blocks.empty())
12707 size_t srcBlockMinIndex = 0;
12720 size_t srcBlockIndex = m_Blocks.size() - 1;
12721 size_t srcAllocIndex = SIZE_MAX;
12727 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12729 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12732 if(srcBlockIndex == srcBlockMinIndex)
12739 srcAllocIndex = SIZE_MAX;
12744 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12748 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12749 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12751 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12752 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12753 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12754 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12757 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12759 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12760 VmaAllocationRequest dstAllocRequest;
12761 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12762 m_CurrentFrameIndex,
12763 m_pBlockVector->GetFrameInUseCount(),
12764 m_pBlockVector->GetBufferImageGranularity(),
12771 &dstAllocRequest) &&
12773 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12775 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12778 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12779 (m_BytesMoved + size > maxBytesToMove))
12784 VmaDefragmentationMove move;
12785 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12786 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12787 move.srcOffset = srcOffset;
12788 move.dstOffset = dstAllocRequest.offset;
12790 moves.push_back(move);
12792 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12796 allocInfo.m_hAllocation);
12797 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12799 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12801 if(allocInfo.m_pChanged != VMA_NULL)
12803 *allocInfo.m_pChanged = VK_TRUE;
12806 ++m_AllocationsMoved;
12807 m_BytesMoved += size;
12809 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12817 if(srcAllocIndex > 0)
12823 if(srcBlockIndex > 0)
12826 srcAllocIndex = SIZE_MAX;
12836 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12839 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12841 if(m_Blocks[i]->m_HasNonMovableAllocations)
12849 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12850 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12851 VkDeviceSize maxBytesToMove,
12852 uint32_t maxAllocationsToMove)
12854 if(!m_AllAllocations && m_AllocationCount == 0)
12859 const size_t blockCount = m_Blocks.size();
12860 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12862 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12864 if(m_AllAllocations)
12866 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12867 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12868 it != pMetadata->m_Suballocations.end();
12871 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12873 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12874 pBlockInfo->m_Allocations.push_back(allocInfo);
12879 pBlockInfo->CalcHasNonMovableAllocations();
12883 pBlockInfo->SortAllocationsByOffsetDescending();
12889 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12892 const uint32_t roundCount = 2;
12895 VkResult result = VK_SUCCESS;
12896 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12898 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12904 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12905 size_t dstBlockIndex, VkDeviceSize dstOffset,
12906 size_t srcBlockIndex, VkDeviceSize srcOffset)
12908 if(dstBlockIndex < srcBlockIndex)
12912 if(dstBlockIndex > srcBlockIndex)
12916 if(dstOffset < srcOffset)
12926 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12928 VmaBlockVector* pBlockVector,
12929 uint32_t currentFrameIndex,
12930 bool overlappingMoveSupported) :
12931 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12932 m_OverlappingMoveSupported(overlappingMoveSupported),
12933 m_AllocationCount(0),
12934 m_AllAllocations(false),
12936 m_AllocationsMoved(0),
12937 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12939 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12943 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12947 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12948 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12949 VkDeviceSize maxBytesToMove,
12950 uint32_t maxAllocationsToMove)
12952 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12954 const size_t blockCount = m_pBlockVector->GetBlockCount();
12955 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12960 PreprocessMetadata();
12964 m_BlockInfos.resize(blockCount);
12965 for(
size_t i = 0; i < blockCount; ++i)
12967 m_BlockInfos[i].origBlockIndex = i;
12970 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12971 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12972 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12977 FreeSpaceDatabase freeSpaceDb;
12979 size_t dstBlockInfoIndex = 0;
12980 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12981 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12982 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12983 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12984 VkDeviceSize dstOffset = 0;
12987 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12989 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12990 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12991 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12992 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12993 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12995 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12996 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12997 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12998 if(m_AllocationsMoved == maxAllocationsToMove ||
12999 m_BytesMoved + srcAllocSize > maxBytesToMove)
13004 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13007 size_t freeSpaceInfoIndex;
13008 VkDeviceSize dstAllocOffset;
13009 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13010 freeSpaceInfoIndex, dstAllocOffset))
13012 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13013 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13014 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13017 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13019 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13023 VmaSuballocation suballoc = *srcSuballocIt;
13024 suballoc.offset = dstAllocOffset;
13025 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13026 m_BytesMoved += srcAllocSize;
13027 ++m_AllocationsMoved;
13029 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13031 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13032 srcSuballocIt = nextSuballocIt;
13034 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13036 VmaDefragmentationMove move = {
13037 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13038 srcAllocOffset, dstAllocOffset,
13040 moves.push_back(move);
13047 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13049 VmaSuballocation suballoc = *srcSuballocIt;
13050 suballoc.offset = dstAllocOffset;
13051 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13052 m_BytesMoved += srcAllocSize;
13053 ++m_AllocationsMoved;
13055 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13057 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13058 srcSuballocIt = nextSuballocIt;
13060 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13062 VmaDefragmentationMove move = {
13063 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13064 srcAllocOffset, dstAllocOffset,
13066 moves.push_back(move);
13071 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13074 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13075 dstAllocOffset + srcAllocSize > dstBlockSize)
13078 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13080 ++dstBlockInfoIndex;
13081 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13082 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13083 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13084 dstBlockSize = pDstMetadata->GetSize();
13086 dstAllocOffset = 0;
13090 if(dstBlockInfoIndex == srcBlockInfoIndex)
13092 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13094 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13096 bool skipOver = overlap;
13097 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13101 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13106 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13108 dstOffset = srcAllocOffset + srcAllocSize;
13114 srcSuballocIt->offset = dstAllocOffset;
13115 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13116 dstOffset = dstAllocOffset + srcAllocSize;
13117 m_BytesMoved += srcAllocSize;
13118 ++m_AllocationsMoved;
13120 VmaDefragmentationMove move = {
13121 srcOrigBlockIndex, dstOrigBlockIndex,
13122 srcAllocOffset, dstAllocOffset,
13124 moves.push_back(move);
13132 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13133 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13135 VmaSuballocation suballoc = *srcSuballocIt;
13136 suballoc.offset = dstAllocOffset;
13137 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13138 dstOffset = dstAllocOffset + srcAllocSize;
13139 m_BytesMoved += srcAllocSize;
13140 ++m_AllocationsMoved;
13142 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13144 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13145 srcSuballocIt = nextSuballocIt;
13147 pDstMetadata->m_Suballocations.push_back(suballoc);
13149 VmaDefragmentationMove move = {
13150 srcOrigBlockIndex, dstOrigBlockIndex,
13151 srcAllocOffset, dstAllocOffset,
13153 moves.push_back(move);
13159 m_BlockInfos.clear();
13161 PostprocessMetadata();
13166 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13168 const size_t blockCount = m_pBlockVector->GetBlockCount();
13169 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13171 VmaBlockMetadata_Generic*
const pMetadata =
13172 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13173 pMetadata->m_FreeCount = 0;
13174 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13175 pMetadata->m_FreeSuballocationsBySize.clear();
13176 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13177 it != pMetadata->m_Suballocations.end(); )
13179 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13181 VmaSuballocationList::iterator nextIt = it;
13183 pMetadata->m_Suballocations.erase(it);
13194 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13196 const size_t blockCount = m_pBlockVector->GetBlockCount();
13197 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13199 VmaBlockMetadata_Generic*
const pMetadata =
13200 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13201 const VkDeviceSize blockSize = pMetadata->GetSize();
13204 if(pMetadata->m_Suballocations.empty())
13206 pMetadata->m_FreeCount = 1;
13208 VmaSuballocation suballoc = {
13212 VMA_SUBALLOCATION_TYPE_FREE };
13213 pMetadata->m_Suballocations.push_back(suballoc);
13214 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13219 VkDeviceSize offset = 0;
13220 VmaSuballocationList::iterator it;
13221 for(it = pMetadata->m_Suballocations.begin();
13222 it != pMetadata->m_Suballocations.end();
13225 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13226 VMA_ASSERT(it->offset >= offset);
13229 if(it->offset > offset)
13231 ++pMetadata->m_FreeCount;
13232 const VkDeviceSize freeSize = it->offset - offset;
13233 VmaSuballocation suballoc = {
13237 VMA_SUBALLOCATION_TYPE_FREE };
13238 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13239 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13241 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13245 pMetadata->m_SumFreeSize -= it->size;
13246 offset = it->offset + it->size;
13250 if(offset < blockSize)
13252 ++pMetadata->m_FreeCount;
13253 const VkDeviceSize freeSize = blockSize - offset;
13254 VmaSuballocation suballoc = {
13258 VMA_SUBALLOCATION_TYPE_FREE };
13259 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13260 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13261 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13263 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13268 pMetadata->m_FreeSuballocationsBySize.begin(),
13269 pMetadata->m_FreeSuballocationsBySize.end(),
13270 VmaSuballocationItemSizeLess());
13273 VMA_HEAVY_ASSERT(pMetadata->Validate());
13277 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13280 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13281 while(it != pMetadata->m_Suballocations.end())
13283 if(it->offset < suballoc.offset)
13288 pMetadata->m_Suballocations.insert(it, suballoc);
13294 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13297 VmaBlockVector* pBlockVector,
13298 uint32_t currFrameIndex,
13299 uint32_t algorithmFlags) :
13301 mutexLocked(false),
13302 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13303 m_hAllocator(hAllocator),
13304 m_hCustomPool(hCustomPool),
13305 m_pBlockVector(pBlockVector),
13306 m_CurrFrameIndex(currFrameIndex),
13307 m_AlgorithmFlags(algorithmFlags),
13308 m_pAlgorithm(VMA_NULL),
13309 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13310 m_AllAllocations(false)
13314 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13316 vma_delete(m_hAllocator, m_pAlgorithm);
13319 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13321 AllocInfo info = { hAlloc, pChanged };
13322 m_Allocations.push_back(info);
13325 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13327 const bool allAllocations = m_AllAllocations ||
13328 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13340 if(VMA_DEBUG_MARGIN == 0 &&
13342 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13344 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13345 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13349 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13350 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13355 m_pAlgorithm->AddAll();
13359 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13361 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13369 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13371 uint32_t currFrameIndex,
13374 m_hAllocator(hAllocator),
13375 m_CurrFrameIndex(currFrameIndex),
13378 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13380 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13383 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13385 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13387 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13388 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13389 vma_delete(m_hAllocator, pBlockVectorCtx);
13391 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13393 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13394 if(pBlockVectorCtx)
13396 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13397 vma_delete(m_hAllocator, pBlockVectorCtx);
13402 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13404 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13406 VmaPool pool = pPools[poolIndex];
13409 if(pool->m_BlockVector.GetAlgorithm() == 0)
13411 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13413 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13415 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13417 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13422 if(!pBlockVectorDefragCtx)
13424 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13427 &pool->m_BlockVector,
13430 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13433 pBlockVectorDefragCtx->AddAll();
13438 void VmaDefragmentationContext_T::AddAllocations(
13439 uint32_t allocationCount,
13441 VkBool32* pAllocationsChanged)
13444 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13447 VMA_ASSERT(hAlloc);
13449 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13451 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13453 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13455 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13457 if(hAllocPool != VK_NULL_HANDLE)
13460 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13462 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13464 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13466 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13470 if(!pBlockVectorDefragCtx)
13472 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13475 &hAllocPool->m_BlockVector,
13478 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13485 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13486 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13487 if(!pBlockVectorDefragCtx)
13489 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13492 m_hAllocator->m_pBlockVectors[memTypeIndex],
13495 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13499 if(pBlockVectorDefragCtx)
13501 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13502 &pAllocationsChanged[allocIndex] : VMA_NULL;
13503 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13509 VkResult VmaDefragmentationContext_T::Defragment(
13510 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13511 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13519 if(commandBuffer == VK_NULL_HANDLE)
13521 maxGpuBytesToMove = 0;
13522 maxGpuAllocationsToMove = 0;
13525 VkResult res = VK_SUCCESS;
13528 for(uint32_t memTypeIndex = 0;
13529 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13532 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13533 if(pBlockVectorCtx)
13535 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13536 pBlockVectorCtx->GetBlockVector()->Defragment(
13539 maxCpuBytesToMove, maxCpuAllocationsToMove,
13540 maxGpuBytesToMove, maxGpuAllocationsToMove,
13542 if(pBlockVectorCtx->res != VK_SUCCESS)
13544 res = pBlockVectorCtx->res;
13550 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13551 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13554 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13555 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13556 pBlockVectorCtx->GetBlockVector()->Defragment(
13559 maxCpuBytesToMove, maxCpuAllocationsToMove,
13560 maxGpuBytesToMove, maxGpuAllocationsToMove,
13562 if(pBlockVectorCtx->res != VK_SUCCESS)
13564 res = pBlockVectorCtx->res;
13574 #if VMA_RECORDING_ENABLED 13576 VmaRecorder::VmaRecorder() :
13581 m_StartCounter(INT64_MAX)
13587 m_UseMutex = useMutex;
13588 m_Flags = settings.
flags;
13590 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13591 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13594 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13597 return VK_ERROR_INITIALIZATION_FAILED;
13601 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13602 fprintf(m_File,
"%s\n",
"1,5");
13607 VmaRecorder::~VmaRecorder()
13609 if(m_File != VMA_NULL)
13615 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13617 CallParams callParams;
13618 GetBasicParams(callParams);
13620 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13621 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13625 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13627 CallParams callParams;
13628 GetBasicParams(callParams);
13630 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13631 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13637 CallParams callParams;
13638 GetBasicParams(callParams);
13640 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13641 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13652 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13654 CallParams callParams;
13655 GetBasicParams(callParams);
13657 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13658 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13663 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13664 const VkMemoryRequirements& vkMemReq,
13668 CallParams callParams;
13669 GetBasicParams(callParams);
13671 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13672 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13673 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13675 vkMemReq.alignment,
13676 vkMemReq.memoryTypeBits,
13684 userDataStr.GetString());
13688 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13689 const VkMemoryRequirements& vkMemReq,
13691 uint64_t allocationCount,
13694 CallParams callParams;
13695 GetBasicParams(callParams);
13697 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13698 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13699 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13701 vkMemReq.alignment,
13702 vkMemReq.memoryTypeBits,
13709 PrintPointerList(allocationCount, pAllocations);
13710 fprintf(m_File,
",%s\n", userDataStr.GetString());
13714 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13715 const VkMemoryRequirements& vkMemReq,
13716 bool requiresDedicatedAllocation,
13717 bool prefersDedicatedAllocation,
13721 CallParams callParams;
13722 GetBasicParams(callParams);
13724 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13725 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13726 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13728 vkMemReq.alignment,
13729 vkMemReq.memoryTypeBits,
13730 requiresDedicatedAllocation ? 1 : 0,
13731 prefersDedicatedAllocation ? 1 : 0,
13739 userDataStr.GetString());
13743 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13744 const VkMemoryRequirements& vkMemReq,
13745 bool requiresDedicatedAllocation,
13746 bool prefersDedicatedAllocation,
13750 CallParams callParams;
13751 GetBasicParams(callParams);
13753 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13754 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13755 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13757 vkMemReq.alignment,
13758 vkMemReq.memoryTypeBits,
13759 requiresDedicatedAllocation ? 1 : 0,
13760 prefersDedicatedAllocation ? 1 : 0,
13768 userDataStr.GetString());
13772 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13775 CallParams callParams;
13776 GetBasicParams(callParams);
13778 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13779 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13784 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13785 uint64_t allocationCount,
13788 CallParams callParams;
13789 GetBasicParams(callParams);
13791 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13792 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13793 PrintPointerList(allocationCount, pAllocations);
13794 fprintf(m_File,
"\n");
13798 void VmaRecorder::RecordResizeAllocation(
13799 uint32_t frameIndex,
13801 VkDeviceSize newSize)
13803 CallParams callParams;
13804 GetBasicParams(callParams);
13806 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13807 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13808 allocation, newSize);
13812 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13814 const void* pUserData)
13816 CallParams callParams;
13817 GetBasicParams(callParams);
13819 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13820 UserDataString userDataStr(
13823 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13825 userDataStr.GetString());
13829 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13832 CallParams callParams;
13833 GetBasicParams(callParams);
13835 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13836 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13841 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13844 CallParams callParams;
13845 GetBasicParams(callParams);
13847 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13848 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13853 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13856 CallParams callParams;
13857 GetBasicParams(callParams);
13859 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13860 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13865 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13866 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13868 CallParams callParams;
13869 GetBasicParams(callParams);
13871 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13872 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13879 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13880 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13882 CallParams callParams;
13883 GetBasicParams(callParams);
13885 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13886 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13893 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13894 const VkBufferCreateInfo& bufCreateInfo,
13898 CallParams callParams;
13899 GetBasicParams(callParams);
13901 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13902 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13903 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13904 bufCreateInfo.flags,
13905 bufCreateInfo.size,
13906 bufCreateInfo.usage,
13907 bufCreateInfo.sharingMode,
13908 allocCreateInfo.
flags,
13909 allocCreateInfo.
usage,
13913 allocCreateInfo.
pool,
13915 userDataStr.GetString());
13919 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13920 const VkImageCreateInfo& imageCreateInfo,
13924 CallParams callParams;
13925 GetBasicParams(callParams);
13927 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13928 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13929 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13930 imageCreateInfo.flags,
13931 imageCreateInfo.imageType,
13932 imageCreateInfo.format,
13933 imageCreateInfo.extent.width,
13934 imageCreateInfo.extent.height,
13935 imageCreateInfo.extent.depth,
13936 imageCreateInfo.mipLevels,
13937 imageCreateInfo.arrayLayers,
13938 imageCreateInfo.samples,
13939 imageCreateInfo.tiling,
13940 imageCreateInfo.usage,
13941 imageCreateInfo.sharingMode,
13942 imageCreateInfo.initialLayout,
13943 allocCreateInfo.
flags,
13944 allocCreateInfo.
usage,
13948 allocCreateInfo.
pool,
13950 userDataStr.GetString());
13954 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13957 CallParams callParams;
13958 GetBasicParams(callParams);
13960 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13961 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13966 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13969 CallParams callParams;
13970 GetBasicParams(callParams);
13972 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13973 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13978 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13981 CallParams callParams;
13982 GetBasicParams(callParams);
13984 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13985 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13990 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13993 CallParams callParams;
13994 GetBasicParams(callParams);
13996 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13997 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14002 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14005 CallParams callParams;
14006 GetBasicParams(callParams);
14008 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14009 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14014 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14018 CallParams callParams;
14019 GetBasicParams(callParams);
14021 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14022 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14025 fprintf(m_File,
",");
14027 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14037 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14040 CallParams callParams;
14041 GetBasicParams(callParams);
14043 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14044 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14051 if(pUserData != VMA_NULL)
14055 m_Str = (
const char*)pUserData;
14059 sprintf_s(m_PtrStr,
"%p", pUserData);
14069 void VmaRecorder::WriteConfiguration(
14070 const VkPhysicalDeviceProperties& devProps,
14071 const VkPhysicalDeviceMemoryProperties& memProps,
14072 bool dedicatedAllocationExtensionEnabled)
14074 fprintf(m_File,
"Config,Begin\n");
14076 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14077 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14078 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14079 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14080 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14081 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14083 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14084 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14085 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14087 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14088 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14090 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14091 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14093 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14094 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14096 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14097 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14100 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14102 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14103 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14104 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14105 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14106 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14107 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14108 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14109 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14110 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14112 fprintf(m_File,
"Config,End\n");
14115 void VmaRecorder::GetBasicParams(CallParams& outParams)
14117 outParams.threadId = GetCurrentThreadId();
14119 LARGE_INTEGER counter;
14120 QueryPerformanceCounter(&counter);
14121 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14124 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14128 fprintf(m_File,
"%p", pItems[0]);
14129 for(uint64_t i = 1; i < count; ++i)
14131 fprintf(m_File,
" %p", pItems[i]);
14136 void VmaRecorder::Flush()
14144 #endif // #if VMA_RECORDING_ENABLED 14149 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14150 m_Allocator(pAllocationCallbacks, 1024)
14156 VmaMutexLock mutexLock(m_Mutex);
14157 return m_Allocator.Alloc();
14160 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14162 VmaMutexLock mutexLock(m_Mutex);
14163 m_Allocator.Free(hAlloc);
14172 m_hDevice(pCreateInfo->device),
14173 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14174 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14175 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14176 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14177 m_PreferredLargeHeapBlockSize(0),
14178 m_PhysicalDevice(pCreateInfo->physicalDevice),
14179 m_CurrentFrameIndex(0),
14180 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14181 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14184 ,m_pRecorder(VMA_NULL)
14187 if(VMA_DEBUG_DETECT_CORRUPTION)
14190 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14195 #if !(VMA_DEDICATED_ALLOCATION) 14198 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14202 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14203 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14204 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14206 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14207 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14209 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14211 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14222 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14223 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14225 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14226 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14227 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14228 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14235 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14237 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14238 if(limit != VK_WHOLE_SIZE)
14240 m_HeapSizeLimit[heapIndex] = limit;
14241 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14243 m_MemProps.memoryHeaps[heapIndex].size = limit;
14249 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14251 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14253 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14257 preferredBlockSize,
14260 GetBufferImageGranularity(),
14267 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14274 VkResult res = VK_SUCCESS;
14279 #if VMA_RECORDING_ENABLED 14280 m_pRecorder = vma_new(
this, VmaRecorder)();
14282 if(res != VK_SUCCESS)
14286 m_pRecorder->WriteConfiguration(
14287 m_PhysicalDeviceProperties,
14289 m_UseKhrDedicatedAllocation);
14290 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14292 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14293 return VK_ERROR_FEATURE_NOT_PRESENT;
14300 VmaAllocator_T::~VmaAllocator_T()
14302 #if VMA_RECORDING_ENABLED 14303 if(m_pRecorder != VMA_NULL)
14305 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14306 vma_delete(
this, m_pRecorder);
14310 VMA_ASSERT(m_Pools.empty());
14312 for(
size_t i = GetMemoryTypeCount(); i--; )
14314 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14316 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14319 vma_delete(
this, m_pDedicatedAllocations[i]);
14320 vma_delete(
this, m_pBlockVectors[i]);
14324 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14326 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14327 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14328 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14329 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14330 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14331 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14332 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14333 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14334 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14335 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14336 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14337 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14338 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14339 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14340 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14341 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14342 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14343 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14344 #if VMA_DEDICATED_ALLOCATION 14345 if(m_UseKhrDedicatedAllocation)
14347 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14348 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14349 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14350 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14352 #endif // #if VMA_DEDICATED_ALLOCATION 14353 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14355 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14356 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14358 if(pVulkanFunctions != VMA_NULL)
14360 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14361 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14362 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14363 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14364 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14365 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14366 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14367 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14368 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14369 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14370 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14371 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14372 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14373 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14374 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14375 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14376 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14377 #if VMA_DEDICATED_ALLOCATION 14378 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14379 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14383 #undef VMA_COPY_IF_NOT_NULL 14387 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14388 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14389 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14390 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14391 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14392 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14393 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14394 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14395 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14396 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14397 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14398 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14399 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14400 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14401 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14402 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14403 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14404 #if VMA_DEDICATED_ALLOCATION 14405 if(m_UseKhrDedicatedAllocation)
14407 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14408 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14413 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14415 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14416 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14417 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14418 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14421 VkResult VmaAllocator_T::AllocateMemoryOfType(
14423 VkDeviceSize alignment,
14424 bool dedicatedAllocation,
14425 VkBuffer dedicatedBuffer,
14426 VkImage dedicatedImage,
14428 uint32_t memTypeIndex,
14429 VmaSuballocationType suballocType,
14430 size_t allocationCount,
14433 VMA_ASSERT(pAllocations != VMA_NULL);
14434 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14440 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14445 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14446 VMA_ASSERT(blockVector);
14448 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14449 bool preferDedicatedMemory =
14450 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14451 dedicatedAllocation ||
14453 size > preferredBlockSize / 2;
14455 if(preferDedicatedMemory &&
14457 finalCreateInfo.
pool == VK_NULL_HANDLE)
14466 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14470 return AllocateDedicatedMemory(
14485 VkResult res = blockVector->Allocate(
14486 m_CurrentFrameIndex.load(),
14493 if(res == VK_SUCCESS)
14501 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14505 res = AllocateDedicatedMemory(
14511 finalCreateInfo.pUserData,
14516 if(res == VK_SUCCESS)
14519 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14525 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14532 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14534 VmaSuballocationType suballocType,
14535 uint32_t memTypeIndex,
14537 bool isUserDataString,
14539 VkBuffer dedicatedBuffer,
14540 VkImage dedicatedImage,
14541 size_t allocationCount,
14544 VMA_ASSERT(allocationCount > 0 && pAllocations);
14546 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14547 allocInfo.memoryTypeIndex = memTypeIndex;
14548 allocInfo.allocationSize = size;
14550 #if VMA_DEDICATED_ALLOCATION 14551 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14552 if(m_UseKhrDedicatedAllocation)
14554 if(dedicatedBuffer != VK_NULL_HANDLE)
14556 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14557 dedicatedAllocInfo.buffer = dedicatedBuffer;
14558 allocInfo.pNext = &dedicatedAllocInfo;
14560 else if(dedicatedImage != VK_NULL_HANDLE)
14562 dedicatedAllocInfo.image = dedicatedImage;
14563 allocInfo.pNext = &dedicatedAllocInfo;
14566 #endif // #if VMA_DEDICATED_ALLOCATION 14569 VkResult res = VK_SUCCESS;
14570 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14572 res = AllocateDedicatedMemoryPage(
14580 pAllocations + allocIndex);
14581 if(res != VK_SUCCESS)
14587 if(res == VK_SUCCESS)
14591 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14592 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14593 VMA_ASSERT(pDedicatedAllocations);
14594 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14596 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14600 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14605 while(allocIndex--)
14608 VkDeviceMemory hMemory = currAlloc->GetMemory();
14620 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14622 currAlloc->SetUserData(
this, VMA_NULL);
14624 m_AllocationObjectAllocator.Free(currAlloc);
14627 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14633 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14635 VmaSuballocationType suballocType,
14636 uint32_t memTypeIndex,
14637 const VkMemoryAllocateInfo& allocInfo,
14639 bool isUserDataString,
14643 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14644 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14647 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14651 void* pMappedData = VMA_NULL;
14654 res = (*m_VulkanFunctions.vkMapMemory)(
14663 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14664 FreeVulkanMemory(memTypeIndex, size, hMemory);
14669 *pAllocation = m_AllocationObjectAllocator.Allocate();
14670 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14671 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14672 (*pAllocation)->SetUserData(
this, pUserData);
14673 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14675 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14681 void VmaAllocator_T::GetBufferMemoryRequirements(
14683 VkMemoryRequirements& memReq,
14684 bool& requiresDedicatedAllocation,
14685 bool& prefersDedicatedAllocation)
const 14687 #if VMA_DEDICATED_ALLOCATION 14688 if(m_UseKhrDedicatedAllocation)
14690 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14691 memReqInfo.buffer = hBuffer;
14693 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14695 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14696 memReq2.pNext = &memDedicatedReq;
14698 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14700 memReq = memReq2.memoryRequirements;
14701 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14702 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14705 #endif // #if VMA_DEDICATED_ALLOCATION 14707 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14708 requiresDedicatedAllocation =
false;
14709 prefersDedicatedAllocation =
false;
14713 void VmaAllocator_T::GetImageMemoryRequirements(
14715 VkMemoryRequirements& memReq,
14716 bool& requiresDedicatedAllocation,
14717 bool& prefersDedicatedAllocation)
const 14719 #if VMA_DEDICATED_ALLOCATION 14720 if(m_UseKhrDedicatedAllocation)
14722 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14723 memReqInfo.image = hImage;
14725 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14727 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14728 memReq2.pNext = &memDedicatedReq;
14730 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14732 memReq = memReq2.memoryRequirements;
14733 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14734 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14737 #endif // #if VMA_DEDICATED_ALLOCATION 14739 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14740 requiresDedicatedAllocation =
false;
14741 prefersDedicatedAllocation =
false;
14745 VkResult VmaAllocator_T::AllocateMemory(
14746 const VkMemoryRequirements& vkMemReq,
14747 bool requiresDedicatedAllocation,
14748 bool prefersDedicatedAllocation,
14749 VkBuffer dedicatedBuffer,
14750 VkImage dedicatedImage,
14752 VmaSuballocationType suballocType,
14753 size_t allocationCount,
14756 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14758 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14760 if(vkMemReq.size == 0)
14762 return VK_ERROR_VALIDATION_FAILED_EXT;
14767 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14768 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14773 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14774 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14776 if(requiresDedicatedAllocation)
14780 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14781 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14783 if(createInfo.
pool != VK_NULL_HANDLE)
14785 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14786 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14789 if((createInfo.
pool != VK_NULL_HANDLE) &&
14792 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14793 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14796 if(createInfo.
pool != VK_NULL_HANDLE)
14798 const VkDeviceSize alignmentForPool = VMA_MAX(
14799 vkMemReq.alignment,
14800 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14801 return createInfo.
pool->m_BlockVector.Allocate(
14802 m_CurrentFrameIndex.load(),
14813 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14814 uint32_t memTypeIndex = UINT32_MAX;
14816 if(res == VK_SUCCESS)
14818 VkDeviceSize alignmentForMemType = VMA_MAX(
14819 vkMemReq.alignment,
14820 GetMemoryTypeMinAlignment(memTypeIndex));
14822 res = AllocateMemoryOfType(
14824 alignmentForMemType,
14825 requiresDedicatedAllocation || prefersDedicatedAllocation,
14834 if(res == VK_SUCCESS)
14844 memoryTypeBits &= ~(1u << memTypeIndex);
14847 if(res == VK_SUCCESS)
14849 alignmentForMemType = VMA_MAX(
14850 vkMemReq.alignment,
14851 GetMemoryTypeMinAlignment(memTypeIndex));
14853 res = AllocateMemoryOfType(
14855 alignmentForMemType,
14856 requiresDedicatedAllocation || prefersDedicatedAllocation,
14865 if(res == VK_SUCCESS)
14875 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14886 void VmaAllocator_T::FreeMemory(
14887 size_t allocationCount,
14890 VMA_ASSERT(pAllocations);
14892 for(
size_t allocIndex = allocationCount; allocIndex--; )
14896 if(allocation != VK_NULL_HANDLE)
14898 if(TouchAllocation(allocation))
14900 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14902 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14905 switch(allocation->GetType())
14907 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14909 VmaBlockVector* pBlockVector = VMA_NULL;
14910 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14911 if(hPool != VK_NULL_HANDLE)
14913 pBlockVector = &hPool->m_BlockVector;
14917 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14918 pBlockVector = m_pBlockVectors[memTypeIndex];
14920 pBlockVector->Free(allocation);
14923 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14924 FreeDedicatedMemory(allocation);
14931 allocation->SetUserData(
this, VMA_NULL);
14932 allocation->Dtor();
14933 m_AllocationObjectAllocator.Free(allocation);
14938 VkResult VmaAllocator_T::ResizeAllocation(
14940 VkDeviceSize newSize)
14942 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14944 return VK_ERROR_VALIDATION_FAILED_EXT;
14946 if(newSize == alloc->GetSize())
14951 switch(alloc->GetType())
14953 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14954 return VK_ERROR_FEATURE_NOT_PRESENT;
14955 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14956 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14958 alloc->ChangeSize(newSize);
14959 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14964 return VK_ERROR_OUT_OF_POOL_MEMORY;
14968 return VK_ERROR_VALIDATION_FAILED_EXT;
14972 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14975 InitStatInfo(pStats->
total);
14976 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14978 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14982 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14984 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14985 VMA_ASSERT(pBlockVector);
14986 pBlockVector->AddStats(pStats);
14991 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14992 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14994 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14999 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15001 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15002 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15003 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15004 VMA_ASSERT(pDedicatedAllocVector);
15005 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15008 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15009 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15010 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15011 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15016 VmaPostprocessCalcStatInfo(pStats->
total);
15017 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15018 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15019 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15020 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15023 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15025 VkResult VmaAllocator_T::DefragmentationBegin(
15035 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15036 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15039 (*pContext)->AddAllocations(
15042 VkResult res = (*pContext)->Defragment(
15047 if(res != VK_NOT_READY)
15049 vma_delete(
this, *pContext);
15050 *pContext = VMA_NULL;
15056 VkResult VmaAllocator_T::DefragmentationEnd(
15059 vma_delete(
this, context);
15065 if(hAllocation->CanBecomeLost())
15071 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15072 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15075 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15079 pAllocationInfo->
offset = 0;
15080 pAllocationInfo->
size = hAllocation->GetSize();
15082 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15085 else if(localLastUseFrameIndex == localCurrFrameIndex)
15087 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15088 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15089 pAllocationInfo->
offset = hAllocation->GetOffset();
15090 pAllocationInfo->
size = hAllocation->GetSize();
15092 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15097 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15099 localLastUseFrameIndex = localCurrFrameIndex;
15106 #if VMA_STATS_STRING_ENABLED 15107 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15108 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15111 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15112 if(localLastUseFrameIndex == localCurrFrameIndex)
15118 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15120 localLastUseFrameIndex = localCurrFrameIndex;
15126 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15127 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15128 pAllocationInfo->
offset = hAllocation->GetOffset();
15129 pAllocationInfo->
size = hAllocation->GetSize();
15130 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15131 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15135 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15138 if(hAllocation->CanBecomeLost())
15140 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15141 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15144 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15148 else if(localLastUseFrameIndex == localCurrFrameIndex)
15154 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15156 localLastUseFrameIndex = localCurrFrameIndex;
15163 #if VMA_STATS_STRING_ENABLED 15164 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15165 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15168 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15169 if(localLastUseFrameIndex == localCurrFrameIndex)
15175 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15177 localLastUseFrameIndex = localCurrFrameIndex;
15189 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15199 return VK_ERROR_INITIALIZATION_FAILED;
15202 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15204 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15206 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15207 if(res != VK_SUCCESS)
15209 vma_delete(
this, *pPool);
15216 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15217 (*pPool)->SetId(m_NextPoolId++);
15218 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15224 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15228 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15229 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15230 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15233 vma_delete(
this, pool);
15238 pool->m_BlockVector.GetPoolStats(pPoolStats);
15241 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15243 m_CurrentFrameIndex.store(frameIndex);
15246 void VmaAllocator_T::MakePoolAllocationsLost(
15248 size_t* pLostAllocationCount)
15250 hPool->m_BlockVector.MakePoolAllocationsLost(
15251 m_CurrentFrameIndex.load(),
15252 pLostAllocationCount);
15255 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15257 return hPool->m_BlockVector.CheckCorruption();
15260 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15262 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15265 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15267 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15269 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15270 VMA_ASSERT(pBlockVector);
15271 VkResult localRes = pBlockVector->CheckCorruption();
15274 case VK_ERROR_FEATURE_NOT_PRESENT:
15277 finalRes = VK_SUCCESS;
15287 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15288 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15290 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15292 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15295 case VK_ERROR_FEATURE_NOT_PRESENT:
15298 finalRes = VK_SUCCESS;
15310 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15312 *pAllocation = m_AllocationObjectAllocator.Allocate();
15313 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15314 (*pAllocation)->InitLost();
15317 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15319 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15322 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15324 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15325 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15327 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15328 if(res == VK_SUCCESS)
15330 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15335 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15340 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15343 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15345 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15351 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15353 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15355 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15358 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15360 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15361 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15363 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15364 m_HeapSizeLimit[heapIndex] += size;
15368 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15370 if(hAllocation->CanBecomeLost())
15372 return VK_ERROR_MEMORY_MAP_FAILED;
15375 switch(hAllocation->GetType())
15377 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15379 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15380 char *pBytes = VMA_NULL;
15381 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15382 if(res == VK_SUCCESS)
15384 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15385 hAllocation->BlockAllocMap();
15389 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15390 return hAllocation->DedicatedAllocMap(
this, ppData);
15393 return VK_ERROR_MEMORY_MAP_FAILED;
15399 switch(hAllocation->GetType())
15401 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15403 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15404 hAllocation->BlockAllocUnmap();
15405 pBlock->Unmap(
this, 1);
15408 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15409 hAllocation->DedicatedAllocUnmap(
this);
15416 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15418 VkResult res = VK_SUCCESS;
15419 switch(hAllocation->GetType())
15421 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15422 res = GetVulkanFunctions().vkBindBufferMemory(
15425 hAllocation->GetMemory(),
15428 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15430 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15431 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15432 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15441 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15443 VkResult res = VK_SUCCESS;
15444 switch(hAllocation->GetType())
15446 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15447 res = GetVulkanFunctions().vkBindImageMemory(
15450 hAllocation->GetMemory(),
15453 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15455 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15456 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15457 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15466 void VmaAllocator_T::FlushOrInvalidateAllocation(
15468 VkDeviceSize offset, VkDeviceSize size,
15469 VMA_CACHE_OPERATION op)
15471 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15472 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15474 const VkDeviceSize allocationSize = hAllocation->GetSize();
15475 VMA_ASSERT(offset <= allocationSize);
15477 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15479 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15480 memRange.memory = hAllocation->GetMemory();
15482 switch(hAllocation->GetType())
15484 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15485 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15486 if(size == VK_WHOLE_SIZE)
15488 memRange.size = allocationSize - memRange.offset;
15492 VMA_ASSERT(offset + size <= allocationSize);
15493 memRange.size = VMA_MIN(
15494 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15495 allocationSize - memRange.offset);
15499 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15502 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15503 if(size == VK_WHOLE_SIZE)
15505 size = allocationSize - offset;
15509 VMA_ASSERT(offset + size <= allocationSize);
15511 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15514 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15515 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15516 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15517 memRange.offset += allocationOffset;
15518 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15529 case VMA_CACHE_FLUSH:
15530 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15532 case VMA_CACHE_INVALIDATE:
15533 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15542 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15544 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15546 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15548 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15549 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15550 VMA_ASSERT(pDedicatedAllocations);
15551 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15552 VMA_ASSERT(success);
15555 VkDeviceMemory hMemory = allocation->GetMemory();
15567 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15569 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15572 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const 15574 VkBufferCreateInfo dummyBufCreateInfo;
15575 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15577 uint32_t memoryTypeBits = 0;
15580 VkBuffer buf = VMA_NULL;
15581 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15582 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15583 if(res == VK_SUCCESS)
15586 VkMemoryRequirements memReq;
15587 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15588 memoryTypeBits = memReq.memoryTypeBits;
15591 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15594 return memoryTypeBits;
15597 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15599 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15600 !hAllocation->CanBecomeLost() &&
15601 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15603 void* pData = VMA_NULL;
15604 VkResult res = Map(hAllocation, &pData);
15605 if(res == VK_SUCCESS)
15607 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15608 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15609 Unmap(hAllocation);
15613 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15618 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15620 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15621 if(memoryTypeBits == UINT32_MAX)
15623 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15624 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15626 return memoryTypeBits;
15629 #if VMA_STATS_STRING_ENABLED 15631 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15633 bool dedicatedAllocationsStarted =
false;
15634 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15636 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15637 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15638 VMA_ASSERT(pDedicatedAllocVector);
15639 if(pDedicatedAllocVector->empty() ==
false)
15641 if(dedicatedAllocationsStarted ==
false)
15643 dedicatedAllocationsStarted =
true;
15644 json.WriteString(
"DedicatedAllocations");
15645 json.BeginObject();
15648 json.BeginString(
"Type ");
15649 json.ContinueString(memTypeIndex);
15654 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15656 json.BeginObject(
true);
15658 hAlloc->PrintParameters(json);
15665 if(dedicatedAllocationsStarted)
15671 bool allocationsStarted =
false;
15672 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15674 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15676 if(allocationsStarted ==
false)
15678 allocationsStarted =
true;
15679 json.WriteString(
"DefaultPools");
15680 json.BeginObject();
15683 json.BeginString(
"Type ");
15684 json.ContinueString(memTypeIndex);
15687 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15690 if(allocationsStarted)
15698 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15699 const size_t poolCount = m_Pools.size();
15702 json.WriteString(
"Pools");
15703 json.BeginObject();
15704 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15706 json.BeginString();
15707 json.ContinueString(m_Pools[poolIndex]->GetId());
15710 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15717 #endif // #if VMA_STATS_STRING_ENABLED 15726 VMA_ASSERT(pCreateInfo && pAllocator);
15727 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15729 return (*pAllocator)->Init(pCreateInfo);
15735 if(allocator != VK_NULL_HANDLE)
15737 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15738 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15739 vma_delete(&allocationCallbacks, allocator);
15745 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15747 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15748 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15753 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15755 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15756 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15761 uint32_t memoryTypeIndex,
15762 VkMemoryPropertyFlags* pFlags)
15764 VMA_ASSERT(allocator && pFlags);
15765 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15766 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15771 uint32_t frameIndex)
15773 VMA_ASSERT(allocator);
15774 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15776 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15778 allocator->SetCurrentFrameIndex(frameIndex);
15785 VMA_ASSERT(allocator && pStats);
15786 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15787 allocator->CalculateStats(pStats);
15790 #if VMA_STATS_STRING_ENABLED 15794 char** ppStatsString,
15795 VkBool32 detailedMap)
15797 VMA_ASSERT(allocator && ppStatsString);
15798 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15800 VmaStringBuilder sb(allocator);
15802 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15803 json.BeginObject();
15806 allocator->CalculateStats(&stats);
15808 json.WriteString(
"Total");
15809 VmaPrintStatInfo(json, stats.
total);
15811 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15813 json.BeginString(
"Heap ");
15814 json.ContinueString(heapIndex);
15816 json.BeginObject();
15818 json.WriteString(
"Size");
15819 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15821 json.WriteString(
"Flags");
15822 json.BeginArray(
true);
15823 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15825 json.WriteString(
"DEVICE_LOCAL");
15831 json.WriteString(
"Stats");
15832 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15835 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15837 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15839 json.BeginString(
"Type ");
15840 json.ContinueString(typeIndex);
15843 json.BeginObject();
15845 json.WriteString(
"Flags");
15846 json.BeginArray(
true);
15847 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15848 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15850 json.WriteString(
"DEVICE_LOCAL");
15852 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15854 json.WriteString(
"HOST_VISIBLE");
15856 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15858 json.WriteString(
"HOST_COHERENT");
15860 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15862 json.WriteString(
"HOST_CACHED");
15864 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15866 json.WriteString(
"LAZILY_ALLOCATED");
15872 json.WriteString(
"Stats");
15873 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15882 if(detailedMap == VK_TRUE)
15884 allocator->PrintDetailedMap(json);
15890 const size_t len = sb.GetLength();
15891 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15894 memcpy(pChars, sb.GetData(), len);
15896 pChars[len] =
'\0';
15897 *ppStatsString = pChars;
15902 char* pStatsString)
15904 if(pStatsString != VMA_NULL)
15906 VMA_ASSERT(allocator);
15907 size_t len = strlen(pStatsString);
15908 vma_delete_array(allocator, pStatsString, len + 1);
15912 #endif // #if VMA_STATS_STRING_ENABLED 15919 uint32_t memoryTypeBits,
15921 uint32_t* pMemoryTypeIndex)
15923 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15924 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15925 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15932 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15933 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15936 switch(pAllocationCreateInfo->
usage)
15941 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15943 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15947 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15950 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15951 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15953 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15957 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15958 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15964 *pMemoryTypeIndex = UINT32_MAX;
15965 uint32_t minCost = UINT32_MAX;
15966 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15967 memTypeIndex < allocator->GetMemoryTypeCount();
15968 ++memTypeIndex, memTypeBit <<= 1)
15971 if((memTypeBit & memoryTypeBits) != 0)
15973 const VkMemoryPropertyFlags currFlags =
15974 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15976 if((requiredFlags & ~currFlags) == 0)
15979 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15981 if(currCost < minCost)
15983 *pMemoryTypeIndex = memTypeIndex;
15988 minCost = currCost;
15993 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15998 const VkBufferCreateInfo* pBufferCreateInfo,
16000 uint32_t* pMemoryTypeIndex)
16002 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16003 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16004 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16005 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16007 const VkDevice hDev = allocator->m_hDevice;
16008 VkBuffer hBuffer = VK_NULL_HANDLE;
16009 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16010 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16011 if(res == VK_SUCCESS)
16013 VkMemoryRequirements memReq = {};
16014 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16015 hDev, hBuffer, &memReq);
16019 memReq.memoryTypeBits,
16020 pAllocationCreateInfo,
16023 allocator->GetVulkanFunctions().vkDestroyBuffer(
16024 hDev, hBuffer, allocator->GetAllocationCallbacks());
16031 const VkImageCreateInfo* pImageCreateInfo,
16033 uint32_t* pMemoryTypeIndex)
16035 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16036 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16037 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16038 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16040 const VkDevice hDev = allocator->m_hDevice;
16041 VkImage hImage = VK_NULL_HANDLE;
16042 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16043 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16044 if(res == VK_SUCCESS)
16046 VkMemoryRequirements memReq = {};
16047 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16048 hDev, hImage, &memReq);
16052 memReq.memoryTypeBits,
16053 pAllocationCreateInfo,
16056 allocator->GetVulkanFunctions().vkDestroyImage(
16057 hDev, hImage, allocator->GetAllocationCallbacks());
16067 VMA_ASSERT(allocator && pCreateInfo && pPool);
16069 VMA_DEBUG_LOG(
"vmaCreatePool");
16071 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16073 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16075 #if VMA_RECORDING_ENABLED 16076 if(allocator->GetRecorder() != VMA_NULL)
16078 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16089 VMA_ASSERT(allocator);
16091 if(pool == VK_NULL_HANDLE)
16096 VMA_DEBUG_LOG(
"vmaDestroyPool");
16098 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16100 #if VMA_RECORDING_ENABLED 16101 if(allocator->GetRecorder() != VMA_NULL)
16103 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16107 allocator->DestroyPool(pool);
16115 VMA_ASSERT(allocator && pool && pPoolStats);
16117 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16119 allocator->GetPoolStats(pool, pPoolStats);
16125 size_t* pLostAllocationCount)
16127 VMA_ASSERT(allocator && pool);
16129 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16131 #if VMA_RECORDING_ENABLED 16132 if(allocator->GetRecorder() != VMA_NULL)
16134 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16138 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16143 VMA_ASSERT(allocator && pool);
16145 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16147 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16149 return allocator->CheckPoolCorruption(pool);
16154 const VkMemoryRequirements* pVkMemoryRequirements,
16159 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16161 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16163 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16165 VkResult result = allocator->AllocateMemory(
16166 *pVkMemoryRequirements,
16172 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16176 #if VMA_RECORDING_ENABLED 16177 if(allocator->GetRecorder() != VMA_NULL)
16179 allocator->GetRecorder()->RecordAllocateMemory(
16180 allocator->GetCurrentFrameIndex(),
16181 *pVkMemoryRequirements,
16187 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16189 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16197 const VkMemoryRequirements* pVkMemoryRequirements,
16199 size_t allocationCount,
16203 if(allocationCount == 0)
16208 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16210 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16212 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16214 VkResult result = allocator->AllocateMemory(
16215 *pVkMemoryRequirements,
16221 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16225 #if VMA_RECORDING_ENABLED 16226 if(allocator->GetRecorder() != VMA_NULL)
16228 allocator->GetRecorder()->RecordAllocateMemoryPages(
16229 allocator->GetCurrentFrameIndex(),
16230 *pVkMemoryRequirements,
16232 (uint64_t)allocationCount,
16237 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16239 for(
size_t i = 0; i < allocationCount; ++i)
16241 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16255 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16257 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16259 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16261 VkMemoryRequirements vkMemReq = {};
16262 bool requiresDedicatedAllocation =
false;
16263 bool prefersDedicatedAllocation =
false;
16264 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16265 requiresDedicatedAllocation,
16266 prefersDedicatedAllocation);
16268 VkResult result = allocator->AllocateMemory(
16270 requiresDedicatedAllocation,
16271 prefersDedicatedAllocation,
16275 VMA_SUBALLOCATION_TYPE_BUFFER,
16279 #if VMA_RECORDING_ENABLED 16280 if(allocator->GetRecorder() != VMA_NULL)
16282 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16283 allocator->GetCurrentFrameIndex(),
16285 requiresDedicatedAllocation,
16286 prefersDedicatedAllocation,
16292 if(pAllocationInfo && result == VK_SUCCESS)
16294 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16307 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16309 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16311 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16313 VkMemoryRequirements vkMemReq = {};
16314 bool requiresDedicatedAllocation =
false;
16315 bool prefersDedicatedAllocation =
false;
16316 allocator->GetImageMemoryRequirements(image, vkMemReq,
16317 requiresDedicatedAllocation, prefersDedicatedAllocation);
16319 VkResult result = allocator->AllocateMemory(
16321 requiresDedicatedAllocation,
16322 prefersDedicatedAllocation,
16326 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16330 #if VMA_RECORDING_ENABLED 16331 if(allocator->GetRecorder() != VMA_NULL)
16333 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16334 allocator->GetCurrentFrameIndex(),
16336 requiresDedicatedAllocation,
16337 prefersDedicatedAllocation,
16343 if(pAllocationInfo && result == VK_SUCCESS)
16345 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16355 VMA_ASSERT(allocator);
16357 if(allocation == VK_NULL_HANDLE)
16362 VMA_DEBUG_LOG(
"vmaFreeMemory");
16364 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16366 #if VMA_RECORDING_ENABLED 16367 if(allocator->GetRecorder() != VMA_NULL)
16369 allocator->GetRecorder()->RecordFreeMemory(
16370 allocator->GetCurrentFrameIndex(),
16375 allocator->FreeMemory(
16382 size_t allocationCount,
16385 if(allocationCount == 0)
16390 VMA_ASSERT(allocator);
16392 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16394 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16396 #if VMA_RECORDING_ENABLED 16397 if(allocator->GetRecorder() != VMA_NULL)
16399 allocator->GetRecorder()->RecordFreeMemoryPages(
16400 allocator->GetCurrentFrameIndex(),
16401 (uint64_t)allocationCount,
16406 allocator->FreeMemory(allocationCount, pAllocations);
16412 VkDeviceSize newSize)
16414 VMA_ASSERT(allocator && allocation);
16416 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16418 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16420 #if VMA_RECORDING_ENABLED 16421 if(allocator->GetRecorder() != VMA_NULL)
16423 allocator->GetRecorder()->RecordResizeAllocation(
16424 allocator->GetCurrentFrameIndex(),
16430 return allocator->ResizeAllocation(allocation, newSize);
16438 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16440 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16442 #if VMA_RECORDING_ENABLED 16443 if(allocator->GetRecorder() != VMA_NULL)
16445 allocator->GetRecorder()->RecordGetAllocationInfo(
16446 allocator->GetCurrentFrameIndex(),
16451 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16458 VMA_ASSERT(allocator && allocation);
16460 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16462 #if VMA_RECORDING_ENABLED 16463 if(allocator->GetRecorder() != VMA_NULL)
16465 allocator->GetRecorder()->RecordTouchAllocation(
16466 allocator->GetCurrentFrameIndex(),
16471 return allocator->TouchAllocation(allocation);
16479 VMA_ASSERT(allocator && allocation);
16481 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16483 allocation->SetUserData(allocator, pUserData);
16485 #if VMA_RECORDING_ENABLED 16486 if(allocator->GetRecorder() != VMA_NULL)
16488 allocator->GetRecorder()->RecordSetAllocationUserData(
16489 allocator->GetCurrentFrameIndex(),
16500 VMA_ASSERT(allocator && pAllocation);
16502 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16504 allocator->CreateLostAllocation(pAllocation);
16506 #if VMA_RECORDING_ENABLED 16507 if(allocator->GetRecorder() != VMA_NULL)
16509 allocator->GetRecorder()->RecordCreateLostAllocation(
16510 allocator->GetCurrentFrameIndex(),
16521 VMA_ASSERT(allocator && allocation && ppData);
16523 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16525 VkResult res = allocator->Map(allocation, ppData);
16527 #if VMA_RECORDING_ENABLED 16528 if(allocator->GetRecorder() != VMA_NULL)
16530 allocator->GetRecorder()->RecordMapMemory(
16531 allocator->GetCurrentFrameIndex(),
16543 VMA_ASSERT(allocator && allocation);
16545 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16547 #if VMA_RECORDING_ENABLED 16548 if(allocator->GetRecorder() != VMA_NULL)
16550 allocator->GetRecorder()->RecordUnmapMemory(
16551 allocator->GetCurrentFrameIndex(),
16556 allocator->Unmap(allocation);
16561 VMA_ASSERT(allocator && allocation);
16563 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16565 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16567 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16569 #if VMA_RECORDING_ENABLED 16570 if(allocator->GetRecorder() != VMA_NULL)
16572 allocator->GetRecorder()->RecordFlushAllocation(
16573 allocator->GetCurrentFrameIndex(),
16574 allocation, offset, size);
16581 VMA_ASSERT(allocator && allocation);
16583 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16585 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16587 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16589 #if VMA_RECORDING_ENABLED 16590 if(allocator->GetRecorder() != VMA_NULL)
16592 allocator->GetRecorder()->RecordInvalidateAllocation(
16593 allocator->GetCurrentFrameIndex(),
16594 allocation, offset, size);
16601 VMA_ASSERT(allocator);
16603 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16605 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16607 return allocator->CheckCorruption(memoryTypeBits);
16613 size_t allocationCount,
16614 VkBool32* pAllocationsChanged,
16624 if(pDefragmentationInfo != VMA_NULL)
16638 if(res == VK_NOT_READY)
16651 VMA_ASSERT(allocator && pInfo && pContext);
16662 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16664 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16666 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16668 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16670 #if VMA_RECORDING_ENABLED 16671 if(allocator->GetRecorder() != VMA_NULL)
16673 allocator->GetRecorder()->RecordDefragmentationBegin(
16674 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16685 VMA_ASSERT(allocator);
16687 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16689 if(context != VK_NULL_HANDLE)
16691 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16693 #if VMA_RECORDING_ENABLED 16694 if(allocator->GetRecorder() != VMA_NULL)
16696 allocator->GetRecorder()->RecordDefragmentationEnd(
16697 allocator->GetCurrentFrameIndex(), context);
16701 return allocator->DefragmentationEnd(context);
16714 VMA_ASSERT(allocator && allocation && buffer);
16716 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16718 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16720 return allocator->BindBufferMemory(allocation, buffer);
16728 VMA_ASSERT(allocator && allocation && image);
16730 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16732 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16734 return allocator->BindImageMemory(allocation, image);
16739 const VkBufferCreateInfo* pBufferCreateInfo,
16745 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16747 if(pBufferCreateInfo->size == 0)
16749 return VK_ERROR_VALIDATION_FAILED_EXT;
16752 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16754 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16756 *pBuffer = VK_NULL_HANDLE;
16757 *pAllocation = VK_NULL_HANDLE;
16760 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16761 allocator->m_hDevice,
16763 allocator->GetAllocationCallbacks(),
16768 VkMemoryRequirements vkMemReq = {};
16769 bool requiresDedicatedAllocation =
false;
16770 bool prefersDedicatedAllocation =
false;
16771 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16772 requiresDedicatedAllocation, prefersDedicatedAllocation);
16776 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16778 VMA_ASSERT(vkMemReq.alignment %
16779 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16781 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16783 VMA_ASSERT(vkMemReq.alignment %
16784 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16786 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16788 VMA_ASSERT(vkMemReq.alignment %
16789 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16793 res = allocator->AllocateMemory(
16795 requiresDedicatedAllocation,
16796 prefersDedicatedAllocation,
16799 *pAllocationCreateInfo,
16800 VMA_SUBALLOCATION_TYPE_BUFFER,
16804 #if VMA_RECORDING_ENABLED 16805 if(allocator->GetRecorder() != VMA_NULL)
16807 allocator->GetRecorder()->RecordCreateBuffer(
16808 allocator->GetCurrentFrameIndex(),
16809 *pBufferCreateInfo,
16810 *pAllocationCreateInfo,
16820 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16825 #if VMA_STATS_STRING_ENABLED 16826 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16828 if(pAllocationInfo != VMA_NULL)
16830 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16835 allocator->FreeMemory(
16838 *pAllocation = VK_NULL_HANDLE;
16839 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16840 *pBuffer = VK_NULL_HANDLE;
16843 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16844 *pBuffer = VK_NULL_HANDLE;
16855 VMA_ASSERT(allocator);
16857 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16862 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16864 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16866 #if VMA_RECORDING_ENABLED 16867 if(allocator->GetRecorder() != VMA_NULL)
16869 allocator->GetRecorder()->RecordDestroyBuffer(
16870 allocator->GetCurrentFrameIndex(),
16875 if(buffer != VK_NULL_HANDLE)
16877 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16880 if(allocation != VK_NULL_HANDLE)
16882 allocator->FreeMemory(
16890 const VkImageCreateInfo* pImageCreateInfo,
16896 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16898 if(pImageCreateInfo->extent.width == 0 ||
16899 pImageCreateInfo->extent.height == 0 ||
16900 pImageCreateInfo->extent.depth == 0 ||
16901 pImageCreateInfo->mipLevels == 0 ||
16902 pImageCreateInfo->arrayLayers == 0)
16904 return VK_ERROR_VALIDATION_FAILED_EXT;
16907 VMA_DEBUG_LOG(
"vmaCreateImage");
16909 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16911 *pImage = VK_NULL_HANDLE;
16912 *pAllocation = VK_NULL_HANDLE;
16915 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16916 allocator->m_hDevice,
16918 allocator->GetAllocationCallbacks(),
16922 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16923 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16924 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16927 VkMemoryRequirements vkMemReq = {};
16928 bool requiresDedicatedAllocation =
false;
16929 bool prefersDedicatedAllocation =
false;
16930 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16931 requiresDedicatedAllocation, prefersDedicatedAllocation);
16933 res = allocator->AllocateMemory(
16935 requiresDedicatedAllocation,
16936 prefersDedicatedAllocation,
16939 *pAllocationCreateInfo,
16944 #if VMA_RECORDING_ENABLED 16945 if(allocator->GetRecorder() != VMA_NULL)
16947 allocator->GetRecorder()->RecordCreateImage(
16948 allocator->GetCurrentFrameIndex(),
16950 *pAllocationCreateInfo,
16960 res = allocator->BindImageMemory(*pAllocation, *pImage);
16965 #if VMA_STATS_STRING_ENABLED 16966 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16968 if(pAllocationInfo != VMA_NULL)
16970 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16975 allocator->FreeMemory(
16978 *pAllocation = VK_NULL_HANDLE;
16979 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16980 *pImage = VK_NULL_HANDLE;
16983 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16984 *pImage = VK_NULL_HANDLE;
16995 VMA_ASSERT(allocator);
16997 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17002 VMA_DEBUG_LOG(
"vmaDestroyImage");
17004 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17006 #if VMA_RECORDING_ENABLED 17007 if(allocator->GetRecorder() != VMA_NULL)
17009 allocator->GetRecorder()->RecordDestroyImage(
17010 allocator->GetCurrentFrameIndex(),
17015 if(image != VK_NULL_HANDLE)
17017 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17019 if(allocation != VK_NULL_HANDLE)
17021 allocator->FreeMemory(
17027 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1744
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2044
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1814
-
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2867
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1802
+
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2855
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1788
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2387
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1768
+
Definition: vk_mem_alloc.h:1776
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2375
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1756
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:2018
-
Definition: vk_mem_alloc.h:2122
-
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2820
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1760
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2487
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1811
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2903
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2276
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1655
+
Definition: vk_mem_alloc.h:2006
+
Definition: vk_mem_alloc.h:2110
+
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2808
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1748
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2475
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1799
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2891
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2264
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1643
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2368
-
Definition: vk_mem_alloc.h:2093
-
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2823
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1749
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2175
-
Definition: vk_mem_alloc.h:2045
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1823
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2304
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2356
+
Definition: vk_mem_alloc.h:2081
+
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2811
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1737
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2163
+
Definition: vk_mem_alloc.h:2033
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1811
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2292
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1877
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1808
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1865
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1796
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2049
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2037
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1949
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1765
-
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2857
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1948
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2907
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1937
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1753
+
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2845
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1936
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2895
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1840
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1958
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2915
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2159
-
Definition: vk_mem_alloc.h:2117
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2898
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1766
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1691
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1828
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1946
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2903
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2147
+
Definition: vk_mem_alloc.h:2105
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2886
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1754
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1679
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1817
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1805
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2318
-
Definition: vk_mem_alloc.h:2312
-
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1772
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1884
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2497
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2306
+
Definition: vk_mem_alloc.h:2300
+
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1760
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1872
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2485
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1761
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1749
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1786
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2196
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2338
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2374
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1774
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2184
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2326
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2362
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1747
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2321
+
Definition: vk_mem_alloc.h:1735
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2309
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2872
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1996
+
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2860
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1984
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2832
+
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2820
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2893
+
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2881
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2911
-
Definition: vk_mem_alloc.h:2035
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2183
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1764
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2899
+
Definition: vk_mem_alloc.h:2023
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2171
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1752
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1954
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1697
-
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2811
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1942
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1685
+
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2799
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:2809
-
Definition: vk_mem_alloc.h:2143
-
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2838
+
Definition: vk_mem_alloc.h:2797
+
Definition: vk_mem_alloc.h:2131
+
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2826
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1718
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1706
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1790
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1723
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2913
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1778
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1711
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2901
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2170
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2384
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2158
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2372
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1757
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1937
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2333
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1710
-
Definition: vk_mem_alloc.h:2308
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1745
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1925
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2321
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1698
+
Definition: vk_mem_alloc.h:2296
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2100
+
Definition: vk_mem_alloc.h:2088
Represents Opaque object that represents started defragmentation process.
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1950
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1714
-
Definition: vk_mem_alloc.h:2133
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2324
-
Definition: vk_mem_alloc.h:2044
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1763
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1938
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1702
+
Definition: vk_mem_alloc.h:2121
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2312
+
Definition: vk_mem_alloc.h:2032
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1751
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2165
-
Definition: vk_mem_alloc.h:2156
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2153
+
Definition: vk_mem_alloc.h:2144
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1940
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1759
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2346
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1826
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2377
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2154
-
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2862
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2189
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1928
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1747
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2334
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1814
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2365
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2142
+
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2850
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2177
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1865
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1956
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2080
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1949
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1853
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1944
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2068
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1937
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1770
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1796
-
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2808
-
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2886
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1712
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1769
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1758
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1784
+
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2796
+
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2874
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1700
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1757
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2360
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1762
-
Definition: vk_mem_alloc.h:2111
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2348
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1750
+
Definition: vk_mem_alloc.h:2099
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1804
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2511
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1820
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1949
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1792
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2499
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1808
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1937
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1946
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1934
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2365
-
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2817
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2353
+
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2805
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
-
Definition: vk_mem_alloc.h:2126
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2492
-
Definition: vk_mem_alloc.h:2140
-
Definition: vk_mem_alloc.h:2152
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2909
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1755
+
Definition: vk_mem_alloc.h:2114
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2480
+
Definition: vk_mem_alloc.h:2128
+
Definition: vk_mem_alloc.h:2140
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2897
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1743
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1944
-
Definition: vk_mem_alloc.h:2001
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2314
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1932
+
Definition: vk_mem_alloc.h:1989
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2302
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1793
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1942
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1767
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1771
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2067
-
Definition: vk_mem_alloc.h:2147
-
Definition: vk_mem_alloc.h:2028
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2506
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1781
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1930
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1755
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1759
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2055
+
Definition: vk_mem_alloc.h:2135
+
Definition: vk_mem_alloc.h:2016
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2494
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1745
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1733
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1758
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2293
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1746
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2281
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2473
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2461
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2137
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2258
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1950
+
Definition: vk_mem_alloc.h:2125
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2246
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1938
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
-
Definition: vk_mem_alloc.h:2106
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1780
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1957
+
Definition: vk_mem_alloc.h:2094
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1768
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1945
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2371
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1950
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2359
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1938
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
-
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2877
+
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2865
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2478
-
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2841
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2466
+
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2829