23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1681 #ifndef VMA_RECORDING_ENABLED 1683 #define VMA_RECORDING_ENABLED 1 1685 #define VMA_RECORDING_ENABLED 0 1690 #define NOMINMAX // For windows.h 1694 #include <vulkan/vulkan.h> 1697 #if VMA_RECORDING_ENABLED 1698 #include <windows.h> 1701 #if !defined(VMA_DEDICATED_ALLOCATION) 1702 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1703 #define VMA_DEDICATED_ALLOCATION 1 1705 #define VMA_DEDICATED_ALLOCATION 0 1723 uint32_t memoryType,
1724 VkDeviceMemory memory,
1729 uint32_t memoryType,
1730 VkDeviceMemory memory,
1803 #if VMA_DEDICATED_ALLOCATION 1804 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1805 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1932 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1940 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1950 uint32_t memoryTypeIndex,
1951 VkMemoryPropertyFlags* pFlags);
1963 uint32_t frameIndex);
1996 #ifndef VMA_STATS_STRING_ENABLED 1997 #define VMA_STATS_STRING_ENABLED 1 2000 #if VMA_STATS_STRING_ENABLED 2007 char** ppStatsString,
2008 VkBool32 detailedMap);
2012 char* pStatsString);
2014 #endif // #if VMA_STATS_STRING_ENABLED 2247 uint32_t memoryTypeBits,
2249 uint32_t* pMemoryTypeIndex);
2265 const VkBufferCreateInfo* pBufferCreateInfo,
2267 uint32_t* pMemoryTypeIndex);
2283 const VkImageCreateInfo* pImageCreateInfo,
2285 uint32_t* pMemoryTypeIndex);
2457 size_t* pLostAllocationCount);
2556 const VkMemoryRequirements* pVkMemoryRequirements,
2582 const VkMemoryRequirements* pVkMemoryRequirements,
2584 size_t allocationCount,
2629 size_t allocationCount,
2655 VkDeviceSize newSize);
3035 size_t allocationCount,
3036 VkBool32* pAllocationsChanged,
3102 const VkBufferCreateInfo* pBufferCreateInfo,
3127 const VkImageCreateInfo* pImageCreateInfo,
3153 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3156 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3157 #define VMA_IMPLEMENTATION 3160 #ifdef VMA_IMPLEMENTATION 3161 #undef VMA_IMPLEMENTATION 3183 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3184 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3196 #if VMA_USE_STL_CONTAINERS 3197 #define VMA_USE_STL_VECTOR 1 3198 #define VMA_USE_STL_UNORDERED_MAP 1 3199 #define VMA_USE_STL_LIST 1 3202 #ifndef VMA_USE_STL_SHARED_MUTEX 3204 #if __cplusplus >= 201703L 3205 #define VMA_USE_STL_SHARED_MUTEX 1 3209 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3210 #define VMA_USE_STL_SHARED_MUTEX 1 3212 #define VMA_USE_STL_SHARED_MUTEX 0 3220 #if VMA_USE_STL_VECTOR 3224 #if VMA_USE_STL_UNORDERED_MAP 3225 #include <unordered_map> 3228 #if VMA_USE_STL_LIST 3237 #include <algorithm> 3242 #define VMA_NULL nullptr 3245 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3247 void *aligned_alloc(
size_t alignment,
size_t size)
3250 if(alignment <
sizeof(
void*))
3252 alignment =
sizeof(
void*);
3255 return memalign(alignment, size);
3257 #elif defined(__APPLE__) || defined(__ANDROID__) 3259 void *aligned_alloc(
size_t alignment,
size_t size)
3262 if(alignment <
sizeof(
void*))
3264 alignment =
sizeof(
void*);
3268 if(posix_memalign(&pointer, alignment, size) == 0)
3282 #define VMA_ASSERT(expr) assert(expr) 3284 #define VMA_ASSERT(expr) 3290 #ifndef VMA_HEAVY_ASSERT 3292 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3294 #define VMA_HEAVY_ASSERT(expr) 3298 #ifndef VMA_ALIGN_OF 3299 #define VMA_ALIGN_OF(type) (__alignof(type)) 3302 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3304 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3306 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3310 #ifndef VMA_SYSTEM_FREE 3312 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3314 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3319 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3323 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3327 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3331 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3334 #ifndef VMA_DEBUG_LOG 3335 #define VMA_DEBUG_LOG(format, ...) 3345 #if VMA_STATS_STRING_ENABLED 3346 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3348 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3350 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3352 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3354 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3356 snprintf(outStr, strLen,
"%p", ptr);
3364 void Lock() { m_Mutex.lock(); }
3365 void Unlock() { m_Mutex.unlock(); }
3369 #define VMA_MUTEX VmaMutex 3373 #ifndef VMA_RW_MUTEX 3374 #if VMA_USE_STL_SHARED_MUTEX 3376 #include <shared_mutex> 3380 void LockRead() { m_Mutex.lock_shared(); }
3381 void UnlockRead() { m_Mutex.unlock_shared(); }
3382 void LockWrite() { m_Mutex.lock(); }
3383 void UnlockWrite() { m_Mutex.unlock(); }
3385 std::shared_mutex m_Mutex;
3387 #define VMA_RW_MUTEX VmaRWMutex 3388 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3394 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3395 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3396 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3397 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3398 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3402 #define VMA_RW_MUTEX VmaRWMutex 3408 void LockRead() { m_Mutex.Lock(); }
3409 void UnlockRead() { m_Mutex.Unlock(); }
3410 void LockWrite() { m_Mutex.Lock(); }
3411 void UnlockWrite() { m_Mutex.Unlock(); }
3415 #define VMA_RW_MUTEX VmaRWMutex 3416 #endif // #if VMA_USE_STL_SHARED_MUTEX 3417 #endif // #ifndef VMA_RW_MUTEX 3427 #ifndef VMA_ATOMIC_UINT32 3429 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3432 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3437 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3440 #ifndef VMA_DEBUG_ALIGNMENT 3445 #define VMA_DEBUG_ALIGNMENT (1) 3448 #ifndef VMA_DEBUG_MARGIN 3453 #define VMA_DEBUG_MARGIN (0) 3456 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3461 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3464 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3470 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3473 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3478 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3481 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3486 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3489 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3490 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3494 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3495 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3499 #ifndef VMA_CLASS_NO_COPY 3500 #define VMA_CLASS_NO_COPY(className) \ 3502 className(const className&) = delete; \ 3503 className& operator=(const className&) = delete; 3506 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3509 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3511 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3512 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3518 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3520 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3521 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3524 static inline uint32_t VmaCountBitsSet(uint32_t v)
3526 uint32_t c = v - ((v >> 1) & 0x55555555);
3527 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3528 c = ((c >> 4) + c) & 0x0F0F0F0F;
3529 c = ((c >> 8) + c) & 0x00FF00FF;
3530 c = ((c >> 16) + c) & 0x0000FFFF;
3536 template <
typename T>
3537 static inline T VmaAlignUp(T val, T align)
3539 return (val + align - 1) / align * align;
3543 template <
typename T>
3544 static inline T VmaAlignDown(T val, T align)
3546 return val / align * align;
3550 template <
typename T>
3551 static inline T VmaRoundDiv(T x, T y)
3553 return (x + (y / (T)2)) / y;
3561 template <
typename T>
3562 inline bool VmaIsPow2(T x)
3564 return (x & (x-1)) == 0;
3568 static inline uint32_t VmaNextPow2(uint32_t v)
3579 static inline uint64_t VmaNextPow2(uint64_t v)
3593 static inline uint32_t VmaPrevPow2(uint32_t v)
3603 static inline uint64_t VmaPrevPow2(uint64_t v)
3615 static inline bool VmaStrIsEmpty(
const char* pStr)
3617 return pStr == VMA_NULL || *pStr ==
'\0';
3620 #if VMA_STATS_STRING_ENABLED 3622 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3638 #endif // #if VMA_STATS_STRING_ENABLED 3642 template<
typename Iterator,
typename Compare>
3643 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3645 Iterator centerValue = end; --centerValue;
3646 Iterator insertIndex = beg;
3647 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3649 if(cmp(*memTypeIndex, *centerValue))
3651 if(insertIndex != memTypeIndex)
3653 VMA_SWAP(*memTypeIndex, *insertIndex);
3658 if(insertIndex != centerValue)
3660 VMA_SWAP(*insertIndex, *centerValue);
3665 template<
typename Iterator,
typename Compare>
3666 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3670 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3671 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3672 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3676 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3678 #endif // #ifndef VMA_SORT 3687 static inline bool VmaBlocksOnSamePage(
3688 VkDeviceSize resourceAOffset,
3689 VkDeviceSize resourceASize,
3690 VkDeviceSize resourceBOffset,
3691 VkDeviceSize pageSize)
3693 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3694 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3695 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3696 VkDeviceSize resourceBStart = resourceBOffset;
3697 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3698 return resourceAEndPage == resourceBStartPage;
3701 enum VmaSuballocationType
3703 VMA_SUBALLOCATION_TYPE_FREE = 0,
3704 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3705 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3706 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3707 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3708 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3709 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3718 static inline bool VmaIsBufferImageGranularityConflict(
3719 VmaSuballocationType suballocType1,
3720 VmaSuballocationType suballocType2)
3722 if(suballocType1 > suballocType2)
3724 VMA_SWAP(suballocType1, suballocType2);
3727 switch(suballocType1)
3729 case VMA_SUBALLOCATION_TYPE_FREE:
3731 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3733 case VMA_SUBALLOCATION_TYPE_BUFFER:
3735 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3736 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3737 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3739 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3740 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3741 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3742 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3744 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3745 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3753 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3755 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3756 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3757 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3758 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3760 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3767 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3769 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3770 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3771 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3772 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3774 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3787 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3789 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3790 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3791 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3792 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3798 VMA_CLASS_NO_COPY(VmaMutexLock)
3800 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3801 m_pMutex(useMutex ? &mutex : VMA_NULL)
3802 {
if(m_pMutex) { m_pMutex->Lock(); } }
3804 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3806 VMA_MUTEX* m_pMutex;
3810 struct VmaMutexLockRead
3812 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3814 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3815 m_pMutex(useMutex ? &mutex : VMA_NULL)
3816 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3817 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3819 VMA_RW_MUTEX* m_pMutex;
3823 struct VmaMutexLockWrite
3825 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3827 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3828 m_pMutex(useMutex ? &mutex : VMA_NULL)
3829 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3830 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3832 VMA_RW_MUTEX* m_pMutex;
3835 #if VMA_DEBUG_GLOBAL_MUTEX 3836 static VMA_MUTEX gDebugGlobalMutex;
3837 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3839 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3843 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3854 template <
typename CmpLess,
typename IterT,
typename KeyT>
3855 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
3857 size_t down = 0, up = (end - beg);
3860 const size_t mid = (down + up) / 2;
3861 if(cmp(*(beg+mid), key))
3873 template<
typename CmpLess,
typename IterT,
typename KeyT>
3874 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
3876 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3877 beg, end, value, cmp);
3879 (!cmp(*it, value) && !cmp(value, *it)))
3891 template<
typename T>
3892 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3894 for(uint32_t i = 0; i < count; ++i)
3896 const T iPtr = arr[i];
3897 if(iPtr == VMA_NULL)
3901 for(uint32_t j = i + 1; j < count; ++j)
3915 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3917 if((pAllocationCallbacks != VMA_NULL) &&
3918 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3920 return (*pAllocationCallbacks->pfnAllocation)(
3921 pAllocationCallbacks->pUserData,
3924 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3928 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3932 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3934 if((pAllocationCallbacks != VMA_NULL) &&
3935 (pAllocationCallbacks->pfnFree != VMA_NULL))
3937 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3941 VMA_SYSTEM_FREE(ptr);
3945 template<
typename T>
3946 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3948 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3951 template<
typename T>
3952 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3954 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3957 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3959 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3961 template<
typename T>
3962 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3965 VmaFree(pAllocationCallbacks, ptr);
3968 template<
typename T>
3969 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3973 for(
size_t i = count; i--; )
3977 VmaFree(pAllocationCallbacks, ptr);
3982 template<
typename T>
3983 class VmaStlAllocator
3986 const VkAllocationCallbacks*
const m_pCallbacks;
3987 typedef T value_type;
3989 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3990 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3992 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3993 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3995 template<
typename U>
3996 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3998 return m_pCallbacks == rhs.m_pCallbacks;
4000 template<
typename U>
4001 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 4003 return m_pCallbacks != rhs.m_pCallbacks;
4006 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4009 #if VMA_USE_STL_VECTOR 4011 #define VmaVector std::vector 4013 template<
typename T,
typename allocatorT>
4014 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4016 vec.insert(vec.begin() + index, item);
4019 template<
typename T,
typename allocatorT>
4020 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4022 vec.erase(vec.begin() + index);
4025 #else // #if VMA_USE_STL_VECTOR 4030 template<
typename T,
typename AllocatorT>
4034 typedef T value_type;
4036 VmaVector(
const AllocatorT& allocator) :
4037 m_Allocator(allocator),
4044 VmaVector(
size_t count,
const AllocatorT& allocator) :
4045 m_Allocator(allocator),
4046 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4052 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4053 m_Allocator(src.m_Allocator),
4054 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4055 m_Count(src.m_Count),
4056 m_Capacity(src.m_Count)
4060 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4066 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4069 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4073 resize(rhs.m_Count);
4076 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4082 bool empty()
const {
return m_Count == 0; }
4083 size_t size()
const {
return m_Count; }
4084 T* data() {
return m_pArray; }
4085 const T* data()
const {
return m_pArray; }
4087 T& operator[](
size_t index)
4089 VMA_HEAVY_ASSERT(index < m_Count);
4090 return m_pArray[index];
4092 const T& operator[](
size_t index)
const 4094 VMA_HEAVY_ASSERT(index < m_Count);
4095 return m_pArray[index];
4100 VMA_HEAVY_ASSERT(m_Count > 0);
4103 const T& front()
const 4105 VMA_HEAVY_ASSERT(m_Count > 0);
4110 VMA_HEAVY_ASSERT(m_Count > 0);
4111 return m_pArray[m_Count - 1];
4113 const T& back()
const 4115 VMA_HEAVY_ASSERT(m_Count > 0);
4116 return m_pArray[m_Count - 1];
4119 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4121 newCapacity = VMA_MAX(newCapacity, m_Count);
4123 if((newCapacity < m_Capacity) && !freeMemory)
4125 newCapacity = m_Capacity;
4128 if(newCapacity != m_Capacity)
4130 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4133 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4135 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4136 m_Capacity = newCapacity;
4137 m_pArray = newArray;
4141 void resize(
size_t newCount,
bool freeMemory =
false)
4143 size_t newCapacity = m_Capacity;
4144 if(newCount > m_Capacity)
4146 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4150 newCapacity = newCount;
4153 if(newCapacity != m_Capacity)
4155 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4156 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4157 if(elementsToCopy != 0)
4159 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4161 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4162 m_Capacity = newCapacity;
4163 m_pArray = newArray;
4169 void clear(
bool freeMemory =
false)
4171 resize(0, freeMemory);
4174 void insert(
size_t index,
const T& src)
4176 VMA_HEAVY_ASSERT(index <= m_Count);
4177 const size_t oldCount = size();
4178 resize(oldCount + 1);
4179 if(index < oldCount)
4181 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4183 m_pArray[index] = src;
4186 void remove(
size_t index)
4188 VMA_HEAVY_ASSERT(index < m_Count);
4189 const size_t oldCount = size();
4190 if(index < oldCount - 1)
4192 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4194 resize(oldCount - 1);
4197 void push_back(
const T& src)
4199 const size_t newIndex = size();
4200 resize(newIndex + 1);
4201 m_pArray[newIndex] = src;
4206 VMA_HEAVY_ASSERT(m_Count > 0);
4210 void push_front(
const T& src)
4217 VMA_HEAVY_ASSERT(m_Count > 0);
4221 typedef T* iterator;
4223 iterator begin() {
return m_pArray; }
4224 iterator end() {
return m_pArray + m_Count; }
4227 AllocatorT m_Allocator;
4233 template<
typename T,
typename allocatorT>
4234 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4236 vec.insert(index, item);
4239 template<
typename T,
typename allocatorT>
4240 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4245 #endif // #if VMA_USE_STL_VECTOR 4247 template<
typename CmpLess,
typename VectorT>
4248 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4250 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4252 vector.data() + vector.size(),
4254 CmpLess()) - vector.data();
4255 VmaVectorInsert(vector, indexToInsert, value);
4256 return indexToInsert;
4259 template<
typename CmpLess,
typename VectorT>
4260 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4263 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4268 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4270 size_t indexToRemove = it - vector.begin();
4271 VmaVectorRemove(vector, indexToRemove);
4285 template<
typename T>
4286 class VmaPoolAllocator
4288 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4290 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4291 ~VmaPoolAllocator();
4299 uint32_t NextFreeIndex;
4307 uint32_t FirstFreeIndex;
4310 const VkAllocationCallbacks* m_pAllocationCallbacks;
4311 const uint32_t m_FirstBlockCapacity;
4312 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4314 ItemBlock& CreateNewBlock();
4317 template<
typename T>
4318 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4319 m_pAllocationCallbacks(pAllocationCallbacks),
4320 m_FirstBlockCapacity(firstBlockCapacity),
4321 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4323 VMA_ASSERT(m_FirstBlockCapacity > 1);
4326 template<
typename T>
4327 VmaPoolAllocator<T>::~VmaPoolAllocator()
4332 template<
typename T>
4333 void VmaPoolAllocator<T>::Clear()
4335 for(
size_t i = m_ItemBlocks.size(); i--; )
4336 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4337 m_ItemBlocks.clear();
4340 template<
typename T>
4341 T* VmaPoolAllocator<T>::Alloc()
4343 for(
size_t i = m_ItemBlocks.size(); i--; )
4345 ItemBlock& block = m_ItemBlocks[i];
4347 if(block.FirstFreeIndex != UINT32_MAX)
4349 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4350 block.FirstFreeIndex = pItem->NextFreeIndex;
4351 return &pItem->Value;
4356 ItemBlock& newBlock = CreateNewBlock();
4357 Item*
const pItem = &newBlock.pItems[0];
4358 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4359 return &pItem->Value;
4362 template<
typename T>
4363 void VmaPoolAllocator<T>::Free(T* ptr)
4366 for(
size_t i = m_ItemBlocks.size(); i--; )
4368 ItemBlock& block = m_ItemBlocks[i];
4372 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4375 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4377 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4378 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4379 block.FirstFreeIndex = index;
4383 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4386 template<
typename T>
4387 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4389 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4390 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4392 const ItemBlock newBlock = {
4393 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4397 m_ItemBlocks.push_back(newBlock);
4400 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4401 newBlock.pItems[i].NextFreeIndex = i + 1;
4402 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4403 return m_ItemBlocks.back();
4409 #if VMA_USE_STL_LIST 4411 #define VmaList std::list 4413 #else // #if VMA_USE_STL_LIST 4415 template<
typename T>
4424 template<
typename T>
4427 VMA_CLASS_NO_COPY(VmaRawList)
4429 typedef VmaListItem<T> ItemType;
4431 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4435 size_t GetCount()
const {
return m_Count; }
4436 bool IsEmpty()
const {
return m_Count == 0; }
4438 ItemType* Front() {
return m_pFront; }
4439 const ItemType* Front()
const {
return m_pFront; }
4440 ItemType* Back() {
return m_pBack; }
4441 const ItemType* Back()
const {
return m_pBack; }
4443 ItemType* PushBack();
4444 ItemType* PushFront();
4445 ItemType* PushBack(
const T& value);
4446 ItemType* PushFront(
const T& value);
4451 ItemType* InsertBefore(ItemType* pItem);
4453 ItemType* InsertAfter(ItemType* pItem);
4455 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4456 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4458 void Remove(ItemType* pItem);
4461 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4462 VmaPoolAllocator<ItemType> m_ItemAllocator;
4468 template<
typename T>
4469 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4470 m_pAllocationCallbacks(pAllocationCallbacks),
4471 m_ItemAllocator(pAllocationCallbacks, 128),
4478 template<
typename T>
4479 VmaRawList<T>::~VmaRawList()
4485 template<
typename T>
4486 void VmaRawList<T>::Clear()
4488 if(IsEmpty() ==
false)
4490 ItemType* pItem = m_pBack;
4491 while(pItem != VMA_NULL)
4493 ItemType*
const pPrevItem = pItem->pPrev;
4494 m_ItemAllocator.Free(pItem);
4497 m_pFront = VMA_NULL;
4503 template<
typename T>
4504 VmaListItem<T>* VmaRawList<T>::PushBack()
4506 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4507 pNewItem->pNext = VMA_NULL;
4510 pNewItem->pPrev = VMA_NULL;
4511 m_pFront = pNewItem;
4517 pNewItem->pPrev = m_pBack;
4518 m_pBack->pNext = pNewItem;
4525 template<
typename T>
4526 VmaListItem<T>* VmaRawList<T>::PushFront()
4528 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4529 pNewItem->pPrev = VMA_NULL;
4532 pNewItem->pNext = VMA_NULL;
4533 m_pFront = pNewItem;
4539 pNewItem->pNext = m_pFront;
4540 m_pFront->pPrev = pNewItem;
4541 m_pFront = pNewItem;
4547 template<
typename T>
4548 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4550 ItemType*
const pNewItem = PushBack();
4551 pNewItem->Value = value;
4555 template<
typename T>
4556 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4558 ItemType*
const pNewItem = PushFront();
4559 pNewItem->Value = value;
4563 template<
typename T>
4564 void VmaRawList<T>::PopBack()
4566 VMA_HEAVY_ASSERT(m_Count > 0);
4567 ItemType*
const pBackItem = m_pBack;
4568 ItemType*
const pPrevItem = pBackItem->pPrev;
4569 if(pPrevItem != VMA_NULL)
4571 pPrevItem->pNext = VMA_NULL;
4573 m_pBack = pPrevItem;
4574 m_ItemAllocator.Free(pBackItem);
4578 template<
typename T>
4579 void VmaRawList<T>::PopFront()
4581 VMA_HEAVY_ASSERT(m_Count > 0);
4582 ItemType*
const pFrontItem = m_pFront;
4583 ItemType*
const pNextItem = pFrontItem->pNext;
4584 if(pNextItem != VMA_NULL)
4586 pNextItem->pPrev = VMA_NULL;
4588 m_pFront = pNextItem;
4589 m_ItemAllocator.Free(pFrontItem);
4593 template<
typename T>
4594 void VmaRawList<T>::Remove(ItemType* pItem)
4596 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4597 VMA_HEAVY_ASSERT(m_Count > 0);
4599 if(pItem->pPrev != VMA_NULL)
4601 pItem->pPrev->pNext = pItem->pNext;
4605 VMA_HEAVY_ASSERT(m_pFront == pItem);
4606 m_pFront = pItem->pNext;
4609 if(pItem->pNext != VMA_NULL)
4611 pItem->pNext->pPrev = pItem->pPrev;
4615 VMA_HEAVY_ASSERT(m_pBack == pItem);
4616 m_pBack = pItem->pPrev;
4619 m_ItemAllocator.Free(pItem);
4623 template<
typename T>
4624 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4626 if(pItem != VMA_NULL)
4628 ItemType*
const prevItem = pItem->pPrev;
4629 ItemType*
const newItem = m_ItemAllocator.Alloc();
4630 newItem->pPrev = prevItem;
4631 newItem->pNext = pItem;
4632 pItem->pPrev = newItem;
4633 if(prevItem != VMA_NULL)
4635 prevItem->pNext = newItem;
4639 VMA_HEAVY_ASSERT(m_pFront == pItem);
4649 template<
typename T>
4650 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4652 if(pItem != VMA_NULL)
4654 ItemType*
const nextItem = pItem->pNext;
4655 ItemType*
const newItem = m_ItemAllocator.Alloc();
4656 newItem->pNext = nextItem;
4657 newItem->pPrev = pItem;
4658 pItem->pNext = newItem;
4659 if(nextItem != VMA_NULL)
4661 nextItem->pPrev = newItem;
4665 VMA_HEAVY_ASSERT(m_pBack == pItem);
4675 template<
typename T>
4676 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4678 ItemType*
const newItem = InsertBefore(pItem);
4679 newItem->Value = value;
4683 template<
typename T>
4684 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4686 ItemType*
const newItem = InsertAfter(pItem);
4687 newItem->Value = value;
4691 template<
typename T,
typename AllocatorT>
4694 VMA_CLASS_NO_COPY(VmaList)
4705 T& operator*()
const 4707 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4708 return m_pItem->Value;
4710 T* operator->()
const 4712 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4713 return &m_pItem->Value;
4716 iterator& operator++()
4718 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4719 m_pItem = m_pItem->pNext;
4722 iterator& operator--()
4724 if(m_pItem != VMA_NULL)
4726 m_pItem = m_pItem->pPrev;
4730 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4731 m_pItem = m_pList->Back();
4736 iterator operator++(
int)
4738 iterator result = *
this;
4742 iterator operator--(
int)
4744 iterator result = *
this;
4749 bool operator==(
const iterator& rhs)
const 4751 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4752 return m_pItem == rhs.m_pItem;
4754 bool operator!=(
const iterator& rhs)
const 4756 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4757 return m_pItem != rhs.m_pItem;
4761 VmaRawList<T>* m_pList;
4762 VmaListItem<T>* m_pItem;
4764 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4770 friend class VmaList<T, AllocatorT>;
4773 class const_iterator
4782 const_iterator(
const iterator& src) :
4783 m_pList(src.m_pList),
4784 m_pItem(src.m_pItem)
4788 const T& operator*()
const 4790 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4791 return m_pItem->Value;
4793 const T* operator->()
const 4795 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4796 return &m_pItem->Value;
4799 const_iterator& operator++()
4801 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4802 m_pItem = m_pItem->pNext;
4805 const_iterator& operator--()
4807 if(m_pItem != VMA_NULL)
4809 m_pItem = m_pItem->pPrev;
4813 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4814 m_pItem = m_pList->Back();
4819 const_iterator operator++(
int)
4821 const_iterator result = *
this;
4825 const_iterator operator--(
int)
4827 const_iterator result = *
this;
4832 bool operator==(
const const_iterator& rhs)
const 4834 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4835 return m_pItem == rhs.m_pItem;
4837 bool operator!=(
const const_iterator& rhs)
const 4839 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4840 return m_pItem != rhs.m_pItem;
4844 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4850 const VmaRawList<T>* m_pList;
4851 const VmaListItem<T>* m_pItem;
4853 friend class VmaList<T, AllocatorT>;
4856 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4858 bool empty()
const {
return m_RawList.IsEmpty(); }
4859 size_t size()
const {
return m_RawList.GetCount(); }
4861 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4862 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4864 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4865 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4867 void clear() { m_RawList.Clear(); }
4868 void push_back(
const T& value) { m_RawList.PushBack(value); }
4869 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4870 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4873 VmaRawList<T> m_RawList;
4876 #endif // #if VMA_USE_STL_LIST 4884 #if VMA_USE_STL_UNORDERED_MAP 4886 #define VmaPair std::pair 4888 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4889 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4891 #else // #if VMA_USE_STL_UNORDERED_MAP 4893 template<
typename T1,
typename T2>
4899 VmaPair() : first(), second() { }
4900 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4906 template<
typename KeyT,
typename ValueT>
4910 typedef VmaPair<KeyT, ValueT> PairType;
4911 typedef PairType* iterator;
4913 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4915 iterator begin() {
return m_Vector.begin(); }
4916 iterator end() {
return m_Vector.end(); }
4918 void insert(
const PairType& pair);
4919 iterator find(
const KeyT& key);
4920 void erase(iterator it);
4923 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4926 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4928 template<
typename FirstT,
typename SecondT>
4929 struct VmaPairFirstLess
4931 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4933 return lhs.first < rhs.first;
4935 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4937 return lhs.first < rhsFirst;
4941 template<
typename KeyT,
typename ValueT>
4942 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4944 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4946 m_Vector.data() + m_Vector.size(),
4948 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4949 VmaVectorInsert(m_Vector, indexToInsert, pair);
4952 template<
typename KeyT,
typename ValueT>
4953 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4955 PairType* it = VmaBinaryFindFirstNotLess(
4957 m_Vector.data() + m_Vector.size(),
4959 VmaPairFirstLess<KeyT, ValueT>());
4960 if((it != m_Vector.end()) && (it->first == key))
4966 return m_Vector.end();
4970 template<
typename KeyT,
typename ValueT>
4971 void VmaMap<KeyT, ValueT>::erase(iterator it)
4973 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4976 #endif // #if VMA_USE_STL_UNORDERED_MAP 4982 class VmaDeviceMemoryBlock;
4984 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4986 struct VmaAllocation_T
4989 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4993 FLAG_USER_DATA_STRING = 0x01,
4997 enum ALLOCATION_TYPE
4999 ALLOCATION_TYPE_NONE,
5000 ALLOCATION_TYPE_BLOCK,
5001 ALLOCATION_TYPE_DEDICATED,
5009 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5013 m_pUserData = VMA_NULL;
5014 m_LastUseFrameIndex = currentFrameIndex;
5015 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5016 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5018 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5020 #if VMA_STATS_STRING_ENABLED 5021 m_CreationFrameIndex = currentFrameIndex;
5022 m_BufferImageUsage = 0;
5028 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5031 VMA_ASSERT(m_pUserData == VMA_NULL);
5034 void InitBlockAllocation(
5035 VmaDeviceMemoryBlock* block,
5036 VkDeviceSize offset,
5037 VkDeviceSize alignment,
5039 VmaSuballocationType suballocationType,
5043 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5044 VMA_ASSERT(block != VMA_NULL);
5045 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5046 m_Alignment = alignment;
5048 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5049 m_SuballocationType = (uint8_t)suballocationType;
5050 m_BlockAllocation.m_Block = block;
5051 m_BlockAllocation.m_Offset = offset;
5052 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5057 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5058 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5059 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5060 m_BlockAllocation.m_Block = VMA_NULL;
5061 m_BlockAllocation.m_Offset = 0;
5062 m_BlockAllocation.m_CanBecomeLost =
true;
5065 void ChangeBlockAllocation(
5067 VmaDeviceMemoryBlock* block,
5068 VkDeviceSize offset);
5070 void ChangeSize(VkDeviceSize newSize);
5071 void ChangeOffset(VkDeviceSize newOffset);
5074 void InitDedicatedAllocation(
5075 uint32_t memoryTypeIndex,
5076 VkDeviceMemory hMemory,
5077 VmaSuballocationType suballocationType,
5081 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5082 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5083 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5086 m_SuballocationType = (uint8_t)suballocationType;
5087 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5088 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5089 m_DedicatedAllocation.m_hMemory = hMemory;
5090 m_DedicatedAllocation.m_pMappedData = pMappedData;
5093 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5094 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5095 VkDeviceSize GetSize()
const {
return m_Size; }
5096 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5097 void* GetUserData()
const {
return m_pUserData; }
5098 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5099 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5101 VmaDeviceMemoryBlock* GetBlock()
const 5103 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5104 return m_BlockAllocation.m_Block;
5106 VkDeviceSize GetOffset()
const;
5107 VkDeviceMemory GetMemory()
const;
5108 uint32_t GetMemoryTypeIndex()
const;
5109 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5110 void* GetMappedData()
const;
5111 bool CanBecomeLost()
const;
5113 uint32_t GetLastUseFrameIndex()
const 5115 return m_LastUseFrameIndex.load();
5117 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5119 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5129 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5131 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5133 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5144 void BlockAllocMap();
5145 void BlockAllocUnmap();
5146 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5149 #if VMA_STATS_STRING_ENABLED 5150 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5151 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5153 void InitBufferImageUsage(uint32_t bufferImageUsage)
5155 VMA_ASSERT(m_BufferImageUsage == 0);
5156 m_BufferImageUsage = bufferImageUsage;
5159 void PrintParameters(
class VmaJsonWriter& json)
const;
5163 VkDeviceSize m_Alignment;
5164 VkDeviceSize m_Size;
5166 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5168 uint8_t m_SuballocationType;
5175 struct BlockAllocation
5177 VmaDeviceMemoryBlock* m_Block;
5178 VkDeviceSize m_Offset;
5179 bool m_CanBecomeLost;
5183 struct DedicatedAllocation
5185 uint32_t m_MemoryTypeIndex;
5186 VkDeviceMemory m_hMemory;
5187 void* m_pMappedData;
5193 BlockAllocation m_BlockAllocation;
5195 DedicatedAllocation m_DedicatedAllocation;
5198 #if VMA_STATS_STRING_ENABLED 5199 uint32_t m_CreationFrameIndex;
5200 uint32_t m_BufferImageUsage;
5210 struct VmaSuballocation
5212 VkDeviceSize offset;
5215 VmaSuballocationType type;
5219 struct VmaSuballocationOffsetLess
5221 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5223 return lhs.offset < rhs.offset;
5226 struct VmaSuballocationOffsetGreater
5228 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5230 return lhs.offset > rhs.offset;
5234 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5237 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5239 enum class VmaAllocationRequestType
5261 struct VmaAllocationRequest
5263 VkDeviceSize offset;
5264 VkDeviceSize sumFreeSize;
5265 VkDeviceSize sumItemSize;
5266 VmaSuballocationList::iterator item;
5267 size_t itemsToMakeLostCount;
5269 VmaAllocationRequestType type;
5271 VkDeviceSize CalcCost()
const 5273 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5281 class VmaBlockMetadata
5285 virtual ~VmaBlockMetadata() { }
5286 virtual void Init(VkDeviceSize size) { m_Size = size; }
5289 virtual bool Validate()
const = 0;
5290 VkDeviceSize GetSize()
const {
return m_Size; }
5291 virtual size_t GetAllocationCount()
const = 0;
5292 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5293 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5295 virtual bool IsEmpty()
const = 0;
5297 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5299 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5301 #if VMA_STATS_STRING_ENABLED 5302 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5308 virtual bool CreateAllocationRequest(
5309 uint32_t currentFrameIndex,
5310 uint32_t frameInUseCount,
5311 VkDeviceSize bufferImageGranularity,
5312 VkDeviceSize allocSize,
5313 VkDeviceSize allocAlignment,
5315 VmaSuballocationType allocType,
5316 bool canMakeOtherLost,
5319 VmaAllocationRequest* pAllocationRequest) = 0;
5321 virtual bool MakeRequestedAllocationsLost(
5322 uint32_t currentFrameIndex,
5323 uint32_t frameInUseCount,
5324 VmaAllocationRequest* pAllocationRequest) = 0;
5326 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5328 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5332 const VmaAllocationRequest& request,
5333 VmaSuballocationType type,
5334 VkDeviceSize allocSize,
5339 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5342 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5345 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5347 #if VMA_STATS_STRING_ENABLED 5348 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5349 VkDeviceSize unusedBytes,
5350 size_t allocationCount,
5351 size_t unusedRangeCount)
const;
5352 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5353 VkDeviceSize offset,
5355 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5356 VkDeviceSize offset,
5357 VkDeviceSize size)
const;
5358 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5362 VkDeviceSize m_Size;
5363 const VkAllocationCallbacks* m_pAllocationCallbacks;
5366 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5367 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5371 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5373 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5376 virtual ~VmaBlockMetadata_Generic();
5377 virtual void Init(VkDeviceSize size);
5379 virtual bool Validate()
const;
5380 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5381 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5382 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5383 virtual bool IsEmpty()
const;
5385 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5386 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5388 #if VMA_STATS_STRING_ENABLED 5389 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5392 virtual bool CreateAllocationRequest(
5393 uint32_t currentFrameIndex,
5394 uint32_t frameInUseCount,
5395 VkDeviceSize bufferImageGranularity,
5396 VkDeviceSize allocSize,
5397 VkDeviceSize allocAlignment,
5399 VmaSuballocationType allocType,
5400 bool canMakeOtherLost,
5402 VmaAllocationRequest* pAllocationRequest);
5404 virtual bool MakeRequestedAllocationsLost(
5405 uint32_t currentFrameIndex,
5406 uint32_t frameInUseCount,
5407 VmaAllocationRequest* pAllocationRequest);
5409 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5411 virtual VkResult CheckCorruption(
const void* pBlockData);
5414 const VmaAllocationRequest& request,
5415 VmaSuballocationType type,
5416 VkDeviceSize allocSize,
5420 virtual void FreeAtOffset(VkDeviceSize offset);
5422 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5427 bool IsBufferImageGranularityConflictPossible(
5428 VkDeviceSize bufferImageGranularity,
5429 VmaSuballocationType& inOutPrevSuballocType)
const;
5432 friend class VmaDefragmentationAlgorithm_Generic;
5433 friend class VmaDefragmentationAlgorithm_Fast;
5435 uint32_t m_FreeCount;
5436 VkDeviceSize m_SumFreeSize;
5437 VmaSuballocationList m_Suballocations;
5440 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5442 bool ValidateFreeSuballocationList()
const;
5446 bool CheckAllocation(
5447 uint32_t currentFrameIndex,
5448 uint32_t frameInUseCount,
5449 VkDeviceSize bufferImageGranularity,
5450 VkDeviceSize allocSize,
5451 VkDeviceSize allocAlignment,
5452 VmaSuballocationType allocType,
5453 VmaSuballocationList::const_iterator suballocItem,
5454 bool canMakeOtherLost,
5455 VkDeviceSize* pOffset,
5456 size_t* itemsToMakeLostCount,
5457 VkDeviceSize* pSumFreeSize,
5458 VkDeviceSize* pSumItemSize)
const;
5460 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5464 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5467 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5470 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5551 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5553 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5556 virtual ~VmaBlockMetadata_Linear();
5557 virtual void Init(VkDeviceSize size);
5559 virtual bool Validate()
const;
5560 virtual size_t GetAllocationCount()
const;
5561 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5562 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5563 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5565 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5566 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5568 #if VMA_STATS_STRING_ENABLED 5569 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5572 virtual bool CreateAllocationRequest(
5573 uint32_t currentFrameIndex,
5574 uint32_t frameInUseCount,
5575 VkDeviceSize bufferImageGranularity,
5576 VkDeviceSize allocSize,
5577 VkDeviceSize allocAlignment,
5579 VmaSuballocationType allocType,
5580 bool canMakeOtherLost,
5582 VmaAllocationRequest* pAllocationRequest);
5584 virtual bool MakeRequestedAllocationsLost(
5585 uint32_t currentFrameIndex,
5586 uint32_t frameInUseCount,
5587 VmaAllocationRequest* pAllocationRequest);
5589 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5591 virtual VkResult CheckCorruption(
const void* pBlockData);
5594 const VmaAllocationRequest& request,
5595 VmaSuballocationType type,
5596 VkDeviceSize allocSize,
5600 virtual void FreeAtOffset(VkDeviceSize offset);
5610 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5612 enum SECOND_VECTOR_MODE
5614 SECOND_VECTOR_EMPTY,
5619 SECOND_VECTOR_RING_BUFFER,
5625 SECOND_VECTOR_DOUBLE_STACK,
5628 VkDeviceSize m_SumFreeSize;
5629 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5630 uint32_t m_1stVectorIndex;
5631 SECOND_VECTOR_MODE m_2ndVectorMode;
5633 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5634 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5635 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5636 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5639 size_t m_1stNullItemsBeginCount;
5641 size_t m_1stNullItemsMiddleCount;
5643 size_t m_2ndNullItemsCount;
5645 bool ShouldCompact1st()
const;
5646 void CleanupAfterFree();
5648 bool CreateAllocationRequest_LowerAddress(
5649 uint32_t currentFrameIndex,
5650 uint32_t frameInUseCount,
5651 VkDeviceSize bufferImageGranularity,
5652 VkDeviceSize allocSize,
5653 VkDeviceSize allocAlignment,
5654 VmaSuballocationType allocType,
5655 bool canMakeOtherLost,
5657 VmaAllocationRequest* pAllocationRequest);
5658 bool CreateAllocationRequest_UpperAddress(
5659 uint32_t currentFrameIndex,
5660 uint32_t frameInUseCount,
5661 VkDeviceSize bufferImageGranularity,
5662 VkDeviceSize allocSize,
5663 VkDeviceSize allocAlignment,
5664 VmaSuballocationType allocType,
5665 bool canMakeOtherLost,
5667 VmaAllocationRequest* pAllocationRequest);
5681 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5683 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5686 virtual ~VmaBlockMetadata_Buddy();
5687 virtual void Init(VkDeviceSize size);
5689 virtual bool Validate()
const;
5690 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5691 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5692 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5693 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5695 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5696 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5698 #if VMA_STATS_STRING_ENABLED 5699 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5702 virtual bool CreateAllocationRequest(
5703 uint32_t currentFrameIndex,
5704 uint32_t frameInUseCount,
5705 VkDeviceSize bufferImageGranularity,
5706 VkDeviceSize allocSize,
5707 VkDeviceSize allocAlignment,
5709 VmaSuballocationType allocType,
5710 bool canMakeOtherLost,
5712 VmaAllocationRequest* pAllocationRequest);
5714 virtual bool MakeRequestedAllocationsLost(
5715 uint32_t currentFrameIndex,
5716 uint32_t frameInUseCount,
5717 VmaAllocationRequest* pAllocationRequest);
5719 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5721 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5724 const VmaAllocationRequest& request,
5725 VmaSuballocationType type,
5726 VkDeviceSize allocSize,
5729 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5730 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5733 static const VkDeviceSize MIN_NODE_SIZE = 32;
5734 static const size_t MAX_LEVELS = 30;
5736 struct ValidationContext
5738 size_t calculatedAllocationCount;
5739 size_t calculatedFreeCount;
5740 VkDeviceSize calculatedSumFreeSize;
5742 ValidationContext() :
5743 calculatedAllocationCount(0),
5744 calculatedFreeCount(0),
5745 calculatedSumFreeSize(0) { }
5750 VkDeviceSize offset;
5780 VkDeviceSize m_UsableSize;
5781 uint32_t m_LevelCount;
5787 } m_FreeList[MAX_LEVELS];
5789 size_t m_AllocationCount;
5793 VkDeviceSize m_SumFreeSize;
5795 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5796 void DeleteNode(Node* node);
5797 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5798 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5799 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5801 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5802 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5806 void AddToFreeListFront(uint32_t level, Node* node);
5810 void RemoveFromFreeList(uint32_t level, Node* node);
5812 #if VMA_STATS_STRING_ENABLED 5813 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5823 class VmaDeviceMemoryBlock
5825 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5827 VmaBlockMetadata* m_pMetadata;
5831 ~VmaDeviceMemoryBlock()
5833 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5834 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5841 uint32_t newMemoryTypeIndex,
5842 VkDeviceMemory newMemory,
5843 VkDeviceSize newSize,
5845 uint32_t algorithm);
5849 VmaPool GetParentPool()
const {
return m_hParentPool; }
5850 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5851 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5852 uint32_t GetId()
const {
return m_Id; }
5853 void* GetMappedData()
const {
return m_pMappedData; }
5856 bool Validate()
const;
5861 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5864 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5865 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5867 VkResult BindBufferMemory(
5871 VkResult BindImageMemory(
5878 uint32_t m_MemoryTypeIndex;
5880 VkDeviceMemory m_hMemory;
5888 uint32_t m_MapCount;
5889 void* m_pMappedData;
5892 struct VmaPointerLess
5894 bool operator()(
const void* lhs,
const void* rhs)
const 5900 struct VmaDefragmentationMove
5902 size_t srcBlockIndex;
5903 size_t dstBlockIndex;
5904 VkDeviceSize srcOffset;
5905 VkDeviceSize dstOffset;
5909 class VmaDefragmentationAlgorithm;
5917 struct VmaBlockVector
5919 VMA_CLASS_NO_COPY(VmaBlockVector)
5924 uint32_t memoryTypeIndex,
5925 VkDeviceSize preferredBlockSize,
5926 size_t minBlockCount,
5927 size_t maxBlockCount,
5928 VkDeviceSize bufferImageGranularity,
5929 uint32_t frameInUseCount,
5931 bool explicitBlockSize,
5932 uint32_t algorithm);
5935 VkResult CreateMinBlocks();
5937 VmaPool GetParentPool()
const {
return m_hParentPool; }
5938 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5939 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5940 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5941 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5942 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5946 bool IsEmpty()
const {
return m_Blocks.empty(); }
5947 bool IsCorruptionDetectionEnabled()
const;
5950 uint32_t currentFrameIndex,
5952 VkDeviceSize alignment,
5954 VmaSuballocationType suballocType,
5955 size_t allocationCount,
5964 #if VMA_STATS_STRING_ENABLED 5965 void PrintDetailedMap(
class VmaJsonWriter& json);
5968 void MakePoolAllocationsLost(
5969 uint32_t currentFrameIndex,
5970 size_t* pLostAllocationCount);
5971 VkResult CheckCorruption();
5975 class VmaBlockVectorDefragmentationContext* pCtx,
5977 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5978 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5979 VkCommandBuffer commandBuffer);
5980 void DefragmentationEnd(
5981 class VmaBlockVectorDefragmentationContext* pCtx,
5987 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5988 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5989 size_t CalcAllocationCount()
const;
5990 bool IsBufferImageGranularityConflictPossible()
const;
5993 friend class VmaDefragmentationAlgorithm_Generic;
5997 const uint32_t m_MemoryTypeIndex;
5998 const VkDeviceSize m_PreferredBlockSize;
5999 const size_t m_MinBlockCount;
6000 const size_t m_MaxBlockCount;
6001 const VkDeviceSize m_BufferImageGranularity;
6002 const uint32_t m_FrameInUseCount;
6003 const bool m_IsCustomPool;
6004 const bool m_ExplicitBlockSize;
6005 const uint32_t m_Algorithm;
6009 bool m_HasEmptyBlock;
6010 VMA_RW_MUTEX m_Mutex;
6012 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6013 uint32_t m_NextBlockId;
6015 VkDeviceSize CalcMaxBlockSize()
const;
6018 void Remove(VmaDeviceMemoryBlock* pBlock);
6022 void IncrementallySortBlocks();
6024 VkResult AllocatePage(
6025 uint32_t currentFrameIndex,
6027 VkDeviceSize alignment,
6029 VmaSuballocationType suballocType,
6033 VkResult AllocateFromBlock(
6034 VmaDeviceMemoryBlock* pBlock,
6035 uint32_t currentFrameIndex,
6037 VkDeviceSize alignment,
6040 VmaSuballocationType suballocType,
6044 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6047 void ApplyDefragmentationMovesCpu(
6048 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6049 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6051 void ApplyDefragmentationMovesGpu(
6052 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6053 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6054 VkCommandBuffer commandBuffer);
6065 VMA_CLASS_NO_COPY(VmaPool_T)
6067 VmaBlockVector m_BlockVector;
6072 VkDeviceSize preferredBlockSize);
6075 uint32_t GetId()
const {
return m_Id; }
6076 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6078 #if VMA_STATS_STRING_ENABLED 6093 class VmaDefragmentationAlgorithm
6095 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6097 VmaDefragmentationAlgorithm(
6099 VmaBlockVector* pBlockVector,
6100 uint32_t currentFrameIndex) :
6101 m_hAllocator(hAllocator),
6102 m_pBlockVector(pBlockVector),
6103 m_CurrentFrameIndex(currentFrameIndex)
6106 virtual ~VmaDefragmentationAlgorithm()
6110 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6111 virtual void AddAll() = 0;
6113 virtual VkResult Defragment(
6114 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6115 VkDeviceSize maxBytesToMove,
6116 uint32_t maxAllocationsToMove) = 0;
6118 virtual VkDeviceSize GetBytesMoved()
const = 0;
6119 virtual uint32_t GetAllocationsMoved()
const = 0;
6123 VmaBlockVector*
const m_pBlockVector;
6124 const uint32_t m_CurrentFrameIndex;
6126 struct AllocationInfo
6129 VkBool32* m_pChanged;
6132 m_hAllocation(VK_NULL_HANDLE),
6133 m_pChanged(VMA_NULL)
6137 m_hAllocation(hAlloc),
6138 m_pChanged(pChanged)
6144 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6146 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6148 VmaDefragmentationAlgorithm_Generic(
6150 VmaBlockVector* pBlockVector,
6151 uint32_t currentFrameIndex,
6152 bool overlappingMoveSupported);
6153 virtual ~VmaDefragmentationAlgorithm_Generic();
6155 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6156 virtual void AddAll() { m_AllAllocations =
true; }
6158 virtual VkResult Defragment(
6159 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6160 VkDeviceSize maxBytesToMove,
6161 uint32_t maxAllocationsToMove);
6163 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6164 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6167 uint32_t m_AllocationCount;
6168 bool m_AllAllocations;
6170 VkDeviceSize m_BytesMoved;
6171 uint32_t m_AllocationsMoved;
6173 struct AllocationInfoSizeGreater
6175 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6177 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6181 struct AllocationInfoOffsetGreater
6183 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6185 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6191 size_t m_OriginalBlockIndex;
6192 VmaDeviceMemoryBlock* m_pBlock;
6193 bool m_HasNonMovableAllocations;
6194 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6196 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6197 m_OriginalBlockIndex(SIZE_MAX),
6199 m_HasNonMovableAllocations(true),
6200 m_Allocations(pAllocationCallbacks)
6204 void CalcHasNonMovableAllocations()
6206 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6207 const size_t defragmentAllocCount = m_Allocations.size();
6208 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6211 void SortAllocationsBySizeDescending()
6213 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6216 void SortAllocationsByOffsetDescending()
6218 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6222 struct BlockPointerLess
6224 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6226 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6228 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6230 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6236 struct BlockInfoCompareMoveDestination
6238 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6240 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6244 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6248 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6256 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6257 BlockInfoVector m_Blocks;
6259 VkResult DefragmentRound(
6260 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6261 VkDeviceSize maxBytesToMove,
6262 uint32_t maxAllocationsToMove);
6264 size_t CalcBlocksWithNonMovableCount()
const;
6266 static bool MoveMakesSense(
6267 size_t dstBlockIndex, VkDeviceSize dstOffset,
6268 size_t srcBlockIndex, VkDeviceSize srcOffset);
6271 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6273 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6275 VmaDefragmentationAlgorithm_Fast(
6277 VmaBlockVector* pBlockVector,
6278 uint32_t currentFrameIndex,
6279 bool overlappingMoveSupported);
6280 virtual ~VmaDefragmentationAlgorithm_Fast();
6282 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6283 virtual void AddAll() { m_AllAllocations =
true; }
6285 virtual VkResult Defragment(
6286 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6287 VkDeviceSize maxBytesToMove,
6288 uint32_t maxAllocationsToMove);
6290 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6291 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6296 size_t origBlockIndex;
6299 class FreeSpaceDatabase
6305 s.blockInfoIndex = SIZE_MAX;
6306 for(
size_t i = 0; i < MAX_COUNT; ++i)
6308 m_FreeSpaces[i] = s;
6312 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6314 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6320 size_t bestIndex = SIZE_MAX;
6321 for(
size_t i = 0; i < MAX_COUNT; ++i)
6324 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6329 if(m_FreeSpaces[i].size < size &&
6330 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6336 if(bestIndex != SIZE_MAX)
6338 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6339 m_FreeSpaces[bestIndex].offset = offset;
6340 m_FreeSpaces[bestIndex].size = size;
6344 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6345 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6347 size_t bestIndex = SIZE_MAX;
6348 VkDeviceSize bestFreeSpaceAfter = 0;
6349 for(
size_t i = 0; i < MAX_COUNT; ++i)
6352 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6354 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6356 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6358 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6360 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6363 bestFreeSpaceAfter = freeSpaceAfter;
6369 if(bestIndex != SIZE_MAX)
6371 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6372 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6374 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6377 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6378 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6379 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6384 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6394 static const size_t MAX_COUNT = 4;
6398 size_t blockInfoIndex;
6399 VkDeviceSize offset;
6401 } m_FreeSpaces[MAX_COUNT];
6404 const bool m_OverlappingMoveSupported;
6406 uint32_t m_AllocationCount;
6407 bool m_AllAllocations;
6409 VkDeviceSize m_BytesMoved;
6410 uint32_t m_AllocationsMoved;
6412 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6414 void PreprocessMetadata();
6415 void PostprocessMetadata();
6416 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6419 struct VmaBlockDefragmentationContext
6423 BLOCK_FLAG_USED = 0x00000001,
6429 class VmaBlockVectorDefragmentationContext
6431 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6435 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6437 VmaBlockVectorDefragmentationContext(
6440 VmaBlockVector* pBlockVector,
6441 uint32_t currFrameIndex);
6442 ~VmaBlockVectorDefragmentationContext();
6444 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6445 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6446 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6448 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6449 void AddAll() { m_AllAllocations =
true; }
6451 void Begin(
bool overlappingMoveSupported);
6458 VmaBlockVector*
const m_pBlockVector;
6459 const uint32_t m_CurrFrameIndex;
6461 VmaDefragmentationAlgorithm* m_pAlgorithm;
6469 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6470 bool m_AllAllocations;
6473 struct VmaDefragmentationContext_T
6476 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6478 VmaDefragmentationContext_T(
6480 uint32_t currFrameIndex,
6483 ~VmaDefragmentationContext_T();
6485 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6486 void AddAllocations(
6487 uint32_t allocationCount,
6489 VkBool32* pAllocationsChanged);
6497 VkResult Defragment(
6498 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6499 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6504 const uint32_t m_CurrFrameIndex;
6505 const uint32_t m_Flags;
6508 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6510 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6513 #if VMA_RECORDING_ENABLED 6520 void WriteConfiguration(
6521 const VkPhysicalDeviceProperties& devProps,
6522 const VkPhysicalDeviceMemoryProperties& memProps,
6523 bool dedicatedAllocationExtensionEnabled);
6526 void RecordCreateAllocator(uint32_t frameIndex);
6527 void RecordDestroyAllocator(uint32_t frameIndex);
6528 void RecordCreatePool(uint32_t frameIndex,
6531 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6532 void RecordAllocateMemory(uint32_t frameIndex,
6533 const VkMemoryRequirements& vkMemReq,
6536 void RecordAllocateMemoryPages(uint32_t frameIndex,
6537 const VkMemoryRequirements& vkMemReq,
6539 uint64_t allocationCount,
6541 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6542 const VkMemoryRequirements& vkMemReq,
6543 bool requiresDedicatedAllocation,
6544 bool prefersDedicatedAllocation,
6547 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6548 const VkMemoryRequirements& vkMemReq,
6549 bool requiresDedicatedAllocation,
6550 bool prefersDedicatedAllocation,
6553 void RecordFreeMemory(uint32_t frameIndex,
6555 void RecordFreeMemoryPages(uint32_t frameIndex,
6556 uint64_t allocationCount,
6558 void RecordResizeAllocation(
6559 uint32_t frameIndex,
6561 VkDeviceSize newSize);
6562 void RecordSetAllocationUserData(uint32_t frameIndex,
6564 const void* pUserData);
6565 void RecordCreateLostAllocation(uint32_t frameIndex,
6567 void RecordMapMemory(uint32_t frameIndex,
6569 void RecordUnmapMemory(uint32_t frameIndex,
6571 void RecordFlushAllocation(uint32_t frameIndex,
6572 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6573 void RecordInvalidateAllocation(uint32_t frameIndex,
6574 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6575 void RecordCreateBuffer(uint32_t frameIndex,
6576 const VkBufferCreateInfo& bufCreateInfo,
6579 void RecordCreateImage(uint32_t frameIndex,
6580 const VkImageCreateInfo& imageCreateInfo,
6583 void RecordDestroyBuffer(uint32_t frameIndex,
6585 void RecordDestroyImage(uint32_t frameIndex,
6587 void RecordTouchAllocation(uint32_t frameIndex,
6589 void RecordGetAllocationInfo(uint32_t frameIndex,
6591 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6593 void RecordDefragmentationBegin(uint32_t frameIndex,
6596 void RecordDefragmentationEnd(uint32_t frameIndex,
6606 class UserDataString
6610 const char* GetString()
const {
return m_Str; }
6620 VMA_MUTEX m_FileMutex;
6622 int64_t m_StartCounter;
6624 void GetBasicParams(CallParams& outParams);
6627 template<
typename T>
6628 void PrintPointerList(uint64_t count,
const T* pItems)
6632 fprintf(m_File,
"%p", pItems[0]);
6633 for(uint64_t i = 1; i < count; ++i)
6635 fprintf(m_File,
" %p", pItems[i]);
6640 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6644 #endif // #if VMA_RECORDING_ENABLED 6649 class VmaAllocationObjectAllocator
6651 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6653 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6660 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6664 struct VmaAllocator_T
6666 VMA_CLASS_NO_COPY(VmaAllocator_T)
6669 bool m_UseKhrDedicatedAllocation;
6671 bool m_AllocationCallbacksSpecified;
6672 VkAllocationCallbacks m_AllocationCallbacks;
6674 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6677 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6678 VMA_MUTEX m_HeapSizeLimitMutex;
6680 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6681 VkPhysicalDeviceMemoryProperties m_MemProps;
6684 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6687 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6688 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6689 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6695 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6697 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6701 return m_VulkanFunctions;
6704 VkDeviceSize GetBufferImageGranularity()
const 6707 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6708 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6711 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6712 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6714 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6716 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6717 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6720 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6722 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6723 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6726 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6728 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6729 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6730 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6733 bool IsIntegratedGpu()
const 6735 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6738 #if VMA_RECORDING_ENABLED 6739 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6742 void GetBufferMemoryRequirements(
6744 VkMemoryRequirements& memReq,
6745 bool& requiresDedicatedAllocation,
6746 bool& prefersDedicatedAllocation)
const;
6747 void GetImageMemoryRequirements(
6749 VkMemoryRequirements& memReq,
6750 bool& requiresDedicatedAllocation,
6751 bool& prefersDedicatedAllocation)
const;
6754 VkResult AllocateMemory(
6755 const VkMemoryRequirements& vkMemReq,
6756 bool requiresDedicatedAllocation,
6757 bool prefersDedicatedAllocation,
6758 VkBuffer dedicatedBuffer,
6759 VkImage dedicatedImage,
6761 VmaSuballocationType suballocType,
6762 size_t allocationCount,
6767 size_t allocationCount,
6770 VkResult ResizeAllocation(
6772 VkDeviceSize newSize);
6774 void CalculateStats(
VmaStats* pStats);
6776 #if VMA_STATS_STRING_ENABLED 6777 void PrintDetailedMap(
class VmaJsonWriter& json);
6780 VkResult DefragmentationBegin(
6784 VkResult DefragmentationEnd(
6791 void DestroyPool(
VmaPool pool);
6794 void SetCurrentFrameIndex(uint32_t frameIndex);
6795 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6797 void MakePoolAllocationsLost(
6799 size_t* pLostAllocationCount);
6800 VkResult CheckPoolCorruption(
VmaPool hPool);
6801 VkResult CheckCorruption(uint32_t memoryTypeBits);
6805 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6806 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6811 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6812 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6814 void FlushOrInvalidateAllocation(
6816 VkDeviceSize offset, VkDeviceSize size,
6817 VMA_CACHE_OPERATION op);
6819 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6825 uint32_t GetGpuDefragmentationMemoryTypeBits();
6828 VkDeviceSize m_PreferredLargeHeapBlockSize;
6830 VkPhysicalDevice m_PhysicalDevice;
6831 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6832 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
6834 VMA_RW_MUTEX m_PoolsMutex;
6836 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6837 uint32_t m_NextPoolId;
6841 #if VMA_RECORDING_ENABLED 6842 VmaRecorder* m_pRecorder;
6847 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6849 VkResult AllocateMemoryOfType(
6851 VkDeviceSize alignment,
6852 bool dedicatedAllocation,
6853 VkBuffer dedicatedBuffer,
6854 VkImage dedicatedImage,
6856 uint32_t memTypeIndex,
6857 VmaSuballocationType suballocType,
6858 size_t allocationCount,
6862 VkResult AllocateDedicatedMemoryPage(
6864 VmaSuballocationType suballocType,
6865 uint32_t memTypeIndex,
6866 const VkMemoryAllocateInfo& allocInfo,
6868 bool isUserDataString,
6873 VkResult AllocateDedicatedMemory(
6875 VmaSuballocationType suballocType,
6876 uint32_t memTypeIndex,
6878 bool isUserDataString,
6880 VkBuffer dedicatedBuffer,
6881 VkImage dedicatedImage,
6882 size_t allocationCount,
6891 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
6897 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6899 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6902 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6904 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6907 template<
typename T>
6910 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6913 template<
typename T>
6914 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6916 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6919 template<
typename T>
6920 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6925 VmaFree(hAllocator, ptr);
6929 template<
typename T>
6930 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6934 for(
size_t i = count; i--; )
6936 VmaFree(hAllocator, ptr);
6943 #if VMA_STATS_STRING_ENABLED 6945 class VmaStringBuilder
6948 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6949 size_t GetLength()
const {
return m_Data.size(); }
6950 const char* GetData()
const {
return m_Data.data(); }
6952 void Add(
char ch) { m_Data.push_back(ch); }
6953 void Add(
const char* pStr);
6954 void AddNewLine() { Add(
'\n'); }
6955 void AddNumber(uint32_t num);
6956 void AddNumber(uint64_t num);
6957 void AddPointer(
const void* ptr);
6960 VmaVector< char, VmaStlAllocator<char> > m_Data;
6963 void VmaStringBuilder::Add(
const char* pStr)
6965 const size_t strLen = strlen(pStr);
6968 const size_t oldCount = m_Data.size();
6969 m_Data.resize(oldCount + strLen);
6970 memcpy(m_Data.data() + oldCount, pStr, strLen);
6974 void VmaStringBuilder::AddNumber(uint32_t num)
6977 VmaUint32ToStr(buf,
sizeof(buf), num);
6981 void VmaStringBuilder::AddNumber(uint64_t num)
6984 VmaUint64ToStr(buf,
sizeof(buf), num);
6988 void VmaStringBuilder::AddPointer(
const void* ptr)
6991 VmaPtrToStr(buf,
sizeof(buf), ptr);
6995 #endif // #if VMA_STATS_STRING_ENABLED 7000 #if VMA_STATS_STRING_ENABLED 7004 VMA_CLASS_NO_COPY(VmaJsonWriter)
7006 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7009 void BeginObject(
bool singleLine =
false);
7012 void BeginArray(
bool singleLine =
false);
7015 void WriteString(
const char* pStr);
7016 void BeginString(
const char* pStr = VMA_NULL);
7017 void ContinueString(
const char* pStr);
7018 void ContinueString(uint32_t n);
7019 void ContinueString(uint64_t n);
7020 void ContinueString_Pointer(
const void* ptr);
7021 void EndString(
const char* pStr = VMA_NULL);
7023 void WriteNumber(uint32_t n);
7024 void WriteNumber(uint64_t n);
7025 void WriteBool(
bool b);
7029 static const char*
const INDENT;
7031 enum COLLECTION_TYPE
7033 COLLECTION_TYPE_OBJECT,
7034 COLLECTION_TYPE_ARRAY,
7038 COLLECTION_TYPE type;
7039 uint32_t valueCount;
7040 bool singleLineMode;
7043 VmaStringBuilder& m_SB;
7044 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7045 bool m_InsideString;
7047 void BeginValue(
bool isString);
7048 void WriteIndent(
bool oneLess =
false);
7051 const char*
const VmaJsonWriter::INDENT =
" ";
7053 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7055 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7056 m_InsideString(false)
7060 VmaJsonWriter::~VmaJsonWriter()
7062 VMA_ASSERT(!m_InsideString);
7063 VMA_ASSERT(m_Stack.empty());
7066 void VmaJsonWriter::BeginObject(
bool singleLine)
7068 VMA_ASSERT(!m_InsideString);
7074 item.type = COLLECTION_TYPE_OBJECT;
7075 item.valueCount = 0;
7076 item.singleLineMode = singleLine;
7077 m_Stack.push_back(item);
7080 void VmaJsonWriter::EndObject()
7082 VMA_ASSERT(!m_InsideString);
7087 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7091 void VmaJsonWriter::BeginArray(
bool singleLine)
7093 VMA_ASSERT(!m_InsideString);
7099 item.type = COLLECTION_TYPE_ARRAY;
7100 item.valueCount = 0;
7101 item.singleLineMode = singleLine;
7102 m_Stack.push_back(item);
7105 void VmaJsonWriter::EndArray()
7107 VMA_ASSERT(!m_InsideString);
7112 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7116 void VmaJsonWriter::WriteString(
const char* pStr)
7122 void VmaJsonWriter::BeginString(
const char* pStr)
7124 VMA_ASSERT(!m_InsideString);
7128 m_InsideString =
true;
7129 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7131 ContinueString(pStr);
7135 void VmaJsonWriter::ContinueString(
const char* pStr)
7137 VMA_ASSERT(m_InsideString);
7139 const size_t strLen = strlen(pStr);
7140 for(
size_t i = 0; i < strLen; ++i)
7173 VMA_ASSERT(0 &&
"Character not currently supported.");
7179 void VmaJsonWriter::ContinueString(uint32_t n)
7181 VMA_ASSERT(m_InsideString);
7185 void VmaJsonWriter::ContinueString(uint64_t n)
7187 VMA_ASSERT(m_InsideString);
7191 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7193 VMA_ASSERT(m_InsideString);
7194 m_SB.AddPointer(ptr);
7197 void VmaJsonWriter::EndString(
const char* pStr)
7199 VMA_ASSERT(m_InsideString);
7200 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7202 ContinueString(pStr);
7205 m_InsideString =
false;
7208 void VmaJsonWriter::WriteNumber(uint32_t n)
7210 VMA_ASSERT(!m_InsideString);
7215 void VmaJsonWriter::WriteNumber(uint64_t n)
7217 VMA_ASSERT(!m_InsideString);
7222 void VmaJsonWriter::WriteBool(
bool b)
7224 VMA_ASSERT(!m_InsideString);
7226 m_SB.Add(b ?
"true" :
"false");
7229 void VmaJsonWriter::WriteNull()
7231 VMA_ASSERT(!m_InsideString);
7236 void VmaJsonWriter::BeginValue(
bool isString)
7238 if(!m_Stack.empty())
7240 StackItem& currItem = m_Stack.back();
7241 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7242 currItem.valueCount % 2 == 0)
7244 VMA_ASSERT(isString);
7247 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7248 currItem.valueCount % 2 != 0)
7252 else if(currItem.valueCount > 0)
7261 ++currItem.valueCount;
7265 void VmaJsonWriter::WriteIndent(
bool oneLess)
7267 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7271 size_t count = m_Stack.size();
7272 if(count > 0 && oneLess)
7276 for(
size_t i = 0; i < count; ++i)
7283 #endif // #if VMA_STATS_STRING_ENABLED 7287 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7289 if(IsUserDataString())
7291 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7293 FreeUserDataString(hAllocator);
7295 if(pUserData != VMA_NULL)
7297 const char*
const newStrSrc = (
char*)pUserData;
7298 const size_t newStrLen = strlen(newStrSrc);
7299 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7300 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7301 m_pUserData = newStrDst;
7306 m_pUserData = pUserData;
7310 void VmaAllocation_T::ChangeBlockAllocation(
7312 VmaDeviceMemoryBlock* block,
7313 VkDeviceSize offset)
7315 VMA_ASSERT(block != VMA_NULL);
7316 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7319 if(block != m_BlockAllocation.m_Block)
7321 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7322 if(IsPersistentMap())
7324 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7325 block->Map(hAllocator, mapRefCount, VMA_NULL);
7328 m_BlockAllocation.m_Block = block;
7329 m_BlockAllocation.m_Offset = offset;
7332 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7334 VMA_ASSERT(newSize > 0);
7338 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7340 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7341 m_BlockAllocation.m_Offset = newOffset;
7344 VkDeviceSize VmaAllocation_T::GetOffset()
const 7348 case ALLOCATION_TYPE_BLOCK:
7349 return m_BlockAllocation.m_Offset;
7350 case ALLOCATION_TYPE_DEDICATED:
7358 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7362 case ALLOCATION_TYPE_BLOCK:
7363 return m_BlockAllocation.m_Block->GetDeviceMemory();
7364 case ALLOCATION_TYPE_DEDICATED:
7365 return m_DedicatedAllocation.m_hMemory;
7368 return VK_NULL_HANDLE;
7372 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7376 case ALLOCATION_TYPE_BLOCK:
7377 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7378 case ALLOCATION_TYPE_DEDICATED:
7379 return m_DedicatedAllocation.m_MemoryTypeIndex;
7386 void* VmaAllocation_T::GetMappedData()
const 7390 case ALLOCATION_TYPE_BLOCK:
7393 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7394 VMA_ASSERT(pBlockData != VMA_NULL);
7395 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7402 case ALLOCATION_TYPE_DEDICATED:
7403 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7404 return m_DedicatedAllocation.m_pMappedData;
7411 bool VmaAllocation_T::CanBecomeLost()
const 7415 case ALLOCATION_TYPE_BLOCK:
7416 return m_BlockAllocation.m_CanBecomeLost;
7417 case ALLOCATION_TYPE_DEDICATED:
7425 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7427 VMA_ASSERT(CanBecomeLost());
7433 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7436 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7441 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7447 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7457 #if VMA_STATS_STRING_ENABLED 7460 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7469 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7471 json.WriteString(
"Type");
7472 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7474 json.WriteString(
"Size");
7475 json.WriteNumber(m_Size);
7477 if(m_pUserData != VMA_NULL)
7479 json.WriteString(
"UserData");
7480 if(IsUserDataString())
7482 json.WriteString((
const char*)m_pUserData);
7487 json.ContinueString_Pointer(m_pUserData);
7492 json.WriteString(
"CreationFrameIndex");
7493 json.WriteNumber(m_CreationFrameIndex);
7495 json.WriteString(
"LastUseFrameIndex");
7496 json.WriteNumber(GetLastUseFrameIndex());
7498 if(m_BufferImageUsage != 0)
7500 json.WriteString(
"Usage");
7501 json.WriteNumber(m_BufferImageUsage);
7507 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7509 VMA_ASSERT(IsUserDataString());
7510 if(m_pUserData != VMA_NULL)
7512 char*
const oldStr = (
char*)m_pUserData;
7513 const size_t oldStrLen = strlen(oldStr);
7514 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7515 m_pUserData = VMA_NULL;
7519 void VmaAllocation_T::BlockAllocMap()
7521 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7523 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7529 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7533 void VmaAllocation_T::BlockAllocUnmap()
7535 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7537 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7543 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7547 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7549 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7553 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7555 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7556 *ppData = m_DedicatedAllocation.m_pMappedData;
7562 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7563 return VK_ERROR_MEMORY_MAP_FAILED;
7568 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7569 hAllocator->m_hDevice,
7570 m_DedicatedAllocation.m_hMemory,
7575 if(result == VK_SUCCESS)
7577 m_DedicatedAllocation.m_pMappedData = *ppData;
7584 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7586 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7588 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7593 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7594 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7595 hAllocator->m_hDevice,
7596 m_DedicatedAllocation.m_hMemory);
7601 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7605 #if VMA_STATS_STRING_ENABLED 7607 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7611 json.WriteString(
"Blocks");
7614 json.WriteString(
"Allocations");
7617 json.WriteString(
"UnusedRanges");
7620 json.WriteString(
"UsedBytes");
7623 json.WriteString(
"UnusedBytes");
7628 json.WriteString(
"AllocationSize");
7629 json.BeginObject(
true);
7630 json.WriteString(
"Min");
7632 json.WriteString(
"Avg");
7634 json.WriteString(
"Max");
7641 json.WriteString(
"UnusedRangeSize");
7642 json.BeginObject(
true);
7643 json.WriteString(
"Min");
7645 json.WriteString(
"Avg");
7647 json.WriteString(
"Max");
7655 #endif // #if VMA_STATS_STRING_ENABLED 7657 struct VmaSuballocationItemSizeLess
7660 const VmaSuballocationList::iterator lhs,
7661 const VmaSuballocationList::iterator rhs)
const 7663 return lhs->size < rhs->size;
7666 const VmaSuballocationList::iterator lhs,
7667 VkDeviceSize rhsSize)
const 7669 return lhs->size < rhsSize;
7677 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7679 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7683 #if VMA_STATS_STRING_ENABLED 7685 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7686 VkDeviceSize unusedBytes,
7687 size_t allocationCount,
7688 size_t unusedRangeCount)
const 7692 json.WriteString(
"TotalBytes");
7693 json.WriteNumber(GetSize());
7695 json.WriteString(
"UnusedBytes");
7696 json.WriteNumber(unusedBytes);
7698 json.WriteString(
"Allocations");
7699 json.WriteNumber((uint64_t)allocationCount);
7701 json.WriteString(
"UnusedRanges");
7702 json.WriteNumber((uint64_t)unusedRangeCount);
7704 json.WriteString(
"Suballocations");
7708 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7709 VkDeviceSize offset,
7712 json.BeginObject(
true);
7714 json.WriteString(
"Offset");
7715 json.WriteNumber(offset);
7717 hAllocation->PrintParameters(json);
7722 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7723 VkDeviceSize offset,
7724 VkDeviceSize size)
const 7726 json.BeginObject(
true);
7728 json.WriteString(
"Offset");
7729 json.WriteNumber(offset);
7731 json.WriteString(
"Type");
7732 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7734 json.WriteString(
"Size");
7735 json.WriteNumber(size);
7740 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7746 #endif // #if VMA_STATS_STRING_ENABLED 7751 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7752 VmaBlockMetadata(hAllocator),
7755 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7756 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7760 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7764 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7766 VmaBlockMetadata::Init(size);
7769 m_SumFreeSize = size;
7771 VmaSuballocation suballoc = {};
7772 suballoc.offset = 0;
7773 suballoc.size = size;
7774 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7775 suballoc.hAllocation = VK_NULL_HANDLE;
7777 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7778 m_Suballocations.push_back(suballoc);
7779 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7781 m_FreeSuballocationsBySize.push_back(suballocItem);
7784 bool VmaBlockMetadata_Generic::Validate()
const 7786 VMA_VALIDATE(!m_Suballocations.empty());
7789 VkDeviceSize calculatedOffset = 0;
7791 uint32_t calculatedFreeCount = 0;
7793 VkDeviceSize calculatedSumFreeSize = 0;
7796 size_t freeSuballocationsToRegister = 0;
7798 bool prevFree =
false;
7800 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7801 suballocItem != m_Suballocations.cend();
7804 const VmaSuballocation& subAlloc = *suballocItem;
7807 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7809 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7811 VMA_VALIDATE(!prevFree || !currFree);
7813 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7817 calculatedSumFreeSize += subAlloc.size;
7818 ++calculatedFreeCount;
7819 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7821 ++freeSuballocationsToRegister;
7825 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7829 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7830 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7833 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7836 calculatedOffset += subAlloc.size;
7837 prevFree = currFree;
7842 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7844 VkDeviceSize lastSize = 0;
7845 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7847 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7850 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7852 VMA_VALIDATE(suballocItem->size >= lastSize);
7854 lastSize = suballocItem->size;
7858 VMA_VALIDATE(ValidateFreeSuballocationList());
7859 VMA_VALIDATE(calculatedOffset == GetSize());
7860 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7861 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7866 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7868 if(!m_FreeSuballocationsBySize.empty())
7870 return m_FreeSuballocationsBySize.back()->size;
7878 bool VmaBlockMetadata_Generic::IsEmpty()
const 7880 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7883 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7887 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7899 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7900 suballocItem != m_Suballocations.cend();
7903 const VmaSuballocation& suballoc = *suballocItem;
7904 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7917 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7919 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7921 inoutStats.
size += GetSize();
7928 #if VMA_STATS_STRING_ENABLED 7930 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7932 PrintDetailedMap_Begin(json,
7934 m_Suballocations.size() - (size_t)m_FreeCount,
7938 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7939 suballocItem != m_Suballocations.cend();
7940 ++suballocItem, ++i)
7942 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7944 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7948 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7952 PrintDetailedMap_End(json);
7955 #endif // #if VMA_STATS_STRING_ENABLED 7957 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7958 uint32_t currentFrameIndex,
7959 uint32_t frameInUseCount,
7960 VkDeviceSize bufferImageGranularity,
7961 VkDeviceSize allocSize,
7962 VkDeviceSize allocAlignment,
7964 VmaSuballocationType allocType,
7965 bool canMakeOtherLost,
7967 VmaAllocationRequest* pAllocationRequest)
7969 VMA_ASSERT(allocSize > 0);
7970 VMA_ASSERT(!upperAddress);
7971 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7972 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7973 VMA_HEAVY_ASSERT(Validate());
7975 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7978 if(canMakeOtherLost ==
false &&
7979 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7985 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7986 if(freeSuballocCount > 0)
7991 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7992 m_FreeSuballocationsBySize.data(),
7993 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7994 allocSize + 2 * VMA_DEBUG_MARGIN,
7995 VmaSuballocationItemSizeLess());
7996 size_t index = it - m_FreeSuballocationsBySize.data();
7997 for(; index < freeSuballocCount; ++index)
8002 bufferImageGranularity,
8006 m_FreeSuballocationsBySize[index],
8008 &pAllocationRequest->offset,
8009 &pAllocationRequest->itemsToMakeLostCount,
8010 &pAllocationRequest->sumFreeSize,
8011 &pAllocationRequest->sumItemSize))
8013 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8018 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8020 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8021 it != m_Suballocations.end();
8024 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8027 bufferImageGranularity,
8033 &pAllocationRequest->offset,
8034 &pAllocationRequest->itemsToMakeLostCount,
8035 &pAllocationRequest->sumFreeSize,
8036 &pAllocationRequest->sumItemSize))
8038 pAllocationRequest->item = it;
8046 for(
size_t index = freeSuballocCount; index--; )
8051 bufferImageGranularity,
8055 m_FreeSuballocationsBySize[index],
8057 &pAllocationRequest->offset,
8058 &pAllocationRequest->itemsToMakeLostCount,
8059 &pAllocationRequest->sumFreeSize,
8060 &pAllocationRequest->sumItemSize))
8062 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8069 if(canMakeOtherLost)
8074 VmaAllocationRequest tmpAllocRequest = {};
8075 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8076 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8077 suballocIt != m_Suballocations.end();
8080 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8081 suballocIt->hAllocation->CanBecomeLost())
8086 bufferImageGranularity,
8092 &tmpAllocRequest.offset,
8093 &tmpAllocRequest.itemsToMakeLostCount,
8094 &tmpAllocRequest.sumFreeSize,
8095 &tmpAllocRequest.sumItemSize))
8099 *pAllocationRequest = tmpAllocRequest;
8100 pAllocationRequest->item = suballocIt;
8103 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8105 *pAllocationRequest = tmpAllocRequest;
8106 pAllocationRequest->item = suballocIt;
8119 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8120 uint32_t currentFrameIndex,
8121 uint32_t frameInUseCount,
8122 VmaAllocationRequest* pAllocationRequest)
8124 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8126 while(pAllocationRequest->itemsToMakeLostCount > 0)
8128 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8130 ++pAllocationRequest->item;
8132 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8133 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8134 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8135 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8137 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8138 --pAllocationRequest->itemsToMakeLostCount;
8146 VMA_HEAVY_ASSERT(Validate());
8147 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8148 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8153 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8155 uint32_t lostAllocationCount = 0;
8156 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8157 it != m_Suballocations.end();
8160 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8161 it->hAllocation->CanBecomeLost() &&
8162 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8164 it = FreeSuballocation(it);
8165 ++lostAllocationCount;
8168 return lostAllocationCount;
8171 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8173 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8174 it != m_Suballocations.end();
8177 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8179 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8181 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8182 return VK_ERROR_VALIDATION_FAILED_EXT;
8184 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8186 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8187 return VK_ERROR_VALIDATION_FAILED_EXT;
8195 void VmaBlockMetadata_Generic::Alloc(
8196 const VmaAllocationRequest& request,
8197 VmaSuballocationType type,
8198 VkDeviceSize allocSize,
8201 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8202 VMA_ASSERT(request.item != m_Suballocations.end());
8203 VmaSuballocation& suballoc = *request.item;
8205 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8207 VMA_ASSERT(request.offset >= suballoc.offset);
8208 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8209 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8210 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8214 UnregisterFreeSuballocation(request.item);
8216 suballoc.offset = request.offset;
8217 suballoc.size = allocSize;
8218 suballoc.type = type;
8219 suballoc.hAllocation = hAllocation;
8224 VmaSuballocation paddingSuballoc = {};
8225 paddingSuballoc.offset = request.offset + allocSize;
8226 paddingSuballoc.size = paddingEnd;
8227 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8228 VmaSuballocationList::iterator next = request.item;
8230 const VmaSuballocationList::iterator paddingEndItem =
8231 m_Suballocations.insert(next, paddingSuballoc);
8232 RegisterFreeSuballocation(paddingEndItem);
8238 VmaSuballocation paddingSuballoc = {};
8239 paddingSuballoc.offset = request.offset - paddingBegin;
8240 paddingSuballoc.size = paddingBegin;
8241 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8242 const VmaSuballocationList::iterator paddingBeginItem =
8243 m_Suballocations.insert(request.item, paddingSuballoc);
8244 RegisterFreeSuballocation(paddingBeginItem);
8248 m_FreeCount = m_FreeCount - 1;
8249 if(paddingBegin > 0)
8257 m_SumFreeSize -= allocSize;
8260 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8262 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8263 suballocItem != m_Suballocations.end();
8266 VmaSuballocation& suballoc = *suballocItem;
8267 if(suballoc.hAllocation == allocation)
8269 FreeSuballocation(suballocItem);
8270 VMA_HEAVY_ASSERT(Validate());
8274 VMA_ASSERT(0 &&
"Not found!");
8277 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8279 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8280 suballocItem != m_Suballocations.end();
8283 VmaSuballocation& suballoc = *suballocItem;
8284 if(suballoc.offset == offset)
8286 FreeSuballocation(suballocItem);
8290 VMA_ASSERT(0 &&
"Not found!");
8293 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8295 typedef VmaSuballocationList::iterator iter_type;
8296 for(iter_type suballocItem = m_Suballocations.begin();
8297 suballocItem != m_Suballocations.end();
8300 VmaSuballocation& suballoc = *suballocItem;
8301 if(suballoc.hAllocation == alloc)
8303 iter_type nextItem = suballocItem;
8307 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8310 if(newSize < alloc->GetSize())
8312 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8315 if(nextItem != m_Suballocations.end())
8318 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8321 UnregisterFreeSuballocation(nextItem);
8322 nextItem->offset -= sizeDiff;
8323 nextItem->size += sizeDiff;
8324 RegisterFreeSuballocation(nextItem);
8330 VmaSuballocation newFreeSuballoc;
8331 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8332 newFreeSuballoc.offset = suballoc.offset + newSize;
8333 newFreeSuballoc.size = sizeDiff;
8334 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8335 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8336 RegisterFreeSuballocation(newFreeSuballocIt);
8345 VmaSuballocation newFreeSuballoc;
8346 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8347 newFreeSuballoc.offset = suballoc.offset + newSize;
8348 newFreeSuballoc.size = sizeDiff;
8349 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8350 m_Suballocations.push_back(newFreeSuballoc);
8352 iter_type newFreeSuballocIt = m_Suballocations.end();
8353 RegisterFreeSuballocation(--newFreeSuballocIt);
8358 suballoc.size = newSize;
8359 m_SumFreeSize += sizeDiff;
8364 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8367 if(nextItem != m_Suballocations.end())
8370 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8373 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8379 if(nextItem->size > sizeDiff)
8382 UnregisterFreeSuballocation(nextItem);
8383 nextItem->offset += sizeDiff;
8384 nextItem->size -= sizeDiff;
8385 RegisterFreeSuballocation(nextItem);
8391 UnregisterFreeSuballocation(nextItem);
8392 m_Suballocations.erase(nextItem);
8408 suballoc.size = newSize;
8409 m_SumFreeSize -= sizeDiff;
8416 VMA_ASSERT(0 &&
"Not found!");
8420 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8422 VkDeviceSize lastSize = 0;
8423 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8425 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8427 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8428 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8429 VMA_VALIDATE(it->size >= lastSize);
8430 lastSize = it->size;
8435 bool VmaBlockMetadata_Generic::CheckAllocation(
8436 uint32_t currentFrameIndex,
8437 uint32_t frameInUseCount,
8438 VkDeviceSize bufferImageGranularity,
8439 VkDeviceSize allocSize,
8440 VkDeviceSize allocAlignment,
8441 VmaSuballocationType allocType,
8442 VmaSuballocationList::const_iterator suballocItem,
8443 bool canMakeOtherLost,
8444 VkDeviceSize* pOffset,
8445 size_t* itemsToMakeLostCount,
8446 VkDeviceSize* pSumFreeSize,
8447 VkDeviceSize* pSumItemSize)
const 8449 VMA_ASSERT(allocSize > 0);
8450 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8451 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8452 VMA_ASSERT(pOffset != VMA_NULL);
8454 *itemsToMakeLostCount = 0;
8458 if(canMakeOtherLost)
8460 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8462 *pSumFreeSize = suballocItem->size;
8466 if(suballocItem->hAllocation->CanBecomeLost() &&
8467 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8469 ++*itemsToMakeLostCount;
8470 *pSumItemSize = suballocItem->size;
8479 if(GetSize() - suballocItem->offset < allocSize)
8485 *pOffset = suballocItem->offset;
8488 if(VMA_DEBUG_MARGIN > 0)
8490 *pOffset += VMA_DEBUG_MARGIN;
8494 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8498 if(bufferImageGranularity > 1)
8500 bool bufferImageGranularityConflict =
false;
8501 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8502 while(prevSuballocItem != m_Suballocations.cbegin())
8505 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8506 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8508 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8510 bufferImageGranularityConflict =
true;
8518 if(bufferImageGranularityConflict)
8520 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8526 if(*pOffset >= suballocItem->offset + suballocItem->size)
8532 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8535 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8537 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8539 if(suballocItem->offset + totalSize > GetSize())
8546 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8547 if(totalSize > suballocItem->size)
8549 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8550 while(remainingSize > 0)
8553 if(lastSuballocItem == m_Suballocations.cend())
8557 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8559 *pSumFreeSize += lastSuballocItem->size;
8563 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8564 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8565 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8567 ++*itemsToMakeLostCount;
8568 *pSumItemSize += lastSuballocItem->size;
8575 remainingSize = (lastSuballocItem->size < remainingSize) ?
8576 remainingSize - lastSuballocItem->size : 0;
8582 if(bufferImageGranularity > 1)
8584 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8586 while(nextSuballocItem != m_Suballocations.cend())
8588 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8589 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8591 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8593 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8594 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8595 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8597 ++*itemsToMakeLostCount;
8616 const VmaSuballocation& suballoc = *suballocItem;
8617 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8619 *pSumFreeSize = suballoc.size;
8622 if(suballoc.size < allocSize)
8628 *pOffset = suballoc.offset;
8631 if(VMA_DEBUG_MARGIN > 0)
8633 *pOffset += VMA_DEBUG_MARGIN;
8637 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8641 if(bufferImageGranularity > 1)
8643 bool bufferImageGranularityConflict =
false;
8644 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8645 while(prevSuballocItem != m_Suballocations.cbegin())
8648 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8649 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8651 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8653 bufferImageGranularityConflict =
true;
8661 if(bufferImageGranularityConflict)
8663 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8668 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8671 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8674 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8681 if(bufferImageGranularity > 1)
8683 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8685 while(nextSuballocItem != m_Suballocations.cend())
8687 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8688 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8690 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8709 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8711 VMA_ASSERT(item != m_Suballocations.end());
8712 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8714 VmaSuballocationList::iterator nextItem = item;
8716 VMA_ASSERT(nextItem != m_Suballocations.end());
8717 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8719 item->size += nextItem->size;
8721 m_Suballocations.erase(nextItem);
8724 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8727 VmaSuballocation& suballoc = *suballocItem;
8728 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8729 suballoc.hAllocation = VK_NULL_HANDLE;
8733 m_SumFreeSize += suballoc.size;
8736 bool mergeWithNext =
false;
8737 bool mergeWithPrev =
false;
8739 VmaSuballocationList::iterator nextItem = suballocItem;
8741 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8743 mergeWithNext =
true;
8746 VmaSuballocationList::iterator prevItem = suballocItem;
8747 if(suballocItem != m_Suballocations.begin())
8750 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8752 mergeWithPrev =
true;
8758 UnregisterFreeSuballocation(nextItem);
8759 MergeFreeWithNext(suballocItem);
8764 UnregisterFreeSuballocation(prevItem);
8765 MergeFreeWithNext(prevItem);
8766 RegisterFreeSuballocation(prevItem);
8771 RegisterFreeSuballocation(suballocItem);
8772 return suballocItem;
8776 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8778 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8779 VMA_ASSERT(item->size > 0);
8783 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8785 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8787 if(m_FreeSuballocationsBySize.empty())
8789 m_FreeSuballocationsBySize.push_back(item);
8793 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8801 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8803 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8804 VMA_ASSERT(item->size > 0);
8808 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8810 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8812 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8813 m_FreeSuballocationsBySize.data(),
8814 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8816 VmaSuballocationItemSizeLess());
8817 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8818 index < m_FreeSuballocationsBySize.size();
8821 if(m_FreeSuballocationsBySize[index] == item)
8823 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8826 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8828 VMA_ASSERT(0 &&
"Not found.");
8834 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8835 VkDeviceSize bufferImageGranularity,
8836 VmaSuballocationType& inOutPrevSuballocType)
const 8838 if(bufferImageGranularity == 1 || IsEmpty())
8843 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8844 bool typeConflictFound =
false;
8845 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8846 it != m_Suballocations.cend();
8849 const VmaSuballocationType suballocType = it->type;
8850 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8852 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8853 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8855 typeConflictFound =
true;
8857 inOutPrevSuballocType = suballocType;
8861 return typeConflictFound || minAlignment >= bufferImageGranularity;
8867 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8868 VmaBlockMetadata(hAllocator),
8870 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8871 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8872 m_1stVectorIndex(0),
8873 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8874 m_1stNullItemsBeginCount(0),
8875 m_1stNullItemsMiddleCount(0),
8876 m_2ndNullItemsCount(0)
8880 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8884 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8886 VmaBlockMetadata::Init(size);
8887 m_SumFreeSize = size;
8890 bool VmaBlockMetadata_Linear::Validate()
const 8892 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8893 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8895 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8896 VMA_VALIDATE(!suballocations1st.empty() ||
8897 suballocations2nd.empty() ||
8898 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8900 if(!suballocations1st.empty())
8903 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8905 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8907 if(!suballocations2nd.empty())
8910 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8913 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8914 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8916 VkDeviceSize sumUsedSize = 0;
8917 const size_t suballoc1stCount = suballocations1st.size();
8918 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8920 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8922 const size_t suballoc2ndCount = suballocations2nd.size();
8923 size_t nullItem2ndCount = 0;
8924 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8926 const VmaSuballocation& suballoc = suballocations2nd[i];
8927 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8929 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8930 VMA_VALIDATE(suballoc.offset >= offset);
8934 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8935 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8936 sumUsedSize += suballoc.size;
8943 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8946 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8949 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8951 const VmaSuballocation& suballoc = suballocations1st[i];
8952 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8953 suballoc.hAllocation == VK_NULL_HANDLE);
8956 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8958 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8960 const VmaSuballocation& suballoc = suballocations1st[i];
8961 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8963 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8964 VMA_VALIDATE(suballoc.offset >= offset);
8965 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8969 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8970 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8971 sumUsedSize += suballoc.size;
8978 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8980 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8982 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8984 const size_t suballoc2ndCount = suballocations2nd.size();
8985 size_t nullItem2ndCount = 0;
8986 for(
size_t i = suballoc2ndCount; i--; )
8988 const VmaSuballocation& suballoc = suballocations2nd[i];
8989 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8991 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8992 VMA_VALIDATE(suballoc.offset >= offset);
8996 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8997 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8998 sumUsedSize += suballoc.size;
9005 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9008 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9011 VMA_VALIDATE(offset <= GetSize());
9012 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9017 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 9019 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9020 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9023 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 9025 const VkDeviceSize size = GetSize();
9037 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9039 switch(m_2ndVectorMode)
9041 case SECOND_VECTOR_EMPTY:
9047 const size_t suballocations1stCount = suballocations1st.size();
9048 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9049 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9050 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9052 firstSuballoc.offset,
9053 size - (lastSuballoc.offset + lastSuballoc.size));
9057 case SECOND_VECTOR_RING_BUFFER:
9062 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9063 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9064 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9065 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9069 case SECOND_VECTOR_DOUBLE_STACK:
9074 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9075 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9076 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9077 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9087 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9089 const VkDeviceSize size = GetSize();
9090 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9091 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9092 const size_t suballoc1stCount = suballocations1st.size();
9093 const size_t suballoc2ndCount = suballocations2nd.size();
9104 VkDeviceSize lastOffset = 0;
9106 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9108 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9109 size_t nextAlloc2ndIndex = 0;
9110 while(lastOffset < freeSpace2ndTo1stEnd)
9113 while(nextAlloc2ndIndex < suballoc2ndCount &&
9114 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9116 ++nextAlloc2ndIndex;
9120 if(nextAlloc2ndIndex < suballoc2ndCount)
9122 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9125 if(lastOffset < suballoc.offset)
9128 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9142 lastOffset = suballoc.offset + suballoc.size;
9143 ++nextAlloc2ndIndex;
9149 if(lastOffset < freeSpace2ndTo1stEnd)
9151 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9159 lastOffset = freeSpace2ndTo1stEnd;
9164 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9165 const VkDeviceSize freeSpace1stTo2ndEnd =
9166 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9167 while(lastOffset < freeSpace1stTo2ndEnd)
9170 while(nextAlloc1stIndex < suballoc1stCount &&
9171 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9173 ++nextAlloc1stIndex;
9177 if(nextAlloc1stIndex < suballoc1stCount)
9179 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9182 if(lastOffset < suballoc.offset)
9185 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9199 lastOffset = suballoc.offset + suballoc.size;
9200 ++nextAlloc1stIndex;
9206 if(lastOffset < freeSpace1stTo2ndEnd)
9208 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9216 lastOffset = freeSpace1stTo2ndEnd;
9220 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9222 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9223 while(lastOffset < size)
9226 while(nextAlloc2ndIndex != SIZE_MAX &&
9227 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9229 --nextAlloc2ndIndex;
9233 if(nextAlloc2ndIndex != SIZE_MAX)
9235 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9238 if(lastOffset < suballoc.offset)
9241 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9255 lastOffset = suballoc.offset + suballoc.size;
9256 --nextAlloc2ndIndex;
9262 if(lastOffset < size)
9264 const VkDeviceSize unusedRangeSize = size - lastOffset;
9280 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9282 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9283 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9284 const VkDeviceSize size = GetSize();
9285 const size_t suballoc1stCount = suballocations1st.size();
9286 const size_t suballoc2ndCount = suballocations2nd.size();
9288 inoutStats.
size += size;
9290 VkDeviceSize lastOffset = 0;
9292 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9294 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9295 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9296 while(lastOffset < freeSpace2ndTo1stEnd)
9299 while(nextAlloc2ndIndex < suballoc2ndCount &&
9300 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9302 ++nextAlloc2ndIndex;
9306 if(nextAlloc2ndIndex < suballoc2ndCount)
9308 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9311 if(lastOffset < suballoc.offset)
9314 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9325 lastOffset = suballoc.offset + suballoc.size;
9326 ++nextAlloc2ndIndex;
9331 if(lastOffset < freeSpace2ndTo1stEnd)
9334 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9341 lastOffset = freeSpace2ndTo1stEnd;
9346 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9347 const VkDeviceSize freeSpace1stTo2ndEnd =
9348 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9349 while(lastOffset < freeSpace1stTo2ndEnd)
9352 while(nextAlloc1stIndex < suballoc1stCount &&
9353 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9355 ++nextAlloc1stIndex;
9359 if(nextAlloc1stIndex < suballoc1stCount)
9361 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9364 if(lastOffset < suballoc.offset)
9367 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9378 lastOffset = suballoc.offset + suballoc.size;
9379 ++nextAlloc1stIndex;
9384 if(lastOffset < freeSpace1stTo2ndEnd)
9387 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9394 lastOffset = freeSpace1stTo2ndEnd;
9398 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9400 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9401 while(lastOffset < size)
9404 while(nextAlloc2ndIndex != SIZE_MAX &&
9405 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9407 --nextAlloc2ndIndex;
9411 if(nextAlloc2ndIndex != SIZE_MAX)
9413 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9416 if(lastOffset < suballoc.offset)
9419 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9430 lastOffset = suballoc.offset + suballoc.size;
9431 --nextAlloc2ndIndex;
9436 if(lastOffset < size)
9439 const VkDeviceSize unusedRangeSize = size - lastOffset;
9452 #if VMA_STATS_STRING_ENABLED 9453 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9455 const VkDeviceSize size = GetSize();
9456 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9457 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9458 const size_t suballoc1stCount = suballocations1st.size();
9459 const size_t suballoc2ndCount = suballocations2nd.size();
9463 size_t unusedRangeCount = 0;
9464 VkDeviceSize usedBytes = 0;
9466 VkDeviceSize lastOffset = 0;
9468 size_t alloc2ndCount = 0;
9469 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9471 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9472 size_t nextAlloc2ndIndex = 0;
9473 while(lastOffset < freeSpace2ndTo1stEnd)
9476 while(nextAlloc2ndIndex < suballoc2ndCount &&
9477 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9479 ++nextAlloc2ndIndex;
9483 if(nextAlloc2ndIndex < suballoc2ndCount)
9485 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9488 if(lastOffset < suballoc.offset)
9497 usedBytes += suballoc.size;
9500 lastOffset = suballoc.offset + suballoc.size;
9501 ++nextAlloc2ndIndex;
9506 if(lastOffset < freeSpace2ndTo1stEnd)
9513 lastOffset = freeSpace2ndTo1stEnd;
9518 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9519 size_t alloc1stCount = 0;
9520 const VkDeviceSize freeSpace1stTo2ndEnd =
9521 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9522 while(lastOffset < freeSpace1stTo2ndEnd)
9525 while(nextAlloc1stIndex < suballoc1stCount &&
9526 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9528 ++nextAlloc1stIndex;
9532 if(nextAlloc1stIndex < suballoc1stCount)
9534 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9537 if(lastOffset < suballoc.offset)
9546 usedBytes += suballoc.size;
9549 lastOffset = suballoc.offset + suballoc.size;
9550 ++nextAlloc1stIndex;
9555 if(lastOffset < size)
9562 lastOffset = freeSpace1stTo2ndEnd;
9566 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9568 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9569 while(lastOffset < size)
9572 while(nextAlloc2ndIndex != SIZE_MAX &&
9573 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9575 --nextAlloc2ndIndex;
9579 if(nextAlloc2ndIndex != SIZE_MAX)
9581 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9584 if(lastOffset < suballoc.offset)
9593 usedBytes += suballoc.size;
9596 lastOffset = suballoc.offset + suballoc.size;
9597 --nextAlloc2ndIndex;
9602 if(lastOffset < size)
9614 const VkDeviceSize unusedBytes = size - usedBytes;
9615 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9620 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9622 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9623 size_t nextAlloc2ndIndex = 0;
9624 while(lastOffset < freeSpace2ndTo1stEnd)
9627 while(nextAlloc2ndIndex < suballoc2ndCount &&
9628 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9630 ++nextAlloc2ndIndex;
9634 if(nextAlloc2ndIndex < suballoc2ndCount)
9636 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9639 if(lastOffset < suballoc.offset)
9642 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9643 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9648 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9651 lastOffset = suballoc.offset + suballoc.size;
9652 ++nextAlloc2ndIndex;
9657 if(lastOffset < freeSpace2ndTo1stEnd)
9660 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9661 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9665 lastOffset = freeSpace2ndTo1stEnd;
9670 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9671 while(lastOffset < freeSpace1stTo2ndEnd)
9674 while(nextAlloc1stIndex < suballoc1stCount &&
9675 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9677 ++nextAlloc1stIndex;
9681 if(nextAlloc1stIndex < suballoc1stCount)
9683 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9686 if(lastOffset < suballoc.offset)
9689 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9690 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9695 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9698 lastOffset = suballoc.offset + suballoc.size;
9699 ++nextAlloc1stIndex;
9704 if(lastOffset < freeSpace1stTo2ndEnd)
9707 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9708 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9712 lastOffset = freeSpace1stTo2ndEnd;
9716 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9718 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9719 while(lastOffset < size)
9722 while(nextAlloc2ndIndex != SIZE_MAX &&
9723 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9725 --nextAlloc2ndIndex;
9729 if(nextAlloc2ndIndex != SIZE_MAX)
9731 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9734 if(lastOffset < suballoc.offset)
9737 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9738 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9743 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9746 lastOffset = suballoc.offset + suballoc.size;
9747 --nextAlloc2ndIndex;
9752 if(lastOffset < size)
9755 const VkDeviceSize unusedRangeSize = size - lastOffset;
9756 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9765 PrintDetailedMap_End(json);
9767 #endif // #if VMA_STATS_STRING_ENABLED 9769 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9770 uint32_t currentFrameIndex,
9771 uint32_t frameInUseCount,
9772 VkDeviceSize bufferImageGranularity,
9773 VkDeviceSize allocSize,
9774 VkDeviceSize allocAlignment,
9776 VmaSuballocationType allocType,
9777 bool canMakeOtherLost,
9779 VmaAllocationRequest* pAllocationRequest)
9781 VMA_ASSERT(allocSize > 0);
9782 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9783 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9784 VMA_HEAVY_ASSERT(Validate());
9785 return upperAddress ?
9786 CreateAllocationRequest_UpperAddress(
9787 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9788 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9789 CreateAllocationRequest_LowerAddress(
9790 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9791 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9794 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9795 uint32_t currentFrameIndex,
9796 uint32_t frameInUseCount,
9797 VkDeviceSize bufferImageGranularity,
9798 VkDeviceSize allocSize,
9799 VkDeviceSize allocAlignment,
9800 VmaSuballocationType allocType,
9801 bool canMakeOtherLost,
9803 VmaAllocationRequest* pAllocationRequest)
9805 const VkDeviceSize size = GetSize();
9806 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9807 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9809 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9811 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9816 if(allocSize > size)
9820 VkDeviceSize resultBaseOffset = size - allocSize;
9821 if(!suballocations2nd.empty())
9823 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9824 resultBaseOffset = lastSuballoc.offset - allocSize;
9825 if(allocSize > lastSuballoc.offset)
9832 VkDeviceSize resultOffset = resultBaseOffset;
9835 if(VMA_DEBUG_MARGIN > 0)
9837 if(resultOffset < VMA_DEBUG_MARGIN)
9841 resultOffset -= VMA_DEBUG_MARGIN;
9845 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9849 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9851 bool bufferImageGranularityConflict =
false;
9852 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9854 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9855 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9857 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9859 bufferImageGranularityConflict =
true;
9867 if(bufferImageGranularityConflict)
9869 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9874 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9875 suballocations1st.back().offset + suballocations1st.back().size :
9877 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9881 if(bufferImageGranularity > 1)
9883 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9885 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9886 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9888 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9902 pAllocationRequest->offset = resultOffset;
9903 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9904 pAllocationRequest->sumItemSize = 0;
9906 pAllocationRequest->itemsToMakeLostCount = 0;
9907 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9914 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9915 uint32_t currentFrameIndex,
9916 uint32_t frameInUseCount,
9917 VkDeviceSize bufferImageGranularity,
9918 VkDeviceSize allocSize,
9919 VkDeviceSize allocAlignment,
9920 VmaSuballocationType allocType,
9921 bool canMakeOtherLost,
9923 VmaAllocationRequest* pAllocationRequest)
9925 const VkDeviceSize size = GetSize();
9926 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9927 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9929 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9933 VkDeviceSize resultBaseOffset = 0;
9934 if(!suballocations1st.empty())
9936 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9937 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9941 VkDeviceSize resultOffset = resultBaseOffset;
9944 if(VMA_DEBUG_MARGIN > 0)
9946 resultOffset += VMA_DEBUG_MARGIN;
9950 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9954 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9956 bool bufferImageGranularityConflict =
false;
9957 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9959 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9960 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9962 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9964 bufferImageGranularityConflict =
true;
9972 if(bufferImageGranularityConflict)
9974 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9978 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9979 suballocations2nd.back().offset : size;
9982 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9986 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9988 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9990 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9991 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9993 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10007 pAllocationRequest->offset = resultOffset;
10008 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10009 pAllocationRequest->sumItemSize = 0;
10011 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10012 pAllocationRequest->itemsToMakeLostCount = 0;
10019 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10021 VMA_ASSERT(!suballocations1st.empty());
10023 VkDeviceSize resultBaseOffset = 0;
10024 if(!suballocations2nd.empty())
10026 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10027 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10031 VkDeviceSize resultOffset = resultBaseOffset;
10034 if(VMA_DEBUG_MARGIN > 0)
10036 resultOffset += VMA_DEBUG_MARGIN;
10040 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10044 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10046 bool bufferImageGranularityConflict =
false;
10047 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10049 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10050 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10052 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10054 bufferImageGranularityConflict =
true;
10062 if(bufferImageGranularityConflict)
10064 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10068 pAllocationRequest->itemsToMakeLostCount = 0;
10069 pAllocationRequest->sumItemSize = 0;
10070 size_t index1st = m_1stNullItemsBeginCount;
10072 if(canMakeOtherLost)
10074 while(index1st < suballocations1st.size() &&
10075 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10078 const VmaSuballocation& suballoc = suballocations1st[index1st];
10079 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10085 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10086 if(suballoc.hAllocation->CanBecomeLost() &&
10087 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10089 ++pAllocationRequest->itemsToMakeLostCount;
10090 pAllocationRequest->sumItemSize += suballoc.size;
10102 if(bufferImageGranularity > 1)
10104 while(index1st < suballocations1st.size())
10106 const VmaSuballocation& suballoc = suballocations1st[index1st];
10107 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10109 if(suballoc.hAllocation != VK_NULL_HANDLE)
10112 if(suballoc.hAllocation->CanBecomeLost() &&
10113 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10115 ++pAllocationRequest->itemsToMakeLostCount;
10116 pAllocationRequest->sumItemSize += suballoc.size;
10134 if(index1st == suballocations1st.size() &&
10135 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10138 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10143 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10144 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10148 if(bufferImageGranularity > 1)
10150 for(
size_t nextSuballocIndex = index1st;
10151 nextSuballocIndex < suballocations1st.size();
10152 nextSuballocIndex++)
10154 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10155 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10157 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10171 pAllocationRequest->offset = resultOffset;
10172 pAllocationRequest->sumFreeSize =
10173 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10175 - pAllocationRequest->sumItemSize;
10176 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10185 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10186 uint32_t currentFrameIndex,
10187 uint32_t frameInUseCount,
10188 VmaAllocationRequest* pAllocationRequest)
10190 if(pAllocationRequest->itemsToMakeLostCount == 0)
10195 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10198 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10199 size_t index = m_1stNullItemsBeginCount;
10200 size_t madeLostCount = 0;
10201 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10203 if(index == suballocations->size())
10207 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10209 suballocations = &AccessSuballocations2nd();
10213 VMA_ASSERT(!suballocations->empty());
10215 VmaSuballocation& suballoc = (*suballocations)[index];
10216 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10218 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10219 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10220 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10222 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10223 suballoc.hAllocation = VK_NULL_HANDLE;
10224 m_SumFreeSize += suballoc.size;
10225 if(suballocations == &AccessSuballocations1st())
10227 ++m_1stNullItemsMiddleCount;
10231 ++m_2ndNullItemsCount;
10243 CleanupAfterFree();
10249 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10251 uint32_t lostAllocationCount = 0;
10253 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10254 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10256 VmaSuballocation& suballoc = suballocations1st[i];
10257 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10258 suballoc.hAllocation->CanBecomeLost() &&
10259 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10261 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10262 suballoc.hAllocation = VK_NULL_HANDLE;
10263 ++m_1stNullItemsMiddleCount;
10264 m_SumFreeSize += suballoc.size;
10265 ++lostAllocationCount;
10269 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10270 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10272 VmaSuballocation& suballoc = suballocations2nd[i];
10273 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10274 suballoc.hAllocation->CanBecomeLost() &&
10275 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10277 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10278 suballoc.hAllocation = VK_NULL_HANDLE;
10279 ++m_2ndNullItemsCount;
10280 m_SumFreeSize += suballoc.size;
10281 ++lostAllocationCount;
10285 if(lostAllocationCount)
10287 CleanupAfterFree();
10290 return lostAllocationCount;
10293 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10295 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10296 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10298 const VmaSuballocation& suballoc = suballocations1st[i];
10299 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10301 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10303 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10304 return VK_ERROR_VALIDATION_FAILED_EXT;
10306 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10308 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10309 return VK_ERROR_VALIDATION_FAILED_EXT;
10314 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10315 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10317 const VmaSuballocation& suballoc = suballocations2nd[i];
10318 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10320 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10322 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10323 return VK_ERROR_VALIDATION_FAILED_EXT;
10325 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10327 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10328 return VK_ERROR_VALIDATION_FAILED_EXT;
10336 void VmaBlockMetadata_Linear::Alloc(
10337 const VmaAllocationRequest& request,
10338 VmaSuballocationType type,
10339 VkDeviceSize allocSize,
10342 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10344 switch(request.type)
10346 case VmaAllocationRequestType::UpperAddress:
10348 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10349 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10350 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10351 suballocations2nd.push_back(newSuballoc);
10352 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10355 case VmaAllocationRequestType::EndOf1st:
10357 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10359 VMA_ASSERT(suballocations1st.empty() ||
10360 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10362 VMA_ASSERT(request.offset + allocSize <= GetSize());
10364 suballocations1st.push_back(newSuballoc);
10367 case VmaAllocationRequestType::EndOf2nd:
10369 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10371 VMA_ASSERT(!suballocations1st.empty() &&
10372 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10373 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10375 switch(m_2ndVectorMode)
10377 case SECOND_VECTOR_EMPTY:
10379 VMA_ASSERT(suballocations2nd.empty());
10380 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10382 case SECOND_VECTOR_RING_BUFFER:
10384 VMA_ASSERT(!suballocations2nd.empty());
10386 case SECOND_VECTOR_DOUBLE_STACK:
10387 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10393 suballocations2nd.push_back(newSuballoc);
10397 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10400 m_SumFreeSize -= newSuballoc.size;
10403 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10405 FreeAtOffset(allocation->GetOffset());
10408 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10410 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10411 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10413 if(!suballocations1st.empty())
10416 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10417 if(firstSuballoc.offset == offset)
10419 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10420 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10421 m_SumFreeSize += firstSuballoc.size;
10422 ++m_1stNullItemsBeginCount;
10423 CleanupAfterFree();
10429 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10430 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10432 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10433 if(lastSuballoc.offset == offset)
10435 m_SumFreeSize += lastSuballoc.size;
10436 suballocations2nd.pop_back();
10437 CleanupAfterFree();
10442 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10444 VmaSuballocation& lastSuballoc = suballocations1st.back();
10445 if(lastSuballoc.offset == offset)
10447 m_SumFreeSize += lastSuballoc.size;
10448 suballocations1st.pop_back();
10449 CleanupAfterFree();
10456 VmaSuballocation refSuballoc;
10457 refSuballoc.offset = offset;
10459 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10460 suballocations1st.begin() + m_1stNullItemsBeginCount,
10461 suballocations1st.end(),
10463 VmaSuballocationOffsetLess());
10464 if(it != suballocations1st.end())
10466 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10467 it->hAllocation = VK_NULL_HANDLE;
10468 ++m_1stNullItemsMiddleCount;
10469 m_SumFreeSize += it->size;
10470 CleanupAfterFree();
10475 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10478 VmaSuballocation refSuballoc;
10479 refSuballoc.offset = offset;
10481 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10482 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10483 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10484 if(it != suballocations2nd.end())
10486 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10487 it->hAllocation = VK_NULL_HANDLE;
10488 ++m_2ndNullItemsCount;
10489 m_SumFreeSize += it->size;
10490 CleanupAfterFree();
10495 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10498 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10500 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10501 const size_t suballocCount = AccessSuballocations1st().size();
10502 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10505 void VmaBlockMetadata_Linear::CleanupAfterFree()
10507 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10508 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10512 suballocations1st.clear();
10513 suballocations2nd.clear();
10514 m_1stNullItemsBeginCount = 0;
10515 m_1stNullItemsMiddleCount = 0;
10516 m_2ndNullItemsCount = 0;
10517 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10521 const size_t suballoc1stCount = suballocations1st.size();
10522 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10523 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10526 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10527 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10529 ++m_1stNullItemsBeginCount;
10530 --m_1stNullItemsMiddleCount;
10534 while(m_1stNullItemsMiddleCount > 0 &&
10535 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10537 --m_1stNullItemsMiddleCount;
10538 suballocations1st.pop_back();
10542 while(m_2ndNullItemsCount > 0 &&
10543 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10545 --m_2ndNullItemsCount;
10546 suballocations2nd.pop_back();
10550 while(m_2ndNullItemsCount > 0 &&
10551 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10553 --m_2ndNullItemsCount;
10554 VmaVectorRemove(suballocations2nd, 0);
10557 if(ShouldCompact1st())
10559 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10560 size_t srcIndex = m_1stNullItemsBeginCount;
10561 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10563 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10567 if(dstIndex != srcIndex)
10569 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10573 suballocations1st.resize(nonNullItemCount);
10574 m_1stNullItemsBeginCount = 0;
10575 m_1stNullItemsMiddleCount = 0;
10579 if(suballocations2nd.empty())
10581 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10585 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10587 suballocations1st.clear();
10588 m_1stNullItemsBeginCount = 0;
10590 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10593 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10594 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10595 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10596 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10598 ++m_1stNullItemsBeginCount;
10599 --m_1stNullItemsMiddleCount;
10601 m_2ndNullItemsCount = 0;
10602 m_1stVectorIndex ^= 1;
10607 VMA_HEAVY_ASSERT(Validate());
10614 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10615 VmaBlockMetadata(hAllocator),
10617 m_AllocationCount(0),
10621 memset(m_FreeList, 0,
sizeof(m_FreeList));
10624 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10626 DeleteNode(m_Root);
10629 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10631 VmaBlockMetadata::Init(size);
10633 m_UsableSize = VmaPrevPow2(size);
10634 m_SumFreeSize = m_UsableSize;
10638 while(m_LevelCount < MAX_LEVELS &&
10639 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10644 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10645 rootNode->offset = 0;
10646 rootNode->type = Node::TYPE_FREE;
10647 rootNode->parent = VMA_NULL;
10648 rootNode->buddy = VMA_NULL;
10651 AddToFreeListFront(0, rootNode);
10654 bool VmaBlockMetadata_Buddy::Validate()
const 10657 ValidationContext ctx;
10658 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10660 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10662 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10663 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10666 for(uint32_t level = 0; level < m_LevelCount; ++level)
10668 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10669 m_FreeList[level].front->free.prev == VMA_NULL);
10671 for(Node* node = m_FreeList[level].front;
10673 node = node->free.next)
10675 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10677 if(node->free.next == VMA_NULL)
10679 VMA_VALIDATE(m_FreeList[level].back == node);
10683 VMA_VALIDATE(node->free.next->free.prev == node);
10689 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10691 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10697 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10699 for(uint32_t level = 0; level < m_LevelCount; ++level)
10701 if(m_FreeList[level].front != VMA_NULL)
10703 return LevelToNodeSize(level);
10709 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10711 const VkDeviceSize unusableSize = GetUnusableSize();
10722 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10724 if(unusableSize > 0)
10733 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10735 const VkDeviceSize unusableSize = GetUnusableSize();
10737 inoutStats.
size += GetSize();
10738 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10743 if(unusableSize > 0)
10750 #if VMA_STATS_STRING_ENABLED 10752 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10756 CalcAllocationStatInfo(stat);
10758 PrintDetailedMap_Begin(
10764 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10766 const VkDeviceSize unusableSize = GetUnusableSize();
10767 if(unusableSize > 0)
10769 PrintDetailedMap_UnusedRange(json,
10774 PrintDetailedMap_End(json);
10777 #endif // #if VMA_STATS_STRING_ENABLED 10779 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10780 uint32_t currentFrameIndex,
10781 uint32_t frameInUseCount,
10782 VkDeviceSize bufferImageGranularity,
10783 VkDeviceSize allocSize,
10784 VkDeviceSize allocAlignment,
10786 VmaSuballocationType allocType,
10787 bool canMakeOtherLost,
10789 VmaAllocationRequest* pAllocationRequest)
10791 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10795 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10796 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10797 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10799 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10800 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10803 if(allocSize > m_UsableSize)
10808 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10809 for(uint32_t level = targetLevel + 1; level--; )
10811 for(Node* freeNode = m_FreeList[level].front;
10812 freeNode != VMA_NULL;
10813 freeNode = freeNode->free.next)
10815 if(freeNode->offset % allocAlignment == 0)
10817 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10818 pAllocationRequest->offset = freeNode->offset;
10819 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10820 pAllocationRequest->sumItemSize = 0;
10821 pAllocationRequest->itemsToMakeLostCount = 0;
10822 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10831 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10832 uint32_t currentFrameIndex,
10833 uint32_t frameInUseCount,
10834 VmaAllocationRequest* pAllocationRequest)
10840 return pAllocationRequest->itemsToMakeLostCount == 0;
10843 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10852 void VmaBlockMetadata_Buddy::Alloc(
10853 const VmaAllocationRequest& request,
10854 VmaSuballocationType type,
10855 VkDeviceSize allocSize,
10858 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10860 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10861 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10863 Node* currNode = m_FreeList[currLevel].front;
10864 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10865 while(currNode->offset != request.offset)
10867 currNode = currNode->free.next;
10868 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10872 while(currLevel < targetLevel)
10876 RemoveFromFreeList(currLevel, currNode);
10878 const uint32_t childrenLevel = currLevel + 1;
10881 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10882 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10884 leftChild->offset = currNode->offset;
10885 leftChild->type = Node::TYPE_FREE;
10886 leftChild->parent = currNode;
10887 leftChild->buddy = rightChild;
10889 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10890 rightChild->type = Node::TYPE_FREE;
10891 rightChild->parent = currNode;
10892 rightChild->buddy = leftChild;
10895 currNode->type = Node::TYPE_SPLIT;
10896 currNode->split.leftChild = leftChild;
10899 AddToFreeListFront(childrenLevel, rightChild);
10900 AddToFreeListFront(childrenLevel, leftChild);
10905 currNode = m_FreeList[currLevel].front;
10914 VMA_ASSERT(currLevel == targetLevel &&
10915 currNode != VMA_NULL &&
10916 currNode->type == Node::TYPE_FREE);
10917 RemoveFromFreeList(currLevel, currNode);
10920 currNode->type = Node::TYPE_ALLOCATION;
10921 currNode->allocation.alloc = hAllocation;
10923 ++m_AllocationCount;
10925 m_SumFreeSize -= allocSize;
10928 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10930 if(node->type == Node::TYPE_SPLIT)
10932 DeleteNode(node->split.leftChild->buddy);
10933 DeleteNode(node->split.leftChild);
10936 vma_delete(GetAllocationCallbacks(), node);
10939 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10941 VMA_VALIDATE(level < m_LevelCount);
10942 VMA_VALIDATE(curr->parent == parent);
10943 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10944 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10947 case Node::TYPE_FREE:
10949 ctx.calculatedSumFreeSize += levelNodeSize;
10950 ++ctx.calculatedFreeCount;
10952 case Node::TYPE_ALLOCATION:
10953 ++ctx.calculatedAllocationCount;
10954 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10955 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10957 case Node::TYPE_SPLIT:
10959 const uint32_t childrenLevel = level + 1;
10960 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10961 const Node*
const leftChild = curr->split.leftChild;
10962 VMA_VALIDATE(leftChild != VMA_NULL);
10963 VMA_VALIDATE(leftChild->offset == curr->offset);
10964 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10966 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10968 const Node*
const rightChild = leftChild->buddy;
10969 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10970 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10972 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10983 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10986 uint32_t level = 0;
10987 VkDeviceSize currLevelNodeSize = m_UsableSize;
10988 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10989 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10992 currLevelNodeSize = nextLevelNodeSize;
10993 nextLevelNodeSize = currLevelNodeSize >> 1;
10998 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11001 Node* node = m_Root;
11002 VkDeviceSize nodeOffset = 0;
11003 uint32_t level = 0;
11004 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11005 while(node->type == Node::TYPE_SPLIT)
11007 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11008 if(offset < nodeOffset + nextLevelSize)
11010 node = node->split.leftChild;
11014 node = node->split.leftChild->buddy;
11015 nodeOffset += nextLevelSize;
11018 levelNodeSize = nextLevelSize;
11021 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11022 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11025 --m_AllocationCount;
11026 m_SumFreeSize += alloc->GetSize();
11028 node->type = Node::TYPE_FREE;
11031 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11033 RemoveFromFreeList(level, node->buddy);
11034 Node*
const parent = node->parent;
11036 vma_delete(GetAllocationCallbacks(), node->buddy);
11037 vma_delete(GetAllocationCallbacks(), node);
11038 parent->type = Node::TYPE_FREE;
11046 AddToFreeListFront(level, node);
11049 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 11053 case Node::TYPE_FREE:
11059 case Node::TYPE_ALLOCATION:
11061 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11067 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11068 if(unusedRangeSize > 0)
11077 case Node::TYPE_SPLIT:
11079 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11080 const Node*
const leftChild = node->split.leftChild;
11081 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11082 const Node*
const rightChild = leftChild->buddy;
11083 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11091 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11093 VMA_ASSERT(node->type == Node::TYPE_FREE);
11096 Node*
const frontNode = m_FreeList[level].front;
11097 if(frontNode == VMA_NULL)
11099 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11100 node->free.prev = node->free.next = VMA_NULL;
11101 m_FreeList[level].front = m_FreeList[level].back = node;
11105 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11106 node->free.prev = VMA_NULL;
11107 node->free.next = frontNode;
11108 frontNode->free.prev = node;
11109 m_FreeList[level].front = node;
11113 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11115 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11118 if(node->free.prev == VMA_NULL)
11120 VMA_ASSERT(m_FreeList[level].front == node);
11121 m_FreeList[level].front = node->free.next;
11125 Node*
const prevFreeNode = node->free.prev;
11126 VMA_ASSERT(prevFreeNode->free.next == node);
11127 prevFreeNode->free.next = node->free.next;
11131 if(node->free.next == VMA_NULL)
11133 VMA_ASSERT(m_FreeList[level].back == node);
11134 m_FreeList[level].back = node->free.prev;
11138 Node*
const nextFreeNode = node->free.next;
11139 VMA_ASSERT(nextFreeNode->free.prev == node);
11140 nextFreeNode->free.prev = node->free.prev;
11144 #if VMA_STATS_STRING_ENABLED 11145 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11149 case Node::TYPE_FREE:
11150 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11152 case Node::TYPE_ALLOCATION:
11154 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11155 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11156 if(allocSize < levelNodeSize)
11158 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11162 case Node::TYPE_SPLIT:
11164 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11165 const Node*
const leftChild = node->split.leftChild;
11166 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11167 const Node*
const rightChild = leftChild->buddy;
11168 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11175 #endif // #if VMA_STATS_STRING_ENABLED 11181 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11182 m_pMetadata(VMA_NULL),
11183 m_MemoryTypeIndex(UINT32_MAX),
11185 m_hMemory(VK_NULL_HANDLE),
11187 m_pMappedData(VMA_NULL)
11191 void VmaDeviceMemoryBlock::Init(
11194 uint32_t newMemoryTypeIndex,
11195 VkDeviceMemory newMemory,
11196 VkDeviceSize newSize,
11198 uint32_t algorithm)
11200 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11202 m_hParentPool = hParentPool;
11203 m_MemoryTypeIndex = newMemoryTypeIndex;
11205 m_hMemory = newMemory;
11210 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11213 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11219 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11221 m_pMetadata->Init(newSize);
11224 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11228 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11230 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11231 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11232 m_hMemory = VK_NULL_HANDLE;
11234 vma_delete(allocator, m_pMetadata);
11235 m_pMetadata = VMA_NULL;
11238 bool VmaDeviceMemoryBlock::Validate()
const 11240 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11241 (m_pMetadata->GetSize() != 0));
11243 return m_pMetadata->Validate();
11246 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11248 void* pData =
nullptr;
11249 VkResult res = Map(hAllocator, 1, &pData);
11250 if(res != VK_SUCCESS)
11255 res = m_pMetadata->CheckCorruption(pData);
11257 Unmap(hAllocator, 1);
11262 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11269 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11270 if(m_MapCount != 0)
11272 m_MapCount += count;
11273 VMA_ASSERT(m_pMappedData != VMA_NULL);
11274 if(ppData != VMA_NULL)
11276 *ppData = m_pMappedData;
11282 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11283 hAllocator->m_hDevice,
11289 if(result == VK_SUCCESS)
11291 if(ppData != VMA_NULL)
11293 *ppData = m_pMappedData;
11295 m_MapCount = count;
11301 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11308 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11309 if(m_MapCount >= count)
11311 m_MapCount -= count;
11312 if(m_MapCount == 0)
11314 m_pMappedData = VMA_NULL;
11315 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11320 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11324 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11326 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11327 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11330 VkResult res = Map(hAllocator, 1, &pData);
11331 if(res != VK_SUCCESS)
11336 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11337 VmaWriteMagicValue(pData, allocOffset + allocSize);
11339 Unmap(hAllocator, 1);
11344 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11346 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11347 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11350 VkResult res = Map(hAllocator, 1, &pData);
11351 if(res != VK_SUCCESS)
11356 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11358 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11360 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11362 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11365 Unmap(hAllocator, 1);
11370 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11375 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11376 hAllocation->GetBlock() ==
this);
11378 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11379 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11380 hAllocator->m_hDevice,
11383 hAllocation->GetOffset());
11386 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11391 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11392 hAllocation->GetBlock() ==
this);
11394 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11395 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11396 hAllocator->m_hDevice,
11399 hAllocation->GetOffset());
11404 memset(&outInfo, 0,
sizeof(outInfo));
11423 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11431 VmaPool_T::VmaPool_T(
11434 VkDeviceSize preferredBlockSize) :
11438 createInfo.memoryTypeIndex,
11439 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11440 createInfo.minBlockCount,
11441 createInfo.maxBlockCount,
11443 createInfo.frameInUseCount,
11445 createInfo.blockSize != 0,
11451 VmaPool_T::~VmaPool_T()
11455 #if VMA_STATS_STRING_ENABLED 11457 #endif // #if VMA_STATS_STRING_ENABLED 11459 VmaBlockVector::VmaBlockVector(
11462 uint32_t memoryTypeIndex,
11463 VkDeviceSize preferredBlockSize,
11464 size_t minBlockCount,
11465 size_t maxBlockCount,
11466 VkDeviceSize bufferImageGranularity,
11467 uint32_t frameInUseCount,
11469 bool explicitBlockSize,
11470 uint32_t algorithm) :
11471 m_hAllocator(hAllocator),
11472 m_hParentPool(hParentPool),
11473 m_MemoryTypeIndex(memoryTypeIndex),
11474 m_PreferredBlockSize(preferredBlockSize),
11475 m_MinBlockCount(minBlockCount),
11476 m_MaxBlockCount(maxBlockCount),
11477 m_BufferImageGranularity(bufferImageGranularity),
11478 m_FrameInUseCount(frameInUseCount),
11479 m_IsCustomPool(isCustomPool),
11480 m_ExplicitBlockSize(explicitBlockSize),
11481 m_Algorithm(algorithm),
11482 m_HasEmptyBlock(false),
11483 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11488 VmaBlockVector::~VmaBlockVector()
11490 for(
size_t i = m_Blocks.size(); i--; )
11492 m_Blocks[i]->Destroy(m_hAllocator);
11493 vma_delete(m_hAllocator, m_Blocks[i]);
11497 VkResult VmaBlockVector::CreateMinBlocks()
11499 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11501 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11502 if(res != VK_SUCCESS)
11510 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11512 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11514 const size_t blockCount = m_Blocks.size();
11523 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11525 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11526 VMA_ASSERT(pBlock);
11527 VMA_HEAVY_ASSERT(pBlock->Validate());
11528 pBlock->m_pMetadata->AddPoolStats(*pStats);
11532 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11534 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11535 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11536 (VMA_DEBUG_MARGIN > 0) &&
11538 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11541 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11543 VkResult VmaBlockVector::Allocate(
11544 uint32_t currentFrameIndex,
11546 VkDeviceSize alignment,
11548 VmaSuballocationType suballocType,
11549 size_t allocationCount,
11553 VkResult res = VK_SUCCESS;
11555 if(IsCorruptionDetectionEnabled())
11557 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11558 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11562 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11563 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11565 res = AllocatePage(
11571 pAllocations + allocIndex);
11572 if(res != VK_SUCCESS)
11579 if(res != VK_SUCCESS)
11582 while(allocIndex--)
11584 Free(pAllocations[allocIndex]);
11586 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11592 VkResult VmaBlockVector::AllocatePage(
11593 uint32_t currentFrameIndex,
11595 VkDeviceSize alignment,
11597 VmaSuballocationType suballocType,
11604 const bool canCreateNewBlock =
11606 (m_Blocks.size() < m_MaxBlockCount);
11613 canMakeOtherLost =
false;
11617 if(isUpperAddress &&
11620 return VK_ERROR_FEATURE_NOT_PRESENT;
11634 return VK_ERROR_FEATURE_NOT_PRESENT;
11638 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11640 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11648 if(!canMakeOtherLost || canCreateNewBlock)
11657 if(!m_Blocks.empty())
11659 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11660 VMA_ASSERT(pCurrBlock);
11661 VkResult res = AllocateFromBlock(
11671 if(res == VK_SUCCESS)
11673 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11683 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11685 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11686 VMA_ASSERT(pCurrBlock);
11687 VkResult res = AllocateFromBlock(
11697 if(res == VK_SUCCESS)
11699 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11707 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11709 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11710 VMA_ASSERT(pCurrBlock);
11711 VkResult res = AllocateFromBlock(
11721 if(res == VK_SUCCESS)
11723 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11731 if(canCreateNewBlock)
11734 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11735 uint32_t newBlockSizeShift = 0;
11736 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11738 if(!m_ExplicitBlockSize)
11741 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11742 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11744 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11745 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11747 newBlockSize = smallerNewBlockSize;
11748 ++newBlockSizeShift;
11757 size_t newBlockIndex = 0;
11758 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11760 if(!m_ExplicitBlockSize)
11762 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11764 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11765 if(smallerNewBlockSize >= size)
11767 newBlockSize = smallerNewBlockSize;
11768 ++newBlockSizeShift;
11769 res = CreateBlock(newBlockSize, &newBlockIndex);
11778 if(res == VK_SUCCESS)
11780 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11781 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11783 res = AllocateFromBlock(
11793 if(res == VK_SUCCESS)
11795 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11801 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11808 if(canMakeOtherLost)
11810 uint32_t tryIndex = 0;
11811 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11813 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11814 VmaAllocationRequest bestRequest = {};
11815 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11821 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11823 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11824 VMA_ASSERT(pCurrBlock);
11825 VmaAllocationRequest currRequest = {};
11826 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11829 m_BufferImageGranularity,
11838 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11839 if(pBestRequestBlock == VMA_NULL ||
11840 currRequestCost < bestRequestCost)
11842 pBestRequestBlock = pCurrBlock;
11843 bestRequest = currRequest;
11844 bestRequestCost = currRequestCost;
11846 if(bestRequestCost == 0)
11857 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11859 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11860 VMA_ASSERT(pCurrBlock);
11861 VmaAllocationRequest currRequest = {};
11862 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11865 m_BufferImageGranularity,
11874 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11875 if(pBestRequestBlock == VMA_NULL ||
11876 currRequestCost < bestRequestCost ||
11879 pBestRequestBlock = pCurrBlock;
11880 bestRequest = currRequest;
11881 bestRequestCost = currRequestCost;
11883 if(bestRequestCost == 0 ||
11893 if(pBestRequestBlock != VMA_NULL)
11897 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11898 if(res != VK_SUCCESS)
11904 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11910 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11912 m_HasEmptyBlock =
false;
11915 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11916 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11917 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11918 (*pAllocation)->InitBlockAllocation(
11920 bestRequest.offset,
11926 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11927 VMA_DEBUG_LOG(
" Returned from existing block");
11928 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11929 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11931 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11933 if(IsCorruptionDetectionEnabled())
11935 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11936 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11951 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11953 return VK_ERROR_TOO_MANY_OBJECTS;
11957 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11960 void VmaBlockVector::Free(
11963 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11967 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11969 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11971 if(IsCorruptionDetectionEnabled())
11973 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11974 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11977 if(hAllocation->IsPersistentMap())
11979 pBlock->Unmap(m_hAllocator, 1);
11982 pBlock->m_pMetadata->Free(hAllocation);
11983 VMA_HEAVY_ASSERT(pBlock->Validate());
11985 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11988 if(pBlock->m_pMetadata->IsEmpty())
11991 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11993 pBlockToDelete = pBlock;
11999 m_HasEmptyBlock =
true;
12004 else if(m_HasEmptyBlock)
12006 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12007 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
12009 pBlockToDelete = pLastBlock;
12010 m_Blocks.pop_back();
12011 m_HasEmptyBlock =
false;
12015 IncrementallySortBlocks();
12020 if(pBlockToDelete != VMA_NULL)
12022 VMA_DEBUG_LOG(
" Deleted empty allocation");
12023 pBlockToDelete->Destroy(m_hAllocator);
12024 vma_delete(m_hAllocator, pBlockToDelete);
12028 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 12030 VkDeviceSize result = 0;
12031 for(
size_t i = m_Blocks.size(); i--; )
12033 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12034 if(result >= m_PreferredBlockSize)
12042 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12044 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12046 if(m_Blocks[blockIndex] == pBlock)
12048 VmaVectorRemove(m_Blocks, blockIndex);
12055 void VmaBlockVector::IncrementallySortBlocks()
12060 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12062 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12064 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12071 VkResult VmaBlockVector::AllocateFromBlock(
12072 VmaDeviceMemoryBlock* pBlock,
12073 uint32_t currentFrameIndex,
12075 VkDeviceSize alignment,
12078 VmaSuballocationType suballocType,
12087 VmaAllocationRequest currRequest = {};
12088 if(pBlock->m_pMetadata->CreateAllocationRequest(
12091 m_BufferImageGranularity,
12101 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12105 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12106 if(res != VK_SUCCESS)
12113 if(pBlock->m_pMetadata->IsEmpty())
12115 m_HasEmptyBlock =
false;
12118 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12119 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12120 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12121 (*pAllocation)->InitBlockAllocation(
12123 currRequest.offset,
12129 VMA_HEAVY_ASSERT(pBlock->Validate());
12130 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12131 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12133 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12135 if(IsCorruptionDetectionEnabled())
12137 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12138 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12142 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12145 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12147 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12148 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12149 allocInfo.allocationSize = blockSize;
12150 VkDeviceMemory mem = VK_NULL_HANDLE;
12151 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12160 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12166 allocInfo.allocationSize,
12170 m_Blocks.push_back(pBlock);
12171 if(pNewBlockIndex != VMA_NULL)
12173 *pNewBlockIndex = m_Blocks.size() - 1;
12179 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12180 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12181 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12183 const size_t blockCount = m_Blocks.size();
12184 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12188 BLOCK_FLAG_USED = 0x00000001,
12189 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12197 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12198 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12199 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12202 const size_t moveCount = moves.size();
12203 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12205 const VmaDefragmentationMove& move = moves[moveIndex];
12206 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12207 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12210 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12213 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12215 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12216 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12217 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12219 currBlockInfo.pMappedData = pBlock->GetMappedData();
12221 if(currBlockInfo.pMappedData == VMA_NULL)
12223 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12224 if(pDefragCtx->res == VK_SUCCESS)
12226 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12233 if(pDefragCtx->res == VK_SUCCESS)
12235 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12236 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12238 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12240 const VmaDefragmentationMove& move = moves[moveIndex];
12242 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12243 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12245 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12250 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12251 memRange.memory = pSrcBlock->GetDeviceMemory();
12252 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12253 memRange.size = VMA_MIN(
12254 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12255 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12256 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12261 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12262 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12263 static_cast<size_t>(move.size));
12265 if(IsCorruptionDetectionEnabled())
12267 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12268 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12274 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12275 memRange.memory = pDstBlock->GetDeviceMemory();
12276 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12277 memRange.size = VMA_MIN(
12278 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12279 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12280 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12287 for(
size_t blockIndex = blockCount; blockIndex--; )
12289 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12290 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12292 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12293 pBlock->Unmap(m_hAllocator, 1);
12298 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12299 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12300 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12301 VkCommandBuffer commandBuffer)
12303 const size_t blockCount = m_Blocks.size();
12305 pDefragCtx->blockContexts.resize(blockCount);
12306 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12309 const size_t moveCount = moves.size();
12310 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12312 const VmaDefragmentationMove& move = moves[moveIndex];
12313 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12314 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12317 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12321 VkBufferCreateInfo bufCreateInfo;
12322 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12324 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12326 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12327 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12328 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12330 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12331 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12332 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12333 if(pDefragCtx->res == VK_SUCCESS)
12335 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12336 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12343 if(pDefragCtx->res == VK_SUCCESS)
12345 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12347 const VmaDefragmentationMove& move = moves[moveIndex];
12349 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12350 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12352 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12354 VkBufferCopy region = {
12358 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12359 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12364 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12366 pDefragCtx->res = VK_NOT_READY;
12372 m_HasEmptyBlock =
false;
12373 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12375 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12376 if(pBlock->m_pMetadata->IsEmpty())
12378 if(m_Blocks.size() > m_MinBlockCount)
12380 if(pDefragmentationStats != VMA_NULL)
12383 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12386 VmaVectorRemove(m_Blocks, blockIndex);
12387 pBlock->Destroy(m_hAllocator);
12388 vma_delete(m_hAllocator, pBlock);
12392 m_HasEmptyBlock =
true;
12398 #if VMA_STATS_STRING_ENABLED 12400 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12402 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12404 json.BeginObject();
12408 json.WriteString(
"MemoryTypeIndex");
12409 json.WriteNumber(m_MemoryTypeIndex);
12411 json.WriteString(
"BlockSize");
12412 json.WriteNumber(m_PreferredBlockSize);
12414 json.WriteString(
"BlockCount");
12415 json.BeginObject(
true);
12416 if(m_MinBlockCount > 0)
12418 json.WriteString(
"Min");
12419 json.WriteNumber((uint64_t)m_MinBlockCount);
12421 if(m_MaxBlockCount < SIZE_MAX)
12423 json.WriteString(
"Max");
12424 json.WriteNumber((uint64_t)m_MaxBlockCount);
12426 json.WriteString(
"Cur");
12427 json.WriteNumber((uint64_t)m_Blocks.size());
12430 if(m_FrameInUseCount > 0)
12432 json.WriteString(
"FrameInUseCount");
12433 json.WriteNumber(m_FrameInUseCount);
12436 if(m_Algorithm != 0)
12438 json.WriteString(
"Algorithm");
12439 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12444 json.WriteString(
"PreferredBlockSize");
12445 json.WriteNumber(m_PreferredBlockSize);
12448 json.WriteString(
"Blocks");
12449 json.BeginObject();
12450 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12452 json.BeginString();
12453 json.ContinueString(m_Blocks[i]->GetId());
12456 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12463 #endif // #if VMA_STATS_STRING_ENABLED 12465 void VmaBlockVector::Defragment(
12466 class VmaBlockVectorDefragmentationContext* pCtx,
12468 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12469 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12470 VkCommandBuffer commandBuffer)
12472 pCtx->res = VK_SUCCESS;
12474 const VkMemoryPropertyFlags memPropFlags =
12475 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12476 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12478 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12480 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12481 !IsCorruptionDetectionEnabled() &&
12482 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12485 if(canDefragmentOnCpu || canDefragmentOnGpu)
12487 bool defragmentOnGpu;
12489 if(canDefragmentOnGpu != canDefragmentOnCpu)
12491 defragmentOnGpu = canDefragmentOnGpu;
12496 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12497 m_hAllocator->IsIntegratedGpu();
12500 bool overlappingMoveSupported = !defragmentOnGpu;
12502 if(m_hAllocator->m_UseMutex)
12504 m_Mutex.LockWrite();
12505 pCtx->mutexLocked =
true;
12508 pCtx->Begin(overlappingMoveSupported);
12512 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12513 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12514 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12515 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12516 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12519 if(pStats != VMA_NULL)
12521 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12522 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12525 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12526 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12527 if(defragmentOnGpu)
12529 maxGpuBytesToMove -= bytesMoved;
12530 maxGpuAllocationsToMove -= allocationsMoved;
12534 maxCpuBytesToMove -= bytesMoved;
12535 maxCpuAllocationsToMove -= allocationsMoved;
12539 if(pCtx->res >= VK_SUCCESS)
12541 if(defragmentOnGpu)
12543 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12547 ApplyDefragmentationMovesCpu(pCtx, moves);
12553 void VmaBlockVector::DefragmentationEnd(
12554 class VmaBlockVectorDefragmentationContext* pCtx,
12558 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12560 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12561 if(blockCtx.hBuffer)
12563 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12564 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12568 if(pCtx->res >= VK_SUCCESS)
12570 FreeEmptyBlocks(pStats);
12573 if(pCtx->mutexLocked)
12575 VMA_ASSERT(m_hAllocator->m_UseMutex);
12576 m_Mutex.UnlockWrite();
12580 size_t VmaBlockVector::CalcAllocationCount()
const 12583 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12585 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12590 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12592 if(m_BufferImageGranularity == 1)
12596 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12597 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12599 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12600 VMA_ASSERT(m_Algorithm == 0);
12601 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12602 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12610 void VmaBlockVector::MakePoolAllocationsLost(
12611 uint32_t currentFrameIndex,
12612 size_t* pLostAllocationCount)
12614 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12615 size_t lostAllocationCount = 0;
12616 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12618 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12619 VMA_ASSERT(pBlock);
12620 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12622 if(pLostAllocationCount != VMA_NULL)
12624 *pLostAllocationCount = lostAllocationCount;
12628 VkResult VmaBlockVector::CheckCorruption()
12630 if(!IsCorruptionDetectionEnabled())
12632 return VK_ERROR_FEATURE_NOT_PRESENT;
12635 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12636 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12638 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12639 VMA_ASSERT(pBlock);
12640 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12641 if(res != VK_SUCCESS)
12649 void VmaBlockVector::AddStats(
VmaStats* pStats)
12651 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12652 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12654 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12656 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12658 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12659 VMA_ASSERT(pBlock);
12660 VMA_HEAVY_ASSERT(pBlock->Validate());
12662 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12663 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12664 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12665 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12672 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12674 VmaBlockVector* pBlockVector,
12675 uint32_t currentFrameIndex,
12676 bool overlappingMoveSupported) :
12677 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12678 m_AllocationCount(0),
12679 m_AllAllocations(false),
12681 m_AllocationsMoved(0),
12682 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12685 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12686 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12688 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12689 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12690 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12691 m_Blocks.push_back(pBlockInfo);
12695 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12698 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12700 for(
size_t i = m_Blocks.size(); i--; )
12702 vma_delete(m_hAllocator, m_Blocks[i]);
12706 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12709 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12711 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12712 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12713 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12715 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12716 (*it)->m_Allocations.push_back(allocInfo);
12723 ++m_AllocationCount;
12727 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12728 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12729 VkDeviceSize maxBytesToMove,
12730 uint32_t maxAllocationsToMove)
12732 if(m_Blocks.empty())
12745 size_t srcBlockMinIndex = 0;
12758 size_t srcBlockIndex = m_Blocks.size() - 1;
12759 size_t srcAllocIndex = SIZE_MAX;
12765 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12767 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12770 if(srcBlockIndex == srcBlockMinIndex)
12777 srcAllocIndex = SIZE_MAX;
12782 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12786 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12787 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12789 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12790 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12791 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12792 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12795 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12797 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12798 VmaAllocationRequest dstAllocRequest;
12799 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12800 m_CurrentFrameIndex,
12801 m_pBlockVector->GetFrameInUseCount(),
12802 m_pBlockVector->GetBufferImageGranularity(),
12809 &dstAllocRequest) &&
12811 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12813 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12816 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12817 (m_BytesMoved + size > maxBytesToMove))
12822 VmaDefragmentationMove move;
12823 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12824 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12825 move.srcOffset = srcOffset;
12826 move.dstOffset = dstAllocRequest.offset;
12828 moves.push_back(move);
12830 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12834 allocInfo.m_hAllocation);
12835 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12837 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12839 if(allocInfo.m_pChanged != VMA_NULL)
12841 *allocInfo.m_pChanged = VK_TRUE;
12844 ++m_AllocationsMoved;
12845 m_BytesMoved += size;
12847 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12855 if(srcAllocIndex > 0)
12861 if(srcBlockIndex > 0)
12864 srcAllocIndex = SIZE_MAX;
12874 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12877 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12879 if(m_Blocks[i]->m_HasNonMovableAllocations)
12887 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12888 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12889 VkDeviceSize maxBytesToMove,
12890 uint32_t maxAllocationsToMove)
12892 if(!m_AllAllocations && m_AllocationCount == 0)
12897 const size_t blockCount = m_Blocks.size();
12898 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12900 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12902 if(m_AllAllocations)
12904 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12905 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12906 it != pMetadata->m_Suballocations.end();
12909 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12911 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12912 pBlockInfo->m_Allocations.push_back(allocInfo);
12917 pBlockInfo->CalcHasNonMovableAllocations();
12921 pBlockInfo->SortAllocationsByOffsetDescending();
12927 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12930 const uint32_t roundCount = 2;
12933 VkResult result = VK_SUCCESS;
12934 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12936 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12942 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12943 size_t dstBlockIndex, VkDeviceSize dstOffset,
12944 size_t srcBlockIndex, VkDeviceSize srcOffset)
12946 if(dstBlockIndex < srcBlockIndex)
12950 if(dstBlockIndex > srcBlockIndex)
12954 if(dstOffset < srcOffset)
12964 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12966 VmaBlockVector* pBlockVector,
12967 uint32_t currentFrameIndex,
12968 bool overlappingMoveSupported) :
12969 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12970 m_OverlappingMoveSupported(overlappingMoveSupported),
12971 m_AllocationCount(0),
12972 m_AllAllocations(false),
12974 m_AllocationsMoved(0),
12975 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12977 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12981 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12985 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12986 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12987 VkDeviceSize maxBytesToMove,
12988 uint32_t maxAllocationsToMove)
12990 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12992 const size_t blockCount = m_pBlockVector->GetBlockCount();
12993 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12998 PreprocessMetadata();
13002 m_BlockInfos.resize(blockCount);
13003 for(
size_t i = 0; i < blockCount; ++i)
13005 m_BlockInfos[i].origBlockIndex = i;
13008 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13009 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13010 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13015 FreeSpaceDatabase freeSpaceDb;
13017 size_t dstBlockInfoIndex = 0;
13018 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13019 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13020 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13021 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13022 VkDeviceSize dstOffset = 0;
13025 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13027 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13028 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13029 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13030 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13031 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13033 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13034 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13035 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13036 if(m_AllocationsMoved == maxAllocationsToMove ||
13037 m_BytesMoved + srcAllocSize > maxBytesToMove)
13042 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13045 size_t freeSpaceInfoIndex;
13046 VkDeviceSize dstAllocOffset;
13047 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13048 freeSpaceInfoIndex, dstAllocOffset))
13050 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13051 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13052 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13055 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13057 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13061 VmaSuballocation suballoc = *srcSuballocIt;
13062 suballoc.offset = dstAllocOffset;
13063 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13064 m_BytesMoved += srcAllocSize;
13065 ++m_AllocationsMoved;
13067 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13069 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13070 srcSuballocIt = nextSuballocIt;
13072 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13074 VmaDefragmentationMove move = {
13075 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13076 srcAllocOffset, dstAllocOffset,
13078 moves.push_back(move);
13085 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13087 VmaSuballocation suballoc = *srcSuballocIt;
13088 suballoc.offset = dstAllocOffset;
13089 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13090 m_BytesMoved += srcAllocSize;
13091 ++m_AllocationsMoved;
13093 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13095 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13096 srcSuballocIt = nextSuballocIt;
13098 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13100 VmaDefragmentationMove move = {
13101 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13102 srcAllocOffset, dstAllocOffset,
13104 moves.push_back(move);
13109 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13112 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13113 dstAllocOffset + srcAllocSize > dstBlockSize)
13116 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13118 ++dstBlockInfoIndex;
13119 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13120 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13121 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13122 dstBlockSize = pDstMetadata->GetSize();
13124 dstAllocOffset = 0;
13128 if(dstBlockInfoIndex == srcBlockInfoIndex)
13130 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13132 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13134 bool skipOver = overlap;
13135 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13139 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13144 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13146 dstOffset = srcAllocOffset + srcAllocSize;
13152 srcSuballocIt->offset = dstAllocOffset;
13153 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13154 dstOffset = dstAllocOffset + srcAllocSize;
13155 m_BytesMoved += srcAllocSize;
13156 ++m_AllocationsMoved;
13158 VmaDefragmentationMove move = {
13159 srcOrigBlockIndex, dstOrigBlockIndex,
13160 srcAllocOffset, dstAllocOffset,
13162 moves.push_back(move);
13170 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13171 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13173 VmaSuballocation suballoc = *srcSuballocIt;
13174 suballoc.offset = dstAllocOffset;
13175 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13176 dstOffset = dstAllocOffset + srcAllocSize;
13177 m_BytesMoved += srcAllocSize;
13178 ++m_AllocationsMoved;
13180 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13182 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13183 srcSuballocIt = nextSuballocIt;
13185 pDstMetadata->m_Suballocations.push_back(suballoc);
13187 VmaDefragmentationMove move = {
13188 srcOrigBlockIndex, dstOrigBlockIndex,
13189 srcAllocOffset, dstAllocOffset,
13191 moves.push_back(move);
13197 m_BlockInfos.clear();
13199 PostprocessMetadata();
13204 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13206 const size_t blockCount = m_pBlockVector->GetBlockCount();
13207 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13209 VmaBlockMetadata_Generic*
const pMetadata =
13210 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13211 pMetadata->m_FreeCount = 0;
13212 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13213 pMetadata->m_FreeSuballocationsBySize.clear();
13214 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13215 it != pMetadata->m_Suballocations.end(); )
13217 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13219 VmaSuballocationList::iterator nextIt = it;
13221 pMetadata->m_Suballocations.erase(it);
13232 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13234 const size_t blockCount = m_pBlockVector->GetBlockCount();
13235 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13237 VmaBlockMetadata_Generic*
const pMetadata =
13238 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13239 const VkDeviceSize blockSize = pMetadata->GetSize();
13242 if(pMetadata->m_Suballocations.empty())
13244 pMetadata->m_FreeCount = 1;
13246 VmaSuballocation suballoc = {
13250 VMA_SUBALLOCATION_TYPE_FREE };
13251 pMetadata->m_Suballocations.push_back(suballoc);
13252 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13257 VkDeviceSize offset = 0;
13258 VmaSuballocationList::iterator it;
13259 for(it = pMetadata->m_Suballocations.begin();
13260 it != pMetadata->m_Suballocations.end();
13263 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13264 VMA_ASSERT(it->offset >= offset);
13267 if(it->offset > offset)
13269 ++pMetadata->m_FreeCount;
13270 const VkDeviceSize freeSize = it->offset - offset;
13271 VmaSuballocation suballoc = {
13275 VMA_SUBALLOCATION_TYPE_FREE };
13276 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13277 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13279 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13283 pMetadata->m_SumFreeSize -= it->size;
13284 offset = it->offset + it->size;
13288 if(offset < blockSize)
13290 ++pMetadata->m_FreeCount;
13291 const VkDeviceSize freeSize = blockSize - offset;
13292 VmaSuballocation suballoc = {
13296 VMA_SUBALLOCATION_TYPE_FREE };
13297 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13298 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13299 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13301 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13306 pMetadata->m_FreeSuballocationsBySize.begin(),
13307 pMetadata->m_FreeSuballocationsBySize.end(),
13308 VmaSuballocationItemSizeLess());
13311 VMA_HEAVY_ASSERT(pMetadata->Validate());
13315 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13318 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13319 while(it != pMetadata->m_Suballocations.end())
13321 if(it->offset < suballoc.offset)
13326 pMetadata->m_Suballocations.insert(it, suballoc);
13332 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13335 VmaBlockVector* pBlockVector,
13336 uint32_t currFrameIndex) :
13338 mutexLocked(false),
13339 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13340 m_hAllocator(hAllocator),
13341 m_hCustomPool(hCustomPool),
13342 m_pBlockVector(pBlockVector),
13343 m_CurrFrameIndex(currFrameIndex),
13344 m_pAlgorithm(VMA_NULL),
13345 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13346 m_AllAllocations(false)
13350 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13352 vma_delete(m_hAllocator, m_pAlgorithm);
13355 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13357 AllocInfo info = { hAlloc, pChanged };
13358 m_Allocations.push_back(info);
13361 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13363 const bool allAllocations = m_AllAllocations ||
13364 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13376 if(VMA_DEBUG_MARGIN == 0 &&
13378 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13380 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13381 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13385 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13386 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13391 m_pAlgorithm->AddAll();
13395 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13397 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13405 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13407 uint32_t currFrameIndex,
13410 m_hAllocator(hAllocator),
13411 m_CurrFrameIndex(currFrameIndex),
13414 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13416 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13419 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13421 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13423 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13424 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13425 vma_delete(m_hAllocator, pBlockVectorCtx);
13427 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13429 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13430 if(pBlockVectorCtx)
13432 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13433 vma_delete(m_hAllocator, pBlockVectorCtx);
13438 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13440 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13442 VmaPool pool = pPools[poolIndex];
13445 if(pool->m_BlockVector.GetAlgorithm() == 0)
13447 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13449 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13451 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13453 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13458 if(!pBlockVectorDefragCtx)
13460 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13463 &pool->m_BlockVector,
13465 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13468 pBlockVectorDefragCtx->AddAll();
13473 void VmaDefragmentationContext_T::AddAllocations(
13474 uint32_t allocationCount,
13476 VkBool32* pAllocationsChanged)
13479 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13482 VMA_ASSERT(hAlloc);
13484 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13486 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13488 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13490 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13492 if(hAllocPool != VK_NULL_HANDLE)
13495 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13497 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13499 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13501 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13505 if(!pBlockVectorDefragCtx)
13507 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13510 &hAllocPool->m_BlockVector,
13512 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13519 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13520 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13521 if(!pBlockVectorDefragCtx)
13523 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13526 m_hAllocator->m_pBlockVectors[memTypeIndex],
13528 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13532 if(pBlockVectorDefragCtx)
13534 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13535 &pAllocationsChanged[allocIndex] : VMA_NULL;
13536 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13542 VkResult VmaDefragmentationContext_T::Defragment(
13543 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13544 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13552 if(commandBuffer == VK_NULL_HANDLE)
13554 maxGpuBytesToMove = 0;
13555 maxGpuAllocationsToMove = 0;
13558 VkResult res = VK_SUCCESS;
13561 for(uint32_t memTypeIndex = 0;
13562 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13565 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13566 if(pBlockVectorCtx)
13568 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13569 pBlockVectorCtx->GetBlockVector()->Defragment(
13572 maxCpuBytesToMove, maxCpuAllocationsToMove,
13573 maxGpuBytesToMove, maxGpuAllocationsToMove,
13575 if(pBlockVectorCtx->res != VK_SUCCESS)
13577 res = pBlockVectorCtx->res;
13583 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13584 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13587 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13588 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13589 pBlockVectorCtx->GetBlockVector()->Defragment(
13592 maxCpuBytesToMove, maxCpuAllocationsToMove,
13593 maxGpuBytesToMove, maxGpuAllocationsToMove,
13595 if(pBlockVectorCtx->res != VK_SUCCESS)
13597 res = pBlockVectorCtx->res;
13607 #if VMA_RECORDING_ENABLED 13609 VmaRecorder::VmaRecorder() :
13614 m_StartCounter(INT64_MAX)
13620 m_UseMutex = useMutex;
13621 m_Flags = settings.
flags;
13623 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13624 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13627 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13630 return VK_ERROR_INITIALIZATION_FAILED;
13634 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13635 fprintf(m_File,
"%s\n",
"1,5");
13640 VmaRecorder::~VmaRecorder()
13642 if(m_File != VMA_NULL)
13648 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13650 CallParams callParams;
13651 GetBasicParams(callParams);
13653 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13654 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13658 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13660 CallParams callParams;
13661 GetBasicParams(callParams);
13663 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13664 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13670 CallParams callParams;
13671 GetBasicParams(callParams);
13673 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13674 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13685 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13687 CallParams callParams;
13688 GetBasicParams(callParams);
13690 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13691 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13696 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13697 const VkMemoryRequirements& vkMemReq,
13701 CallParams callParams;
13702 GetBasicParams(callParams);
13704 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13705 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13706 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13708 vkMemReq.alignment,
13709 vkMemReq.memoryTypeBits,
13717 userDataStr.GetString());
13721 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13722 const VkMemoryRequirements& vkMemReq,
13724 uint64_t allocationCount,
13727 CallParams callParams;
13728 GetBasicParams(callParams);
13730 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13731 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13732 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13734 vkMemReq.alignment,
13735 vkMemReq.memoryTypeBits,
13742 PrintPointerList(allocationCount, pAllocations);
13743 fprintf(m_File,
",%s\n", userDataStr.GetString());
13747 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13748 const VkMemoryRequirements& vkMemReq,
13749 bool requiresDedicatedAllocation,
13750 bool prefersDedicatedAllocation,
13754 CallParams callParams;
13755 GetBasicParams(callParams);
13757 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13758 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13759 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13761 vkMemReq.alignment,
13762 vkMemReq.memoryTypeBits,
13763 requiresDedicatedAllocation ? 1 : 0,
13764 prefersDedicatedAllocation ? 1 : 0,
13772 userDataStr.GetString());
13776 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13777 const VkMemoryRequirements& vkMemReq,
13778 bool requiresDedicatedAllocation,
13779 bool prefersDedicatedAllocation,
13783 CallParams callParams;
13784 GetBasicParams(callParams);
13786 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13787 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13788 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13790 vkMemReq.alignment,
13791 vkMemReq.memoryTypeBits,
13792 requiresDedicatedAllocation ? 1 : 0,
13793 prefersDedicatedAllocation ? 1 : 0,
13801 userDataStr.GetString());
13805 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13808 CallParams callParams;
13809 GetBasicParams(callParams);
13811 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13812 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13817 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13818 uint64_t allocationCount,
13821 CallParams callParams;
13822 GetBasicParams(callParams);
13824 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13825 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13826 PrintPointerList(allocationCount, pAllocations);
13827 fprintf(m_File,
"\n");
13831 void VmaRecorder::RecordResizeAllocation(
13832 uint32_t frameIndex,
13834 VkDeviceSize newSize)
13836 CallParams callParams;
13837 GetBasicParams(callParams);
13839 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13840 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13841 allocation, newSize);
13845 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13847 const void* pUserData)
13849 CallParams callParams;
13850 GetBasicParams(callParams);
13852 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13853 UserDataString userDataStr(
13856 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13858 userDataStr.GetString());
13862 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13865 CallParams callParams;
13866 GetBasicParams(callParams);
13868 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13869 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13874 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13877 CallParams callParams;
13878 GetBasicParams(callParams);
13880 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13881 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13886 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13889 CallParams callParams;
13890 GetBasicParams(callParams);
13892 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13893 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13898 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13899 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13901 CallParams callParams;
13902 GetBasicParams(callParams);
13904 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13905 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13912 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13913 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13915 CallParams callParams;
13916 GetBasicParams(callParams);
13918 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13919 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13926 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13927 const VkBufferCreateInfo& bufCreateInfo,
13931 CallParams callParams;
13932 GetBasicParams(callParams);
13934 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13935 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13936 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13937 bufCreateInfo.flags,
13938 bufCreateInfo.size,
13939 bufCreateInfo.usage,
13940 bufCreateInfo.sharingMode,
13941 allocCreateInfo.
flags,
13942 allocCreateInfo.
usage,
13946 allocCreateInfo.
pool,
13948 userDataStr.GetString());
13952 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13953 const VkImageCreateInfo& imageCreateInfo,
13957 CallParams callParams;
13958 GetBasicParams(callParams);
13960 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13961 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13962 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13963 imageCreateInfo.flags,
13964 imageCreateInfo.imageType,
13965 imageCreateInfo.format,
13966 imageCreateInfo.extent.width,
13967 imageCreateInfo.extent.height,
13968 imageCreateInfo.extent.depth,
13969 imageCreateInfo.mipLevels,
13970 imageCreateInfo.arrayLayers,
13971 imageCreateInfo.samples,
13972 imageCreateInfo.tiling,
13973 imageCreateInfo.usage,
13974 imageCreateInfo.sharingMode,
13975 imageCreateInfo.initialLayout,
13976 allocCreateInfo.
flags,
13977 allocCreateInfo.
usage,
13981 allocCreateInfo.
pool,
13983 userDataStr.GetString());
13987 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13990 CallParams callParams;
13991 GetBasicParams(callParams);
13993 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13994 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13999 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14002 CallParams callParams;
14003 GetBasicParams(callParams);
14005 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14006 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14011 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14014 CallParams callParams;
14015 GetBasicParams(callParams);
14017 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14018 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14023 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14026 CallParams callParams;
14027 GetBasicParams(callParams);
14029 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14030 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14035 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14038 CallParams callParams;
14039 GetBasicParams(callParams);
14041 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14042 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14047 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14051 CallParams callParams;
14052 GetBasicParams(callParams);
14054 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14055 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14058 fprintf(m_File,
",");
14060 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14070 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14073 CallParams callParams;
14074 GetBasicParams(callParams);
14076 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14077 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14084 if(pUserData != VMA_NULL)
14088 m_Str = (
const char*)pUserData;
14092 sprintf_s(m_PtrStr,
"%p", pUserData);
14102 void VmaRecorder::WriteConfiguration(
14103 const VkPhysicalDeviceProperties& devProps,
14104 const VkPhysicalDeviceMemoryProperties& memProps,
14105 bool dedicatedAllocationExtensionEnabled)
14107 fprintf(m_File,
"Config,Begin\n");
14109 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14110 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14111 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14112 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14113 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14114 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14116 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14117 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14118 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14120 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14121 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14123 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14124 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14126 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14127 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14129 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14130 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14133 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14135 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14136 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14137 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14138 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14139 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14140 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14141 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14142 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14143 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14145 fprintf(m_File,
"Config,End\n");
14148 void VmaRecorder::GetBasicParams(CallParams& outParams)
14150 outParams.threadId = GetCurrentThreadId();
14152 LARGE_INTEGER counter;
14153 QueryPerformanceCounter(&counter);
14154 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14157 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14161 fprintf(m_File,
"%p", pItems[0]);
14162 for(uint64_t i = 1; i < count; ++i)
14164 fprintf(m_File,
" %p", pItems[i]);
14169 void VmaRecorder::Flush()
14177 #endif // #if VMA_RECORDING_ENABLED 14182 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14183 m_Allocator(pAllocationCallbacks, 1024)
14189 VmaMutexLock mutexLock(m_Mutex);
14190 return m_Allocator.Alloc();
14193 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14195 VmaMutexLock mutexLock(m_Mutex);
14196 m_Allocator.Free(hAlloc);
14205 m_hDevice(pCreateInfo->device),
14206 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14207 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14208 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14209 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14210 m_PreferredLargeHeapBlockSize(0),
14211 m_PhysicalDevice(pCreateInfo->physicalDevice),
14212 m_CurrentFrameIndex(0),
14213 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14214 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14217 ,m_pRecorder(VMA_NULL)
14220 if(VMA_DEBUG_DETECT_CORRUPTION)
14223 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14228 #if !(VMA_DEDICATED_ALLOCATION) 14231 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14235 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14236 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14237 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14239 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14240 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14242 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14244 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14255 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14256 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14258 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14259 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14260 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14261 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14268 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14270 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14271 if(limit != VK_WHOLE_SIZE)
14273 m_HeapSizeLimit[heapIndex] = limit;
14274 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14276 m_MemProps.memoryHeaps[heapIndex].size = limit;
14282 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14284 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14286 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14290 preferredBlockSize,
14293 GetBufferImageGranularity(),
14300 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14307 VkResult res = VK_SUCCESS;
14312 #if VMA_RECORDING_ENABLED 14313 m_pRecorder = vma_new(
this, VmaRecorder)();
14315 if(res != VK_SUCCESS)
14319 m_pRecorder->WriteConfiguration(
14320 m_PhysicalDeviceProperties,
14322 m_UseKhrDedicatedAllocation);
14323 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14325 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14326 return VK_ERROR_FEATURE_NOT_PRESENT;
14333 VmaAllocator_T::~VmaAllocator_T()
14335 #if VMA_RECORDING_ENABLED 14336 if(m_pRecorder != VMA_NULL)
14338 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14339 vma_delete(
this, m_pRecorder);
14343 VMA_ASSERT(m_Pools.empty());
14345 for(
size_t i = GetMemoryTypeCount(); i--; )
14347 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14349 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14352 vma_delete(
this, m_pDedicatedAllocations[i]);
14353 vma_delete(
this, m_pBlockVectors[i]);
14357 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14359 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14360 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14361 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14362 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14363 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14364 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14365 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14366 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14367 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14368 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14369 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14370 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14371 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14372 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14373 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14374 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14375 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14376 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14377 #if VMA_DEDICATED_ALLOCATION 14378 if(m_UseKhrDedicatedAllocation)
14380 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14381 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14382 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14383 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14385 #endif // #if VMA_DEDICATED_ALLOCATION 14386 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14388 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14389 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14391 if(pVulkanFunctions != VMA_NULL)
14393 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14394 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14395 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14396 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14397 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14398 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14399 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14400 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14401 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14402 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14403 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14404 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14405 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14406 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14407 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14408 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14409 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14410 #if VMA_DEDICATED_ALLOCATION 14411 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14412 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14416 #undef VMA_COPY_IF_NOT_NULL 14420 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14421 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14422 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14423 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14424 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14425 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14426 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14427 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14428 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14429 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14430 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14431 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14432 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14433 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14434 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14435 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14436 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14437 #if VMA_DEDICATED_ALLOCATION 14438 if(m_UseKhrDedicatedAllocation)
14440 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14441 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14446 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14448 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14449 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14450 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14451 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14454 VkResult VmaAllocator_T::AllocateMemoryOfType(
14456 VkDeviceSize alignment,
14457 bool dedicatedAllocation,
14458 VkBuffer dedicatedBuffer,
14459 VkImage dedicatedImage,
14461 uint32_t memTypeIndex,
14462 VmaSuballocationType suballocType,
14463 size_t allocationCount,
14466 VMA_ASSERT(pAllocations != VMA_NULL);
14467 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14473 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14478 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14479 VMA_ASSERT(blockVector);
14481 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14482 bool preferDedicatedMemory =
14483 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14484 dedicatedAllocation ||
14486 size > preferredBlockSize / 2;
14488 if(preferDedicatedMemory &&
14490 finalCreateInfo.
pool == VK_NULL_HANDLE)
14499 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14503 return AllocateDedicatedMemory(
14518 VkResult res = blockVector->Allocate(
14519 m_CurrentFrameIndex.load(),
14526 if(res == VK_SUCCESS)
14534 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14538 res = AllocateDedicatedMemory(
14544 finalCreateInfo.pUserData,
14549 if(res == VK_SUCCESS)
14552 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14558 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14565 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14567 VmaSuballocationType suballocType,
14568 uint32_t memTypeIndex,
14570 bool isUserDataString,
14572 VkBuffer dedicatedBuffer,
14573 VkImage dedicatedImage,
14574 size_t allocationCount,
14577 VMA_ASSERT(allocationCount > 0 && pAllocations);
14579 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14580 allocInfo.memoryTypeIndex = memTypeIndex;
14581 allocInfo.allocationSize = size;
14583 #if VMA_DEDICATED_ALLOCATION 14584 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14585 if(m_UseKhrDedicatedAllocation)
14587 if(dedicatedBuffer != VK_NULL_HANDLE)
14589 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14590 dedicatedAllocInfo.buffer = dedicatedBuffer;
14591 allocInfo.pNext = &dedicatedAllocInfo;
14593 else if(dedicatedImage != VK_NULL_HANDLE)
14595 dedicatedAllocInfo.image = dedicatedImage;
14596 allocInfo.pNext = &dedicatedAllocInfo;
14599 #endif // #if VMA_DEDICATED_ALLOCATION 14602 VkResult res = VK_SUCCESS;
14603 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14605 res = AllocateDedicatedMemoryPage(
14613 pAllocations + allocIndex);
14614 if(res != VK_SUCCESS)
14620 if(res == VK_SUCCESS)
14624 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14625 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14626 VMA_ASSERT(pDedicatedAllocations);
14627 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14629 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14633 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14638 while(allocIndex--)
14641 VkDeviceMemory hMemory = currAlloc->GetMemory();
14653 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14655 currAlloc->SetUserData(
this, VMA_NULL);
14657 m_AllocationObjectAllocator.Free(currAlloc);
14660 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14666 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14668 VmaSuballocationType suballocType,
14669 uint32_t memTypeIndex,
14670 const VkMemoryAllocateInfo& allocInfo,
14672 bool isUserDataString,
14676 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14677 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14680 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14684 void* pMappedData = VMA_NULL;
14687 res = (*m_VulkanFunctions.vkMapMemory)(
14696 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14697 FreeVulkanMemory(memTypeIndex, size, hMemory);
14702 *pAllocation = m_AllocationObjectAllocator.Allocate();
14703 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14704 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14705 (*pAllocation)->SetUserData(
this, pUserData);
14706 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14708 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14714 void VmaAllocator_T::GetBufferMemoryRequirements(
14716 VkMemoryRequirements& memReq,
14717 bool& requiresDedicatedAllocation,
14718 bool& prefersDedicatedAllocation)
const 14720 #if VMA_DEDICATED_ALLOCATION 14721 if(m_UseKhrDedicatedAllocation)
14723 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14724 memReqInfo.buffer = hBuffer;
14726 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14728 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14729 memReq2.pNext = &memDedicatedReq;
14731 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14733 memReq = memReq2.memoryRequirements;
14734 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14735 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14738 #endif // #if VMA_DEDICATED_ALLOCATION 14740 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14741 requiresDedicatedAllocation =
false;
14742 prefersDedicatedAllocation =
false;
14746 void VmaAllocator_T::GetImageMemoryRequirements(
14748 VkMemoryRequirements& memReq,
14749 bool& requiresDedicatedAllocation,
14750 bool& prefersDedicatedAllocation)
const 14752 #if VMA_DEDICATED_ALLOCATION 14753 if(m_UseKhrDedicatedAllocation)
14755 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14756 memReqInfo.image = hImage;
14758 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14760 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14761 memReq2.pNext = &memDedicatedReq;
14763 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14765 memReq = memReq2.memoryRequirements;
14766 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14767 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14770 #endif // #if VMA_DEDICATED_ALLOCATION 14772 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14773 requiresDedicatedAllocation =
false;
14774 prefersDedicatedAllocation =
false;
14778 VkResult VmaAllocator_T::AllocateMemory(
14779 const VkMemoryRequirements& vkMemReq,
14780 bool requiresDedicatedAllocation,
14781 bool prefersDedicatedAllocation,
14782 VkBuffer dedicatedBuffer,
14783 VkImage dedicatedImage,
14785 VmaSuballocationType suballocType,
14786 size_t allocationCount,
14789 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14791 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14793 if(vkMemReq.size == 0)
14795 return VK_ERROR_VALIDATION_FAILED_EXT;
14800 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14801 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14806 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14807 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14809 if(requiresDedicatedAllocation)
14813 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14814 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14816 if(createInfo.
pool != VK_NULL_HANDLE)
14818 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14819 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14822 if((createInfo.
pool != VK_NULL_HANDLE) &&
14825 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14826 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14829 if(createInfo.
pool != VK_NULL_HANDLE)
14831 const VkDeviceSize alignmentForPool = VMA_MAX(
14832 vkMemReq.alignment,
14833 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14838 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14843 return createInfo.
pool->m_BlockVector.Allocate(
14844 m_CurrentFrameIndex.load(),
14855 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14856 uint32_t memTypeIndex = UINT32_MAX;
14858 if(res == VK_SUCCESS)
14860 VkDeviceSize alignmentForMemType = VMA_MAX(
14861 vkMemReq.alignment,
14862 GetMemoryTypeMinAlignment(memTypeIndex));
14864 res = AllocateMemoryOfType(
14866 alignmentForMemType,
14867 requiresDedicatedAllocation || prefersDedicatedAllocation,
14876 if(res == VK_SUCCESS)
14886 memoryTypeBits &= ~(1u << memTypeIndex);
14889 if(res == VK_SUCCESS)
14891 alignmentForMemType = VMA_MAX(
14892 vkMemReq.alignment,
14893 GetMemoryTypeMinAlignment(memTypeIndex));
14895 res = AllocateMemoryOfType(
14897 alignmentForMemType,
14898 requiresDedicatedAllocation || prefersDedicatedAllocation,
14907 if(res == VK_SUCCESS)
14917 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14928 void VmaAllocator_T::FreeMemory(
14929 size_t allocationCount,
14932 VMA_ASSERT(pAllocations);
14934 for(
size_t allocIndex = allocationCount; allocIndex--; )
14938 if(allocation != VK_NULL_HANDLE)
14940 if(TouchAllocation(allocation))
14942 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14944 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14947 switch(allocation->GetType())
14949 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14951 VmaBlockVector* pBlockVector = VMA_NULL;
14952 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14953 if(hPool != VK_NULL_HANDLE)
14955 pBlockVector = &hPool->m_BlockVector;
14959 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14960 pBlockVector = m_pBlockVectors[memTypeIndex];
14962 pBlockVector->Free(allocation);
14965 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14966 FreeDedicatedMemory(allocation);
14973 allocation->SetUserData(
this, VMA_NULL);
14974 allocation->Dtor();
14975 m_AllocationObjectAllocator.Free(allocation);
14980 VkResult VmaAllocator_T::ResizeAllocation(
14982 VkDeviceSize newSize)
14984 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14986 return VK_ERROR_VALIDATION_FAILED_EXT;
14988 if(newSize == alloc->GetSize())
14993 switch(alloc->GetType())
14995 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14996 return VK_ERROR_FEATURE_NOT_PRESENT;
14997 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14998 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
15000 alloc->ChangeSize(newSize);
15001 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
15006 return VK_ERROR_OUT_OF_POOL_MEMORY;
15010 return VK_ERROR_VALIDATION_FAILED_EXT;
15014 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15017 InitStatInfo(pStats->
total);
15018 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15020 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15024 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15026 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15027 VMA_ASSERT(pBlockVector);
15028 pBlockVector->AddStats(pStats);
15033 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15034 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15036 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15041 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15043 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15044 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15045 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15046 VMA_ASSERT(pDedicatedAllocVector);
15047 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15050 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15051 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15052 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15053 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15058 VmaPostprocessCalcStatInfo(pStats->
total);
15059 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15060 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15061 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15062 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15065 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15067 VkResult VmaAllocator_T::DefragmentationBegin(
15077 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15078 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15081 (*pContext)->AddAllocations(
15084 VkResult res = (*pContext)->Defragment(
15089 if(res != VK_NOT_READY)
15091 vma_delete(
this, *pContext);
15092 *pContext = VMA_NULL;
15098 VkResult VmaAllocator_T::DefragmentationEnd(
15101 vma_delete(
this, context);
15107 if(hAllocation->CanBecomeLost())
15113 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15114 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15117 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15121 pAllocationInfo->
offset = 0;
15122 pAllocationInfo->
size = hAllocation->GetSize();
15124 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15127 else if(localLastUseFrameIndex == localCurrFrameIndex)
15129 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15130 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15131 pAllocationInfo->
offset = hAllocation->GetOffset();
15132 pAllocationInfo->
size = hAllocation->GetSize();
15134 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15139 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15141 localLastUseFrameIndex = localCurrFrameIndex;
15148 #if VMA_STATS_STRING_ENABLED 15149 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15150 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15153 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15154 if(localLastUseFrameIndex == localCurrFrameIndex)
15160 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15162 localLastUseFrameIndex = localCurrFrameIndex;
15168 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15169 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15170 pAllocationInfo->
offset = hAllocation->GetOffset();
15171 pAllocationInfo->
size = hAllocation->GetSize();
15172 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15173 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15177 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15180 if(hAllocation->CanBecomeLost())
15182 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15183 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15186 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15190 else if(localLastUseFrameIndex == localCurrFrameIndex)
15196 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15198 localLastUseFrameIndex = localCurrFrameIndex;
15205 #if VMA_STATS_STRING_ENABLED 15206 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15207 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15210 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15211 if(localLastUseFrameIndex == localCurrFrameIndex)
15217 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15219 localLastUseFrameIndex = localCurrFrameIndex;
15231 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15241 return VK_ERROR_INITIALIZATION_FAILED;
15244 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15246 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15248 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15249 if(res != VK_SUCCESS)
15251 vma_delete(
this, *pPool);
15258 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15259 (*pPool)->SetId(m_NextPoolId++);
15260 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15266 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15270 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15271 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15272 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15275 vma_delete(
this, pool);
15280 pool->m_BlockVector.GetPoolStats(pPoolStats);
15283 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15285 m_CurrentFrameIndex.store(frameIndex);
15288 void VmaAllocator_T::MakePoolAllocationsLost(
15290 size_t* pLostAllocationCount)
15292 hPool->m_BlockVector.MakePoolAllocationsLost(
15293 m_CurrentFrameIndex.load(),
15294 pLostAllocationCount);
15297 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15299 return hPool->m_BlockVector.CheckCorruption();
15302 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15304 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15307 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15309 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15311 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15312 VMA_ASSERT(pBlockVector);
15313 VkResult localRes = pBlockVector->CheckCorruption();
15316 case VK_ERROR_FEATURE_NOT_PRESENT:
15319 finalRes = VK_SUCCESS;
15329 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15330 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15332 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15334 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15337 case VK_ERROR_FEATURE_NOT_PRESENT:
15340 finalRes = VK_SUCCESS;
15352 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15354 *pAllocation = m_AllocationObjectAllocator.Allocate();
15355 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15356 (*pAllocation)->InitLost();
15359 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15361 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15364 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15366 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15367 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15369 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15370 if(res == VK_SUCCESS)
15372 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15377 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15382 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15385 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15387 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15393 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15395 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15397 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15400 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15402 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15403 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15405 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15406 m_HeapSizeLimit[heapIndex] += size;
15410 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15412 if(hAllocation->CanBecomeLost())
15414 return VK_ERROR_MEMORY_MAP_FAILED;
15417 switch(hAllocation->GetType())
15419 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15421 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15422 char *pBytes = VMA_NULL;
15423 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15424 if(res == VK_SUCCESS)
15426 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15427 hAllocation->BlockAllocMap();
15431 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15432 return hAllocation->DedicatedAllocMap(
this, ppData);
15435 return VK_ERROR_MEMORY_MAP_FAILED;
15441 switch(hAllocation->GetType())
15443 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15445 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15446 hAllocation->BlockAllocUnmap();
15447 pBlock->Unmap(
this, 1);
15450 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15451 hAllocation->DedicatedAllocUnmap(
this);
15458 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15460 VkResult res = VK_SUCCESS;
15461 switch(hAllocation->GetType())
15463 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15464 res = GetVulkanFunctions().vkBindBufferMemory(
15467 hAllocation->GetMemory(),
15470 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15472 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15473 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15474 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15483 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15485 VkResult res = VK_SUCCESS;
15486 switch(hAllocation->GetType())
15488 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15489 res = GetVulkanFunctions().vkBindImageMemory(
15492 hAllocation->GetMemory(),
15495 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15497 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15498 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15499 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15508 void VmaAllocator_T::FlushOrInvalidateAllocation(
15510 VkDeviceSize offset, VkDeviceSize size,
15511 VMA_CACHE_OPERATION op)
15513 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15514 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15516 const VkDeviceSize allocationSize = hAllocation->GetSize();
15517 VMA_ASSERT(offset <= allocationSize);
15519 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15521 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15522 memRange.memory = hAllocation->GetMemory();
15524 switch(hAllocation->GetType())
15526 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15527 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15528 if(size == VK_WHOLE_SIZE)
15530 memRange.size = allocationSize - memRange.offset;
15534 VMA_ASSERT(offset + size <= allocationSize);
15535 memRange.size = VMA_MIN(
15536 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15537 allocationSize - memRange.offset);
15541 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15544 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15545 if(size == VK_WHOLE_SIZE)
15547 size = allocationSize - offset;
15551 VMA_ASSERT(offset + size <= allocationSize);
15553 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15556 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15557 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15558 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15559 memRange.offset += allocationOffset;
15560 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15571 case VMA_CACHE_FLUSH:
15572 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15574 case VMA_CACHE_INVALIDATE:
15575 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15584 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15586 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15588 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15590 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15591 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15592 VMA_ASSERT(pDedicatedAllocations);
15593 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15594 VMA_ASSERT(success);
15597 VkDeviceMemory hMemory = allocation->GetMemory();
15609 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15611 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15614 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const 15616 VkBufferCreateInfo dummyBufCreateInfo;
15617 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15619 uint32_t memoryTypeBits = 0;
15622 VkBuffer buf = VK_NULL_HANDLE;
15623 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15624 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15625 if(res == VK_SUCCESS)
15628 VkMemoryRequirements memReq;
15629 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15630 memoryTypeBits = memReq.memoryTypeBits;
15633 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15636 return memoryTypeBits;
15639 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15641 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15642 !hAllocation->CanBecomeLost() &&
15643 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15645 void* pData = VMA_NULL;
15646 VkResult res = Map(hAllocation, &pData);
15647 if(res == VK_SUCCESS)
15649 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15650 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15651 Unmap(hAllocation);
15655 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15660 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15662 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15663 if(memoryTypeBits == UINT32_MAX)
15665 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15666 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15668 return memoryTypeBits;
15671 #if VMA_STATS_STRING_ENABLED 15673 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15675 bool dedicatedAllocationsStarted =
false;
15676 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15678 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15679 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15680 VMA_ASSERT(pDedicatedAllocVector);
15681 if(pDedicatedAllocVector->empty() ==
false)
15683 if(dedicatedAllocationsStarted ==
false)
15685 dedicatedAllocationsStarted =
true;
15686 json.WriteString(
"DedicatedAllocations");
15687 json.BeginObject();
15690 json.BeginString(
"Type ");
15691 json.ContinueString(memTypeIndex);
15696 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15698 json.BeginObject(
true);
15700 hAlloc->PrintParameters(json);
15707 if(dedicatedAllocationsStarted)
15713 bool allocationsStarted =
false;
15714 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15716 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15718 if(allocationsStarted ==
false)
15720 allocationsStarted =
true;
15721 json.WriteString(
"DefaultPools");
15722 json.BeginObject();
15725 json.BeginString(
"Type ");
15726 json.ContinueString(memTypeIndex);
15729 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15732 if(allocationsStarted)
15740 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15741 const size_t poolCount = m_Pools.size();
15744 json.WriteString(
"Pools");
15745 json.BeginObject();
15746 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15748 json.BeginString();
15749 json.ContinueString(m_Pools[poolIndex]->GetId());
15752 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15759 #endif // #if VMA_STATS_STRING_ENABLED 15768 VMA_ASSERT(pCreateInfo && pAllocator);
15769 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15771 return (*pAllocator)->Init(pCreateInfo);
15777 if(allocator != VK_NULL_HANDLE)
15779 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15780 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15781 vma_delete(&allocationCallbacks, allocator);
15787 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15789 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15790 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15795 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15797 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15798 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15803 uint32_t memoryTypeIndex,
15804 VkMemoryPropertyFlags* pFlags)
15806 VMA_ASSERT(allocator && pFlags);
15807 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15808 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15813 uint32_t frameIndex)
15815 VMA_ASSERT(allocator);
15816 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15818 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15820 allocator->SetCurrentFrameIndex(frameIndex);
15827 VMA_ASSERT(allocator && pStats);
15828 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15829 allocator->CalculateStats(pStats);
15832 #if VMA_STATS_STRING_ENABLED 15836 char** ppStatsString,
15837 VkBool32 detailedMap)
15839 VMA_ASSERT(allocator && ppStatsString);
15840 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15842 VmaStringBuilder sb(allocator);
15844 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15845 json.BeginObject();
15848 allocator->CalculateStats(&stats);
15850 json.WriteString(
"Total");
15851 VmaPrintStatInfo(json, stats.
total);
15853 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15855 json.BeginString(
"Heap ");
15856 json.ContinueString(heapIndex);
15858 json.BeginObject();
15860 json.WriteString(
"Size");
15861 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15863 json.WriteString(
"Flags");
15864 json.BeginArray(
true);
15865 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15867 json.WriteString(
"DEVICE_LOCAL");
15873 json.WriteString(
"Stats");
15874 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15877 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15879 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15881 json.BeginString(
"Type ");
15882 json.ContinueString(typeIndex);
15885 json.BeginObject();
15887 json.WriteString(
"Flags");
15888 json.BeginArray(
true);
15889 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15890 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15892 json.WriteString(
"DEVICE_LOCAL");
15894 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15896 json.WriteString(
"HOST_VISIBLE");
15898 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15900 json.WriteString(
"HOST_COHERENT");
15902 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15904 json.WriteString(
"HOST_CACHED");
15906 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15908 json.WriteString(
"LAZILY_ALLOCATED");
15914 json.WriteString(
"Stats");
15915 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15924 if(detailedMap == VK_TRUE)
15926 allocator->PrintDetailedMap(json);
15932 const size_t len = sb.GetLength();
15933 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15936 memcpy(pChars, sb.GetData(), len);
15938 pChars[len] =
'\0';
15939 *ppStatsString = pChars;
15944 char* pStatsString)
15946 if(pStatsString != VMA_NULL)
15948 VMA_ASSERT(allocator);
15949 size_t len = strlen(pStatsString);
15950 vma_delete_array(allocator, pStatsString, len + 1);
15954 #endif // #if VMA_STATS_STRING_ENABLED 15961 uint32_t memoryTypeBits,
15963 uint32_t* pMemoryTypeIndex)
15965 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15966 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15967 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15974 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15975 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15978 switch(pAllocationCreateInfo->
usage)
15983 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15985 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15989 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15992 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15993 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15995 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15999 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16000 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16006 *pMemoryTypeIndex = UINT32_MAX;
16007 uint32_t minCost = UINT32_MAX;
16008 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16009 memTypeIndex < allocator->GetMemoryTypeCount();
16010 ++memTypeIndex, memTypeBit <<= 1)
16013 if((memTypeBit & memoryTypeBits) != 0)
16015 const VkMemoryPropertyFlags currFlags =
16016 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16018 if((requiredFlags & ~currFlags) == 0)
16021 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
16023 if(currCost < minCost)
16025 *pMemoryTypeIndex = memTypeIndex;
16030 minCost = currCost;
16035 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16040 const VkBufferCreateInfo* pBufferCreateInfo,
16042 uint32_t* pMemoryTypeIndex)
16044 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16045 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16046 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16047 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16049 const VkDevice hDev = allocator->m_hDevice;
16050 VkBuffer hBuffer = VK_NULL_HANDLE;
16051 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16052 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16053 if(res == VK_SUCCESS)
16055 VkMemoryRequirements memReq = {};
16056 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16057 hDev, hBuffer, &memReq);
16061 memReq.memoryTypeBits,
16062 pAllocationCreateInfo,
16065 allocator->GetVulkanFunctions().vkDestroyBuffer(
16066 hDev, hBuffer, allocator->GetAllocationCallbacks());
16073 const VkImageCreateInfo* pImageCreateInfo,
16075 uint32_t* pMemoryTypeIndex)
16077 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16078 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16079 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16080 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16082 const VkDevice hDev = allocator->m_hDevice;
16083 VkImage hImage = VK_NULL_HANDLE;
16084 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16085 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16086 if(res == VK_SUCCESS)
16088 VkMemoryRequirements memReq = {};
16089 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16090 hDev, hImage, &memReq);
16094 memReq.memoryTypeBits,
16095 pAllocationCreateInfo,
16098 allocator->GetVulkanFunctions().vkDestroyImage(
16099 hDev, hImage, allocator->GetAllocationCallbacks());
16109 VMA_ASSERT(allocator && pCreateInfo && pPool);
16111 VMA_DEBUG_LOG(
"vmaCreatePool");
16113 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16115 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16117 #if VMA_RECORDING_ENABLED 16118 if(allocator->GetRecorder() != VMA_NULL)
16120 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16131 VMA_ASSERT(allocator);
16133 if(pool == VK_NULL_HANDLE)
16138 VMA_DEBUG_LOG(
"vmaDestroyPool");
16140 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16142 #if VMA_RECORDING_ENABLED 16143 if(allocator->GetRecorder() != VMA_NULL)
16145 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16149 allocator->DestroyPool(pool);
16157 VMA_ASSERT(allocator && pool && pPoolStats);
16159 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16161 allocator->GetPoolStats(pool, pPoolStats);
16167 size_t* pLostAllocationCount)
16169 VMA_ASSERT(allocator && pool);
16171 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16173 #if VMA_RECORDING_ENABLED 16174 if(allocator->GetRecorder() != VMA_NULL)
16176 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16180 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16185 VMA_ASSERT(allocator && pool);
16187 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16189 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16191 return allocator->CheckPoolCorruption(pool);
16196 const VkMemoryRequirements* pVkMemoryRequirements,
16201 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16203 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16205 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16207 VkResult result = allocator->AllocateMemory(
16208 *pVkMemoryRequirements,
16214 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16218 #if VMA_RECORDING_ENABLED 16219 if(allocator->GetRecorder() != VMA_NULL)
16221 allocator->GetRecorder()->RecordAllocateMemory(
16222 allocator->GetCurrentFrameIndex(),
16223 *pVkMemoryRequirements,
16229 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16231 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16239 const VkMemoryRequirements* pVkMemoryRequirements,
16241 size_t allocationCount,
16245 if(allocationCount == 0)
16250 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16252 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16254 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16256 VkResult result = allocator->AllocateMemory(
16257 *pVkMemoryRequirements,
16263 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16267 #if VMA_RECORDING_ENABLED 16268 if(allocator->GetRecorder() != VMA_NULL)
16270 allocator->GetRecorder()->RecordAllocateMemoryPages(
16271 allocator->GetCurrentFrameIndex(),
16272 *pVkMemoryRequirements,
16274 (uint64_t)allocationCount,
16279 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16281 for(
size_t i = 0; i < allocationCount; ++i)
16283 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16297 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16299 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16301 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16303 VkMemoryRequirements vkMemReq = {};
16304 bool requiresDedicatedAllocation =
false;
16305 bool prefersDedicatedAllocation =
false;
16306 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16307 requiresDedicatedAllocation,
16308 prefersDedicatedAllocation);
16310 VkResult result = allocator->AllocateMemory(
16312 requiresDedicatedAllocation,
16313 prefersDedicatedAllocation,
16317 VMA_SUBALLOCATION_TYPE_BUFFER,
16321 #if VMA_RECORDING_ENABLED 16322 if(allocator->GetRecorder() != VMA_NULL)
16324 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16325 allocator->GetCurrentFrameIndex(),
16327 requiresDedicatedAllocation,
16328 prefersDedicatedAllocation,
16334 if(pAllocationInfo && result == VK_SUCCESS)
16336 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16349 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16351 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16353 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16355 VkMemoryRequirements vkMemReq = {};
16356 bool requiresDedicatedAllocation =
false;
16357 bool prefersDedicatedAllocation =
false;
16358 allocator->GetImageMemoryRequirements(image, vkMemReq,
16359 requiresDedicatedAllocation, prefersDedicatedAllocation);
16361 VkResult result = allocator->AllocateMemory(
16363 requiresDedicatedAllocation,
16364 prefersDedicatedAllocation,
16368 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16372 #if VMA_RECORDING_ENABLED 16373 if(allocator->GetRecorder() != VMA_NULL)
16375 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16376 allocator->GetCurrentFrameIndex(),
16378 requiresDedicatedAllocation,
16379 prefersDedicatedAllocation,
16385 if(pAllocationInfo && result == VK_SUCCESS)
16387 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16397 VMA_ASSERT(allocator);
16399 if(allocation == VK_NULL_HANDLE)
16404 VMA_DEBUG_LOG(
"vmaFreeMemory");
16406 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16408 #if VMA_RECORDING_ENABLED 16409 if(allocator->GetRecorder() != VMA_NULL)
16411 allocator->GetRecorder()->RecordFreeMemory(
16412 allocator->GetCurrentFrameIndex(),
16417 allocator->FreeMemory(
16424 size_t allocationCount,
16427 if(allocationCount == 0)
16432 VMA_ASSERT(allocator);
16434 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16436 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16438 #if VMA_RECORDING_ENABLED 16439 if(allocator->GetRecorder() != VMA_NULL)
16441 allocator->GetRecorder()->RecordFreeMemoryPages(
16442 allocator->GetCurrentFrameIndex(),
16443 (uint64_t)allocationCount,
16448 allocator->FreeMemory(allocationCount, pAllocations);
16454 VkDeviceSize newSize)
16456 VMA_ASSERT(allocator && allocation);
16458 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16460 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16462 #if VMA_RECORDING_ENABLED 16463 if(allocator->GetRecorder() != VMA_NULL)
16465 allocator->GetRecorder()->RecordResizeAllocation(
16466 allocator->GetCurrentFrameIndex(),
16472 return allocator->ResizeAllocation(allocation, newSize);
16480 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16482 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16484 #if VMA_RECORDING_ENABLED 16485 if(allocator->GetRecorder() != VMA_NULL)
16487 allocator->GetRecorder()->RecordGetAllocationInfo(
16488 allocator->GetCurrentFrameIndex(),
16493 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16500 VMA_ASSERT(allocator && allocation);
16502 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16504 #if VMA_RECORDING_ENABLED 16505 if(allocator->GetRecorder() != VMA_NULL)
16507 allocator->GetRecorder()->RecordTouchAllocation(
16508 allocator->GetCurrentFrameIndex(),
16513 return allocator->TouchAllocation(allocation);
16521 VMA_ASSERT(allocator && allocation);
16523 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16525 allocation->SetUserData(allocator, pUserData);
16527 #if VMA_RECORDING_ENABLED 16528 if(allocator->GetRecorder() != VMA_NULL)
16530 allocator->GetRecorder()->RecordSetAllocationUserData(
16531 allocator->GetCurrentFrameIndex(),
16542 VMA_ASSERT(allocator && pAllocation);
16544 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16546 allocator->CreateLostAllocation(pAllocation);
16548 #if VMA_RECORDING_ENABLED 16549 if(allocator->GetRecorder() != VMA_NULL)
16551 allocator->GetRecorder()->RecordCreateLostAllocation(
16552 allocator->GetCurrentFrameIndex(),
16563 VMA_ASSERT(allocator && allocation && ppData);
16565 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16567 VkResult res = allocator->Map(allocation, ppData);
16569 #if VMA_RECORDING_ENABLED 16570 if(allocator->GetRecorder() != VMA_NULL)
16572 allocator->GetRecorder()->RecordMapMemory(
16573 allocator->GetCurrentFrameIndex(),
16585 VMA_ASSERT(allocator && allocation);
16587 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16589 #if VMA_RECORDING_ENABLED 16590 if(allocator->GetRecorder() != VMA_NULL)
16592 allocator->GetRecorder()->RecordUnmapMemory(
16593 allocator->GetCurrentFrameIndex(),
16598 allocator->Unmap(allocation);
16603 VMA_ASSERT(allocator && allocation);
16605 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16607 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16609 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16611 #if VMA_RECORDING_ENABLED 16612 if(allocator->GetRecorder() != VMA_NULL)
16614 allocator->GetRecorder()->RecordFlushAllocation(
16615 allocator->GetCurrentFrameIndex(),
16616 allocation, offset, size);
16623 VMA_ASSERT(allocator && allocation);
16625 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16627 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16629 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16631 #if VMA_RECORDING_ENABLED 16632 if(allocator->GetRecorder() != VMA_NULL)
16634 allocator->GetRecorder()->RecordInvalidateAllocation(
16635 allocator->GetCurrentFrameIndex(),
16636 allocation, offset, size);
16643 VMA_ASSERT(allocator);
16645 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16647 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16649 return allocator->CheckCorruption(memoryTypeBits);
16655 size_t allocationCount,
16656 VkBool32* pAllocationsChanged,
16666 if(pDefragmentationInfo != VMA_NULL)
16680 if(res == VK_NOT_READY)
16693 VMA_ASSERT(allocator && pInfo && pContext);
16704 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16706 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16708 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16710 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16712 #if VMA_RECORDING_ENABLED 16713 if(allocator->GetRecorder() != VMA_NULL)
16715 allocator->GetRecorder()->RecordDefragmentationBegin(
16716 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16727 VMA_ASSERT(allocator);
16729 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16731 if(context != VK_NULL_HANDLE)
16733 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16735 #if VMA_RECORDING_ENABLED 16736 if(allocator->GetRecorder() != VMA_NULL)
16738 allocator->GetRecorder()->RecordDefragmentationEnd(
16739 allocator->GetCurrentFrameIndex(), context);
16743 return allocator->DefragmentationEnd(context);
16756 VMA_ASSERT(allocator && allocation && buffer);
16758 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16760 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16762 return allocator->BindBufferMemory(allocation, buffer);
16770 VMA_ASSERT(allocator && allocation && image);
16772 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16774 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16776 return allocator->BindImageMemory(allocation, image);
16781 const VkBufferCreateInfo* pBufferCreateInfo,
16787 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16789 if(pBufferCreateInfo->size == 0)
16791 return VK_ERROR_VALIDATION_FAILED_EXT;
16794 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16796 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16798 *pBuffer = VK_NULL_HANDLE;
16799 *pAllocation = VK_NULL_HANDLE;
16802 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16803 allocator->m_hDevice,
16805 allocator->GetAllocationCallbacks(),
16810 VkMemoryRequirements vkMemReq = {};
16811 bool requiresDedicatedAllocation =
false;
16812 bool prefersDedicatedAllocation =
false;
16813 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16814 requiresDedicatedAllocation, prefersDedicatedAllocation);
16818 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16820 VMA_ASSERT(vkMemReq.alignment %
16821 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16823 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16825 VMA_ASSERT(vkMemReq.alignment %
16826 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16828 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16830 VMA_ASSERT(vkMemReq.alignment %
16831 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16835 res = allocator->AllocateMemory(
16837 requiresDedicatedAllocation,
16838 prefersDedicatedAllocation,
16841 *pAllocationCreateInfo,
16842 VMA_SUBALLOCATION_TYPE_BUFFER,
16846 #if VMA_RECORDING_ENABLED 16847 if(allocator->GetRecorder() != VMA_NULL)
16849 allocator->GetRecorder()->RecordCreateBuffer(
16850 allocator->GetCurrentFrameIndex(),
16851 *pBufferCreateInfo,
16852 *pAllocationCreateInfo,
16862 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16867 #if VMA_STATS_STRING_ENABLED 16868 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16870 if(pAllocationInfo != VMA_NULL)
16872 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16877 allocator->FreeMemory(
16880 *pAllocation = VK_NULL_HANDLE;
16881 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16882 *pBuffer = VK_NULL_HANDLE;
16885 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16886 *pBuffer = VK_NULL_HANDLE;
16897 VMA_ASSERT(allocator);
16899 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16904 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16906 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16908 #if VMA_RECORDING_ENABLED 16909 if(allocator->GetRecorder() != VMA_NULL)
16911 allocator->GetRecorder()->RecordDestroyBuffer(
16912 allocator->GetCurrentFrameIndex(),
16917 if(buffer != VK_NULL_HANDLE)
16919 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16922 if(allocation != VK_NULL_HANDLE)
16924 allocator->FreeMemory(
16932 const VkImageCreateInfo* pImageCreateInfo,
16938 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16940 if(pImageCreateInfo->extent.width == 0 ||
16941 pImageCreateInfo->extent.height == 0 ||
16942 pImageCreateInfo->extent.depth == 0 ||
16943 pImageCreateInfo->mipLevels == 0 ||
16944 pImageCreateInfo->arrayLayers == 0)
16946 return VK_ERROR_VALIDATION_FAILED_EXT;
16949 VMA_DEBUG_LOG(
"vmaCreateImage");
16951 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16953 *pImage = VK_NULL_HANDLE;
16954 *pAllocation = VK_NULL_HANDLE;
16957 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16958 allocator->m_hDevice,
16960 allocator->GetAllocationCallbacks(),
16964 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16965 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16966 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16969 VkMemoryRequirements vkMemReq = {};
16970 bool requiresDedicatedAllocation =
false;
16971 bool prefersDedicatedAllocation =
false;
16972 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16973 requiresDedicatedAllocation, prefersDedicatedAllocation);
16975 res = allocator->AllocateMemory(
16977 requiresDedicatedAllocation,
16978 prefersDedicatedAllocation,
16981 *pAllocationCreateInfo,
16986 #if VMA_RECORDING_ENABLED 16987 if(allocator->GetRecorder() != VMA_NULL)
16989 allocator->GetRecorder()->RecordCreateImage(
16990 allocator->GetCurrentFrameIndex(),
16992 *pAllocationCreateInfo,
17002 res = allocator->BindImageMemory(*pAllocation, *pImage);
17007 #if VMA_STATS_STRING_ENABLED 17008 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17010 if(pAllocationInfo != VMA_NULL)
17012 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17017 allocator->FreeMemory(
17020 *pAllocation = VK_NULL_HANDLE;
17021 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17022 *pImage = VK_NULL_HANDLE;
17025 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17026 *pImage = VK_NULL_HANDLE;
17037 VMA_ASSERT(allocator);
17039 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17044 VMA_DEBUG_LOG(
"vmaDestroyImage");
17046 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17048 #if VMA_RECORDING_ENABLED 17049 if(allocator->GetRecorder() != VMA_NULL)
17051 allocator->GetRecorder()->RecordDestroyImage(
17052 allocator->GetCurrentFrameIndex(),
17057 if(image != VK_NULL_HANDLE)
17059 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17061 if(allocation != VK_NULL_HANDLE)
17063 allocator->FreeMemory(
17069 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1786
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2086
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1822
-
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2875
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1844
+
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2897
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1796
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2395
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1776
+
Definition: vk_mem_alloc.h:1818
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2417
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1798
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:2026
-
Definition: vk_mem_alloc.h:2130
-
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2828
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1768
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2495
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1819
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2911
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2284
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1663
+
Definition: vk_mem_alloc.h:2048
+
Definition: vk_mem_alloc.h:2152
+
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2850
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1790
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2517
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1841
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2933
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2306
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1685
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2376
-
Definition: vk_mem_alloc.h:2101
-
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2831
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1757
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2183
-
Definition: vk_mem_alloc.h:2053
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1831
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2312
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2398
+
Definition: vk_mem_alloc.h:2123
+
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2853
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1779
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2205
+
Definition: vk_mem_alloc.h:2075
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1853
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2334
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1885
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1816
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1907
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1838
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2057
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2079
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1957
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1773
-
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2865
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1956
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2915
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1979
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1795
+
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2887
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1978
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2937
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1848
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1966
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2923
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2167
-
Definition: vk_mem_alloc.h:2125
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2906
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1774
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1699
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1870
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1988
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2945
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2189
+
Definition: vk_mem_alloc.h:2147
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2928
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1796
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1721
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1825
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1847
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2326
-
Definition: vk_mem_alloc.h:2320
-
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1780
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1892
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2505
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2348
+
Definition: vk_mem_alloc.h:2342
+
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1802
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1914
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2527
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1769
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1791
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1794
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2204
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2346
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2382
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1816
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2226
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2368
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2404
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1755
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2329
+
Definition: vk_mem_alloc.h:1777
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2351
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2880
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:2004
+
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2902
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:2026
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2840
+
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2862
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2901
+
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2923
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2919
-
Definition: vk_mem_alloc.h:2043
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2191
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1772
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2941
+
Definition: vk_mem_alloc.h:2065
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2213
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1794
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1962
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1705
-
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2819
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1984
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1727
+
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2841
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:2817
-
Definition: vk_mem_alloc.h:2151
-
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2846
+
Definition: vk_mem_alloc.h:2839
+
Definition: vk_mem_alloc.h:2173
+
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2868
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1726
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1748
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1798
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1731
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2921
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1820
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1753
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2943
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2178
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2392
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2200
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2414
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1765
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1945
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2341
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1718
-
Definition: vk_mem_alloc.h:2316
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1787
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1967
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2363
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1740
+
Definition: vk_mem_alloc.h:2338
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2108
+
Definition: vk_mem_alloc.h:2130
Represents Opaque object that represents started defragmentation process.
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1958
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1722
-
Definition: vk_mem_alloc.h:2141
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2332
-
Definition: vk_mem_alloc.h:2052
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1771
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1980
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1744
+
Definition: vk_mem_alloc.h:2163
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2354
+
Definition: vk_mem_alloc.h:2074
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1793
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2173
-
Definition: vk_mem_alloc.h:2164
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2195
+
Definition: vk_mem_alloc.h:2186
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1948
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1767
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2354
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1834
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2385
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2162
-
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2870
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2197
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1970
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1789
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2376
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1856
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2407
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2184
+
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2892
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2219
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1873
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1964
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2088
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1957
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1895
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1986
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2110
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1979
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1778
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1804
-
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2816
-
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2894
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1720
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1777
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1800
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1826
+
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2838
+
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2916
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1742
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1799
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2368
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1770
-
Definition: vk_mem_alloc.h:2119
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2390
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1792
+
Definition: vk_mem_alloc.h:2141
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1812
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2519
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1828
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1957
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1834
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2541
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1850
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1979
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1954
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1976
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2373
-
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2825
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2395
+
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2847
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
-
Definition: vk_mem_alloc.h:2134
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2500
-
Definition: vk_mem_alloc.h:2148
-
Definition: vk_mem_alloc.h:2160
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2917
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1763
+
Definition: vk_mem_alloc.h:2156
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2522
+
Definition: vk_mem_alloc.h:2170
+
Definition: vk_mem_alloc.h:2182
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2939
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1785
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1952
-
Definition: vk_mem_alloc.h:2009
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2322
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1974
+
Definition: vk_mem_alloc.h:2031
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2344
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1801
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1950
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1775
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1779
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2075
-
Definition: vk_mem_alloc.h:2155
-
Definition: vk_mem_alloc.h:2036
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2514
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1823
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1972
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1797
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1801
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2097
+
Definition: vk_mem_alloc.h:2177
+
Definition: vk_mem_alloc.h:2058
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2536
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1753
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1775
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1766
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2301
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1788
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2323
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2481
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2503
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2145
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2266
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1958
+
Definition: vk_mem_alloc.h:2167
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2288
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1980
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
-
Definition: vk_mem_alloc.h:2114
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1788
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1965
+
Definition: vk_mem_alloc.h:2136
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1810
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1987
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2379
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1958
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2401
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1980
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
-
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2885
+
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2907
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2486
-
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2849
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2508
+
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2871