23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1397 #include <vulkan/vulkan.h> 1399 #if !defined(VMA_DEDICATED_ALLOCATION) 1400 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1401 #define VMA_DEDICATED_ALLOCATION 1 1403 #define VMA_DEDICATED_ALLOCATION 0 1421 uint32_t memoryType,
1422 VkDeviceMemory memory,
1427 uint32_t memoryType,
1428 VkDeviceMemory memory,
1500 #if VMA_DEDICATED_ALLOCATION 1501 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1502 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1523 #ifndef VMA_RECORDING_ENABLED 1525 #define VMA_RECORDING_ENABLED 1 1527 #define VMA_RECORDING_ENABLED 0 1640 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1648 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1658 uint32_t memoryTypeIndex,
1659 VkMemoryPropertyFlags* pFlags);
1671 uint32_t frameIndex);
1704 #define VMA_STATS_STRING_ENABLED 1 1706 #if VMA_STATS_STRING_ENABLED 1713 char** ppStatsString,
1714 VkBool32 detailedMap);
1718 char* pStatsString);
1720 #endif // #if VMA_STATS_STRING_ENABLED 1949 uint32_t memoryTypeBits,
1951 uint32_t* pMemoryTypeIndex);
1967 const VkBufferCreateInfo* pBufferCreateInfo,
1969 uint32_t* pMemoryTypeIndex);
1985 const VkImageCreateInfo* pImageCreateInfo,
1987 uint32_t* pMemoryTypeIndex);
2159 size_t* pLostAllocationCount);
2258 const VkMemoryRequirements* pVkMemoryRequirements,
2568 size_t allocationCount,
2569 VkBool32* pAllocationsChanged,
2635 const VkBufferCreateInfo* pBufferCreateInfo,
2660 const VkImageCreateInfo* pImageCreateInfo,
2686 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2689 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2690 #define VMA_IMPLEMENTATION 2693 #ifdef VMA_IMPLEMENTATION 2694 #undef VMA_IMPLEMENTATION 2716 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2717 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2729 #if VMA_USE_STL_CONTAINERS 2730 #define VMA_USE_STL_VECTOR 1 2731 #define VMA_USE_STL_UNORDERED_MAP 1 2732 #define VMA_USE_STL_LIST 1 2735 #if VMA_USE_STL_VECTOR 2739 #if VMA_USE_STL_UNORDERED_MAP 2740 #include <unordered_map> 2743 #if VMA_USE_STL_LIST 2752 #include <algorithm> 2758 #define VMA_NULL nullptr 2761 #if defined(__APPLE__) || defined(__ANDROID__) 2763 void *aligned_alloc(
size_t alignment,
size_t size)
2766 if(alignment <
sizeof(
void*))
2768 alignment =
sizeof(
void*);
2772 if(posix_memalign(&pointer, alignment, size) == 0)
2786 #define VMA_ASSERT(expr) assert(expr) 2788 #define VMA_ASSERT(expr) 2794 #ifndef VMA_HEAVY_ASSERT 2796 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2798 #define VMA_HEAVY_ASSERT(expr) 2802 #ifndef VMA_ALIGN_OF 2803 #define VMA_ALIGN_OF(type) (__alignof(type)) 2806 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2808 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2810 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2814 #ifndef VMA_SYSTEM_FREE 2816 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2818 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2823 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2827 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2831 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2835 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2838 #ifndef VMA_DEBUG_LOG 2839 #define VMA_DEBUG_LOG(format, ...) 2849 #if VMA_STATS_STRING_ENABLED 2850 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2852 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2854 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2856 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2858 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2860 snprintf(outStr, strLen,
"%p", ptr);
2870 void Lock() { m_Mutex.lock(); }
2871 void Unlock() { m_Mutex.unlock(); }
2875 #define VMA_MUTEX VmaMutex 2886 #ifndef VMA_ATOMIC_UINT32 2887 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2890 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2895 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2898 #ifndef VMA_DEBUG_ALIGNMENT 2903 #define VMA_DEBUG_ALIGNMENT (1) 2906 #ifndef VMA_DEBUG_MARGIN 2911 #define VMA_DEBUG_MARGIN (0) 2914 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2919 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2922 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2928 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2931 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2936 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2939 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2944 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2947 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2948 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2952 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2953 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2957 #ifndef VMA_CLASS_NO_COPY 2958 #define VMA_CLASS_NO_COPY(className) \ 2960 className(const className&) = delete; \ 2961 className& operator=(const className&) = delete; 2964 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2967 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2969 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2970 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
2976 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2977 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2980 static inline uint32_t VmaCountBitsSet(uint32_t v)
2982 uint32_t c = v - ((v >> 1) & 0x55555555);
2983 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2984 c = ((c >> 4) + c) & 0x0F0F0F0F;
2985 c = ((c >> 8) + c) & 0x00FF00FF;
2986 c = ((c >> 16) + c) & 0x0000FFFF;
2992 template <
typename T>
2993 static inline T VmaAlignUp(T val, T align)
2995 return (val + align - 1) / align * align;
2999 template <
typename T>
3000 static inline T VmaAlignDown(T val, T align)
3002 return val / align * align;
3006 template <
typename T>
3007 static inline T VmaRoundDiv(T x, T y)
3009 return (x + (y / (T)2)) / y;
3017 template <
typename T>
3018 inline bool VmaIsPow2(T x)
3020 return (x & (x-1)) == 0;
3024 static inline uint32_t VmaNextPow2(uint32_t v)
3035 static inline uint64_t VmaNextPow2(uint64_t v)
3049 static inline uint32_t VmaPrevPow2(uint32_t v)
3059 static inline uint64_t VmaPrevPow2(uint64_t v)
3071 static inline bool VmaStrIsEmpty(
const char* pStr)
3073 return pStr == VMA_NULL || *pStr ==
'\0';
3076 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3094 template<
typename Iterator,
typename Compare>
3095 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3097 Iterator centerValue = end; --centerValue;
3098 Iterator insertIndex = beg;
3099 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3101 if(cmp(*memTypeIndex, *centerValue))
3103 if(insertIndex != memTypeIndex)
3105 VMA_SWAP(*memTypeIndex, *insertIndex);
3110 if(insertIndex != centerValue)
3112 VMA_SWAP(*insertIndex, *centerValue);
3117 template<
typename Iterator,
typename Compare>
3118 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3122 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3123 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3124 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3128 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3130 #endif // #ifndef VMA_SORT 3139 static inline bool VmaBlocksOnSamePage(
3140 VkDeviceSize resourceAOffset,
3141 VkDeviceSize resourceASize,
3142 VkDeviceSize resourceBOffset,
3143 VkDeviceSize pageSize)
3145 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3146 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3147 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3148 VkDeviceSize resourceBStart = resourceBOffset;
3149 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3150 return resourceAEndPage == resourceBStartPage;
3153 enum VmaSuballocationType
3155 VMA_SUBALLOCATION_TYPE_FREE = 0,
3156 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3157 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3158 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3159 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3160 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3161 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3170 static inline bool VmaIsBufferImageGranularityConflict(
3171 VmaSuballocationType suballocType1,
3172 VmaSuballocationType suballocType2)
3174 if(suballocType1 > suballocType2)
3176 VMA_SWAP(suballocType1, suballocType2);
3179 switch(suballocType1)
3181 case VMA_SUBALLOCATION_TYPE_FREE:
3183 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3185 case VMA_SUBALLOCATION_TYPE_BUFFER:
3187 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3188 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3189 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3191 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3192 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3193 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3194 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3196 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3197 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3205 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3207 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3208 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3212 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3214 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3219 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3221 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3222 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3226 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3228 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3240 VMA_CLASS_NO_COPY(VmaMutexLock)
3242 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3243 m_pMutex(useMutex ? &mutex : VMA_NULL)
3260 VMA_MUTEX* m_pMutex;
3263 #if VMA_DEBUG_GLOBAL_MUTEX 3264 static VMA_MUTEX gDebugGlobalMutex;
3265 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3267 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3271 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3282 template <
typename CmpLess,
typename IterT,
typename KeyT>
3283 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3285 size_t down = 0, up = (end - beg);
3288 const size_t mid = (down + up) / 2;
3289 if(cmp(*(beg+mid), key))
3304 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3306 if((pAllocationCallbacks != VMA_NULL) &&
3307 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3309 return (*pAllocationCallbacks->pfnAllocation)(
3310 pAllocationCallbacks->pUserData,
3313 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3317 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3321 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3323 if((pAllocationCallbacks != VMA_NULL) &&
3324 (pAllocationCallbacks->pfnFree != VMA_NULL))
3326 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3330 VMA_SYSTEM_FREE(ptr);
3334 template<
typename T>
3335 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3337 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3340 template<
typename T>
3341 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3343 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3346 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3348 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3350 template<
typename T>
3351 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3354 VmaFree(pAllocationCallbacks, ptr);
3357 template<
typename T>
3358 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3362 for(
size_t i = count; i--; )
3366 VmaFree(pAllocationCallbacks, ptr);
3371 template<
typename T>
3372 class VmaStlAllocator
3375 const VkAllocationCallbacks*
const m_pCallbacks;
3376 typedef T value_type;
3378 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3379 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3381 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3382 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3384 template<
typename U>
3385 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3387 return m_pCallbacks == rhs.m_pCallbacks;
3389 template<
typename U>
3390 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3392 return m_pCallbacks != rhs.m_pCallbacks;
3395 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3398 #if VMA_USE_STL_VECTOR 3400 #define VmaVector std::vector 3402 template<
typename T,
typename allocatorT>
3403 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3405 vec.insert(vec.begin() + index, item);
3408 template<
typename T,
typename allocatorT>
3409 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3411 vec.erase(vec.begin() + index);
3414 #else // #if VMA_USE_STL_VECTOR 3419 template<
typename T,
typename AllocatorT>
3423 typedef T value_type;
3425 VmaVector(
const AllocatorT& allocator) :
3426 m_Allocator(allocator),
3433 VmaVector(
size_t count,
const AllocatorT& allocator) :
3434 m_Allocator(allocator),
3435 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3441 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3442 m_Allocator(src.m_Allocator),
3443 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3444 m_Count(src.m_Count),
3445 m_Capacity(src.m_Count)
3449 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3455 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3458 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3462 resize(rhs.m_Count);
3465 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3471 bool empty()
const {
return m_Count == 0; }
3472 size_t size()
const {
return m_Count; }
3473 T* data() {
return m_pArray; }
3474 const T* data()
const {
return m_pArray; }
3476 T& operator[](
size_t index)
3478 VMA_HEAVY_ASSERT(index < m_Count);
3479 return m_pArray[index];
3481 const T& operator[](
size_t index)
const 3483 VMA_HEAVY_ASSERT(index < m_Count);
3484 return m_pArray[index];
3489 VMA_HEAVY_ASSERT(m_Count > 0);
3492 const T& front()
const 3494 VMA_HEAVY_ASSERT(m_Count > 0);
3499 VMA_HEAVY_ASSERT(m_Count > 0);
3500 return m_pArray[m_Count - 1];
3502 const T& back()
const 3504 VMA_HEAVY_ASSERT(m_Count > 0);
3505 return m_pArray[m_Count - 1];
3508 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3510 newCapacity = VMA_MAX(newCapacity, m_Count);
3512 if((newCapacity < m_Capacity) && !freeMemory)
3514 newCapacity = m_Capacity;
3517 if(newCapacity != m_Capacity)
3519 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3522 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3524 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3525 m_Capacity = newCapacity;
3526 m_pArray = newArray;
3530 void resize(
size_t newCount,
bool freeMemory =
false)
3532 size_t newCapacity = m_Capacity;
3533 if(newCount > m_Capacity)
3535 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3539 newCapacity = newCount;
3542 if(newCapacity != m_Capacity)
3544 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3545 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3546 if(elementsToCopy != 0)
3548 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3550 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3551 m_Capacity = newCapacity;
3552 m_pArray = newArray;
3558 void clear(
bool freeMemory =
false)
3560 resize(0, freeMemory);
3563 void insert(
size_t index,
const T& src)
3565 VMA_HEAVY_ASSERT(index <= m_Count);
3566 const size_t oldCount = size();
3567 resize(oldCount + 1);
3568 if(index < oldCount)
3570 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3572 m_pArray[index] = src;
3575 void remove(
size_t index)
3577 VMA_HEAVY_ASSERT(index < m_Count);
3578 const size_t oldCount = size();
3579 if(index < oldCount - 1)
3581 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3583 resize(oldCount - 1);
3586 void push_back(
const T& src)
3588 const size_t newIndex = size();
3589 resize(newIndex + 1);
3590 m_pArray[newIndex] = src;
3595 VMA_HEAVY_ASSERT(m_Count > 0);
3599 void push_front(
const T& src)
3606 VMA_HEAVY_ASSERT(m_Count > 0);
3610 typedef T* iterator;
3612 iterator begin() {
return m_pArray; }
3613 iterator end() {
return m_pArray + m_Count; }
3616 AllocatorT m_Allocator;
3622 template<
typename T,
typename allocatorT>
3623 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3625 vec.insert(index, item);
3628 template<
typename T,
typename allocatorT>
3629 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3634 #endif // #if VMA_USE_STL_VECTOR 3636 template<
typename CmpLess,
typename VectorT>
3637 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3639 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3641 vector.data() + vector.size(),
3643 CmpLess()) - vector.data();
3644 VmaVectorInsert(vector, indexToInsert, value);
3645 return indexToInsert;
3648 template<
typename CmpLess,
typename VectorT>
3649 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3652 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3657 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3659 size_t indexToRemove = it - vector.begin();
3660 VmaVectorRemove(vector, indexToRemove);
3666 template<
typename CmpLess,
typename IterT,
typename KeyT>
3667 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3670 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3671 beg, end, value, comparator);
3673 (!comparator(*it, value) && !comparator(value, *it)))
3688 template<
typename T>
3689 class VmaPoolAllocator
3691 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3693 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3694 ~VmaPoolAllocator();
3702 uint32_t NextFreeIndex;
3709 uint32_t FirstFreeIndex;
3712 const VkAllocationCallbacks* m_pAllocationCallbacks;
3713 size_t m_ItemsPerBlock;
3714 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3716 ItemBlock& CreateNewBlock();
3719 template<
typename T>
3720 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3721 m_pAllocationCallbacks(pAllocationCallbacks),
3722 m_ItemsPerBlock(itemsPerBlock),
3723 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3725 VMA_ASSERT(itemsPerBlock > 0);
3728 template<
typename T>
3729 VmaPoolAllocator<T>::~VmaPoolAllocator()
3734 template<
typename T>
3735 void VmaPoolAllocator<T>::Clear()
3737 for(
size_t i = m_ItemBlocks.size(); i--; )
3738 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3739 m_ItemBlocks.clear();
3742 template<
typename T>
3743 T* VmaPoolAllocator<T>::Alloc()
3745 for(
size_t i = m_ItemBlocks.size(); i--; )
3747 ItemBlock& block = m_ItemBlocks[i];
3749 if(block.FirstFreeIndex != UINT32_MAX)
3751 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3752 block.FirstFreeIndex = pItem->NextFreeIndex;
3753 return &pItem->Value;
3758 ItemBlock& newBlock = CreateNewBlock();
3759 Item*
const pItem = &newBlock.pItems[0];
3760 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3761 return &pItem->Value;
3764 template<
typename T>
3765 void VmaPoolAllocator<T>::Free(T* ptr)
3768 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3770 ItemBlock& block = m_ItemBlocks[i];
3774 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3777 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3779 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3780 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3781 block.FirstFreeIndex = index;
3785 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3788 template<
typename T>
3789 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3791 ItemBlock newBlock = {
3792 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3794 m_ItemBlocks.push_back(newBlock);
3797 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3798 newBlock.pItems[i].NextFreeIndex = i + 1;
3799 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3800 return m_ItemBlocks.back();
3806 #if VMA_USE_STL_LIST 3808 #define VmaList std::list 3810 #else // #if VMA_USE_STL_LIST 3812 template<
typename T>
3821 template<
typename T>
3824 VMA_CLASS_NO_COPY(VmaRawList)
3826 typedef VmaListItem<T> ItemType;
3828 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3832 size_t GetCount()
const {
return m_Count; }
3833 bool IsEmpty()
const {
return m_Count == 0; }
3835 ItemType* Front() {
return m_pFront; }
3836 const ItemType* Front()
const {
return m_pFront; }
3837 ItemType* Back() {
return m_pBack; }
3838 const ItemType* Back()
const {
return m_pBack; }
3840 ItemType* PushBack();
3841 ItemType* PushFront();
3842 ItemType* PushBack(
const T& value);
3843 ItemType* PushFront(
const T& value);
3848 ItemType* InsertBefore(ItemType* pItem);
3850 ItemType* InsertAfter(ItemType* pItem);
3852 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3853 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3855 void Remove(ItemType* pItem);
3858 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3859 VmaPoolAllocator<ItemType> m_ItemAllocator;
3865 template<
typename T>
3866 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3867 m_pAllocationCallbacks(pAllocationCallbacks),
3868 m_ItemAllocator(pAllocationCallbacks, 128),
3875 template<
typename T>
3876 VmaRawList<T>::~VmaRawList()
3882 template<
typename T>
3883 void VmaRawList<T>::Clear()
3885 if(IsEmpty() ==
false)
3887 ItemType* pItem = m_pBack;
3888 while(pItem != VMA_NULL)
3890 ItemType*
const pPrevItem = pItem->pPrev;
3891 m_ItemAllocator.Free(pItem);
3894 m_pFront = VMA_NULL;
3900 template<
typename T>
3901 VmaListItem<T>* VmaRawList<T>::PushBack()
3903 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3904 pNewItem->pNext = VMA_NULL;
3907 pNewItem->pPrev = VMA_NULL;
3908 m_pFront = pNewItem;
3914 pNewItem->pPrev = m_pBack;
3915 m_pBack->pNext = pNewItem;
3922 template<
typename T>
3923 VmaListItem<T>* VmaRawList<T>::PushFront()
3925 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3926 pNewItem->pPrev = VMA_NULL;
3929 pNewItem->pNext = VMA_NULL;
3930 m_pFront = pNewItem;
3936 pNewItem->pNext = m_pFront;
3937 m_pFront->pPrev = pNewItem;
3938 m_pFront = pNewItem;
3944 template<
typename T>
3945 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3947 ItemType*
const pNewItem = PushBack();
3948 pNewItem->Value = value;
3952 template<
typename T>
3953 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3955 ItemType*
const pNewItem = PushFront();
3956 pNewItem->Value = value;
3960 template<
typename T>
3961 void VmaRawList<T>::PopBack()
3963 VMA_HEAVY_ASSERT(m_Count > 0);
3964 ItemType*
const pBackItem = m_pBack;
3965 ItemType*
const pPrevItem = pBackItem->pPrev;
3966 if(pPrevItem != VMA_NULL)
3968 pPrevItem->pNext = VMA_NULL;
3970 m_pBack = pPrevItem;
3971 m_ItemAllocator.Free(pBackItem);
3975 template<
typename T>
3976 void VmaRawList<T>::PopFront()
3978 VMA_HEAVY_ASSERT(m_Count > 0);
3979 ItemType*
const pFrontItem = m_pFront;
3980 ItemType*
const pNextItem = pFrontItem->pNext;
3981 if(pNextItem != VMA_NULL)
3983 pNextItem->pPrev = VMA_NULL;
3985 m_pFront = pNextItem;
3986 m_ItemAllocator.Free(pFrontItem);
3990 template<
typename T>
3991 void VmaRawList<T>::Remove(ItemType* pItem)
3993 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3994 VMA_HEAVY_ASSERT(m_Count > 0);
3996 if(pItem->pPrev != VMA_NULL)
3998 pItem->pPrev->pNext = pItem->pNext;
4002 VMA_HEAVY_ASSERT(m_pFront == pItem);
4003 m_pFront = pItem->pNext;
4006 if(pItem->pNext != VMA_NULL)
4008 pItem->pNext->pPrev = pItem->pPrev;
4012 VMA_HEAVY_ASSERT(m_pBack == pItem);
4013 m_pBack = pItem->pPrev;
4016 m_ItemAllocator.Free(pItem);
4020 template<
typename T>
4021 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4023 if(pItem != VMA_NULL)
4025 ItemType*
const prevItem = pItem->pPrev;
4026 ItemType*
const newItem = m_ItemAllocator.Alloc();
4027 newItem->pPrev = prevItem;
4028 newItem->pNext = pItem;
4029 pItem->pPrev = newItem;
4030 if(prevItem != VMA_NULL)
4032 prevItem->pNext = newItem;
4036 VMA_HEAVY_ASSERT(m_pFront == pItem);
4046 template<
typename T>
4047 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4049 if(pItem != VMA_NULL)
4051 ItemType*
const nextItem = pItem->pNext;
4052 ItemType*
const newItem = m_ItemAllocator.Alloc();
4053 newItem->pNext = nextItem;
4054 newItem->pPrev = pItem;
4055 pItem->pNext = newItem;
4056 if(nextItem != VMA_NULL)
4058 nextItem->pPrev = newItem;
4062 VMA_HEAVY_ASSERT(m_pBack == pItem);
4072 template<
typename T>
4073 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4075 ItemType*
const newItem = InsertBefore(pItem);
4076 newItem->Value = value;
4080 template<
typename T>
4081 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4083 ItemType*
const newItem = InsertAfter(pItem);
4084 newItem->Value = value;
4088 template<
typename T,
typename AllocatorT>
4091 VMA_CLASS_NO_COPY(VmaList)
4102 T& operator*()
const 4104 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4105 return m_pItem->Value;
4107 T* operator->()
const 4109 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4110 return &m_pItem->Value;
4113 iterator& operator++()
4115 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4116 m_pItem = m_pItem->pNext;
4119 iterator& operator--()
4121 if(m_pItem != VMA_NULL)
4123 m_pItem = m_pItem->pPrev;
4127 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4128 m_pItem = m_pList->Back();
4133 iterator operator++(
int)
4135 iterator result = *
this;
4139 iterator operator--(
int)
4141 iterator result = *
this;
4146 bool operator==(
const iterator& rhs)
const 4148 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4149 return m_pItem == rhs.m_pItem;
4151 bool operator!=(
const iterator& rhs)
const 4153 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4154 return m_pItem != rhs.m_pItem;
4158 VmaRawList<T>* m_pList;
4159 VmaListItem<T>* m_pItem;
4161 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4167 friend class VmaList<T, AllocatorT>;
4170 class const_iterator
4179 const_iterator(
const iterator& src) :
4180 m_pList(src.m_pList),
4181 m_pItem(src.m_pItem)
4185 const T& operator*()
const 4187 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4188 return m_pItem->Value;
4190 const T* operator->()
const 4192 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4193 return &m_pItem->Value;
4196 const_iterator& operator++()
4198 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4199 m_pItem = m_pItem->pNext;
4202 const_iterator& operator--()
4204 if(m_pItem != VMA_NULL)
4206 m_pItem = m_pItem->pPrev;
4210 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4211 m_pItem = m_pList->Back();
4216 const_iterator operator++(
int)
4218 const_iterator result = *
this;
4222 const_iterator operator--(
int)
4224 const_iterator result = *
this;
4229 bool operator==(
const const_iterator& rhs)
const 4231 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4232 return m_pItem == rhs.m_pItem;
4234 bool operator!=(
const const_iterator& rhs)
const 4236 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4237 return m_pItem != rhs.m_pItem;
4241 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4247 const VmaRawList<T>* m_pList;
4248 const VmaListItem<T>* m_pItem;
4250 friend class VmaList<T, AllocatorT>;
4253 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4255 bool empty()
const {
return m_RawList.IsEmpty(); }
4256 size_t size()
const {
return m_RawList.GetCount(); }
4258 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4259 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4261 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4262 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4264 void clear() { m_RawList.Clear(); }
4265 void push_back(
const T& value) { m_RawList.PushBack(value); }
4266 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4267 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4270 VmaRawList<T> m_RawList;
4273 #endif // #if VMA_USE_STL_LIST 4281 #if VMA_USE_STL_UNORDERED_MAP 4283 #define VmaPair std::pair 4285 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4286 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4288 #else // #if VMA_USE_STL_UNORDERED_MAP 4290 template<
typename T1,
typename T2>
4296 VmaPair() : first(), second() { }
4297 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4303 template<
typename KeyT,
typename ValueT>
4307 typedef VmaPair<KeyT, ValueT> PairType;
4308 typedef PairType* iterator;
4310 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4312 iterator begin() {
return m_Vector.begin(); }
4313 iterator end() {
return m_Vector.end(); }
4315 void insert(
const PairType& pair);
4316 iterator find(
const KeyT& key);
4317 void erase(iterator it);
4320 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4323 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4325 template<
typename FirstT,
typename SecondT>
4326 struct VmaPairFirstLess
4328 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4330 return lhs.first < rhs.first;
4332 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4334 return lhs.first < rhsFirst;
4338 template<
typename KeyT,
typename ValueT>
4339 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4341 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4343 m_Vector.data() + m_Vector.size(),
4345 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4346 VmaVectorInsert(m_Vector, indexToInsert, pair);
4349 template<
typename KeyT,
typename ValueT>
4350 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4352 PairType* it = VmaBinaryFindFirstNotLess(
4354 m_Vector.data() + m_Vector.size(),
4356 VmaPairFirstLess<KeyT, ValueT>());
4357 if((it != m_Vector.end()) && (it->first == key))
4363 return m_Vector.end();
4367 template<
typename KeyT,
typename ValueT>
4368 void VmaMap<KeyT, ValueT>::erase(iterator it)
4370 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4373 #endif // #if VMA_USE_STL_UNORDERED_MAP 4379 class VmaDeviceMemoryBlock;
4381 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4383 struct VmaAllocation_T
4385 VMA_CLASS_NO_COPY(VmaAllocation_T)
4387 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4391 FLAG_USER_DATA_STRING = 0x01,
4395 enum ALLOCATION_TYPE
4397 ALLOCATION_TYPE_NONE,
4398 ALLOCATION_TYPE_BLOCK,
4399 ALLOCATION_TYPE_DEDICATED,
4402 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4405 m_pUserData(VMA_NULL),
4406 m_LastUseFrameIndex(currentFrameIndex),
4407 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4408 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4410 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4412 #if VMA_STATS_STRING_ENABLED 4413 m_CreationFrameIndex = currentFrameIndex;
4414 m_BufferImageUsage = 0;
4420 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4423 VMA_ASSERT(m_pUserData == VMA_NULL);
4426 void InitBlockAllocation(
4428 VmaDeviceMemoryBlock* block,
4429 VkDeviceSize offset,
4430 VkDeviceSize alignment,
4432 VmaSuballocationType suballocationType,
4436 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4437 VMA_ASSERT(block != VMA_NULL);
4438 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4439 m_Alignment = alignment;
4441 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4442 m_SuballocationType = (uint8_t)suballocationType;
4443 m_BlockAllocation.m_hPool = hPool;
4444 m_BlockAllocation.m_Block = block;
4445 m_BlockAllocation.m_Offset = offset;
4446 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4451 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4452 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4453 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4454 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4455 m_BlockAllocation.m_Block = VMA_NULL;
4456 m_BlockAllocation.m_Offset = 0;
4457 m_BlockAllocation.m_CanBecomeLost =
true;
4460 void ChangeBlockAllocation(
4462 VmaDeviceMemoryBlock* block,
4463 VkDeviceSize offset);
4466 void InitDedicatedAllocation(
4467 uint32_t memoryTypeIndex,
4468 VkDeviceMemory hMemory,
4469 VmaSuballocationType suballocationType,
4473 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4474 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4475 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4478 m_SuballocationType = (uint8_t)suballocationType;
4479 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4480 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4481 m_DedicatedAllocation.m_hMemory = hMemory;
4482 m_DedicatedAllocation.m_pMappedData = pMappedData;
4485 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4486 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4487 VkDeviceSize GetSize()
const {
return m_Size; }
4488 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4489 void* GetUserData()
const {
return m_pUserData; }
4490 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4491 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4493 VmaDeviceMemoryBlock* GetBlock()
const 4495 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4496 return m_BlockAllocation.m_Block;
4498 VkDeviceSize GetOffset()
const;
4499 VkDeviceMemory GetMemory()
const;
4500 uint32_t GetMemoryTypeIndex()
const;
4501 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4502 void* GetMappedData()
const;
4503 bool CanBecomeLost()
const;
4506 uint32_t GetLastUseFrameIndex()
const 4508 return m_LastUseFrameIndex.load();
4510 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4512 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4522 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4524 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4526 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4537 void BlockAllocMap();
4538 void BlockAllocUnmap();
4539 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4542 #if VMA_STATS_STRING_ENABLED 4543 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4544 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4546 void InitBufferImageUsage(uint32_t bufferImageUsage)
4548 VMA_ASSERT(m_BufferImageUsage == 0);
4549 m_BufferImageUsage = bufferImageUsage;
4552 void PrintParameters(
class VmaJsonWriter& json)
const;
4556 VkDeviceSize m_Alignment;
4557 VkDeviceSize m_Size;
4559 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4561 uint8_t m_SuballocationType;
4568 struct BlockAllocation
4571 VmaDeviceMemoryBlock* m_Block;
4572 VkDeviceSize m_Offset;
4573 bool m_CanBecomeLost;
4577 struct DedicatedAllocation
4579 uint32_t m_MemoryTypeIndex;
4580 VkDeviceMemory m_hMemory;
4581 void* m_pMappedData;
4587 BlockAllocation m_BlockAllocation;
4589 DedicatedAllocation m_DedicatedAllocation;
4592 #if VMA_STATS_STRING_ENABLED 4593 uint32_t m_CreationFrameIndex;
4594 uint32_t m_BufferImageUsage;
4604 struct VmaSuballocation
4606 VkDeviceSize offset;
4609 VmaSuballocationType type;
4613 struct VmaSuballocationOffsetLess
4615 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4617 return lhs.offset < rhs.offset;
4620 struct VmaSuballocationOffsetGreater
4622 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4624 return lhs.offset > rhs.offset;
4628 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4631 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4646 struct VmaAllocationRequest
4648 VkDeviceSize offset;
4649 VkDeviceSize sumFreeSize;
4650 VkDeviceSize sumItemSize;
4651 VmaSuballocationList::iterator item;
4652 size_t itemsToMakeLostCount;
4655 VkDeviceSize CalcCost()
const 4657 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4665 class VmaBlockMetadata
4669 virtual ~VmaBlockMetadata() { }
4670 virtual void Init(VkDeviceSize size) { m_Size = size; }
4673 virtual bool Validate()
const = 0;
4674 VkDeviceSize GetSize()
const {
return m_Size; }
4675 virtual size_t GetAllocationCount()
const = 0;
4676 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4677 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4679 virtual bool IsEmpty()
const = 0;
4681 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4683 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4685 #if VMA_STATS_STRING_ENABLED 4686 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4692 virtual bool CreateAllocationRequest(
4693 uint32_t currentFrameIndex,
4694 uint32_t frameInUseCount,
4695 VkDeviceSize bufferImageGranularity,
4696 VkDeviceSize allocSize,
4697 VkDeviceSize allocAlignment,
4699 VmaSuballocationType allocType,
4700 bool canMakeOtherLost,
4702 VmaAllocationRequest* pAllocationRequest) = 0;
4704 virtual bool MakeRequestedAllocationsLost(
4705 uint32_t currentFrameIndex,
4706 uint32_t frameInUseCount,
4707 VmaAllocationRequest* pAllocationRequest) = 0;
4709 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4711 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4715 const VmaAllocationRequest& request,
4716 VmaSuballocationType type,
4717 VkDeviceSize allocSize,
4723 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4726 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4728 #if VMA_STATS_STRING_ENABLED 4729 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4730 VkDeviceSize unusedBytes,
4731 size_t allocationCount,
4732 size_t unusedRangeCount)
const;
4733 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4734 VkDeviceSize offset,
4736 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4737 VkDeviceSize offset,
4738 VkDeviceSize size)
const;
4739 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4743 VkDeviceSize m_Size;
4744 const VkAllocationCallbacks* m_pAllocationCallbacks;
4747 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4748 VMA_ASSERT(0 && "Validation failed: " ## #cond); \ 4752 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4754 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4757 virtual ~VmaBlockMetadata_Generic();
4758 virtual void Init(VkDeviceSize size);
4760 virtual bool Validate()
const;
4761 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4762 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4763 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4764 virtual bool IsEmpty()
const;
4766 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4767 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4769 #if VMA_STATS_STRING_ENABLED 4770 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4773 virtual bool CreateAllocationRequest(
4774 uint32_t currentFrameIndex,
4775 uint32_t frameInUseCount,
4776 VkDeviceSize bufferImageGranularity,
4777 VkDeviceSize allocSize,
4778 VkDeviceSize allocAlignment,
4780 VmaSuballocationType allocType,
4781 bool canMakeOtherLost,
4783 VmaAllocationRequest* pAllocationRequest);
4785 virtual bool MakeRequestedAllocationsLost(
4786 uint32_t currentFrameIndex,
4787 uint32_t frameInUseCount,
4788 VmaAllocationRequest* pAllocationRequest);
4790 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4792 virtual VkResult CheckCorruption(
const void* pBlockData);
4795 const VmaAllocationRequest& request,
4796 VmaSuballocationType type,
4797 VkDeviceSize allocSize,
4802 virtual void FreeAtOffset(VkDeviceSize offset);
4805 uint32_t m_FreeCount;
4806 VkDeviceSize m_SumFreeSize;
4807 VmaSuballocationList m_Suballocations;
4810 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4812 bool ValidateFreeSuballocationList()
const;
4816 bool CheckAllocation(
4817 uint32_t currentFrameIndex,
4818 uint32_t frameInUseCount,
4819 VkDeviceSize bufferImageGranularity,
4820 VkDeviceSize allocSize,
4821 VkDeviceSize allocAlignment,
4822 VmaSuballocationType allocType,
4823 VmaSuballocationList::const_iterator suballocItem,
4824 bool canMakeOtherLost,
4825 VkDeviceSize* pOffset,
4826 size_t* itemsToMakeLostCount,
4827 VkDeviceSize* pSumFreeSize,
4828 VkDeviceSize* pSumItemSize)
const;
4830 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4834 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4837 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4840 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4921 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4923 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4926 virtual ~VmaBlockMetadata_Linear();
4927 virtual void Init(VkDeviceSize size);
4929 virtual bool Validate()
const;
4930 virtual size_t GetAllocationCount()
const;
4931 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4932 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4933 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
4935 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4936 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4938 #if VMA_STATS_STRING_ENABLED 4939 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4942 virtual bool CreateAllocationRequest(
4943 uint32_t currentFrameIndex,
4944 uint32_t frameInUseCount,
4945 VkDeviceSize bufferImageGranularity,
4946 VkDeviceSize allocSize,
4947 VkDeviceSize allocAlignment,
4949 VmaSuballocationType allocType,
4950 bool canMakeOtherLost,
4952 VmaAllocationRequest* pAllocationRequest);
4954 virtual bool MakeRequestedAllocationsLost(
4955 uint32_t currentFrameIndex,
4956 uint32_t frameInUseCount,
4957 VmaAllocationRequest* pAllocationRequest);
4959 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4961 virtual VkResult CheckCorruption(
const void* pBlockData);
4964 const VmaAllocationRequest& request,
4965 VmaSuballocationType type,
4966 VkDeviceSize allocSize,
4971 virtual void FreeAtOffset(VkDeviceSize offset);
4981 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
4983 enum SECOND_VECTOR_MODE
4985 SECOND_VECTOR_EMPTY,
4990 SECOND_VECTOR_RING_BUFFER,
4996 SECOND_VECTOR_DOUBLE_STACK,
4999 VkDeviceSize m_SumFreeSize;
5000 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5001 uint32_t m_1stVectorIndex;
5002 SECOND_VECTOR_MODE m_2ndVectorMode;
5004 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5005 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5006 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5007 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5010 size_t m_1stNullItemsBeginCount;
5012 size_t m_1stNullItemsMiddleCount;
5014 size_t m_2ndNullItemsCount;
5016 bool ShouldCompact1st()
const;
5017 void CleanupAfterFree();
5031 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5033 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5036 virtual ~VmaBlockMetadata_Buddy();
5037 virtual void Init(VkDeviceSize size);
5039 virtual bool Validate()
const;
5040 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5041 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5042 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5043 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5045 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5046 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5048 #if VMA_STATS_STRING_ENABLED 5049 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5052 virtual bool CreateAllocationRequest(
5053 uint32_t currentFrameIndex,
5054 uint32_t frameInUseCount,
5055 VkDeviceSize bufferImageGranularity,
5056 VkDeviceSize allocSize,
5057 VkDeviceSize allocAlignment,
5059 VmaSuballocationType allocType,
5060 bool canMakeOtherLost,
5062 VmaAllocationRequest* pAllocationRequest);
5064 virtual bool MakeRequestedAllocationsLost(
5065 uint32_t currentFrameIndex,
5066 uint32_t frameInUseCount,
5067 VmaAllocationRequest* pAllocationRequest);
5069 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5071 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5074 const VmaAllocationRequest& request,
5075 VmaSuballocationType type,
5076 VkDeviceSize allocSize,
5080 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5081 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5084 static const VkDeviceSize MIN_NODE_SIZE = 32;
5085 static const size_t MAX_LEVELS = 30;
5087 struct ValidationContext
5089 size_t calculatedAllocationCount;
5090 size_t calculatedFreeCount;
5091 VkDeviceSize calculatedSumFreeSize;
5093 ValidationContext() :
5094 calculatedAllocationCount(0),
5095 calculatedFreeCount(0),
5096 calculatedSumFreeSize(0) { }
5101 VkDeviceSize offset;
5131 VkDeviceSize m_UsableSize;
5132 uint32_t m_LevelCount;
5138 } m_FreeList[MAX_LEVELS];
5140 size_t m_AllocationCount;
5144 VkDeviceSize m_SumFreeSize;
5146 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5147 void DeleteNode(Node* node);
5148 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5149 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5150 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5152 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5153 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5157 void AddToFreeListFront(uint32_t level, Node* node);
5161 void RemoveFromFreeList(uint32_t level, Node* node);
5163 #if VMA_STATS_STRING_ENABLED 5164 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5174 class VmaDeviceMemoryBlock
5176 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5178 VmaBlockMetadata* m_pMetadata;
5182 ~VmaDeviceMemoryBlock()
5184 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5185 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5191 uint32_t newMemoryTypeIndex,
5192 VkDeviceMemory newMemory,
5193 VkDeviceSize newSize,
5195 uint32_t algorithm);
5199 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5200 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5201 uint32_t GetId()
const {
return m_Id; }
5202 void* GetMappedData()
const {
return m_pMappedData; }
5205 bool Validate()
const;
5210 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5213 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5214 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5216 VkResult BindBufferMemory(
5220 VkResult BindImageMemory(
5226 uint32_t m_MemoryTypeIndex;
5228 VkDeviceMemory m_hMemory;
5233 uint32_t m_MapCount;
5234 void* m_pMappedData;
5237 struct VmaPointerLess
5239 bool operator()(
const void* lhs,
const void* rhs)
const 5245 class VmaDefragmentator;
5253 struct VmaBlockVector
5255 VMA_CLASS_NO_COPY(VmaBlockVector)
5259 uint32_t memoryTypeIndex,
5260 VkDeviceSize preferredBlockSize,
5261 size_t minBlockCount,
5262 size_t maxBlockCount,
5263 VkDeviceSize bufferImageGranularity,
5264 uint32_t frameInUseCount,
5266 bool explicitBlockSize,
5267 uint32_t algorithm);
5270 VkResult CreateMinBlocks();
5272 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5273 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5274 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5275 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5276 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5280 bool IsEmpty()
const {
return m_Blocks.empty(); }
5281 bool IsCorruptionDetectionEnabled()
const;
5285 uint32_t currentFrameIndex,
5287 VkDeviceSize alignment,
5289 VmaSuballocationType suballocType,
5298 #if VMA_STATS_STRING_ENABLED 5299 void PrintDetailedMap(
class VmaJsonWriter& json);
5302 void MakePoolAllocationsLost(
5303 uint32_t currentFrameIndex,
5304 size_t* pLostAllocationCount);
5305 VkResult CheckCorruption();
5307 VmaDefragmentator* EnsureDefragmentator(
5309 uint32_t currentFrameIndex);
5311 VkResult Defragment(
5313 VkDeviceSize& maxBytesToMove,
5314 uint32_t& maxAllocationsToMove);
5316 void DestroyDefragmentator();
5319 friend class VmaDefragmentator;
5322 const uint32_t m_MemoryTypeIndex;
5323 const VkDeviceSize m_PreferredBlockSize;
5324 const size_t m_MinBlockCount;
5325 const size_t m_MaxBlockCount;
5326 const VkDeviceSize m_BufferImageGranularity;
5327 const uint32_t m_FrameInUseCount;
5328 const bool m_IsCustomPool;
5329 const bool m_ExplicitBlockSize;
5330 const uint32_t m_Algorithm;
5331 bool m_HasEmptyBlock;
5334 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5338 VmaDefragmentator* m_pDefragmentator;
5339 uint32_t m_NextBlockId;
5341 VkDeviceSize CalcMaxBlockSize()
const;
5344 void Remove(VmaDeviceMemoryBlock* pBlock);
5348 void IncrementallySortBlocks();
5351 VkResult AllocateFromBlock(
5352 VmaDeviceMemoryBlock* pBlock,
5354 uint32_t currentFrameIndex,
5356 VkDeviceSize alignment,
5359 VmaSuballocationType suballocType,
5363 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5368 VMA_CLASS_NO_COPY(VmaPool_T)
5370 VmaBlockVector m_BlockVector;
5375 VkDeviceSize preferredBlockSize);
5378 uint32_t GetId()
const {
return m_Id; }
5379 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5381 #if VMA_STATS_STRING_ENABLED 5389 class VmaDefragmentator
5391 VMA_CLASS_NO_COPY(VmaDefragmentator)
5394 VmaBlockVector*
const m_pBlockVector;
5395 uint32_t m_CurrentFrameIndex;
5396 VkDeviceSize m_BytesMoved;
5397 uint32_t m_AllocationsMoved;
5399 struct AllocationInfo
5402 VkBool32* m_pChanged;
5405 m_hAllocation(VK_NULL_HANDLE),
5406 m_pChanged(VMA_NULL)
5411 struct AllocationInfoSizeGreater
5413 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5415 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5420 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5424 VmaDeviceMemoryBlock* m_pBlock;
5425 bool m_HasNonMovableAllocations;
5426 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5428 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5430 m_HasNonMovableAllocations(true),
5431 m_Allocations(pAllocationCallbacks),
5432 m_pMappedDataForDefragmentation(VMA_NULL)
5436 void CalcHasNonMovableAllocations()
5438 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5439 const size_t defragmentAllocCount = m_Allocations.size();
5440 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5443 void SortAllocationsBySizeDescecnding()
5445 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5448 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5453 void* m_pMappedDataForDefragmentation;
5456 struct BlockPointerLess
5458 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5460 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5462 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5464 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5470 struct BlockInfoCompareMoveDestination
5472 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5474 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5478 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5482 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5490 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5491 BlockInfoVector m_Blocks;
5493 VkResult DefragmentRound(
5494 VkDeviceSize maxBytesToMove,
5495 uint32_t maxAllocationsToMove);
5497 static bool MoveMakesSense(
5498 size_t dstBlockIndex, VkDeviceSize dstOffset,
5499 size_t srcBlockIndex, VkDeviceSize srcOffset);
5504 VmaBlockVector* pBlockVector,
5505 uint32_t currentFrameIndex);
5507 ~VmaDefragmentator();
5509 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5510 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5512 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5514 VkResult Defragment(
5515 VkDeviceSize maxBytesToMove,
5516 uint32_t maxAllocationsToMove);
5519 #if VMA_RECORDING_ENABLED 5526 void WriteConfiguration(
5527 const VkPhysicalDeviceProperties& devProps,
5528 const VkPhysicalDeviceMemoryProperties& memProps,
5529 bool dedicatedAllocationExtensionEnabled);
5532 void RecordCreateAllocator(uint32_t frameIndex);
5533 void RecordDestroyAllocator(uint32_t frameIndex);
5534 void RecordCreatePool(uint32_t frameIndex,
5537 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5538 void RecordAllocateMemory(uint32_t frameIndex,
5539 const VkMemoryRequirements& vkMemReq,
5542 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5543 const VkMemoryRequirements& vkMemReq,
5544 bool requiresDedicatedAllocation,
5545 bool prefersDedicatedAllocation,
5548 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5549 const VkMemoryRequirements& vkMemReq,
5550 bool requiresDedicatedAllocation,
5551 bool prefersDedicatedAllocation,
5554 void RecordFreeMemory(uint32_t frameIndex,
5556 void RecordSetAllocationUserData(uint32_t frameIndex,
5558 const void* pUserData);
5559 void RecordCreateLostAllocation(uint32_t frameIndex,
5561 void RecordMapMemory(uint32_t frameIndex,
5563 void RecordUnmapMemory(uint32_t frameIndex,
5565 void RecordFlushAllocation(uint32_t frameIndex,
5566 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5567 void RecordInvalidateAllocation(uint32_t frameIndex,
5568 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5569 void RecordCreateBuffer(uint32_t frameIndex,
5570 const VkBufferCreateInfo& bufCreateInfo,
5573 void RecordCreateImage(uint32_t frameIndex,
5574 const VkImageCreateInfo& imageCreateInfo,
5577 void RecordDestroyBuffer(uint32_t frameIndex,
5579 void RecordDestroyImage(uint32_t frameIndex,
5581 void RecordTouchAllocation(uint32_t frameIndex,
5583 void RecordGetAllocationInfo(uint32_t frameIndex,
5585 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5595 class UserDataString
5599 const char* GetString()
const {
return m_Str; }
5609 VMA_MUTEX m_FileMutex;
5611 int64_t m_StartCounter;
5613 void GetBasicParams(CallParams& outParams);
5617 #endif // #if VMA_RECORDING_ENABLED 5620 struct VmaAllocator_T
5622 VMA_CLASS_NO_COPY(VmaAllocator_T)
5625 bool m_UseKhrDedicatedAllocation;
5627 bool m_AllocationCallbacksSpecified;
5628 VkAllocationCallbacks m_AllocationCallbacks;
5632 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5633 VMA_MUTEX m_HeapSizeLimitMutex;
5635 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5636 VkPhysicalDeviceMemoryProperties m_MemProps;
5639 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5642 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5643 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5644 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5650 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5652 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5656 return m_VulkanFunctions;
5659 VkDeviceSize GetBufferImageGranularity()
const 5662 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5663 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5666 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5667 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5669 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5671 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5672 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5675 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5677 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5678 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5681 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5683 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5684 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5685 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5688 bool IsIntegratedGpu()
const 5690 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5693 #if VMA_RECORDING_ENABLED 5694 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5697 void GetBufferMemoryRequirements(
5699 VkMemoryRequirements& memReq,
5700 bool& requiresDedicatedAllocation,
5701 bool& prefersDedicatedAllocation)
const;
5702 void GetImageMemoryRequirements(
5704 VkMemoryRequirements& memReq,
5705 bool& requiresDedicatedAllocation,
5706 bool& prefersDedicatedAllocation)
const;
5709 VkResult AllocateMemory(
5710 const VkMemoryRequirements& vkMemReq,
5711 bool requiresDedicatedAllocation,
5712 bool prefersDedicatedAllocation,
5713 VkBuffer dedicatedBuffer,
5714 VkImage dedicatedImage,
5716 VmaSuballocationType suballocType,
5722 void CalculateStats(
VmaStats* pStats);
5724 #if VMA_STATS_STRING_ENABLED 5725 void PrintDetailedMap(
class VmaJsonWriter& json);
5728 VkResult Defragment(
5730 size_t allocationCount,
5731 VkBool32* pAllocationsChanged,
5739 void DestroyPool(
VmaPool pool);
5742 void SetCurrentFrameIndex(uint32_t frameIndex);
5743 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5745 void MakePoolAllocationsLost(
5747 size_t* pLostAllocationCount);
5748 VkResult CheckPoolCorruption(
VmaPool hPool);
5749 VkResult CheckCorruption(uint32_t memoryTypeBits);
5753 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5754 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5759 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5760 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5762 void FlushOrInvalidateAllocation(
5764 VkDeviceSize offset, VkDeviceSize size,
5765 VMA_CACHE_OPERATION op);
5767 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5770 VkDeviceSize m_PreferredLargeHeapBlockSize;
5772 VkPhysicalDevice m_PhysicalDevice;
5773 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5775 VMA_MUTEX m_PoolsMutex;
5777 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5778 uint32_t m_NextPoolId;
5782 #if VMA_RECORDING_ENABLED 5783 VmaRecorder* m_pRecorder;
5788 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5790 VkResult AllocateMemoryOfType(
5792 VkDeviceSize alignment,
5793 bool dedicatedAllocation,
5794 VkBuffer dedicatedBuffer,
5795 VkImage dedicatedImage,
5797 uint32_t memTypeIndex,
5798 VmaSuballocationType suballocType,
5802 VkResult AllocateDedicatedMemory(
5804 VmaSuballocationType suballocType,
5805 uint32_t memTypeIndex,
5807 bool isUserDataString,
5809 VkBuffer dedicatedBuffer,
5810 VkImage dedicatedImage,
5820 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5822 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5825 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5827 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5830 template<
typename T>
5833 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5836 template<
typename T>
5837 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5839 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5842 template<
typename T>
5843 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5848 VmaFree(hAllocator, ptr);
5852 template<
typename T>
5853 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5857 for(
size_t i = count; i--; )
5859 VmaFree(hAllocator, ptr);
5866 #if VMA_STATS_STRING_ENABLED 5868 class VmaStringBuilder
5871 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5872 size_t GetLength()
const {
return m_Data.size(); }
5873 const char* GetData()
const {
return m_Data.data(); }
5875 void Add(
char ch) { m_Data.push_back(ch); }
5876 void Add(
const char* pStr);
5877 void AddNewLine() { Add(
'\n'); }
5878 void AddNumber(uint32_t num);
5879 void AddNumber(uint64_t num);
5880 void AddPointer(
const void* ptr);
5883 VmaVector< char, VmaStlAllocator<char> > m_Data;
5886 void VmaStringBuilder::Add(
const char* pStr)
5888 const size_t strLen = strlen(pStr);
5891 const size_t oldCount = m_Data.size();
5892 m_Data.resize(oldCount + strLen);
5893 memcpy(m_Data.data() + oldCount, pStr, strLen);
5897 void VmaStringBuilder::AddNumber(uint32_t num)
5900 VmaUint32ToStr(buf,
sizeof(buf), num);
5904 void VmaStringBuilder::AddNumber(uint64_t num)
5907 VmaUint64ToStr(buf,
sizeof(buf), num);
5911 void VmaStringBuilder::AddPointer(
const void* ptr)
5914 VmaPtrToStr(buf,
sizeof(buf), ptr);
5918 #endif // #if VMA_STATS_STRING_ENABLED 5923 #if VMA_STATS_STRING_ENABLED 5927 VMA_CLASS_NO_COPY(VmaJsonWriter)
5929 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5932 void BeginObject(
bool singleLine =
false);
5935 void BeginArray(
bool singleLine =
false);
5938 void WriteString(
const char* pStr);
5939 void BeginString(
const char* pStr = VMA_NULL);
5940 void ContinueString(
const char* pStr);
5941 void ContinueString(uint32_t n);
5942 void ContinueString(uint64_t n);
5943 void ContinueString_Pointer(
const void* ptr);
5944 void EndString(
const char* pStr = VMA_NULL);
5946 void WriteNumber(uint32_t n);
5947 void WriteNumber(uint64_t n);
5948 void WriteBool(
bool b);
5952 static const char*
const INDENT;
5954 enum COLLECTION_TYPE
5956 COLLECTION_TYPE_OBJECT,
5957 COLLECTION_TYPE_ARRAY,
5961 COLLECTION_TYPE type;
5962 uint32_t valueCount;
5963 bool singleLineMode;
5966 VmaStringBuilder& m_SB;
5967 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5968 bool m_InsideString;
5970 void BeginValue(
bool isString);
5971 void WriteIndent(
bool oneLess =
false);
5974 const char*
const VmaJsonWriter::INDENT =
" ";
5976 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5978 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5979 m_InsideString(false)
5983 VmaJsonWriter::~VmaJsonWriter()
5985 VMA_ASSERT(!m_InsideString);
5986 VMA_ASSERT(m_Stack.empty());
5989 void VmaJsonWriter::BeginObject(
bool singleLine)
5991 VMA_ASSERT(!m_InsideString);
5997 item.type = COLLECTION_TYPE_OBJECT;
5998 item.valueCount = 0;
5999 item.singleLineMode = singleLine;
6000 m_Stack.push_back(item);
6003 void VmaJsonWriter::EndObject()
6005 VMA_ASSERT(!m_InsideString);
6010 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6014 void VmaJsonWriter::BeginArray(
bool singleLine)
6016 VMA_ASSERT(!m_InsideString);
6022 item.type = COLLECTION_TYPE_ARRAY;
6023 item.valueCount = 0;
6024 item.singleLineMode = singleLine;
6025 m_Stack.push_back(item);
6028 void VmaJsonWriter::EndArray()
6030 VMA_ASSERT(!m_InsideString);
6035 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6039 void VmaJsonWriter::WriteString(
const char* pStr)
6045 void VmaJsonWriter::BeginString(
const char* pStr)
6047 VMA_ASSERT(!m_InsideString);
6051 m_InsideString =
true;
6052 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6054 ContinueString(pStr);
6058 void VmaJsonWriter::ContinueString(
const char* pStr)
6060 VMA_ASSERT(m_InsideString);
6062 const size_t strLen = strlen(pStr);
6063 for(
size_t i = 0; i < strLen; ++i)
6096 VMA_ASSERT(0 &&
"Character not currently supported.");
6102 void VmaJsonWriter::ContinueString(uint32_t n)
6104 VMA_ASSERT(m_InsideString);
6108 void VmaJsonWriter::ContinueString(uint64_t n)
6110 VMA_ASSERT(m_InsideString);
6114 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6116 VMA_ASSERT(m_InsideString);
6117 m_SB.AddPointer(ptr);
6120 void VmaJsonWriter::EndString(
const char* pStr)
6122 VMA_ASSERT(m_InsideString);
6123 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6125 ContinueString(pStr);
6128 m_InsideString =
false;
6131 void VmaJsonWriter::WriteNumber(uint32_t n)
6133 VMA_ASSERT(!m_InsideString);
6138 void VmaJsonWriter::WriteNumber(uint64_t n)
6140 VMA_ASSERT(!m_InsideString);
6145 void VmaJsonWriter::WriteBool(
bool b)
6147 VMA_ASSERT(!m_InsideString);
6149 m_SB.Add(b ?
"true" :
"false");
6152 void VmaJsonWriter::WriteNull()
6154 VMA_ASSERT(!m_InsideString);
6159 void VmaJsonWriter::BeginValue(
bool isString)
6161 if(!m_Stack.empty())
6163 StackItem& currItem = m_Stack.back();
6164 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6165 currItem.valueCount % 2 == 0)
6167 VMA_ASSERT(isString);
6170 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6171 currItem.valueCount % 2 != 0)
6175 else if(currItem.valueCount > 0)
6184 ++currItem.valueCount;
6188 void VmaJsonWriter::WriteIndent(
bool oneLess)
6190 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6194 size_t count = m_Stack.size();
6195 if(count > 0 && oneLess)
6199 for(
size_t i = 0; i < count; ++i)
6206 #endif // #if VMA_STATS_STRING_ENABLED 6210 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6212 if(IsUserDataString())
6214 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6216 FreeUserDataString(hAllocator);
6218 if(pUserData != VMA_NULL)
6220 const char*
const newStrSrc = (
char*)pUserData;
6221 const size_t newStrLen = strlen(newStrSrc);
6222 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6223 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6224 m_pUserData = newStrDst;
6229 m_pUserData = pUserData;
6233 void VmaAllocation_T::ChangeBlockAllocation(
6235 VmaDeviceMemoryBlock* block,
6236 VkDeviceSize offset)
6238 VMA_ASSERT(block != VMA_NULL);
6239 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6242 if(block != m_BlockAllocation.m_Block)
6244 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6245 if(IsPersistentMap())
6247 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6248 block->Map(hAllocator, mapRefCount, VMA_NULL);
6251 m_BlockAllocation.m_Block = block;
6252 m_BlockAllocation.m_Offset = offset;
6255 VkDeviceSize VmaAllocation_T::GetOffset()
const 6259 case ALLOCATION_TYPE_BLOCK:
6260 return m_BlockAllocation.m_Offset;
6261 case ALLOCATION_TYPE_DEDICATED:
6269 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6273 case ALLOCATION_TYPE_BLOCK:
6274 return m_BlockAllocation.m_Block->GetDeviceMemory();
6275 case ALLOCATION_TYPE_DEDICATED:
6276 return m_DedicatedAllocation.m_hMemory;
6279 return VK_NULL_HANDLE;
6283 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6287 case ALLOCATION_TYPE_BLOCK:
6288 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6289 case ALLOCATION_TYPE_DEDICATED:
6290 return m_DedicatedAllocation.m_MemoryTypeIndex;
6297 void* VmaAllocation_T::GetMappedData()
const 6301 case ALLOCATION_TYPE_BLOCK:
6304 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6305 VMA_ASSERT(pBlockData != VMA_NULL);
6306 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6313 case ALLOCATION_TYPE_DEDICATED:
6314 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6315 return m_DedicatedAllocation.m_pMappedData;
6322 bool VmaAllocation_T::CanBecomeLost()
const 6326 case ALLOCATION_TYPE_BLOCK:
6327 return m_BlockAllocation.m_CanBecomeLost;
6328 case ALLOCATION_TYPE_DEDICATED:
6336 VmaPool VmaAllocation_T::GetPool()
const 6338 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6339 return m_BlockAllocation.m_hPool;
6342 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6344 VMA_ASSERT(CanBecomeLost());
6350 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6353 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6358 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6364 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6374 #if VMA_STATS_STRING_ENABLED 6377 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6386 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6388 json.WriteString(
"Type");
6389 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6391 json.WriteString(
"Size");
6392 json.WriteNumber(m_Size);
6394 if(m_pUserData != VMA_NULL)
6396 json.WriteString(
"UserData");
6397 if(IsUserDataString())
6399 json.WriteString((
const char*)m_pUserData);
6404 json.ContinueString_Pointer(m_pUserData);
6409 json.WriteString(
"CreationFrameIndex");
6410 json.WriteNumber(m_CreationFrameIndex);
6412 json.WriteString(
"LastUseFrameIndex");
6413 json.WriteNumber(GetLastUseFrameIndex());
6415 if(m_BufferImageUsage != 0)
6417 json.WriteString(
"Usage");
6418 json.WriteNumber(m_BufferImageUsage);
6424 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6426 VMA_ASSERT(IsUserDataString());
6427 if(m_pUserData != VMA_NULL)
6429 char*
const oldStr = (
char*)m_pUserData;
6430 const size_t oldStrLen = strlen(oldStr);
6431 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6432 m_pUserData = VMA_NULL;
6436 void VmaAllocation_T::BlockAllocMap()
6438 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6440 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6446 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6450 void VmaAllocation_T::BlockAllocUnmap()
6452 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6454 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6460 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6464 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6466 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6470 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6472 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6473 *ppData = m_DedicatedAllocation.m_pMappedData;
6479 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6480 return VK_ERROR_MEMORY_MAP_FAILED;
6485 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6486 hAllocator->m_hDevice,
6487 m_DedicatedAllocation.m_hMemory,
6492 if(result == VK_SUCCESS)
6494 m_DedicatedAllocation.m_pMappedData = *ppData;
6501 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6503 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6505 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6510 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6511 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6512 hAllocator->m_hDevice,
6513 m_DedicatedAllocation.m_hMemory);
6518 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6522 #if VMA_STATS_STRING_ENABLED 6524 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6528 json.WriteString(
"Blocks");
6531 json.WriteString(
"Allocations");
6534 json.WriteString(
"UnusedRanges");
6537 json.WriteString(
"UsedBytes");
6540 json.WriteString(
"UnusedBytes");
6545 json.WriteString(
"AllocationSize");
6546 json.BeginObject(
true);
6547 json.WriteString(
"Min");
6549 json.WriteString(
"Avg");
6551 json.WriteString(
"Max");
6558 json.WriteString(
"UnusedRangeSize");
6559 json.BeginObject(
true);
6560 json.WriteString(
"Min");
6562 json.WriteString(
"Avg");
6564 json.WriteString(
"Max");
6572 #endif // #if VMA_STATS_STRING_ENABLED 6574 struct VmaSuballocationItemSizeLess
6577 const VmaSuballocationList::iterator lhs,
6578 const VmaSuballocationList::iterator rhs)
const 6580 return lhs->size < rhs->size;
6583 const VmaSuballocationList::iterator lhs,
6584 VkDeviceSize rhsSize)
const 6586 return lhs->size < rhsSize;
6594 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6596 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6600 #if VMA_STATS_STRING_ENABLED 6602 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6603 VkDeviceSize unusedBytes,
6604 size_t allocationCount,
6605 size_t unusedRangeCount)
const 6609 json.WriteString(
"TotalBytes");
6610 json.WriteNumber(GetSize());
6612 json.WriteString(
"UnusedBytes");
6613 json.WriteNumber(unusedBytes);
6615 json.WriteString(
"Allocations");
6616 json.WriteNumber((uint64_t)allocationCount);
6618 json.WriteString(
"UnusedRanges");
6619 json.WriteNumber((uint64_t)unusedRangeCount);
6621 json.WriteString(
"Suballocations");
6625 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6626 VkDeviceSize offset,
6629 json.BeginObject(
true);
6631 json.WriteString(
"Offset");
6632 json.WriteNumber(offset);
6634 hAllocation->PrintParameters(json);
6639 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6640 VkDeviceSize offset,
6641 VkDeviceSize size)
const 6643 json.BeginObject(
true);
6645 json.WriteString(
"Offset");
6646 json.WriteNumber(offset);
6648 json.WriteString(
"Type");
6649 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6651 json.WriteString(
"Size");
6652 json.WriteNumber(size);
6657 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6663 #endif // #if VMA_STATS_STRING_ENABLED 6668 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6669 VmaBlockMetadata(hAllocator),
6672 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6673 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6677 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6681 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6683 VmaBlockMetadata::Init(size);
6686 m_SumFreeSize = size;
6688 VmaSuballocation suballoc = {};
6689 suballoc.offset = 0;
6690 suballoc.size = size;
6691 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6692 suballoc.hAllocation = VK_NULL_HANDLE;
6694 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6695 m_Suballocations.push_back(suballoc);
6696 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6698 m_FreeSuballocationsBySize.push_back(suballocItem);
6701 bool VmaBlockMetadata_Generic::Validate()
const 6703 VMA_VALIDATE(!m_Suballocations.empty());
6706 VkDeviceSize calculatedOffset = 0;
6708 uint32_t calculatedFreeCount = 0;
6710 VkDeviceSize calculatedSumFreeSize = 0;
6713 size_t freeSuballocationsToRegister = 0;
6715 bool prevFree =
false;
6717 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6718 suballocItem != m_Suballocations.cend();
6721 const VmaSuballocation& subAlloc = *suballocItem;
6724 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6726 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6728 VMA_VALIDATE(!prevFree || !currFree);
6730 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6734 calculatedSumFreeSize += subAlloc.size;
6735 ++calculatedFreeCount;
6736 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6738 ++freeSuballocationsToRegister;
6742 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6746 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6747 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6750 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6753 calculatedOffset += subAlloc.size;
6754 prevFree = currFree;
6759 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6761 VkDeviceSize lastSize = 0;
6762 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6764 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6767 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6769 VMA_VALIDATE(suballocItem->size >= lastSize);
6771 lastSize = suballocItem->size;
6775 VMA_VALIDATE(ValidateFreeSuballocationList());
6776 VMA_VALIDATE(calculatedOffset == GetSize());
6777 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6778 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6783 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6785 if(!m_FreeSuballocationsBySize.empty())
6787 return m_FreeSuballocationsBySize.back()->size;
6795 bool VmaBlockMetadata_Generic::IsEmpty()
const 6797 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6800 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6804 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6816 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6817 suballocItem != m_Suballocations.cend();
6820 const VmaSuballocation& suballoc = *suballocItem;
6821 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6834 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6836 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6838 inoutStats.
size += GetSize();
6845 #if VMA_STATS_STRING_ENABLED 6847 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6849 PrintDetailedMap_Begin(json,
6851 m_Suballocations.size() - (size_t)m_FreeCount,
6855 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6856 suballocItem != m_Suballocations.cend();
6857 ++suballocItem, ++i)
6859 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6861 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6865 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6869 PrintDetailedMap_End(json);
6872 #endif // #if VMA_STATS_STRING_ENABLED 6874 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6875 uint32_t currentFrameIndex,
6876 uint32_t frameInUseCount,
6877 VkDeviceSize bufferImageGranularity,
6878 VkDeviceSize allocSize,
6879 VkDeviceSize allocAlignment,
6881 VmaSuballocationType allocType,
6882 bool canMakeOtherLost,
6884 VmaAllocationRequest* pAllocationRequest)
6886 VMA_ASSERT(allocSize > 0);
6887 VMA_ASSERT(!upperAddress);
6888 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6889 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6890 VMA_HEAVY_ASSERT(Validate());
6893 if(canMakeOtherLost ==
false &&
6894 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6900 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6901 if(freeSuballocCount > 0)
6906 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6907 m_FreeSuballocationsBySize.data(),
6908 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6909 allocSize + 2 * VMA_DEBUG_MARGIN,
6910 VmaSuballocationItemSizeLess());
6911 size_t index = it - m_FreeSuballocationsBySize.data();
6912 for(; index < freeSuballocCount; ++index)
6917 bufferImageGranularity,
6921 m_FreeSuballocationsBySize[index],
6923 &pAllocationRequest->offset,
6924 &pAllocationRequest->itemsToMakeLostCount,
6925 &pAllocationRequest->sumFreeSize,
6926 &pAllocationRequest->sumItemSize))
6928 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6936 for(
size_t index = freeSuballocCount; index--; )
6941 bufferImageGranularity,
6945 m_FreeSuballocationsBySize[index],
6947 &pAllocationRequest->offset,
6948 &pAllocationRequest->itemsToMakeLostCount,
6949 &pAllocationRequest->sumFreeSize,
6950 &pAllocationRequest->sumItemSize))
6952 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6959 if(canMakeOtherLost)
6963 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6964 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6966 VmaAllocationRequest tmpAllocRequest = {};
6967 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6968 suballocIt != m_Suballocations.end();
6971 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6972 suballocIt->hAllocation->CanBecomeLost())
6977 bufferImageGranularity,
6983 &tmpAllocRequest.offset,
6984 &tmpAllocRequest.itemsToMakeLostCount,
6985 &tmpAllocRequest.sumFreeSize,
6986 &tmpAllocRequest.sumItemSize))
6988 tmpAllocRequest.item = suballocIt;
6990 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
6993 *pAllocationRequest = tmpAllocRequest;
6999 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7008 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7009 uint32_t currentFrameIndex,
7010 uint32_t frameInUseCount,
7011 VmaAllocationRequest* pAllocationRequest)
7013 while(pAllocationRequest->itemsToMakeLostCount > 0)
7015 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7017 ++pAllocationRequest->item;
7019 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7020 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7021 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7022 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7024 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7025 --pAllocationRequest->itemsToMakeLostCount;
7033 VMA_HEAVY_ASSERT(Validate());
7034 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7035 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7040 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7042 uint32_t lostAllocationCount = 0;
7043 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7044 it != m_Suballocations.end();
7047 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7048 it->hAllocation->CanBecomeLost() &&
7049 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7051 it = FreeSuballocation(it);
7052 ++lostAllocationCount;
7055 return lostAllocationCount;
7058 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7060 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7061 it != m_Suballocations.end();
7064 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7066 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7068 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7069 return VK_ERROR_VALIDATION_FAILED_EXT;
7071 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7073 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7074 return VK_ERROR_VALIDATION_FAILED_EXT;
7082 void VmaBlockMetadata_Generic::Alloc(
7083 const VmaAllocationRequest& request,
7084 VmaSuballocationType type,
7085 VkDeviceSize allocSize,
7089 VMA_ASSERT(!upperAddress);
7090 VMA_ASSERT(request.item != m_Suballocations.end());
7091 VmaSuballocation& suballoc = *request.item;
7093 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7095 VMA_ASSERT(request.offset >= suballoc.offset);
7096 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7097 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7098 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7102 UnregisterFreeSuballocation(request.item);
7104 suballoc.offset = request.offset;
7105 suballoc.size = allocSize;
7106 suballoc.type = type;
7107 suballoc.hAllocation = hAllocation;
7112 VmaSuballocation paddingSuballoc = {};
7113 paddingSuballoc.offset = request.offset + allocSize;
7114 paddingSuballoc.size = paddingEnd;
7115 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7116 VmaSuballocationList::iterator next = request.item;
7118 const VmaSuballocationList::iterator paddingEndItem =
7119 m_Suballocations.insert(next, paddingSuballoc);
7120 RegisterFreeSuballocation(paddingEndItem);
7126 VmaSuballocation paddingSuballoc = {};
7127 paddingSuballoc.offset = request.offset - paddingBegin;
7128 paddingSuballoc.size = paddingBegin;
7129 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7130 const VmaSuballocationList::iterator paddingBeginItem =
7131 m_Suballocations.insert(request.item, paddingSuballoc);
7132 RegisterFreeSuballocation(paddingBeginItem);
7136 m_FreeCount = m_FreeCount - 1;
7137 if(paddingBegin > 0)
7145 m_SumFreeSize -= allocSize;
7148 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7150 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7151 suballocItem != m_Suballocations.end();
7154 VmaSuballocation& suballoc = *suballocItem;
7155 if(suballoc.hAllocation == allocation)
7157 FreeSuballocation(suballocItem);
7158 VMA_HEAVY_ASSERT(Validate());
7162 VMA_ASSERT(0 &&
"Not found!");
7165 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7167 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7168 suballocItem != m_Suballocations.end();
7171 VmaSuballocation& suballoc = *suballocItem;
7172 if(suballoc.offset == offset)
7174 FreeSuballocation(suballocItem);
7178 VMA_ASSERT(0 &&
"Not found!");
7181 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7183 VkDeviceSize lastSize = 0;
7184 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7186 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7188 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7189 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7190 VMA_VALIDATE(it->size >= lastSize);
7191 lastSize = it->size;
7196 bool VmaBlockMetadata_Generic::CheckAllocation(
7197 uint32_t currentFrameIndex,
7198 uint32_t frameInUseCount,
7199 VkDeviceSize bufferImageGranularity,
7200 VkDeviceSize allocSize,
7201 VkDeviceSize allocAlignment,
7202 VmaSuballocationType allocType,
7203 VmaSuballocationList::const_iterator suballocItem,
7204 bool canMakeOtherLost,
7205 VkDeviceSize* pOffset,
7206 size_t* itemsToMakeLostCount,
7207 VkDeviceSize* pSumFreeSize,
7208 VkDeviceSize* pSumItemSize)
const 7210 VMA_ASSERT(allocSize > 0);
7211 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7212 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7213 VMA_ASSERT(pOffset != VMA_NULL);
7215 *itemsToMakeLostCount = 0;
7219 if(canMakeOtherLost)
7221 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7223 *pSumFreeSize = suballocItem->size;
7227 if(suballocItem->hAllocation->CanBecomeLost() &&
7228 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7230 ++*itemsToMakeLostCount;
7231 *pSumItemSize = suballocItem->size;
7240 if(GetSize() - suballocItem->offset < allocSize)
7246 *pOffset = suballocItem->offset;
7249 if(VMA_DEBUG_MARGIN > 0)
7251 *pOffset += VMA_DEBUG_MARGIN;
7255 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7259 if(bufferImageGranularity > 1)
7261 bool bufferImageGranularityConflict =
false;
7262 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7263 while(prevSuballocItem != m_Suballocations.cbegin())
7266 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7267 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7269 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7271 bufferImageGranularityConflict =
true;
7279 if(bufferImageGranularityConflict)
7281 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7287 if(*pOffset >= suballocItem->offset + suballocItem->size)
7293 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7296 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7298 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7300 if(suballocItem->offset + totalSize > GetSize())
7307 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7308 if(totalSize > suballocItem->size)
7310 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7311 while(remainingSize > 0)
7314 if(lastSuballocItem == m_Suballocations.cend())
7318 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7320 *pSumFreeSize += lastSuballocItem->size;
7324 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7325 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7326 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7328 ++*itemsToMakeLostCount;
7329 *pSumItemSize += lastSuballocItem->size;
7336 remainingSize = (lastSuballocItem->size < remainingSize) ?
7337 remainingSize - lastSuballocItem->size : 0;
7343 if(bufferImageGranularity > 1)
7345 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7347 while(nextSuballocItem != m_Suballocations.cend())
7349 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7350 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7352 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7354 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7355 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7356 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7358 ++*itemsToMakeLostCount;
7377 const VmaSuballocation& suballoc = *suballocItem;
7378 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7380 *pSumFreeSize = suballoc.size;
7383 if(suballoc.size < allocSize)
7389 *pOffset = suballoc.offset;
7392 if(VMA_DEBUG_MARGIN > 0)
7394 *pOffset += VMA_DEBUG_MARGIN;
7398 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7402 if(bufferImageGranularity > 1)
7404 bool bufferImageGranularityConflict =
false;
7405 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7406 while(prevSuballocItem != m_Suballocations.cbegin())
7409 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7410 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7412 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7414 bufferImageGranularityConflict =
true;
7422 if(bufferImageGranularityConflict)
7424 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7429 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7432 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7435 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7442 if(bufferImageGranularity > 1)
7444 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7446 while(nextSuballocItem != m_Suballocations.cend())
7448 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7449 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7451 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7470 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7472 VMA_ASSERT(item != m_Suballocations.end());
7473 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7475 VmaSuballocationList::iterator nextItem = item;
7477 VMA_ASSERT(nextItem != m_Suballocations.end());
7478 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7480 item->size += nextItem->size;
7482 m_Suballocations.erase(nextItem);
7485 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7488 VmaSuballocation& suballoc = *suballocItem;
7489 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7490 suballoc.hAllocation = VK_NULL_HANDLE;
7494 m_SumFreeSize += suballoc.size;
7497 bool mergeWithNext =
false;
7498 bool mergeWithPrev =
false;
7500 VmaSuballocationList::iterator nextItem = suballocItem;
7502 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7504 mergeWithNext =
true;
7507 VmaSuballocationList::iterator prevItem = suballocItem;
7508 if(suballocItem != m_Suballocations.begin())
7511 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7513 mergeWithPrev =
true;
7519 UnregisterFreeSuballocation(nextItem);
7520 MergeFreeWithNext(suballocItem);
7525 UnregisterFreeSuballocation(prevItem);
7526 MergeFreeWithNext(prevItem);
7527 RegisterFreeSuballocation(prevItem);
7532 RegisterFreeSuballocation(suballocItem);
7533 return suballocItem;
7537 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7539 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7540 VMA_ASSERT(item->size > 0);
7544 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7546 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7548 if(m_FreeSuballocationsBySize.empty())
7550 m_FreeSuballocationsBySize.push_back(item);
7554 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7562 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7564 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7565 VMA_ASSERT(item->size > 0);
7569 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7571 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7573 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7574 m_FreeSuballocationsBySize.data(),
7575 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7577 VmaSuballocationItemSizeLess());
7578 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7579 index < m_FreeSuballocationsBySize.size();
7582 if(m_FreeSuballocationsBySize[index] == item)
7584 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7587 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7589 VMA_ASSERT(0 &&
"Not found.");
7598 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7599 VmaBlockMetadata(hAllocator),
7601 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7602 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7603 m_1stVectorIndex(0),
7604 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7605 m_1stNullItemsBeginCount(0),
7606 m_1stNullItemsMiddleCount(0),
7607 m_2ndNullItemsCount(0)
7611 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7615 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7617 VmaBlockMetadata::Init(size);
7618 m_SumFreeSize = size;
7621 bool VmaBlockMetadata_Linear::Validate()
const 7623 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7624 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7626 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7627 VMA_VALIDATE(!suballocations1st.empty() ||
7628 suballocations2nd.empty() ||
7629 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7631 if(!suballocations1st.empty())
7634 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7636 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7638 if(!suballocations2nd.empty())
7641 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7644 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7645 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7647 VkDeviceSize sumUsedSize = 0;
7648 const size_t suballoc1stCount = suballocations1st.size();
7649 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7651 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7653 const size_t suballoc2ndCount = suballocations2nd.size();
7654 size_t nullItem2ndCount = 0;
7655 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7657 const VmaSuballocation& suballoc = suballocations2nd[i];
7658 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7660 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7661 VMA_VALIDATE(suballoc.offset >= offset);
7665 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7666 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7667 sumUsedSize += suballoc.size;
7674 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7677 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7680 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7682 const VmaSuballocation& suballoc = suballocations1st[i];
7683 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7684 suballoc.hAllocation == VK_NULL_HANDLE);
7687 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7689 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7691 const VmaSuballocation& suballoc = suballocations1st[i];
7692 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7694 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7695 VMA_VALIDATE(suballoc.offset >= offset);
7696 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7700 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7701 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7702 sumUsedSize += suballoc.size;
7709 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7711 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7713 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7715 const size_t suballoc2ndCount = suballocations2nd.size();
7716 size_t nullItem2ndCount = 0;
7717 for(
size_t i = suballoc2ndCount; i--; )
7719 const VmaSuballocation& suballoc = suballocations2nd[i];
7720 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7722 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7723 VMA_VALIDATE(suballoc.offset >= offset);
7727 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7728 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7729 sumUsedSize += suballoc.size;
7736 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7739 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7742 VMA_VALIDATE(offset <= GetSize());
7743 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7748 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7750 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7751 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7754 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7756 const VkDeviceSize size = GetSize();
7768 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7770 switch(m_2ndVectorMode)
7772 case SECOND_VECTOR_EMPTY:
7778 const size_t suballocations1stCount = suballocations1st.size();
7779 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7780 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7781 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7783 firstSuballoc.offset,
7784 size - (lastSuballoc.offset + lastSuballoc.size));
7788 case SECOND_VECTOR_RING_BUFFER:
7793 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7794 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7795 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7796 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7800 case SECOND_VECTOR_DOUBLE_STACK:
7805 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7806 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7807 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7808 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7818 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7820 const VkDeviceSize size = GetSize();
7821 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7822 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7823 const size_t suballoc1stCount = suballocations1st.size();
7824 const size_t suballoc2ndCount = suballocations2nd.size();
7835 VkDeviceSize lastOffset = 0;
7837 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7839 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7840 size_t nextAlloc2ndIndex = 0;
7841 while(lastOffset < freeSpace2ndTo1stEnd)
7844 while(nextAlloc2ndIndex < suballoc2ndCount &&
7845 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7847 ++nextAlloc2ndIndex;
7851 if(nextAlloc2ndIndex < suballoc2ndCount)
7853 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7856 if(lastOffset < suballoc.offset)
7859 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7873 lastOffset = suballoc.offset + suballoc.size;
7874 ++nextAlloc2ndIndex;
7880 if(lastOffset < freeSpace2ndTo1stEnd)
7882 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7890 lastOffset = freeSpace2ndTo1stEnd;
7895 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7896 const VkDeviceSize freeSpace1stTo2ndEnd =
7897 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7898 while(lastOffset < freeSpace1stTo2ndEnd)
7901 while(nextAlloc1stIndex < suballoc1stCount &&
7902 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7904 ++nextAlloc1stIndex;
7908 if(nextAlloc1stIndex < suballoc1stCount)
7910 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7913 if(lastOffset < suballoc.offset)
7916 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7930 lastOffset = suballoc.offset + suballoc.size;
7931 ++nextAlloc1stIndex;
7937 if(lastOffset < freeSpace1stTo2ndEnd)
7939 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7947 lastOffset = freeSpace1stTo2ndEnd;
7951 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7953 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7954 while(lastOffset < size)
7957 while(nextAlloc2ndIndex != SIZE_MAX &&
7958 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7960 --nextAlloc2ndIndex;
7964 if(nextAlloc2ndIndex != SIZE_MAX)
7966 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7969 if(lastOffset < suballoc.offset)
7972 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7986 lastOffset = suballoc.offset + suballoc.size;
7987 --nextAlloc2ndIndex;
7993 if(lastOffset < size)
7995 const VkDeviceSize unusedRangeSize = size - lastOffset;
8011 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8013 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8014 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8015 const VkDeviceSize size = GetSize();
8016 const size_t suballoc1stCount = suballocations1st.size();
8017 const size_t suballoc2ndCount = suballocations2nd.size();
8019 inoutStats.
size += size;
8021 VkDeviceSize lastOffset = 0;
8023 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8025 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8026 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8027 while(lastOffset < freeSpace2ndTo1stEnd)
8030 while(nextAlloc2ndIndex < suballoc2ndCount &&
8031 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8033 ++nextAlloc2ndIndex;
8037 if(nextAlloc2ndIndex < suballoc2ndCount)
8039 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8042 if(lastOffset < suballoc.offset)
8045 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8056 lastOffset = suballoc.offset + suballoc.size;
8057 ++nextAlloc2ndIndex;
8062 if(lastOffset < freeSpace2ndTo1stEnd)
8065 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8072 lastOffset = freeSpace2ndTo1stEnd;
8077 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8078 const VkDeviceSize freeSpace1stTo2ndEnd =
8079 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8080 while(lastOffset < freeSpace1stTo2ndEnd)
8083 while(nextAlloc1stIndex < suballoc1stCount &&
8084 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8086 ++nextAlloc1stIndex;
8090 if(nextAlloc1stIndex < suballoc1stCount)
8092 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8095 if(lastOffset < suballoc.offset)
8098 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8109 lastOffset = suballoc.offset + suballoc.size;
8110 ++nextAlloc1stIndex;
8115 if(lastOffset < freeSpace1stTo2ndEnd)
8118 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8125 lastOffset = freeSpace1stTo2ndEnd;
8129 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8131 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8132 while(lastOffset < size)
8135 while(nextAlloc2ndIndex != SIZE_MAX &&
8136 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8138 --nextAlloc2ndIndex;
8142 if(nextAlloc2ndIndex != SIZE_MAX)
8144 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8147 if(lastOffset < suballoc.offset)
8150 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8161 lastOffset = suballoc.offset + suballoc.size;
8162 --nextAlloc2ndIndex;
8167 if(lastOffset < size)
8170 const VkDeviceSize unusedRangeSize = size - lastOffset;
8183 #if VMA_STATS_STRING_ENABLED 8184 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8186 const VkDeviceSize size = GetSize();
8187 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8188 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8189 const size_t suballoc1stCount = suballocations1st.size();
8190 const size_t suballoc2ndCount = suballocations2nd.size();
8194 size_t unusedRangeCount = 0;
8195 VkDeviceSize usedBytes = 0;
8197 VkDeviceSize lastOffset = 0;
8199 size_t alloc2ndCount = 0;
8200 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8202 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8203 size_t nextAlloc2ndIndex = 0;
8204 while(lastOffset < freeSpace2ndTo1stEnd)
8207 while(nextAlloc2ndIndex < suballoc2ndCount &&
8208 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8210 ++nextAlloc2ndIndex;
8214 if(nextAlloc2ndIndex < suballoc2ndCount)
8216 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8219 if(lastOffset < suballoc.offset)
8228 usedBytes += suballoc.size;
8231 lastOffset = suballoc.offset + suballoc.size;
8232 ++nextAlloc2ndIndex;
8237 if(lastOffset < freeSpace2ndTo1stEnd)
8244 lastOffset = freeSpace2ndTo1stEnd;
8249 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8250 size_t alloc1stCount = 0;
8251 const VkDeviceSize freeSpace1stTo2ndEnd =
8252 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8253 while(lastOffset < freeSpace1stTo2ndEnd)
8256 while(nextAlloc1stIndex < suballoc1stCount &&
8257 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8259 ++nextAlloc1stIndex;
8263 if(nextAlloc1stIndex < suballoc1stCount)
8265 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8268 if(lastOffset < suballoc.offset)
8277 usedBytes += suballoc.size;
8280 lastOffset = suballoc.offset + suballoc.size;
8281 ++nextAlloc1stIndex;
8286 if(lastOffset < size)
8293 lastOffset = freeSpace1stTo2ndEnd;
8297 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8299 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8300 while(lastOffset < size)
8303 while(nextAlloc2ndIndex != SIZE_MAX &&
8304 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8306 --nextAlloc2ndIndex;
8310 if(nextAlloc2ndIndex != SIZE_MAX)
8312 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8315 if(lastOffset < suballoc.offset)
8324 usedBytes += suballoc.size;
8327 lastOffset = suballoc.offset + suballoc.size;
8328 --nextAlloc2ndIndex;
8333 if(lastOffset < size)
8345 const VkDeviceSize unusedBytes = size - usedBytes;
8346 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8351 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8353 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8354 size_t nextAlloc2ndIndex = 0;
8355 while(lastOffset < freeSpace2ndTo1stEnd)
8358 while(nextAlloc2ndIndex < suballoc2ndCount &&
8359 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8361 ++nextAlloc2ndIndex;
8365 if(nextAlloc2ndIndex < suballoc2ndCount)
8367 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8370 if(lastOffset < suballoc.offset)
8373 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8374 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8379 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8382 lastOffset = suballoc.offset + suballoc.size;
8383 ++nextAlloc2ndIndex;
8388 if(lastOffset < freeSpace2ndTo1stEnd)
8391 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8392 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8396 lastOffset = freeSpace2ndTo1stEnd;
8401 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8402 while(lastOffset < freeSpace1stTo2ndEnd)
8405 while(nextAlloc1stIndex < suballoc1stCount &&
8406 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8408 ++nextAlloc1stIndex;
8412 if(nextAlloc1stIndex < suballoc1stCount)
8414 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8417 if(lastOffset < suballoc.offset)
8420 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8421 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8426 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8429 lastOffset = suballoc.offset + suballoc.size;
8430 ++nextAlloc1stIndex;
8435 if(lastOffset < freeSpace1stTo2ndEnd)
8438 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8439 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8443 lastOffset = freeSpace1stTo2ndEnd;
8447 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8449 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8450 while(lastOffset < size)
8453 while(nextAlloc2ndIndex != SIZE_MAX &&
8454 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8456 --nextAlloc2ndIndex;
8460 if(nextAlloc2ndIndex != SIZE_MAX)
8462 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8465 if(lastOffset < suballoc.offset)
8468 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8469 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8474 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8477 lastOffset = suballoc.offset + suballoc.size;
8478 --nextAlloc2ndIndex;
8483 if(lastOffset < size)
8486 const VkDeviceSize unusedRangeSize = size - lastOffset;
8487 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8496 PrintDetailedMap_End(json);
8498 #endif // #if VMA_STATS_STRING_ENABLED 8500 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8501 uint32_t currentFrameIndex,
8502 uint32_t frameInUseCount,
8503 VkDeviceSize bufferImageGranularity,
8504 VkDeviceSize allocSize,
8505 VkDeviceSize allocAlignment,
8507 VmaSuballocationType allocType,
8508 bool canMakeOtherLost,
8510 VmaAllocationRequest* pAllocationRequest)
8512 VMA_ASSERT(allocSize > 0);
8513 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8514 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8515 VMA_HEAVY_ASSERT(Validate());
8517 const VkDeviceSize size = GetSize();
8518 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8519 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8523 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8525 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8530 if(allocSize > size)
8534 VkDeviceSize resultBaseOffset = size - allocSize;
8535 if(!suballocations2nd.empty())
8537 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8538 resultBaseOffset = lastSuballoc.offset - allocSize;
8539 if(allocSize > lastSuballoc.offset)
8546 VkDeviceSize resultOffset = resultBaseOffset;
8549 if(VMA_DEBUG_MARGIN > 0)
8551 if(resultOffset < VMA_DEBUG_MARGIN)
8555 resultOffset -= VMA_DEBUG_MARGIN;
8559 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8563 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8565 bool bufferImageGranularityConflict =
false;
8566 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8568 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8569 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8571 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8573 bufferImageGranularityConflict =
true;
8581 if(bufferImageGranularityConflict)
8583 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8588 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8589 suballocations1st.back().offset + suballocations1st.back().size :
8591 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8595 if(bufferImageGranularity > 1)
8597 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8599 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8600 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8602 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8616 pAllocationRequest->offset = resultOffset;
8617 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8618 pAllocationRequest->sumItemSize = 0;
8620 pAllocationRequest->itemsToMakeLostCount = 0;
8626 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8630 VkDeviceSize resultBaseOffset = 0;
8631 if(!suballocations1st.empty())
8633 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8634 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8638 VkDeviceSize resultOffset = resultBaseOffset;
8641 if(VMA_DEBUG_MARGIN > 0)
8643 resultOffset += VMA_DEBUG_MARGIN;
8647 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8651 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8653 bool bufferImageGranularityConflict =
false;
8654 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8656 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8657 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8659 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8661 bufferImageGranularityConflict =
true;
8669 if(bufferImageGranularityConflict)
8671 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8675 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8676 suballocations2nd.back().offset : size;
8679 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8683 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8685 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8687 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8688 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8690 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8704 pAllocationRequest->offset = resultOffset;
8705 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8706 pAllocationRequest->sumItemSize = 0;
8708 pAllocationRequest->itemsToMakeLostCount = 0;
8715 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8717 VMA_ASSERT(!suballocations1st.empty());
8719 VkDeviceSize resultBaseOffset = 0;
8720 if(!suballocations2nd.empty())
8722 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8723 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8727 VkDeviceSize resultOffset = resultBaseOffset;
8730 if(VMA_DEBUG_MARGIN > 0)
8732 resultOffset += VMA_DEBUG_MARGIN;
8736 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8740 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8742 bool bufferImageGranularityConflict =
false;
8743 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8745 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8746 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8748 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8750 bufferImageGranularityConflict =
true;
8758 if(bufferImageGranularityConflict)
8760 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8764 pAllocationRequest->itemsToMakeLostCount = 0;
8765 pAllocationRequest->sumItemSize = 0;
8766 size_t index1st = m_1stNullItemsBeginCount;
8768 if(canMakeOtherLost)
8770 while(index1st < suballocations1st.size() &&
8771 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8774 const VmaSuballocation& suballoc = suballocations1st[index1st];
8775 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8781 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8782 if(suballoc.hAllocation->CanBecomeLost() &&
8783 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8785 ++pAllocationRequest->itemsToMakeLostCount;
8786 pAllocationRequest->sumItemSize += suballoc.size;
8798 if(bufferImageGranularity > 1)
8800 while(index1st < suballocations1st.size())
8802 const VmaSuballocation& suballoc = suballocations1st[index1st];
8803 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8805 if(suballoc.hAllocation != VK_NULL_HANDLE)
8808 if(suballoc.hAllocation->CanBecomeLost() &&
8809 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8811 ++pAllocationRequest->itemsToMakeLostCount;
8812 pAllocationRequest->sumItemSize += suballoc.size;
8831 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
8832 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
8836 if(bufferImageGranularity > 1)
8838 for(
size_t nextSuballocIndex = index1st;
8839 nextSuballocIndex < suballocations1st.size();
8840 nextSuballocIndex++)
8842 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8843 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8845 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8859 pAllocationRequest->offset = resultOffset;
8860 pAllocationRequest->sumFreeSize =
8861 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8863 - pAllocationRequest->sumItemSize;
8873 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8874 uint32_t currentFrameIndex,
8875 uint32_t frameInUseCount,
8876 VmaAllocationRequest* pAllocationRequest)
8878 if(pAllocationRequest->itemsToMakeLostCount == 0)
8883 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8885 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8886 size_t index1st = m_1stNullItemsBeginCount;
8887 size_t madeLostCount = 0;
8888 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8890 VMA_ASSERT(index1st < suballocations1st.size());
8891 VmaSuballocation& suballoc = suballocations1st[index1st];
8892 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8894 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8895 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8896 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8898 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8899 suballoc.hAllocation = VK_NULL_HANDLE;
8900 m_SumFreeSize += suballoc.size;
8901 ++m_1stNullItemsMiddleCount;
8918 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8920 uint32_t lostAllocationCount = 0;
8922 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8923 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8925 VmaSuballocation& suballoc = suballocations1st[i];
8926 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8927 suballoc.hAllocation->CanBecomeLost() &&
8928 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8930 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8931 suballoc.hAllocation = VK_NULL_HANDLE;
8932 ++m_1stNullItemsMiddleCount;
8933 m_SumFreeSize += suballoc.size;
8934 ++lostAllocationCount;
8938 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8939 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8941 VmaSuballocation& suballoc = suballocations2nd[i];
8942 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8943 suballoc.hAllocation->CanBecomeLost() &&
8944 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8946 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8947 suballoc.hAllocation = VK_NULL_HANDLE;
8948 ++m_2ndNullItemsCount;
8949 ++lostAllocationCount;
8953 if(lostAllocationCount)
8958 return lostAllocationCount;
8961 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8963 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8964 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8966 const VmaSuballocation& suballoc = suballocations1st[i];
8967 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8969 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8971 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8972 return VK_ERROR_VALIDATION_FAILED_EXT;
8974 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8976 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8977 return VK_ERROR_VALIDATION_FAILED_EXT;
8982 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8983 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8985 const VmaSuballocation& suballoc = suballocations2nd[i];
8986 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8988 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8990 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8991 return VK_ERROR_VALIDATION_FAILED_EXT;
8993 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8995 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8996 return VK_ERROR_VALIDATION_FAILED_EXT;
9004 void VmaBlockMetadata_Linear::Alloc(
9005 const VmaAllocationRequest& request,
9006 VmaSuballocationType type,
9007 VkDeviceSize allocSize,
9011 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9015 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9016 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9017 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9018 suballocations2nd.push_back(newSuballoc);
9019 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9023 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9026 if(suballocations1st.empty())
9028 suballocations1st.push_back(newSuballoc);
9033 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9036 VMA_ASSERT(request.offset + allocSize <= GetSize());
9037 suballocations1st.push_back(newSuballoc);
9040 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9042 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9044 switch(m_2ndVectorMode)
9046 case SECOND_VECTOR_EMPTY:
9048 VMA_ASSERT(suballocations2nd.empty());
9049 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9051 case SECOND_VECTOR_RING_BUFFER:
9053 VMA_ASSERT(!suballocations2nd.empty());
9055 case SECOND_VECTOR_DOUBLE_STACK:
9056 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9062 suballocations2nd.push_back(newSuballoc);
9066 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9071 m_SumFreeSize -= newSuballoc.size;
9074 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9076 FreeAtOffset(allocation->GetOffset());
9079 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9081 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9082 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9084 if(!suballocations1st.empty())
9087 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9088 if(firstSuballoc.offset == offset)
9090 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9091 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9092 m_SumFreeSize += firstSuballoc.size;
9093 ++m_1stNullItemsBeginCount;
9100 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9101 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9103 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9104 if(lastSuballoc.offset == offset)
9106 m_SumFreeSize += lastSuballoc.size;
9107 suballocations2nd.pop_back();
9113 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9115 VmaSuballocation& lastSuballoc = suballocations1st.back();
9116 if(lastSuballoc.offset == offset)
9118 m_SumFreeSize += lastSuballoc.size;
9119 suballocations1st.pop_back();
9127 VmaSuballocation refSuballoc;
9128 refSuballoc.offset = offset;
9130 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9131 suballocations1st.begin() + m_1stNullItemsBeginCount,
9132 suballocations1st.end(),
9134 if(it != suballocations1st.end())
9136 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9137 it->hAllocation = VK_NULL_HANDLE;
9138 ++m_1stNullItemsMiddleCount;
9139 m_SumFreeSize += it->size;
9145 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9148 VmaSuballocation refSuballoc;
9149 refSuballoc.offset = offset;
9151 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9152 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9153 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9154 if(it != suballocations2nd.end())
9156 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9157 it->hAllocation = VK_NULL_HANDLE;
9158 ++m_2ndNullItemsCount;
9159 m_SumFreeSize += it->size;
9165 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9168 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9170 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9171 const size_t suballocCount = AccessSuballocations1st().size();
9172 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9175 void VmaBlockMetadata_Linear::CleanupAfterFree()
9177 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9178 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9182 suballocations1st.clear();
9183 suballocations2nd.clear();
9184 m_1stNullItemsBeginCount = 0;
9185 m_1stNullItemsMiddleCount = 0;
9186 m_2ndNullItemsCount = 0;
9187 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9191 const size_t suballoc1stCount = suballocations1st.size();
9192 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9193 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9196 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9197 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9199 ++m_1stNullItemsBeginCount;
9200 --m_1stNullItemsMiddleCount;
9204 while(m_1stNullItemsMiddleCount > 0 &&
9205 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9207 --m_1stNullItemsMiddleCount;
9208 suballocations1st.pop_back();
9212 while(m_2ndNullItemsCount > 0 &&
9213 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9215 --m_2ndNullItemsCount;
9216 suballocations2nd.pop_back();
9219 if(ShouldCompact1st())
9221 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9222 size_t srcIndex = m_1stNullItemsBeginCount;
9223 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9225 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9229 if(dstIndex != srcIndex)
9231 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9235 suballocations1st.resize(nonNullItemCount);
9236 m_1stNullItemsBeginCount = 0;
9237 m_1stNullItemsMiddleCount = 0;
9241 if(suballocations2nd.empty())
9243 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9247 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9249 suballocations1st.clear();
9250 m_1stNullItemsBeginCount = 0;
9252 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9255 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9256 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9257 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9258 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9260 ++m_1stNullItemsBeginCount;
9261 --m_1stNullItemsMiddleCount;
9263 m_2ndNullItemsCount = 0;
9264 m_1stVectorIndex ^= 1;
9269 VMA_HEAVY_ASSERT(Validate());
9276 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9277 VmaBlockMetadata(hAllocator),
9279 m_AllocationCount(0),
9283 memset(m_FreeList, 0,
sizeof(m_FreeList));
9286 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9291 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9293 VmaBlockMetadata::Init(size);
9295 m_UsableSize = VmaPrevPow2(size);
9296 m_SumFreeSize = m_UsableSize;
9300 while(m_LevelCount < MAX_LEVELS &&
9301 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9306 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9307 rootNode->offset = 0;
9308 rootNode->type = Node::TYPE_FREE;
9309 rootNode->parent = VMA_NULL;
9310 rootNode->buddy = VMA_NULL;
9313 AddToFreeListFront(0, rootNode);
9316 bool VmaBlockMetadata_Buddy::Validate()
const 9319 ValidationContext ctx;
9320 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9322 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9324 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9325 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9328 for(uint32_t level = 0; level < m_LevelCount; ++level)
9330 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9331 m_FreeList[level].front->free.prev == VMA_NULL);
9333 for(Node* node = m_FreeList[level].front;
9335 node = node->free.next)
9337 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9339 if(node->free.next == VMA_NULL)
9341 VMA_VALIDATE(m_FreeList[level].back == node);
9345 VMA_VALIDATE(node->free.next->free.prev == node);
9351 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9353 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9359 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9361 for(uint32_t level = 0; level < m_LevelCount; ++level)
9363 if(m_FreeList[level].front != VMA_NULL)
9365 return LevelToNodeSize(level);
9371 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9373 const VkDeviceSize unusableSize = GetUnusableSize();
9384 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9386 if(unusableSize > 0)
9395 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9397 const VkDeviceSize unusableSize = GetUnusableSize();
9399 inoutStats.
size += GetSize();
9400 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9405 if(unusableSize > 0)
9412 #if VMA_STATS_STRING_ENABLED 9414 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9418 CalcAllocationStatInfo(stat);
9420 PrintDetailedMap_Begin(
9426 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9428 const VkDeviceSize unusableSize = GetUnusableSize();
9429 if(unusableSize > 0)
9431 PrintDetailedMap_UnusedRange(json,
9436 PrintDetailedMap_End(json);
9439 #endif // #if VMA_STATS_STRING_ENABLED 9441 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9442 uint32_t currentFrameIndex,
9443 uint32_t frameInUseCount,
9444 VkDeviceSize bufferImageGranularity,
9445 VkDeviceSize allocSize,
9446 VkDeviceSize allocAlignment,
9448 VmaSuballocationType allocType,
9449 bool canMakeOtherLost,
9451 VmaAllocationRequest* pAllocationRequest)
9453 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9457 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9458 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9459 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9461 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9462 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9465 if(allocSize > m_UsableSize)
9470 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9471 for(uint32_t level = targetLevel + 1; level--; )
9473 for(Node* freeNode = m_FreeList[level].front;
9474 freeNode != VMA_NULL;
9475 freeNode = freeNode->free.next)
9477 if(freeNode->offset % allocAlignment == 0)
9479 pAllocationRequest->offset = freeNode->offset;
9480 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9481 pAllocationRequest->sumItemSize = 0;
9482 pAllocationRequest->itemsToMakeLostCount = 0;
9483 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9492 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9493 uint32_t currentFrameIndex,
9494 uint32_t frameInUseCount,
9495 VmaAllocationRequest* pAllocationRequest)
9501 return pAllocationRequest->itemsToMakeLostCount == 0;
9504 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9513 void VmaBlockMetadata_Buddy::Alloc(
9514 const VmaAllocationRequest& request,
9515 VmaSuballocationType type,
9516 VkDeviceSize allocSize,
9520 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9521 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9523 Node* currNode = m_FreeList[currLevel].front;
9524 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9525 while(currNode->offset != request.offset)
9527 currNode = currNode->free.next;
9528 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9532 while(currLevel < targetLevel)
9536 RemoveFromFreeList(currLevel, currNode);
9538 const uint32_t childrenLevel = currLevel + 1;
9541 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9542 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9544 leftChild->offset = currNode->offset;
9545 leftChild->type = Node::TYPE_FREE;
9546 leftChild->parent = currNode;
9547 leftChild->buddy = rightChild;
9549 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9550 rightChild->type = Node::TYPE_FREE;
9551 rightChild->parent = currNode;
9552 rightChild->buddy = leftChild;
9555 currNode->type = Node::TYPE_SPLIT;
9556 currNode->split.leftChild = leftChild;
9559 AddToFreeListFront(childrenLevel, rightChild);
9560 AddToFreeListFront(childrenLevel, leftChild);
9565 currNode = m_FreeList[currLevel].front;
9574 VMA_ASSERT(currLevel == targetLevel &&
9575 currNode != VMA_NULL &&
9576 currNode->type == Node::TYPE_FREE);
9577 RemoveFromFreeList(currLevel, currNode);
9580 currNode->type = Node::TYPE_ALLOCATION;
9581 currNode->allocation.alloc = hAllocation;
9583 ++m_AllocationCount;
9585 m_SumFreeSize -= allocSize;
9588 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9590 if(node->type == Node::TYPE_SPLIT)
9592 DeleteNode(node->split.leftChild->buddy);
9593 DeleteNode(node->split.leftChild);
9596 vma_delete(GetAllocationCallbacks(), node);
9599 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9601 VMA_VALIDATE(level < m_LevelCount);
9602 VMA_VALIDATE(curr->parent == parent);
9603 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9604 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9607 case Node::TYPE_FREE:
9609 ctx.calculatedSumFreeSize += levelNodeSize;
9610 ++ctx.calculatedFreeCount;
9612 case Node::TYPE_ALLOCATION:
9613 ++ctx.calculatedAllocationCount;
9614 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9615 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9617 case Node::TYPE_SPLIT:
9619 const uint32_t childrenLevel = level + 1;
9620 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9621 const Node*
const leftChild = curr->split.leftChild;
9622 VMA_VALIDATE(leftChild != VMA_NULL);
9623 VMA_VALIDATE(leftChild->offset == curr->offset);
9624 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9626 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9628 const Node*
const rightChild = leftChild->buddy;
9629 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9630 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9632 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9643 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9647 VkDeviceSize currLevelNodeSize = m_UsableSize;
9648 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9649 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9652 currLevelNodeSize = nextLevelNodeSize;
9653 nextLevelNodeSize = currLevelNodeSize >> 1;
9658 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9661 Node* node = m_Root;
9662 VkDeviceSize nodeOffset = 0;
9664 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9665 while(node->type == Node::TYPE_SPLIT)
9667 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9668 if(offset < nodeOffset + nextLevelSize)
9670 node = node->split.leftChild;
9674 node = node->split.leftChild->buddy;
9675 nodeOffset += nextLevelSize;
9678 levelNodeSize = nextLevelSize;
9681 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9682 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9685 --m_AllocationCount;
9686 m_SumFreeSize += alloc->GetSize();
9688 node->type = Node::TYPE_FREE;
9691 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9693 RemoveFromFreeList(level, node->buddy);
9694 Node*
const parent = node->parent;
9696 vma_delete(GetAllocationCallbacks(), node->buddy);
9697 vma_delete(GetAllocationCallbacks(), node);
9698 parent->type = Node::TYPE_FREE;
9706 AddToFreeListFront(level, node);
9709 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9713 case Node::TYPE_FREE:
9719 case Node::TYPE_ALLOCATION:
9721 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9727 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9728 if(unusedRangeSize > 0)
9737 case Node::TYPE_SPLIT:
9739 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9740 const Node*
const leftChild = node->split.leftChild;
9741 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9742 const Node*
const rightChild = leftChild->buddy;
9743 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9751 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9753 VMA_ASSERT(node->type == Node::TYPE_FREE);
9756 Node*
const frontNode = m_FreeList[level].front;
9757 if(frontNode == VMA_NULL)
9759 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9760 node->free.prev = node->free.next = VMA_NULL;
9761 m_FreeList[level].front = m_FreeList[level].back = node;
9765 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9766 node->free.prev = VMA_NULL;
9767 node->free.next = frontNode;
9768 frontNode->free.prev = node;
9769 m_FreeList[level].front = node;
9773 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9775 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9778 if(node->free.prev == VMA_NULL)
9780 VMA_ASSERT(m_FreeList[level].front == node);
9781 m_FreeList[level].front = node->free.next;
9785 Node*
const prevFreeNode = node->free.prev;
9786 VMA_ASSERT(prevFreeNode->free.next == node);
9787 prevFreeNode->free.next = node->free.next;
9791 if(node->free.next == VMA_NULL)
9793 VMA_ASSERT(m_FreeList[level].back == node);
9794 m_FreeList[level].back = node->free.prev;
9798 Node*
const nextFreeNode = node->free.next;
9799 VMA_ASSERT(nextFreeNode->free.prev == node);
9800 nextFreeNode->free.prev = node->free.prev;
9804 #if VMA_STATS_STRING_ENABLED 9805 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 9809 case Node::TYPE_FREE:
9810 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9812 case Node::TYPE_ALLOCATION:
9814 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
9815 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9816 if(allocSize < levelNodeSize)
9818 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
9822 case Node::TYPE_SPLIT:
9824 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9825 const Node*
const leftChild = node->split.leftChild;
9826 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9827 const Node*
const rightChild = leftChild->buddy;
9828 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9835 #endif // #if VMA_STATS_STRING_ENABLED 9841 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9842 m_pMetadata(VMA_NULL),
9843 m_MemoryTypeIndex(UINT32_MAX),
9845 m_hMemory(VK_NULL_HANDLE),
9847 m_pMappedData(VMA_NULL)
9851 void VmaDeviceMemoryBlock::Init(
9853 uint32_t newMemoryTypeIndex,
9854 VkDeviceMemory newMemory,
9855 VkDeviceSize newSize,
9859 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9861 m_MemoryTypeIndex = newMemoryTypeIndex;
9863 m_hMemory = newMemory;
9868 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9871 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
9877 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9879 m_pMetadata->Init(newSize);
9882 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9886 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9888 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9889 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9890 m_hMemory = VK_NULL_HANDLE;
9892 vma_delete(allocator, m_pMetadata);
9893 m_pMetadata = VMA_NULL;
9896 bool VmaDeviceMemoryBlock::Validate()
const 9898 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
9899 (m_pMetadata->GetSize() != 0));
9901 return m_pMetadata->Validate();
9904 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9906 void* pData =
nullptr;
9907 VkResult res = Map(hAllocator, 1, &pData);
9908 if(res != VK_SUCCESS)
9913 res = m_pMetadata->CheckCorruption(pData);
9915 Unmap(hAllocator, 1);
9920 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
9927 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9930 m_MapCount += count;
9931 VMA_ASSERT(m_pMappedData != VMA_NULL);
9932 if(ppData != VMA_NULL)
9934 *ppData = m_pMappedData;
9940 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9941 hAllocator->m_hDevice,
9947 if(result == VK_SUCCESS)
9949 if(ppData != VMA_NULL)
9951 *ppData = m_pMappedData;
9959 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
9966 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9967 if(m_MapCount >= count)
9969 m_MapCount -= count;
9972 m_pMappedData = VMA_NULL;
9973 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
9978 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
9982 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9984 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
9985 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
9988 VkResult res = Map(hAllocator, 1, &pData);
9989 if(res != VK_SUCCESS)
9994 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
9995 VmaWriteMagicValue(pData, allocOffset + allocSize);
9997 Unmap(hAllocator, 1);
10002 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10004 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10005 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10008 VkResult res = Map(hAllocator, 1, &pData);
10009 if(res != VK_SUCCESS)
10014 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10016 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10018 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10020 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10023 Unmap(hAllocator, 1);
10028 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10033 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10034 hAllocation->GetBlock() ==
this);
10036 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10037 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10038 hAllocator->m_hDevice,
10041 hAllocation->GetOffset());
10044 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10049 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10050 hAllocation->GetBlock() ==
this);
10052 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10053 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10054 hAllocator->m_hDevice,
10057 hAllocation->GetOffset());
10062 memset(&outInfo, 0,
sizeof(outInfo));
10081 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10089 VmaPool_T::VmaPool_T(
10092 VkDeviceSize preferredBlockSize) :
10095 createInfo.memoryTypeIndex,
10096 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10097 createInfo.minBlockCount,
10098 createInfo.maxBlockCount,
10100 createInfo.frameInUseCount,
10102 createInfo.blockSize != 0,
10108 VmaPool_T::~VmaPool_T()
10112 #if VMA_STATS_STRING_ENABLED 10114 #endif // #if VMA_STATS_STRING_ENABLED 10116 VmaBlockVector::VmaBlockVector(
10118 uint32_t memoryTypeIndex,
10119 VkDeviceSize preferredBlockSize,
10120 size_t minBlockCount,
10121 size_t maxBlockCount,
10122 VkDeviceSize bufferImageGranularity,
10123 uint32_t frameInUseCount,
10125 bool explicitBlockSize,
10126 uint32_t algorithm) :
10127 m_hAllocator(hAllocator),
10128 m_MemoryTypeIndex(memoryTypeIndex),
10129 m_PreferredBlockSize(preferredBlockSize),
10130 m_MinBlockCount(minBlockCount),
10131 m_MaxBlockCount(maxBlockCount),
10132 m_BufferImageGranularity(bufferImageGranularity),
10133 m_FrameInUseCount(frameInUseCount),
10134 m_IsCustomPool(isCustomPool),
10135 m_ExplicitBlockSize(explicitBlockSize),
10136 m_Algorithm(algorithm),
10137 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10138 m_HasEmptyBlock(false),
10139 m_pDefragmentator(VMA_NULL),
10144 VmaBlockVector::~VmaBlockVector()
10146 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10148 for(
size_t i = m_Blocks.size(); i--; )
10150 m_Blocks[i]->Destroy(m_hAllocator);
10151 vma_delete(m_hAllocator, m_Blocks[i]);
10155 VkResult VmaBlockVector::CreateMinBlocks()
10157 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10159 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10160 if(res != VK_SUCCESS)
10168 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10170 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10172 const size_t blockCount = m_Blocks.size();
10181 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10183 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10184 VMA_ASSERT(pBlock);
10185 VMA_HEAVY_ASSERT(pBlock->Validate());
10186 pBlock->m_pMetadata->AddPoolStats(*pStats);
10190 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10192 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10193 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10194 (VMA_DEBUG_MARGIN > 0) &&
10195 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10198 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10200 VkResult VmaBlockVector::Allocate(
10202 uint32_t currentFrameIndex,
10204 VkDeviceSize alignment,
10206 VmaSuballocationType suballocType,
10213 const bool canCreateNewBlock =
10215 (m_Blocks.size() < m_MaxBlockCount);
10222 canMakeOtherLost =
false;
10226 if(isUpperAddress &&
10229 return VK_ERROR_FEATURE_NOT_PRESENT;
10243 return VK_ERROR_FEATURE_NOT_PRESENT;
10247 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10249 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10252 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10259 if(!canMakeOtherLost || canCreateNewBlock)
10268 if(!m_Blocks.empty())
10270 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10271 VMA_ASSERT(pCurrBlock);
10272 VkResult res = AllocateFromBlock(
10283 if(res == VK_SUCCESS)
10285 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10295 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10297 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10298 VMA_ASSERT(pCurrBlock);
10299 VkResult res = AllocateFromBlock(
10310 if(res == VK_SUCCESS)
10312 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10320 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10322 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10323 VMA_ASSERT(pCurrBlock);
10324 VkResult res = AllocateFromBlock(
10335 if(res == VK_SUCCESS)
10337 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10345 if(canCreateNewBlock)
10348 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10349 uint32_t newBlockSizeShift = 0;
10350 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10352 if(!m_ExplicitBlockSize)
10355 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10356 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10358 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10359 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10361 newBlockSize = smallerNewBlockSize;
10362 ++newBlockSizeShift;
10371 size_t newBlockIndex = 0;
10372 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10374 if(!m_ExplicitBlockSize)
10376 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10378 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10379 if(smallerNewBlockSize >= size)
10381 newBlockSize = smallerNewBlockSize;
10382 ++newBlockSizeShift;
10383 res = CreateBlock(newBlockSize, &newBlockIndex);
10392 if(res == VK_SUCCESS)
10394 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10395 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10397 res = AllocateFromBlock(
10408 if(res == VK_SUCCESS)
10410 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10416 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10423 if(canMakeOtherLost)
10425 uint32_t tryIndex = 0;
10426 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10428 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10429 VmaAllocationRequest bestRequest = {};
10430 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10436 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10438 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10439 VMA_ASSERT(pCurrBlock);
10440 VmaAllocationRequest currRequest = {};
10441 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10444 m_BufferImageGranularity,
10453 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10454 if(pBestRequestBlock == VMA_NULL ||
10455 currRequestCost < bestRequestCost)
10457 pBestRequestBlock = pCurrBlock;
10458 bestRequest = currRequest;
10459 bestRequestCost = currRequestCost;
10461 if(bestRequestCost == 0)
10472 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10474 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10475 VMA_ASSERT(pCurrBlock);
10476 VmaAllocationRequest currRequest = {};
10477 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10480 m_BufferImageGranularity,
10489 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10490 if(pBestRequestBlock == VMA_NULL ||
10491 currRequestCost < bestRequestCost ||
10494 pBestRequestBlock = pCurrBlock;
10495 bestRequest = currRequest;
10496 bestRequestCost = currRequestCost;
10498 if(bestRequestCost == 0 ||
10508 if(pBestRequestBlock != VMA_NULL)
10512 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10513 if(res != VK_SUCCESS)
10519 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10525 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10527 m_HasEmptyBlock =
false;
10530 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10531 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10532 (*pAllocation)->InitBlockAllocation(
10535 bestRequest.offset,
10541 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10542 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10543 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10544 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10546 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10548 if(IsCorruptionDetectionEnabled())
10550 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10551 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10566 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10568 return VK_ERROR_TOO_MANY_OBJECTS;
10572 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10575 void VmaBlockVector::Free(
10578 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10582 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10584 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10586 if(IsCorruptionDetectionEnabled())
10588 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10589 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10592 if(hAllocation->IsPersistentMap())
10594 pBlock->Unmap(m_hAllocator, 1);
10597 pBlock->m_pMetadata->Free(hAllocation);
10598 VMA_HEAVY_ASSERT(pBlock->Validate());
10600 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10603 if(pBlock->m_pMetadata->IsEmpty())
10606 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10608 pBlockToDelete = pBlock;
10614 m_HasEmptyBlock =
true;
10619 else if(m_HasEmptyBlock)
10621 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10622 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10624 pBlockToDelete = pLastBlock;
10625 m_Blocks.pop_back();
10626 m_HasEmptyBlock =
false;
10630 IncrementallySortBlocks();
10635 if(pBlockToDelete != VMA_NULL)
10637 VMA_DEBUG_LOG(
" Deleted empty allocation");
10638 pBlockToDelete->Destroy(m_hAllocator);
10639 vma_delete(m_hAllocator, pBlockToDelete);
10643 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10645 VkDeviceSize result = 0;
10646 for(
size_t i = m_Blocks.size(); i--; )
10648 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10649 if(result >= m_PreferredBlockSize)
10657 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10659 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10661 if(m_Blocks[blockIndex] == pBlock)
10663 VmaVectorRemove(m_Blocks, blockIndex);
10670 void VmaBlockVector::IncrementallySortBlocks()
10675 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10677 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10679 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10686 VkResult VmaBlockVector::AllocateFromBlock(
10687 VmaDeviceMemoryBlock* pBlock,
10689 uint32_t currentFrameIndex,
10691 VkDeviceSize alignment,
10694 VmaSuballocationType suballocType,
10703 VmaAllocationRequest currRequest = {};
10704 if(pBlock->m_pMetadata->CreateAllocationRequest(
10707 m_BufferImageGranularity,
10717 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10721 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10722 if(res != VK_SUCCESS)
10729 if(pBlock->m_pMetadata->IsEmpty())
10731 m_HasEmptyBlock =
false;
10734 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10735 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10736 (*pAllocation)->InitBlockAllocation(
10739 currRequest.offset,
10745 VMA_HEAVY_ASSERT(pBlock->Validate());
10746 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10747 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10749 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10751 if(IsCorruptionDetectionEnabled())
10753 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10754 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10758 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10761 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10763 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10764 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10765 allocInfo.allocationSize = blockSize;
10766 VkDeviceMemory mem = VK_NULL_HANDLE;
10767 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10776 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
10781 allocInfo.allocationSize,
10785 m_Blocks.push_back(pBlock);
10786 if(pNewBlockIndex != VMA_NULL)
10788 *pNewBlockIndex = m_Blocks.size() - 1;
10794 #if VMA_STATS_STRING_ENABLED 10796 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
10798 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10800 json.BeginObject();
10804 json.WriteString(
"MemoryTypeIndex");
10805 json.WriteNumber(m_MemoryTypeIndex);
10807 json.WriteString(
"BlockSize");
10808 json.WriteNumber(m_PreferredBlockSize);
10810 json.WriteString(
"BlockCount");
10811 json.BeginObject(
true);
10812 if(m_MinBlockCount > 0)
10814 json.WriteString(
"Min");
10815 json.WriteNumber((uint64_t)m_MinBlockCount);
10817 if(m_MaxBlockCount < SIZE_MAX)
10819 json.WriteString(
"Max");
10820 json.WriteNumber((uint64_t)m_MaxBlockCount);
10822 json.WriteString(
"Cur");
10823 json.WriteNumber((uint64_t)m_Blocks.size());
10826 if(m_FrameInUseCount > 0)
10828 json.WriteString(
"FrameInUseCount");
10829 json.WriteNumber(m_FrameInUseCount);
10832 if(m_Algorithm != 0)
10834 json.WriteString(
"Algorithm");
10835 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
10840 json.WriteString(
"PreferredBlockSize");
10841 json.WriteNumber(m_PreferredBlockSize);
10844 json.WriteString(
"Blocks");
10845 json.BeginObject();
10846 for(
size_t i = 0; i < m_Blocks.size(); ++i)
10848 json.BeginString();
10849 json.ContinueString(m_Blocks[i]->GetId());
10852 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
10859 #endif // #if VMA_STATS_STRING_ENABLED 10861 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
10863 uint32_t currentFrameIndex)
10865 if(m_pDefragmentator == VMA_NULL)
10867 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
10870 currentFrameIndex);
10873 return m_pDefragmentator;
10876 VkResult VmaBlockVector::Defragment(
10878 VkDeviceSize& maxBytesToMove,
10879 uint32_t& maxAllocationsToMove)
10881 if(m_pDefragmentator == VMA_NULL)
10886 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10889 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
10892 if(pDefragmentationStats != VMA_NULL)
10894 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
10895 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
10896 pDefragmentationStats->
bytesMoved += bytesMoved;
10898 VMA_ASSERT(bytesMoved <= maxBytesToMove);
10899 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
10900 maxBytesToMove -= bytesMoved;
10901 maxAllocationsToMove -= allocationsMoved;
10905 m_HasEmptyBlock =
false;
10906 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10908 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
10909 if(pBlock->m_pMetadata->IsEmpty())
10911 if(m_Blocks.size() > m_MinBlockCount)
10913 if(pDefragmentationStats != VMA_NULL)
10916 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
10919 VmaVectorRemove(m_Blocks, blockIndex);
10920 pBlock->Destroy(m_hAllocator);
10921 vma_delete(m_hAllocator, pBlock);
10925 m_HasEmptyBlock =
true;
10933 void VmaBlockVector::DestroyDefragmentator()
10935 if(m_pDefragmentator != VMA_NULL)
10937 vma_delete(m_hAllocator, m_pDefragmentator);
10938 m_pDefragmentator = VMA_NULL;
10942 void VmaBlockVector::MakePoolAllocationsLost(
10943 uint32_t currentFrameIndex,
10944 size_t* pLostAllocationCount)
10946 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10947 size_t lostAllocationCount = 0;
10948 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10950 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10951 VMA_ASSERT(pBlock);
10952 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
10954 if(pLostAllocationCount != VMA_NULL)
10956 *pLostAllocationCount = lostAllocationCount;
10960 VkResult VmaBlockVector::CheckCorruption()
10962 if(!IsCorruptionDetectionEnabled())
10964 return VK_ERROR_FEATURE_NOT_PRESENT;
10967 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10968 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10970 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10971 VMA_ASSERT(pBlock);
10972 VkResult res = pBlock->CheckCorruption(m_hAllocator);
10973 if(res != VK_SUCCESS)
10981 void VmaBlockVector::AddStats(
VmaStats* pStats)
10983 const uint32_t memTypeIndex = m_MemoryTypeIndex;
10984 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
10986 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10988 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10990 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10991 VMA_ASSERT(pBlock);
10992 VMA_HEAVY_ASSERT(pBlock->Validate());
10994 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
10995 VmaAddStatInfo(pStats->
total, allocationStatInfo);
10996 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
10997 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11004 VmaDefragmentator::VmaDefragmentator(
11006 VmaBlockVector* pBlockVector,
11007 uint32_t currentFrameIndex) :
11008 m_hAllocator(hAllocator),
11009 m_pBlockVector(pBlockVector),
11010 m_CurrentFrameIndex(currentFrameIndex),
11012 m_AllocationsMoved(0),
11013 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11014 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11016 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11019 VmaDefragmentator::~VmaDefragmentator()
11021 for(
size_t i = m_Blocks.size(); i--; )
11023 vma_delete(m_hAllocator, m_Blocks[i]);
11027 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11029 AllocationInfo allocInfo;
11030 allocInfo.m_hAllocation = hAlloc;
11031 allocInfo.m_pChanged = pChanged;
11032 m_Allocations.push_back(allocInfo);
11035 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11038 if(m_pMappedDataForDefragmentation)
11040 *ppMappedData = m_pMappedDataForDefragmentation;
11045 if(m_pBlock->GetMappedData())
11047 *ppMappedData = m_pBlock->GetMappedData();
11052 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11053 *ppMappedData = m_pMappedDataForDefragmentation;
11057 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11059 if(m_pMappedDataForDefragmentation != VMA_NULL)
11061 m_pBlock->Unmap(hAllocator, 1);
11065 VkResult VmaDefragmentator::DefragmentRound(
11066 VkDeviceSize maxBytesToMove,
11067 uint32_t maxAllocationsToMove)
11069 if(m_Blocks.empty())
11074 size_t srcBlockIndex = m_Blocks.size() - 1;
11075 size_t srcAllocIndex = SIZE_MAX;
11081 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11083 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11086 if(srcBlockIndex == 0)
11093 srcAllocIndex = SIZE_MAX;
11098 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11102 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11103 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11105 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11106 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11107 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11108 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11111 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11113 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11114 VmaAllocationRequest dstAllocRequest;
11115 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11116 m_CurrentFrameIndex,
11117 m_pBlockVector->GetFrameInUseCount(),
11118 m_pBlockVector->GetBufferImageGranularity(),
11125 &dstAllocRequest) &&
11127 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11129 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11132 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11133 (m_BytesMoved + size > maxBytesToMove))
11135 return VK_INCOMPLETE;
11138 void* pDstMappedData = VMA_NULL;
11139 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11140 if(res != VK_SUCCESS)
11145 void* pSrcMappedData = VMA_NULL;
11146 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11147 if(res != VK_SUCCESS)
11154 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11155 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11156 static_cast<size_t>(size));
11158 if(VMA_DEBUG_MARGIN > 0)
11160 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11161 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11164 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11169 allocInfo.m_hAllocation);
11170 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11172 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11174 if(allocInfo.m_pChanged != VMA_NULL)
11176 *allocInfo.m_pChanged = VK_TRUE;
11179 ++m_AllocationsMoved;
11180 m_BytesMoved += size;
11182 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11190 if(srcAllocIndex > 0)
11196 if(srcBlockIndex > 0)
11199 srcAllocIndex = SIZE_MAX;
11209 VkResult VmaDefragmentator::Defragment(
11210 VkDeviceSize maxBytesToMove,
11211 uint32_t maxAllocationsToMove)
11213 if(m_Allocations.empty())
11219 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11220 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11222 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11223 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11224 m_Blocks.push_back(pBlockInfo);
11228 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11231 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11233 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11235 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11237 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11238 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11239 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11241 (*it)->m_Allocations.push_back(allocInfo);
11249 m_Allocations.clear();
11251 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11253 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11254 pBlockInfo->CalcHasNonMovableAllocations();
11255 pBlockInfo->SortAllocationsBySizeDescecnding();
11259 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11262 VkResult result = VK_SUCCESS;
11263 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11265 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11269 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11271 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11277 bool VmaDefragmentator::MoveMakesSense(
11278 size_t dstBlockIndex, VkDeviceSize dstOffset,
11279 size_t srcBlockIndex, VkDeviceSize srcOffset)
11281 if(dstBlockIndex < srcBlockIndex)
11285 if(dstBlockIndex > srcBlockIndex)
11289 if(dstOffset < srcOffset)
11299 #if VMA_RECORDING_ENABLED 11301 VmaRecorder::VmaRecorder() :
11306 m_StartCounter(INT64_MAX)
11312 m_UseMutex = useMutex;
11313 m_Flags = settings.
flags;
11315 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11316 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11319 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11322 return VK_ERROR_INITIALIZATION_FAILED;
11326 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11327 fprintf(m_File,
"%s\n",
"1,3");
11332 VmaRecorder::~VmaRecorder()
11334 if(m_File != VMA_NULL)
11340 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11342 CallParams callParams;
11343 GetBasicParams(callParams);
11345 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11346 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11350 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11352 CallParams callParams;
11353 GetBasicParams(callParams);
11355 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11356 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11362 CallParams callParams;
11363 GetBasicParams(callParams);
11365 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11366 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11377 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11379 CallParams callParams;
11380 GetBasicParams(callParams);
11382 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11383 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11388 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11389 const VkMemoryRequirements& vkMemReq,
11393 CallParams callParams;
11394 GetBasicParams(callParams);
11396 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11397 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11398 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11400 vkMemReq.alignment,
11401 vkMemReq.memoryTypeBits,
11409 userDataStr.GetString());
11413 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11414 const VkMemoryRequirements& vkMemReq,
11415 bool requiresDedicatedAllocation,
11416 bool prefersDedicatedAllocation,
11420 CallParams callParams;
11421 GetBasicParams(callParams);
11423 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11424 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11425 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11427 vkMemReq.alignment,
11428 vkMemReq.memoryTypeBits,
11429 requiresDedicatedAllocation ? 1 : 0,
11430 prefersDedicatedAllocation ? 1 : 0,
11438 userDataStr.GetString());
11442 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11443 const VkMemoryRequirements& vkMemReq,
11444 bool requiresDedicatedAllocation,
11445 bool prefersDedicatedAllocation,
11449 CallParams callParams;
11450 GetBasicParams(callParams);
11452 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11453 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11454 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11456 vkMemReq.alignment,
11457 vkMemReq.memoryTypeBits,
11458 requiresDedicatedAllocation ? 1 : 0,
11459 prefersDedicatedAllocation ? 1 : 0,
11467 userDataStr.GetString());
11471 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11474 CallParams callParams;
11475 GetBasicParams(callParams);
11477 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11478 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11483 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11485 const void* pUserData)
11487 CallParams callParams;
11488 GetBasicParams(callParams);
11490 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11491 UserDataString userDataStr(
11494 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11496 userDataStr.GetString());
11500 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11503 CallParams callParams;
11504 GetBasicParams(callParams);
11506 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11507 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11512 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11515 CallParams callParams;
11516 GetBasicParams(callParams);
11518 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11519 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11524 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11527 CallParams callParams;
11528 GetBasicParams(callParams);
11530 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11531 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11536 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11537 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11539 CallParams callParams;
11540 GetBasicParams(callParams);
11542 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11543 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11550 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11551 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11553 CallParams callParams;
11554 GetBasicParams(callParams);
11556 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11557 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11564 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11565 const VkBufferCreateInfo& bufCreateInfo,
11569 CallParams callParams;
11570 GetBasicParams(callParams);
11572 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11573 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11574 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11575 bufCreateInfo.flags,
11576 bufCreateInfo.size,
11577 bufCreateInfo.usage,
11578 bufCreateInfo.sharingMode,
11579 allocCreateInfo.
flags,
11580 allocCreateInfo.
usage,
11584 allocCreateInfo.
pool,
11586 userDataStr.GetString());
11590 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11591 const VkImageCreateInfo& imageCreateInfo,
11595 CallParams callParams;
11596 GetBasicParams(callParams);
11598 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11599 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11600 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11601 imageCreateInfo.flags,
11602 imageCreateInfo.imageType,
11603 imageCreateInfo.format,
11604 imageCreateInfo.extent.width,
11605 imageCreateInfo.extent.height,
11606 imageCreateInfo.extent.depth,
11607 imageCreateInfo.mipLevels,
11608 imageCreateInfo.arrayLayers,
11609 imageCreateInfo.samples,
11610 imageCreateInfo.tiling,
11611 imageCreateInfo.usage,
11612 imageCreateInfo.sharingMode,
11613 imageCreateInfo.initialLayout,
11614 allocCreateInfo.
flags,
11615 allocCreateInfo.
usage,
11619 allocCreateInfo.
pool,
11621 userDataStr.GetString());
11625 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11628 CallParams callParams;
11629 GetBasicParams(callParams);
11631 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11632 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11637 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11640 CallParams callParams;
11641 GetBasicParams(callParams);
11643 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11644 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11649 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11652 CallParams callParams;
11653 GetBasicParams(callParams);
11655 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11656 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11661 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11664 CallParams callParams;
11665 GetBasicParams(callParams);
11667 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11668 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11673 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11676 CallParams callParams;
11677 GetBasicParams(callParams);
11679 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11680 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11687 if(pUserData != VMA_NULL)
11691 m_Str = (
const char*)pUserData;
11695 sprintf_s(m_PtrStr,
"%p", pUserData);
11705 void VmaRecorder::WriteConfiguration(
11706 const VkPhysicalDeviceProperties& devProps,
11707 const VkPhysicalDeviceMemoryProperties& memProps,
11708 bool dedicatedAllocationExtensionEnabled)
11710 fprintf(m_File,
"Config,Begin\n");
11712 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11713 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11714 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11715 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11716 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11717 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11719 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11720 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11721 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11723 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11724 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11726 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11727 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11729 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11730 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11732 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11733 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11736 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11738 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11739 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11740 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11741 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11742 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11743 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11744 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11745 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11746 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11748 fprintf(m_File,
"Config,End\n");
11751 void VmaRecorder::GetBasicParams(CallParams& outParams)
11753 outParams.threadId = GetCurrentThreadId();
11755 LARGE_INTEGER counter;
11756 QueryPerformanceCounter(&counter);
11757 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11760 void VmaRecorder::Flush()
11768 #endif // #if VMA_RECORDING_ENABLED 11776 m_hDevice(pCreateInfo->device),
11777 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
11778 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
11779 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
11780 m_PreferredLargeHeapBlockSize(0),
11781 m_PhysicalDevice(pCreateInfo->physicalDevice),
11782 m_CurrentFrameIndex(0),
11783 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
11786 ,m_pRecorder(VMA_NULL)
11789 if(VMA_DEBUG_DETECT_CORRUPTION)
11792 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
11797 #if !(VMA_DEDICATED_ALLOCATION) 11800 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
11804 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
11805 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
11806 memset(&m_MemProps, 0,
sizeof(m_MemProps));
11808 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
11809 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
11811 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11813 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
11824 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
11825 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
11827 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
11828 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
11829 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
11830 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
11837 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
11839 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
11840 if(limit != VK_WHOLE_SIZE)
11842 m_HeapSizeLimit[heapIndex] = limit;
11843 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
11845 m_MemProps.memoryHeaps[heapIndex].size = limit;
11851 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11853 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
11855 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
11858 preferredBlockSize,
11861 GetBufferImageGranularity(),
11868 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
11875 VkResult res = VK_SUCCESS;
11880 #if VMA_RECORDING_ENABLED 11881 m_pRecorder = vma_new(
this, VmaRecorder)();
11883 if(res != VK_SUCCESS)
11887 m_pRecorder->WriteConfiguration(
11888 m_PhysicalDeviceProperties,
11890 m_UseKhrDedicatedAllocation);
11891 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
11893 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
11894 return VK_ERROR_FEATURE_NOT_PRESENT;
11901 VmaAllocator_T::~VmaAllocator_T()
11903 #if VMA_RECORDING_ENABLED 11904 if(m_pRecorder != VMA_NULL)
11906 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
11907 vma_delete(
this, m_pRecorder);
11911 VMA_ASSERT(m_Pools.empty());
11913 for(
size_t i = GetMemoryTypeCount(); i--; )
11915 vma_delete(
this, m_pDedicatedAllocations[i]);
11916 vma_delete(
this, m_pBlockVectors[i]);
11920 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
11922 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11923 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
11924 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
11925 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
11926 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
11927 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
11928 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
11929 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
11930 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
11931 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
11932 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
11933 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
11934 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
11935 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
11936 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
11937 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
11938 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
11939 #if VMA_DEDICATED_ALLOCATION 11940 if(m_UseKhrDedicatedAllocation)
11942 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
11943 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
11944 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
11945 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
11947 #endif // #if VMA_DEDICATED_ALLOCATION 11948 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11950 #define VMA_COPY_IF_NOT_NULL(funcName) \ 11951 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 11953 if(pVulkanFunctions != VMA_NULL)
11955 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
11956 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
11957 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
11958 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
11959 VMA_COPY_IF_NOT_NULL(vkMapMemory);
11960 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
11961 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
11962 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
11963 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
11964 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
11965 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
11966 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
11967 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
11968 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
11969 VMA_COPY_IF_NOT_NULL(vkCreateImage);
11970 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
11971 #if VMA_DEDICATED_ALLOCATION 11972 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
11973 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
11977 #undef VMA_COPY_IF_NOT_NULL 11981 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
11982 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
11983 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
11984 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
11985 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
11986 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
11987 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
11988 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
11989 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
11990 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
11991 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
11992 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
11993 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
11994 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
11995 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
11996 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
11997 #if VMA_DEDICATED_ALLOCATION 11998 if(m_UseKhrDedicatedAllocation)
12000 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12001 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12006 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12008 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12009 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12010 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12011 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12014 VkResult VmaAllocator_T::AllocateMemoryOfType(
12016 VkDeviceSize alignment,
12017 bool dedicatedAllocation,
12018 VkBuffer dedicatedBuffer,
12019 VkImage dedicatedImage,
12021 uint32_t memTypeIndex,
12022 VmaSuballocationType suballocType,
12025 VMA_ASSERT(pAllocation != VMA_NULL);
12026 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12032 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12037 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12038 VMA_ASSERT(blockVector);
12040 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12041 bool preferDedicatedMemory =
12042 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12043 dedicatedAllocation ||
12045 size > preferredBlockSize / 2;
12047 if(preferDedicatedMemory &&
12049 finalCreateInfo.
pool == VK_NULL_HANDLE)
12058 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12062 return AllocateDedicatedMemory(
12076 VkResult res = blockVector->Allocate(
12078 m_CurrentFrameIndex.load(),
12084 if(res == VK_SUCCESS)
12092 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12096 res = AllocateDedicatedMemory(
12102 finalCreateInfo.pUserData,
12106 if(res == VK_SUCCESS)
12109 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12115 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12122 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12124 VmaSuballocationType suballocType,
12125 uint32_t memTypeIndex,
12127 bool isUserDataString,
12129 VkBuffer dedicatedBuffer,
12130 VkImage dedicatedImage,
12133 VMA_ASSERT(pAllocation);
12135 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12136 allocInfo.memoryTypeIndex = memTypeIndex;
12137 allocInfo.allocationSize = size;
12139 #if VMA_DEDICATED_ALLOCATION 12140 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12141 if(m_UseKhrDedicatedAllocation)
12143 if(dedicatedBuffer != VK_NULL_HANDLE)
12145 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12146 dedicatedAllocInfo.buffer = dedicatedBuffer;
12147 allocInfo.pNext = &dedicatedAllocInfo;
12149 else if(dedicatedImage != VK_NULL_HANDLE)
12151 dedicatedAllocInfo.image = dedicatedImage;
12152 allocInfo.pNext = &dedicatedAllocInfo;
12155 #endif // #if VMA_DEDICATED_ALLOCATION 12158 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12159 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12162 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12166 void* pMappedData = VMA_NULL;
12169 res = (*m_VulkanFunctions.vkMapMemory)(
12178 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12179 FreeVulkanMemory(memTypeIndex, size, hMemory);
12184 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12185 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12186 (*pAllocation)->SetUserData(
this, pUserData);
12187 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12189 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12194 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12195 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12196 VMA_ASSERT(pDedicatedAllocations);
12197 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12200 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12205 void VmaAllocator_T::GetBufferMemoryRequirements(
12207 VkMemoryRequirements& memReq,
12208 bool& requiresDedicatedAllocation,
12209 bool& prefersDedicatedAllocation)
const 12211 #if VMA_DEDICATED_ALLOCATION 12212 if(m_UseKhrDedicatedAllocation)
12214 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12215 memReqInfo.buffer = hBuffer;
12217 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12219 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12220 memReq2.pNext = &memDedicatedReq;
12222 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12224 memReq = memReq2.memoryRequirements;
12225 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12226 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12229 #endif // #if VMA_DEDICATED_ALLOCATION 12231 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12232 requiresDedicatedAllocation =
false;
12233 prefersDedicatedAllocation =
false;
12237 void VmaAllocator_T::GetImageMemoryRequirements(
12239 VkMemoryRequirements& memReq,
12240 bool& requiresDedicatedAllocation,
12241 bool& prefersDedicatedAllocation)
const 12243 #if VMA_DEDICATED_ALLOCATION 12244 if(m_UseKhrDedicatedAllocation)
12246 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12247 memReqInfo.image = hImage;
12249 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12251 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12252 memReq2.pNext = &memDedicatedReq;
12254 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12256 memReq = memReq2.memoryRequirements;
12257 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12258 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12261 #endif // #if VMA_DEDICATED_ALLOCATION 12263 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12264 requiresDedicatedAllocation =
false;
12265 prefersDedicatedAllocation =
false;
12269 VkResult VmaAllocator_T::AllocateMemory(
12270 const VkMemoryRequirements& vkMemReq,
12271 bool requiresDedicatedAllocation,
12272 bool prefersDedicatedAllocation,
12273 VkBuffer dedicatedBuffer,
12274 VkImage dedicatedImage,
12276 VmaSuballocationType suballocType,
12279 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12284 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12285 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12290 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12291 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12293 if(requiresDedicatedAllocation)
12297 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12298 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12300 if(createInfo.
pool != VK_NULL_HANDLE)
12302 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12303 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12306 if((createInfo.
pool != VK_NULL_HANDLE) &&
12309 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12310 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12313 if(createInfo.
pool != VK_NULL_HANDLE)
12315 const VkDeviceSize alignmentForPool = VMA_MAX(
12316 vkMemReq.alignment,
12317 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12318 return createInfo.
pool->m_BlockVector.Allocate(
12320 m_CurrentFrameIndex.load(),
12330 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12331 uint32_t memTypeIndex = UINT32_MAX;
12333 if(res == VK_SUCCESS)
12335 VkDeviceSize alignmentForMemType = VMA_MAX(
12336 vkMemReq.alignment,
12337 GetMemoryTypeMinAlignment(memTypeIndex));
12339 res = AllocateMemoryOfType(
12341 alignmentForMemType,
12342 requiresDedicatedAllocation || prefersDedicatedAllocation,
12350 if(res == VK_SUCCESS)
12360 memoryTypeBits &= ~(1u << memTypeIndex);
12363 if(res == VK_SUCCESS)
12365 alignmentForMemType = VMA_MAX(
12366 vkMemReq.alignment,
12367 GetMemoryTypeMinAlignment(memTypeIndex));
12369 res = AllocateMemoryOfType(
12371 alignmentForMemType,
12372 requiresDedicatedAllocation || prefersDedicatedAllocation,
12380 if(res == VK_SUCCESS)
12390 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12401 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12403 VMA_ASSERT(allocation);
12405 if(TouchAllocation(allocation))
12407 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12409 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12412 switch(allocation->GetType())
12414 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12416 VmaBlockVector* pBlockVector = VMA_NULL;
12417 VmaPool hPool = allocation->GetPool();
12418 if(hPool != VK_NULL_HANDLE)
12420 pBlockVector = &hPool->m_BlockVector;
12424 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12425 pBlockVector = m_pBlockVectors[memTypeIndex];
12427 pBlockVector->Free(allocation);
12430 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12431 FreeDedicatedMemory(allocation);
12438 allocation->SetUserData(
this, VMA_NULL);
12439 vma_delete(
this, allocation);
12442 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12445 InitStatInfo(pStats->
total);
12446 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12448 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12452 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12454 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12455 VMA_ASSERT(pBlockVector);
12456 pBlockVector->AddStats(pStats);
12461 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12462 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12464 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12469 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12471 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12472 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12473 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12474 VMA_ASSERT(pDedicatedAllocVector);
12475 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12478 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12479 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12480 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12481 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12486 VmaPostprocessCalcStatInfo(pStats->
total);
12487 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12488 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12489 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12490 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12493 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12495 VkResult VmaAllocator_T::Defragment(
12497 size_t allocationCount,
12498 VkBool32* pAllocationsChanged,
12502 if(pAllocationsChanged != VMA_NULL)
12504 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
12506 if(pDefragmentationStats != VMA_NULL)
12508 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12511 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12513 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12515 const size_t poolCount = m_Pools.size();
12518 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12521 VMA_ASSERT(hAlloc);
12522 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12524 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12525 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12527 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12529 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12531 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12533 const VmaPool hAllocPool = hAlloc->GetPool();
12535 if(hAllocPool != VK_NULL_HANDLE)
12538 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12540 pAllocBlockVector = &hAllocPool->m_BlockVector;
12546 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12549 if(pAllocBlockVector != VMA_NULL)
12551 VmaDefragmentator*
const pDefragmentator =
12552 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12553 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12554 &pAllocationsChanged[allocIndex] : VMA_NULL;
12555 pDefragmentator->AddAllocation(hAlloc, pChanged);
12560 VkResult result = VK_SUCCESS;
12564 VkDeviceSize maxBytesToMove = SIZE_MAX;
12565 uint32_t maxAllocationsToMove = UINT32_MAX;
12566 if(pDefragmentationInfo != VMA_NULL)
12573 for(uint32_t memTypeIndex = 0;
12574 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12578 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12580 result = m_pBlockVectors[memTypeIndex]->Defragment(
12581 pDefragmentationStats,
12583 maxAllocationsToMove);
12588 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12590 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12591 pDefragmentationStats,
12593 maxAllocationsToMove);
12599 for(
size_t poolIndex = poolCount; poolIndex--; )
12601 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12605 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12607 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12609 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12618 if(hAllocation->CanBecomeLost())
12624 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12625 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12628 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12632 pAllocationInfo->
offset = 0;
12633 pAllocationInfo->
size = hAllocation->GetSize();
12635 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12638 else if(localLastUseFrameIndex == localCurrFrameIndex)
12640 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12641 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12642 pAllocationInfo->
offset = hAllocation->GetOffset();
12643 pAllocationInfo->
size = hAllocation->GetSize();
12645 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12650 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12652 localLastUseFrameIndex = localCurrFrameIndex;
12659 #if VMA_STATS_STRING_ENABLED 12660 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12661 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12664 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12665 if(localLastUseFrameIndex == localCurrFrameIndex)
12671 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12673 localLastUseFrameIndex = localCurrFrameIndex;
12679 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12680 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12681 pAllocationInfo->
offset = hAllocation->GetOffset();
12682 pAllocationInfo->
size = hAllocation->GetSize();
12683 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12684 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12688 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12691 if(hAllocation->CanBecomeLost())
12693 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12694 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12697 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12701 else if(localLastUseFrameIndex == localCurrFrameIndex)
12707 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12709 localLastUseFrameIndex = localCurrFrameIndex;
12716 #if VMA_STATS_STRING_ENABLED 12717 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12718 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12721 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12722 if(localLastUseFrameIndex == localCurrFrameIndex)
12728 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12730 localLastUseFrameIndex = localCurrFrameIndex;
12742 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
12752 return VK_ERROR_INITIALIZATION_FAILED;
12755 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
12757 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
12759 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
12760 if(res != VK_SUCCESS)
12762 vma_delete(
this, *pPool);
12769 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12770 (*pPool)->SetId(m_NextPoolId++);
12771 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
12777 void VmaAllocator_T::DestroyPool(
VmaPool pool)
12781 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12782 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
12783 VMA_ASSERT(success &&
"Pool not found in Allocator.");
12786 vma_delete(
this, pool);
12791 pool->m_BlockVector.GetPoolStats(pPoolStats);
12794 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
12796 m_CurrentFrameIndex.store(frameIndex);
12799 void VmaAllocator_T::MakePoolAllocationsLost(
12801 size_t* pLostAllocationCount)
12803 hPool->m_BlockVector.MakePoolAllocationsLost(
12804 m_CurrentFrameIndex.load(),
12805 pLostAllocationCount);
12808 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
12810 return hPool->m_BlockVector.CheckCorruption();
12813 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
12815 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
12818 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12820 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
12822 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12823 VMA_ASSERT(pBlockVector);
12824 VkResult localRes = pBlockVector->CheckCorruption();
12827 case VK_ERROR_FEATURE_NOT_PRESENT:
12830 finalRes = VK_SUCCESS;
12840 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12841 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12843 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
12845 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
12848 case VK_ERROR_FEATURE_NOT_PRESENT:
12851 finalRes = VK_SUCCESS;
12863 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
12865 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
12866 (*pAllocation)->InitLost();
12869 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
12871 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
12874 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12876 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12877 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
12879 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12880 if(res == VK_SUCCESS)
12882 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
12887 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
12892 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12895 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
12897 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
12903 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
12905 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
12907 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
12910 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
12912 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
12913 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12915 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12916 m_HeapSizeLimit[heapIndex] += size;
12920 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
12922 if(hAllocation->CanBecomeLost())
12924 return VK_ERROR_MEMORY_MAP_FAILED;
12927 switch(hAllocation->GetType())
12929 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12931 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12932 char *pBytes = VMA_NULL;
12933 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
12934 if(res == VK_SUCCESS)
12936 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
12937 hAllocation->BlockAllocMap();
12941 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12942 return hAllocation->DedicatedAllocMap(
this, ppData);
12945 return VK_ERROR_MEMORY_MAP_FAILED;
12951 switch(hAllocation->GetType())
12953 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12955 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12956 hAllocation->BlockAllocUnmap();
12957 pBlock->Unmap(
this, 1);
12960 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12961 hAllocation->DedicatedAllocUnmap(
this);
12968 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
12970 VkResult res = VK_SUCCESS;
12971 switch(hAllocation->GetType())
12973 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12974 res = GetVulkanFunctions().vkBindBufferMemory(
12977 hAllocation->GetMemory(),
12980 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12982 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12983 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
12984 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
12993 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
12995 VkResult res = VK_SUCCESS;
12996 switch(hAllocation->GetType())
12998 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12999 res = GetVulkanFunctions().vkBindImageMemory(
13002 hAllocation->GetMemory(),
13005 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13007 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13008 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13009 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13018 void VmaAllocator_T::FlushOrInvalidateAllocation(
13020 VkDeviceSize offset, VkDeviceSize size,
13021 VMA_CACHE_OPERATION op)
13023 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13024 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13026 const VkDeviceSize allocationSize = hAllocation->GetSize();
13027 VMA_ASSERT(offset <= allocationSize);
13029 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13031 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13032 memRange.memory = hAllocation->GetMemory();
13034 switch(hAllocation->GetType())
13036 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13037 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13038 if(size == VK_WHOLE_SIZE)
13040 memRange.size = allocationSize - memRange.offset;
13044 VMA_ASSERT(offset + size <= allocationSize);
13045 memRange.size = VMA_MIN(
13046 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13047 allocationSize - memRange.offset);
13051 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13054 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13055 if(size == VK_WHOLE_SIZE)
13057 size = allocationSize - offset;
13061 VMA_ASSERT(offset + size <= allocationSize);
13063 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13066 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13067 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13068 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13069 memRange.offset += allocationOffset;
13070 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13081 case VMA_CACHE_FLUSH:
13082 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13084 case VMA_CACHE_INVALIDATE:
13085 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13094 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13096 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13098 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13100 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13101 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13102 VMA_ASSERT(pDedicatedAllocations);
13103 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13104 VMA_ASSERT(success);
13107 VkDeviceMemory hMemory = allocation->GetMemory();
13109 if(allocation->GetMappedData() != VMA_NULL)
13111 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
13114 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13116 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13119 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13121 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13122 !hAllocation->CanBecomeLost() &&
13123 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13125 void* pData = VMA_NULL;
13126 VkResult res = Map(hAllocation, &pData);
13127 if(res == VK_SUCCESS)
13129 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13130 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13131 Unmap(hAllocation);
13135 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13140 #if VMA_STATS_STRING_ENABLED 13142 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13144 bool dedicatedAllocationsStarted =
false;
13145 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13147 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13148 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13149 VMA_ASSERT(pDedicatedAllocVector);
13150 if(pDedicatedAllocVector->empty() ==
false)
13152 if(dedicatedAllocationsStarted ==
false)
13154 dedicatedAllocationsStarted =
true;
13155 json.WriteString(
"DedicatedAllocations");
13156 json.BeginObject();
13159 json.BeginString(
"Type ");
13160 json.ContinueString(memTypeIndex);
13165 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13167 json.BeginObject(
true);
13169 hAlloc->PrintParameters(json);
13176 if(dedicatedAllocationsStarted)
13182 bool allocationsStarted =
false;
13183 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13185 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13187 if(allocationsStarted ==
false)
13189 allocationsStarted =
true;
13190 json.WriteString(
"DefaultPools");
13191 json.BeginObject();
13194 json.BeginString(
"Type ");
13195 json.ContinueString(memTypeIndex);
13198 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13201 if(allocationsStarted)
13209 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13210 const size_t poolCount = m_Pools.size();
13213 json.WriteString(
"Pools");
13214 json.BeginObject();
13215 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13217 json.BeginString();
13218 json.ContinueString(m_Pools[poolIndex]->GetId());
13221 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13228 #endif // #if VMA_STATS_STRING_ENABLED 13237 VMA_ASSERT(pCreateInfo && pAllocator);
13238 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13240 return (*pAllocator)->Init(pCreateInfo);
13246 if(allocator != VK_NULL_HANDLE)
13248 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13249 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13250 vma_delete(&allocationCallbacks, allocator);
13256 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13258 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13259 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13264 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13266 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13267 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13272 uint32_t memoryTypeIndex,
13273 VkMemoryPropertyFlags* pFlags)
13275 VMA_ASSERT(allocator && pFlags);
13276 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13277 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13282 uint32_t frameIndex)
13284 VMA_ASSERT(allocator);
13285 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13287 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13289 allocator->SetCurrentFrameIndex(frameIndex);
13296 VMA_ASSERT(allocator && pStats);
13297 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13298 allocator->CalculateStats(pStats);
13301 #if VMA_STATS_STRING_ENABLED 13305 char** ppStatsString,
13306 VkBool32 detailedMap)
13308 VMA_ASSERT(allocator && ppStatsString);
13309 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13311 VmaStringBuilder sb(allocator);
13313 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13314 json.BeginObject();
13317 allocator->CalculateStats(&stats);
13319 json.WriteString(
"Total");
13320 VmaPrintStatInfo(json, stats.
total);
13322 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13324 json.BeginString(
"Heap ");
13325 json.ContinueString(heapIndex);
13327 json.BeginObject();
13329 json.WriteString(
"Size");
13330 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13332 json.WriteString(
"Flags");
13333 json.BeginArray(
true);
13334 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13336 json.WriteString(
"DEVICE_LOCAL");
13342 json.WriteString(
"Stats");
13343 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13346 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13348 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13350 json.BeginString(
"Type ");
13351 json.ContinueString(typeIndex);
13354 json.BeginObject();
13356 json.WriteString(
"Flags");
13357 json.BeginArray(
true);
13358 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13359 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13361 json.WriteString(
"DEVICE_LOCAL");
13363 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13365 json.WriteString(
"HOST_VISIBLE");
13367 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13369 json.WriteString(
"HOST_COHERENT");
13371 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13373 json.WriteString(
"HOST_CACHED");
13375 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13377 json.WriteString(
"LAZILY_ALLOCATED");
13383 json.WriteString(
"Stats");
13384 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13393 if(detailedMap == VK_TRUE)
13395 allocator->PrintDetailedMap(json);
13401 const size_t len = sb.GetLength();
13402 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13405 memcpy(pChars, sb.GetData(), len);
13407 pChars[len] =
'\0';
13408 *ppStatsString = pChars;
13413 char* pStatsString)
13415 if(pStatsString != VMA_NULL)
13417 VMA_ASSERT(allocator);
13418 size_t len = strlen(pStatsString);
13419 vma_delete_array(allocator, pStatsString, len + 1);
13423 #endif // #if VMA_STATS_STRING_ENABLED 13430 uint32_t memoryTypeBits,
13432 uint32_t* pMemoryTypeIndex)
13434 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13435 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13436 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13443 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13444 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13449 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13453 switch(pAllocationCreateInfo->
usage)
13458 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13460 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13464 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13467 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13468 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13470 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13474 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13475 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13481 *pMemoryTypeIndex = UINT32_MAX;
13482 uint32_t minCost = UINT32_MAX;
13483 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13484 memTypeIndex < allocator->GetMemoryTypeCount();
13485 ++memTypeIndex, memTypeBit <<= 1)
13488 if((memTypeBit & memoryTypeBits) != 0)
13490 const VkMemoryPropertyFlags currFlags =
13491 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13493 if((requiredFlags & ~currFlags) == 0)
13496 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13498 if(currCost < minCost)
13500 *pMemoryTypeIndex = memTypeIndex;
13505 minCost = currCost;
13510 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13515 const VkBufferCreateInfo* pBufferCreateInfo,
13517 uint32_t* pMemoryTypeIndex)
13519 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13520 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13521 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13522 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13524 const VkDevice hDev = allocator->m_hDevice;
13525 VkBuffer hBuffer = VK_NULL_HANDLE;
13526 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13527 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13528 if(res == VK_SUCCESS)
13530 VkMemoryRequirements memReq = {};
13531 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13532 hDev, hBuffer, &memReq);
13536 memReq.memoryTypeBits,
13537 pAllocationCreateInfo,
13540 allocator->GetVulkanFunctions().vkDestroyBuffer(
13541 hDev, hBuffer, allocator->GetAllocationCallbacks());
13548 const VkImageCreateInfo* pImageCreateInfo,
13550 uint32_t* pMemoryTypeIndex)
13552 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13553 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13554 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13555 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13557 const VkDevice hDev = allocator->m_hDevice;
13558 VkImage hImage = VK_NULL_HANDLE;
13559 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13560 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13561 if(res == VK_SUCCESS)
13563 VkMemoryRequirements memReq = {};
13564 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13565 hDev, hImage, &memReq);
13569 memReq.memoryTypeBits,
13570 pAllocationCreateInfo,
13573 allocator->GetVulkanFunctions().vkDestroyImage(
13574 hDev, hImage, allocator->GetAllocationCallbacks());
13584 VMA_ASSERT(allocator && pCreateInfo && pPool);
13586 VMA_DEBUG_LOG(
"vmaCreatePool");
13588 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13590 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13592 #if VMA_RECORDING_ENABLED 13593 if(allocator->GetRecorder() != VMA_NULL)
13595 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13606 VMA_ASSERT(allocator);
13608 if(pool == VK_NULL_HANDLE)
13613 VMA_DEBUG_LOG(
"vmaDestroyPool");
13615 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13617 #if VMA_RECORDING_ENABLED 13618 if(allocator->GetRecorder() != VMA_NULL)
13620 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13624 allocator->DestroyPool(pool);
13632 VMA_ASSERT(allocator && pool && pPoolStats);
13634 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13636 allocator->GetPoolStats(pool, pPoolStats);
13642 size_t* pLostAllocationCount)
13644 VMA_ASSERT(allocator && pool);
13646 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13648 #if VMA_RECORDING_ENABLED 13649 if(allocator->GetRecorder() != VMA_NULL)
13651 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13655 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13660 VMA_ASSERT(allocator && pool);
13662 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13664 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13666 return allocator->CheckPoolCorruption(pool);
13671 const VkMemoryRequirements* pVkMemoryRequirements,
13676 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13678 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13680 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13682 VkResult result = allocator->AllocateMemory(
13683 *pVkMemoryRequirements,
13689 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13692 #if VMA_RECORDING_ENABLED 13693 if(allocator->GetRecorder() != VMA_NULL)
13695 allocator->GetRecorder()->RecordAllocateMemory(
13696 allocator->GetCurrentFrameIndex(),
13697 *pVkMemoryRequirements,
13703 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13705 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13718 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13720 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
13722 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13724 VkMemoryRequirements vkMemReq = {};
13725 bool requiresDedicatedAllocation =
false;
13726 bool prefersDedicatedAllocation =
false;
13727 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
13728 requiresDedicatedAllocation,
13729 prefersDedicatedAllocation);
13731 VkResult result = allocator->AllocateMemory(
13733 requiresDedicatedAllocation,
13734 prefersDedicatedAllocation,
13738 VMA_SUBALLOCATION_TYPE_BUFFER,
13741 #if VMA_RECORDING_ENABLED 13742 if(allocator->GetRecorder() != VMA_NULL)
13744 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
13745 allocator->GetCurrentFrameIndex(),
13747 requiresDedicatedAllocation,
13748 prefersDedicatedAllocation,
13754 if(pAllocationInfo && result == VK_SUCCESS)
13756 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13769 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13771 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
13773 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13775 VkMemoryRequirements vkMemReq = {};
13776 bool requiresDedicatedAllocation =
false;
13777 bool prefersDedicatedAllocation =
false;
13778 allocator->GetImageMemoryRequirements(image, vkMemReq,
13779 requiresDedicatedAllocation, prefersDedicatedAllocation);
13781 VkResult result = allocator->AllocateMemory(
13783 requiresDedicatedAllocation,
13784 prefersDedicatedAllocation,
13788 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
13791 #if VMA_RECORDING_ENABLED 13792 if(allocator->GetRecorder() != VMA_NULL)
13794 allocator->GetRecorder()->RecordAllocateMemoryForImage(
13795 allocator->GetCurrentFrameIndex(),
13797 requiresDedicatedAllocation,
13798 prefersDedicatedAllocation,
13804 if(pAllocationInfo && result == VK_SUCCESS)
13806 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13816 VMA_ASSERT(allocator);
13818 if(allocation == VK_NULL_HANDLE)
13823 VMA_DEBUG_LOG(
"vmaFreeMemory");
13825 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13827 #if VMA_RECORDING_ENABLED 13828 if(allocator->GetRecorder() != VMA_NULL)
13830 allocator->GetRecorder()->RecordFreeMemory(
13831 allocator->GetCurrentFrameIndex(),
13836 allocator->FreeMemory(allocation);
13844 VMA_ASSERT(allocator && allocation && pAllocationInfo);
13846 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13848 #if VMA_RECORDING_ENABLED 13849 if(allocator->GetRecorder() != VMA_NULL)
13851 allocator->GetRecorder()->RecordGetAllocationInfo(
13852 allocator->GetCurrentFrameIndex(),
13857 allocator->GetAllocationInfo(allocation, pAllocationInfo);
13864 VMA_ASSERT(allocator && allocation);
13866 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13868 #if VMA_RECORDING_ENABLED 13869 if(allocator->GetRecorder() != VMA_NULL)
13871 allocator->GetRecorder()->RecordTouchAllocation(
13872 allocator->GetCurrentFrameIndex(),
13877 return allocator->TouchAllocation(allocation);
13885 VMA_ASSERT(allocator && allocation);
13887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13889 allocation->SetUserData(allocator, pUserData);
13891 #if VMA_RECORDING_ENABLED 13892 if(allocator->GetRecorder() != VMA_NULL)
13894 allocator->GetRecorder()->RecordSetAllocationUserData(
13895 allocator->GetCurrentFrameIndex(),
13906 VMA_ASSERT(allocator && pAllocation);
13908 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
13910 allocator->CreateLostAllocation(pAllocation);
13912 #if VMA_RECORDING_ENABLED 13913 if(allocator->GetRecorder() != VMA_NULL)
13915 allocator->GetRecorder()->RecordCreateLostAllocation(
13916 allocator->GetCurrentFrameIndex(),
13927 VMA_ASSERT(allocator && allocation && ppData);
13929 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13931 VkResult res = allocator->Map(allocation, ppData);
13933 #if VMA_RECORDING_ENABLED 13934 if(allocator->GetRecorder() != VMA_NULL)
13936 allocator->GetRecorder()->RecordMapMemory(
13937 allocator->GetCurrentFrameIndex(),
13949 VMA_ASSERT(allocator && allocation);
13951 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13953 #if VMA_RECORDING_ENABLED 13954 if(allocator->GetRecorder() != VMA_NULL)
13956 allocator->GetRecorder()->RecordUnmapMemory(
13957 allocator->GetCurrentFrameIndex(),
13962 allocator->Unmap(allocation);
13967 VMA_ASSERT(allocator && allocation);
13969 VMA_DEBUG_LOG(
"vmaFlushAllocation");
13971 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13973 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
13975 #if VMA_RECORDING_ENABLED 13976 if(allocator->GetRecorder() != VMA_NULL)
13978 allocator->GetRecorder()->RecordFlushAllocation(
13979 allocator->GetCurrentFrameIndex(),
13980 allocation, offset, size);
13987 VMA_ASSERT(allocator && allocation);
13989 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
13991 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13993 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
13995 #if VMA_RECORDING_ENABLED 13996 if(allocator->GetRecorder() != VMA_NULL)
13998 allocator->GetRecorder()->RecordInvalidateAllocation(
13999 allocator->GetCurrentFrameIndex(),
14000 allocation, offset, size);
14007 VMA_ASSERT(allocator);
14009 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14011 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14013 return allocator->CheckCorruption(memoryTypeBits);
14019 size_t allocationCount,
14020 VkBool32* pAllocationsChanged,
14024 VMA_ASSERT(allocator && pAllocations);
14026 VMA_DEBUG_LOG(
"vmaDefragment");
14028 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14030 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14038 VMA_ASSERT(allocator && allocation && buffer);
14040 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14042 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14044 return allocator->BindBufferMemory(allocation, buffer);
14052 VMA_ASSERT(allocator && allocation && image);
14054 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14056 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14058 return allocator->BindImageMemory(allocation, image);
14063 const VkBufferCreateInfo* pBufferCreateInfo,
14069 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14071 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14073 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14075 *pBuffer = VK_NULL_HANDLE;
14076 *pAllocation = VK_NULL_HANDLE;
14079 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14080 allocator->m_hDevice,
14082 allocator->GetAllocationCallbacks(),
14087 VkMemoryRequirements vkMemReq = {};
14088 bool requiresDedicatedAllocation =
false;
14089 bool prefersDedicatedAllocation =
false;
14090 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14091 requiresDedicatedAllocation, prefersDedicatedAllocation);
14095 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14097 VMA_ASSERT(vkMemReq.alignment %
14098 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14100 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14102 VMA_ASSERT(vkMemReq.alignment %
14103 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14105 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14107 VMA_ASSERT(vkMemReq.alignment %
14108 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14112 res = allocator->AllocateMemory(
14114 requiresDedicatedAllocation,
14115 prefersDedicatedAllocation,
14118 *pAllocationCreateInfo,
14119 VMA_SUBALLOCATION_TYPE_BUFFER,
14122 #if VMA_RECORDING_ENABLED 14123 if(allocator->GetRecorder() != VMA_NULL)
14125 allocator->GetRecorder()->RecordCreateBuffer(
14126 allocator->GetCurrentFrameIndex(),
14127 *pBufferCreateInfo,
14128 *pAllocationCreateInfo,
14136 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14140 #if VMA_STATS_STRING_ENABLED 14141 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14143 if(pAllocationInfo != VMA_NULL)
14145 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14150 allocator->FreeMemory(*pAllocation);
14151 *pAllocation = VK_NULL_HANDLE;
14152 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14153 *pBuffer = VK_NULL_HANDLE;
14156 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14157 *pBuffer = VK_NULL_HANDLE;
14168 VMA_ASSERT(allocator);
14170 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14175 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14177 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14179 #if VMA_RECORDING_ENABLED 14180 if(allocator->GetRecorder() != VMA_NULL)
14182 allocator->GetRecorder()->RecordDestroyBuffer(
14183 allocator->GetCurrentFrameIndex(),
14188 if(buffer != VK_NULL_HANDLE)
14190 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14193 if(allocation != VK_NULL_HANDLE)
14195 allocator->FreeMemory(allocation);
14201 const VkImageCreateInfo* pImageCreateInfo,
14207 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14209 VMA_DEBUG_LOG(
"vmaCreateImage");
14211 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14213 *pImage = VK_NULL_HANDLE;
14214 *pAllocation = VK_NULL_HANDLE;
14217 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14218 allocator->m_hDevice,
14220 allocator->GetAllocationCallbacks(),
14224 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14225 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14226 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14229 VkMemoryRequirements vkMemReq = {};
14230 bool requiresDedicatedAllocation =
false;
14231 bool prefersDedicatedAllocation =
false;
14232 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14233 requiresDedicatedAllocation, prefersDedicatedAllocation);
14235 res = allocator->AllocateMemory(
14237 requiresDedicatedAllocation,
14238 prefersDedicatedAllocation,
14241 *pAllocationCreateInfo,
14245 #if VMA_RECORDING_ENABLED 14246 if(allocator->GetRecorder() != VMA_NULL)
14248 allocator->GetRecorder()->RecordCreateImage(
14249 allocator->GetCurrentFrameIndex(),
14251 *pAllocationCreateInfo,
14259 res = allocator->BindImageMemory(*pAllocation, *pImage);
14263 #if VMA_STATS_STRING_ENABLED 14264 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14266 if(pAllocationInfo != VMA_NULL)
14268 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14273 allocator->FreeMemory(*pAllocation);
14274 *pAllocation = VK_NULL_HANDLE;
14275 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14276 *pImage = VK_NULL_HANDLE;
14279 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14280 *pImage = VK_NULL_HANDLE;
14291 VMA_ASSERT(allocator);
14293 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14298 VMA_DEBUG_LOG(
"vmaDestroyImage");
14300 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14302 #if VMA_RECORDING_ENABLED 14303 if(allocator->GetRecorder() != VMA_NULL)
14305 allocator->GetRecorder()->RecordDestroyImage(
14306 allocator->GetCurrentFrameIndex(),
14311 if(image != VK_NULL_HANDLE)
14313 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14315 if(allocation != VK_NULL_HANDLE)
14317 allocator->FreeMemory(allocation);
14321 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1484
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1797
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1522
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1553
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1496
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2039
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1477
+
Definition: vk_mem_alloc.h:1515
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2119
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1496
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1723
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1469
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2139
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1519
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2384
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1947
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1368
+
Definition: vk_mem_alloc.h:1754
+
Definition: vk_mem_alloc.h:1857
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1488
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2219
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1550
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2464
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2008
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1527
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2020
-
Definition: vk_mem_alloc.h:1803
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1458
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1846
-
Definition: vk_mem_alloc.h:1750
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1531
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2100
+
Definition: vk_mem_alloc.h:1834
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1477
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1907
+
Definition: vk_mem_alloc.h:1781
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1562
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2036
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1584
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1516
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1615
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1547
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1754
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1785
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1656
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1474
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1655
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2388
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1687
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1493
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1686
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2468
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1548
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1665
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2396
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1830
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2379
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1475
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1400
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1579
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1696
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2476
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1891
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2459
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1494
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1419
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1525
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1556
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1970
-
Definition: vk_mem_alloc.h:1964
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1591
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2149
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2050
+
Definition: vk_mem_alloc.h:2044
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1622
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2229
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1470
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1494
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1867
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1990
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2026
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1489
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1513
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1928
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2070
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2106
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1456
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1973
+
Definition: vk_mem_alloc.h:1475
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2053
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1701
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1732
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2374
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2454
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2392
-
Definition: vk_mem_alloc.h:1740
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1854
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1473
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2472
+
Definition: vk_mem_alloc.h:1771
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1915
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1492
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1661
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1406
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1692
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1425
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
+
Definition: vk_mem_alloc.h:1875
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1427
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1446
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1498
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1432
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2394
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1517
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1451
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2474
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1841
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2036
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1902
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2116
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1466
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1644
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:1985
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1419
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1485
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1675
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2065
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1438
+
Definition: vk_mem_alloc.h:2040
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1810
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1657
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1423
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1976
-
Definition: vk_mem_alloc.h:1749
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1472
+
Definition: vk_mem_alloc.h:1841
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1688
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1442
+
Definition: vk_mem_alloc.h:1865
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2056
+
Definition: vk_mem_alloc.h:1780
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1491
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1836
-
Definition: vk_mem_alloc.h:1827
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1897
+
Definition: vk_mem_alloc.h:1888
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1647
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1468
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1998
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1534
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2029
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1825
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1860
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1678
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1487
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2078
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1565
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2109
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1886
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1921
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1572
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1663
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1790
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1656
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1603
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1694
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1821
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1687
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1479
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1504
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1421
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1478
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1498
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1535
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1440
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1497
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2012
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1471
-
Definition: vk_mem_alloc.h:1821
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2092
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1490
+
Definition: vk_mem_alloc.h:1852
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1512
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2163
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1528
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1656
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1653
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1543
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2243
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1559
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1687
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1684
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2017
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2097
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2144
-
Definition: vk_mem_alloc.h:1823
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2390
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1464
+
Definition: vk_mem_alloc.h:1861
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2224
+
Definition: vk_mem_alloc.h:1872
+
Definition: vk_mem_alloc.h:1884
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2470
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1483
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1651
-
Definition: vk_mem_alloc.h:1706
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1966
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1682
+
Definition: vk_mem_alloc.h:1737
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2046
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1501
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1649
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1476
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1480
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1777
-
Definition: vk_mem_alloc.h:1733
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2158
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1532
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1680
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1495
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1499
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1808
+
Definition: vk_mem_alloc.h:1879
+
Definition: vk_mem_alloc.h:1764
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2238
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1454
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1473
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1467
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:1962
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2125
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1486
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2025
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2205
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1929
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1657
+
Definition: vk_mem_alloc.h:1869
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1990
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1688
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1816
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1488
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1664
+
Definition: vk_mem_alloc.h:1847
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1507
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1695
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2023
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1657
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2103
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1688
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2130
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2210