23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1258 #include <vulkan/vulkan.h> 1260 #if !defined(VMA_DEDICATED_ALLOCATION) 1261 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1262 #define VMA_DEDICATED_ALLOCATION 1 1264 #define VMA_DEDICATED_ALLOCATION 0 1282 uint32_t memoryType,
1283 VkDeviceMemory memory,
1288 uint32_t memoryType,
1289 VkDeviceMemory memory,
1361 #if VMA_DEDICATED_ALLOCATION 1362 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1363 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1384 #ifndef VMA_RECORDING_ENABLED 1386 #define VMA_RECORDING_ENABLED 1 1388 #define VMA_RECORDING_ENABLED 0 1501 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1509 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1519 uint32_t memoryTypeIndex,
1520 VkMemoryPropertyFlags* pFlags);
1532 uint32_t frameIndex);
1565 #define VMA_STATS_STRING_ENABLED 1 1567 #if VMA_STATS_STRING_ENABLED 1574 char** ppStatsString,
1575 VkBool32 detailedMap);
1579 char* pStatsString);
1581 #endif // #if VMA_STATS_STRING_ENABLED 1775 uint32_t memoryTypeBits,
1777 uint32_t* pMemoryTypeIndex);
1793 const VkBufferCreateInfo* pBufferCreateInfo,
1795 uint32_t* pMemoryTypeIndex);
1811 const VkImageCreateInfo* pImageCreateInfo,
1813 uint32_t* pMemoryTypeIndex);
1944 size_t* pLostAllocationCount);
2043 const VkMemoryRequirements* pVkMemoryRequirements,
2349 size_t allocationCount,
2350 VkBool32* pAllocationsChanged,
2416 const VkBufferCreateInfo* pBufferCreateInfo,
2441 const VkImageCreateInfo* pImageCreateInfo,
2467 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2470 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2471 #define VMA_IMPLEMENTATION 2474 #ifdef VMA_IMPLEMENTATION 2475 #undef VMA_IMPLEMENTATION 2497 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2498 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2510 #if VMA_USE_STL_CONTAINERS 2511 #define VMA_USE_STL_VECTOR 1 2512 #define VMA_USE_STL_UNORDERED_MAP 1 2513 #define VMA_USE_STL_LIST 1 2516 #if VMA_USE_STL_VECTOR 2520 #if VMA_USE_STL_UNORDERED_MAP 2521 #include <unordered_map> 2524 #if VMA_USE_STL_LIST 2533 #include <algorithm> 2539 #define VMA_NULL nullptr 2542 #if defined(__APPLE__) || defined(__ANDROID__) 2544 void *aligned_alloc(
size_t alignment,
size_t size)
2547 if(alignment <
sizeof(
void*))
2549 alignment =
sizeof(
void*);
2553 if(posix_memalign(&pointer, alignment, size) == 0)
2567 #define VMA_ASSERT(expr) assert(expr) 2569 #define VMA_ASSERT(expr) 2575 #ifndef VMA_HEAVY_ASSERT 2577 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2579 #define VMA_HEAVY_ASSERT(expr) 2583 #ifndef VMA_ALIGN_OF 2584 #define VMA_ALIGN_OF(type) (__alignof(type)) 2587 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2589 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2591 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2595 #ifndef VMA_SYSTEM_FREE 2597 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2599 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2604 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2608 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2612 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2616 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2619 #ifndef VMA_DEBUG_LOG 2620 #define VMA_DEBUG_LOG(format, ...) 2630 #if VMA_STATS_STRING_ENABLED 2631 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2633 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2635 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2637 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2639 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2641 snprintf(outStr, strLen,
"%p", ptr);
2651 void Lock() { m_Mutex.lock(); }
2652 void Unlock() { m_Mutex.unlock(); }
2656 #define VMA_MUTEX VmaMutex 2667 #ifndef VMA_ATOMIC_UINT32 2668 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2671 #ifndef VMA_BEST_FIT 2684 #define VMA_BEST_FIT (1) 2687 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2692 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2695 #ifndef VMA_DEBUG_ALIGNMENT 2700 #define VMA_DEBUG_ALIGNMENT (1) 2703 #ifndef VMA_DEBUG_MARGIN 2708 #define VMA_DEBUG_MARGIN (0) 2711 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2716 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2719 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2725 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2728 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2733 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2736 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2741 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2744 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2745 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2749 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2750 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2754 #ifndef VMA_CLASS_NO_COPY 2755 #define VMA_CLASS_NO_COPY(className) \ 2757 className(const className&) = delete; \ 2758 className& operator=(const className&) = delete; 2761 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2764 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2766 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2767 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
2773 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2774 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2777 static inline uint32_t VmaCountBitsSet(uint32_t v)
2779 uint32_t c = v - ((v >> 1) & 0x55555555);
2780 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2781 c = ((c >> 4) + c) & 0x0F0F0F0F;
2782 c = ((c >> 8) + c) & 0x00FF00FF;
2783 c = ((c >> 16) + c) & 0x0000FFFF;
2789 template <
typename T>
2790 static inline T VmaAlignUp(T val, T align)
2792 return (val + align - 1) / align * align;
2796 template <
typename T>
2797 static inline T VmaAlignDown(T val, T align)
2799 return val / align * align;
2803 template <
typename T>
2804 inline T VmaRoundDiv(T x, T y)
2806 return (x + (y / (T)2)) / y;
2809 static inline bool VmaStrIsEmpty(
const char* pStr)
2811 return pStr == VMA_NULL || *pStr ==
'\0';
2816 template<
typename Iterator,
typename Compare>
2817 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2819 Iterator centerValue = end; --centerValue;
2820 Iterator insertIndex = beg;
2821 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2823 if(cmp(*memTypeIndex, *centerValue))
2825 if(insertIndex != memTypeIndex)
2827 VMA_SWAP(*memTypeIndex, *insertIndex);
2832 if(insertIndex != centerValue)
2834 VMA_SWAP(*insertIndex, *centerValue);
2839 template<
typename Iterator,
typename Compare>
2840 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2844 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2845 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2846 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2850 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2852 #endif // #ifndef VMA_SORT 2861 static inline bool VmaBlocksOnSamePage(
2862 VkDeviceSize resourceAOffset,
2863 VkDeviceSize resourceASize,
2864 VkDeviceSize resourceBOffset,
2865 VkDeviceSize pageSize)
2867 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2868 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2869 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2870 VkDeviceSize resourceBStart = resourceBOffset;
2871 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2872 return resourceAEndPage == resourceBStartPage;
2875 enum VmaSuballocationType
2877 VMA_SUBALLOCATION_TYPE_FREE = 0,
2878 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2879 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2880 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2881 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2882 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2883 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2892 static inline bool VmaIsBufferImageGranularityConflict(
2893 VmaSuballocationType suballocType1,
2894 VmaSuballocationType suballocType2)
2896 if(suballocType1 > suballocType2)
2898 VMA_SWAP(suballocType1, suballocType2);
2901 switch(suballocType1)
2903 case VMA_SUBALLOCATION_TYPE_FREE:
2905 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2907 case VMA_SUBALLOCATION_TYPE_BUFFER:
2909 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2910 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2911 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2913 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2914 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2915 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2916 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2918 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2919 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2927 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
2929 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
2930 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
2931 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
2933 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
2937 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
2939 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
2940 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
2941 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
2943 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
2954 VMA_CLASS_NO_COPY(VmaMutexLock)
2956 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2957 m_pMutex(useMutex ? &mutex : VMA_NULL)
2974 VMA_MUTEX* m_pMutex;
2977 #if VMA_DEBUG_GLOBAL_MUTEX 2978 static VMA_MUTEX gDebugGlobalMutex;
2979 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2981 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2985 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2996 template <
typename IterT,
typename KeyT,
typename CmpT>
2997 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2999 size_t down = 0, up = (end - beg);
3002 const size_t mid = (down + up) / 2;
3003 if(cmp(*(beg+mid), key))
3018 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3020 if((pAllocationCallbacks != VMA_NULL) &&
3021 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3023 return (*pAllocationCallbacks->pfnAllocation)(
3024 pAllocationCallbacks->pUserData,
3027 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3031 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3035 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3037 if((pAllocationCallbacks != VMA_NULL) &&
3038 (pAllocationCallbacks->pfnFree != VMA_NULL))
3040 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3044 VMA_SYSTEM_FREE(ptr);
3048 template<
typename T>
3049 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3051 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3054 template<
typename T>
3055 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3057 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3060 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3062 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3064 template<
typename T>
3065 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3068 VmaFree(pAllocationCallbacks, ptr);
3071 template<
typename T>
3072 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3076 for(
size_t i = count; i--; )
3080 VmaFree(pAllocationCallbacks, ptr);
3085 template<
typename T>
3086 class VmaStlAllocator
3089 const VkAllocationCallbacks*
const m_pCallbacks;
3090 typedef T value_type;
3092 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3093 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3095 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3096 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3098 template<
typename U>
3099 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3101 return m_pCallbacks == rhs.m_pCallbacks;
3103 template<
typename U>
3104 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3106 return m_pCallbacks != rhs.m_pCallbacks;
3109 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3112 #if VMA_USE_STL_VECTOR 3114 #define VmaVector std::vector 3116 template<
typename T,
typename allocatorT>
3117 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3119 vec.insert(vec.begin() + index, item);
3122 template<
typename T,
typename allocatorT>
3123 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3125 vec.erase(vec.begin() + index);
3128 #else // #if VMA_USE_STL_VECTOR 3133 template<
typename T,
typename AllocatorT>
3137 typedef T value_type;
3139 VmaVector(
const AllocatorT& allocator) :
3140 m_Allocator(allocator),
3147 VmaVector(
size_t count,
const AllocatorT& allocator) :
3148 m_Allocator(allocator),
3149 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3155 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3156 m_Allocator(src.m_Allocator),
3157 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3158 m_Count(src.m_Count),
3159 m_Capacity(src.m_Count)
3163 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3169 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3172 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3176 resize(rhs.m_Count);
3179 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3185 bool empty()
const {
return m_Count == 0; }
3186 size_t size()
const {
return m_Count; }
3187 T* data() {
return m_pArray; }
3188 const T* data()
const {
return m_pArray; }
3190 T& operator[](
size_t index)
3192 VMA_HEAVY_ASSERT(index < m_Count);
3193 return m_pArray[index];
3195 const T& operator[](
size_t index)
const 3197 VMA_HEAVY_ASSERT(index < m_Count);
3198 return m_pArray[index];
3203 VMA_HEAVY_ASSERT(m_Count > 0);
3206 const T& front()
const 3208 VMA_HEAVY_ASSERT(m_Count > 0);
3213 VMA_HEAVY_ASSERT(m_Count > 0);
3214 return m_pArray[m_Count - 1];
3216 const T& back()
const 3218 VMA_HEAVY_ASSERT(m_Count > 0);
3219 return m_pArray[m_Count - 1];
3222 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3224 newCapacity = VMA_MAX(newCapacity, m_Count);
3226 if((newCapacity < m_Capacity) && !freeMemory)
3228 newCapacity = m_Capacity;
3231 if(newCapacity != m_Capacity)
3233 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3236 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3238 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3239 m_Capacity = newCapacity;
3240 m_pArray = newArray;
3244 void resize(
size_t newCount,
bool freeMemory =
false)
3246 size_t newCapacity = m_Capacity;
3247 if(newCount > m_Capacity)
3249 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3253 newCapacity = newCount;
3256 if(newCapacity != m_Capacity)
3258 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3259 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3260 if(elementsToCopy != 0)
3262 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3264 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3265 m_Capacity = newCapacity;
3266 m_pArray = newArray;
3272 void clear(
bool freeMemory =
false)
3274 resize(0, freeMemory);
3277 void insert(
size_t index,
const T& src)
3279 VMA_HEAVY_ASSERT(index <= m_Count);
3280 const size_t oldCount = size();
3281 resize(oldCount + 1);
3282 if(index < oldCount)
3284 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3286 m_pArray[index] = src;
3289 void remove(
size_t index)
3291 VMA_HEAVY_ASSERT(index < m_Count);
3292 const size_t oldCount = size();
3293 if(index < oldCount - 1)
3295 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3297 resize(oldCount - 1);
3300 void push_back(
const T& src)
3302 const size_t newIndex = size();
3303 resize(newIndex + 1);
3304 m_pArray[newIndex] = src;
3309 VMA_HEAVY_ASSERT(m_Count > 0);
3313 void push_front(
const T& src)
3320 VMA_HEAVY_ASSERT(m_Count > 0);
3324 typedef T* iterator;
3326 iterator begin() {
return m_pArray; }
3327 iterator end() {
return m_pArray + m_Count; }
3330 AllocatorT m_Allocator;
3336 template<
typename T,
typename allocatorT>
3337 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3339 vec.insert(index, item);
3342 template<
typename T,
typename allocatorT>
3343 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3348 #endif // #if VMA_USE_STL_VECTOR 3350 template<
typename CmpLess,
typename VectorT>
3351 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3353 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3355 vector.data() + vector.size(),
3357 CmpLess()) - vector.data();
3358 VmaVectorInsert(vector, indexToInsert, value);
3359 return indexToInsert;
3362 template<
typename CmpLess,
typename VectorT>
3363 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3366 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3371 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3373 size_t indexToRemove = it - vector.begin();
3374 VmaVectorRemove(vector, indexToRemove);
3380 template<
typename CmpLess,
typename VectorT>
3381 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
3384 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3386 vector.data() + vector.size(),
3389 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3391 return it - vector.begin();
3395 return vector.size();
3407 template<
typename T>
3408 class VmaPoolAllocator
3410 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3412 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3413 ~VmaPoolAllocator();
3421 uint32_t NextFreeIndex;
3428 uint32_t FirstFreeIndex;
3431 const VkAllocationCallbacks* m_pAllocationCallbacks;
3432 size_t m_ItemsPerBlock;
3433 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3435 ItemBlock& CreateNewBlock();
3438 template<
typename T>
3439 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3440 m_pAllocationCallbacks(pAllocationCallbacks),
3441 m_ItemsPerBlock(itemsPerBlock),
3442 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3444 VMA_ASSERT(itemsPerBlock > 0);
3447 template<
typename T>
3448 VmaPoolAllocator<T>::~VmaPoolAllocator()
3453 template<
typename T>
3454 void VmaPoolAllocator<T>::Clear()
3456 for(
size_t i = m_ItemBlocks.size(); i--; )
3457 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3458 m_ItemBlocks.clear();
3461 template<
typename T>
3462 T* VmaPoolAllocator<T>::Alloc()
3464 for(
size_t i = m_ItemBlocks.size(); i--; )
3466 ItemBlock& block = m_ItemBlocks[i];
3468 if(block.FirstFreeIndex != UINT32_MAX)
3470 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3471 block.FirstFreeIndex = pItem->NextFreeIndex;
3472 return &pItem->Value;
3477 ItemBlock& newBlock = CreateNewBlock();
3478 Item*
const pItem = &newBlock.pItems[0];
3479 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3480 return &pItem->Value;
3483 template<
typename T>
3484 void VmaPoolAllocator<T>::Free(T* ptr)
3487 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3489 ItemBlock& block = m_ItemBlocks[i];
3493 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3496 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3498 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3499 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3500 block.FirstFreeIndex = index;
3504 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3507 template<
typename T>
3508 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3510 ItemBlock newBlock = {
3511 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3513 m_ItemBlocks.push_back(newBlock);
3516 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3517 newBlock.pItems[i].NextFreeIndex = i + 1;
3518 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3519 return m_ItemBlocks.back();
3525 #if VMA_USE_STL_LIST 3527 #define VmaList std::list 3529 #else // #if VMA_USE_STL_LIST 3531 template<
typename T>
3540 template<
typename T>
3543 VMA_CLASS_NO_COPY(VmaRawList)
3545 typedef VmaListItem<T> ItemType;
3547 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3551 size_t GetCount()
const {
return m_Count; }
3552 bool IsEmpty()
const {
return m_Count == 0; }
3554 ItemType* Front() {
return m_pFront; }
3555 const ItemType* Front()
const {
return m_pFront; }
3556 ItemType* Back() {
return m_pBack; }
3557 const ItemType* Back()
const {
return m_pBack; }
3559 ItemType* PushBack();
3560 ItemType* PushFront();
3561 ItemType* PushBack(
const T& value);
3562 ItemType* PushFront(
const T& value);
3567 ItemType* InsertBefore(ItemType* pItem);
3569 ItemType* InsertAfter(ItemType* pItem);
3571 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3572 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3574 void Remove(ItemType* pItem);
3577 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3578 VmaPoolAllocator<ItemType> m_ItemAllocator;
3584 template<
typename T>
3585 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3586 m_pAllocationCallbacks(pAllocationCallbacks),
3587 m_ItemAllocator(pAllocationCallbacks, 128),
3594 template<
typename T>
3595 VmaRawList<T>::~VmaRawList()
3601 template<
typename T>
3602 void VmaRawList<T>::Clear()
3604 if(IsEmpty() ==
false)
3606 ItemType* pItem = m_pBack;
3607 while(pItem != VMA_NULL)
3609 ItemType*
const pPrevItem = pItem->pPrev;
3610 m_ItemAllocator.Free(pItem);
3613 m_pFront = VMA_NULL;
3619 template<
typename T>
3620 VmaListItem<T>* VmaRawList<T>::PushBack()
3622 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3623 pNewItem->pNext = VMA_NULL;
3626 pNewItem->pPrev = VMA_NULL;
3627 m_pFront = pNewItem;
3633 pNewItem->pPrev = m_pBack;
3634 m_pBack->pNext = pNewItem;
3641 template<
typename T>
3642 VmaListItem<T>* VmaRawList<T>::PushFront()
3644 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3645 pNewItem->pPrev = VMA_NULL;
3648 pNewItem->pNext = VMA_NULL;
3649 m_pFront = pNewItem;
3655 pNewItem->pNext = m_pFront;
3656 m_pFront->pPrev = pNewItem;
3657 m_pFront = pNewItem;
3663 template<
typename T>
3664 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3666 ItemType*
const pNewItem = PushBack();
3667 pNewItem->Value = value;
3671 template<
typename T>
3672 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3674 ItemType*
const pNewItem = PushFront();
3675 pNewItem->Value = value;
3679 template<
typename T>
3680 void VmaRawList<T>::PopBack()
3682 VMA_HEAVY_ASSERT(m_Count > 0);
3683 ItemType*
const pBackItem = m_pBack;
3684 ItemType*
const pPrevItem = pBackItem->pPrev;
3685 if(pPrevItem != VMA_NULL)
3687 pPrevItem->pNext = VMA_NULL;
3689 m_pBack = pPrevItem;
3690 m_ItemAllocator.Free(pBackItem);
3694 template<
typename T>
3695 void VmaRawList<T>::PopFront()
3697 VMA_HEAVY_ASSERT(m_Count > 0);
3698 ItemType*
const pFrontItem = m_pFront;
3699 ItemType*
const pNextItem = pFrontItem->pNext;
3700 if(pNextItem != VMA_NULL)
3702 pNextItem->pPrev = VMA_NULL;
3704 m_pFront = pNextItem;
3705 m_ItemAllocator.Free(pFrontItem);
3709 template<
typename T>
3710 void VmaRawList<T>::Remove(ItemType* pItem)
3712 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3713 VMA_HEAVY_ASSERT(m_Count > 0);
3715 if(pItem->pPrev != VMA_NULL)
3717 pItem->pPrev->pNext = pItem->pNext;
3721 VMA_HEAVY_ASSERT(m_pFront == pItem);
3722 m_pFront = pItem->pNext;
3725 if(pItem->pNext != VMA_NULL)
3727 pItem->pNext->pPrev = pItem->pPrev;
3731 VMA_HEAVY_ASSERT(m_pBack == pItem);
3732 m_pBack = pItem->pPrev;
3735 m_ItemAllocator.Free(pItem);
3739 template<
typename T>
3740 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3742 if(pItem != VMA_NULL)
3744 ItemType*
const prevItem = pItem->pPrev;
3745 ItemType*
const newItem = m_ItemAllocator.Alloc();
3746 newItem->pPrev = prevItem;
3747 newItem->pNext = pItem;
3748 pItem->pPrev = newItem;
3749 if(prevItem != VMA_NULL)
3751 prevItem->pNext = newItem;
3755 VMA_HEAVY_ASSERT(m_pFront == pItem);
3765 template<
typename T>
3766 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3768 if(pItem != VMA_NULL)
3770 ItemType*
const nextItem = pItem->pNext;
3771 ItemType*
const newItem = m_ItemAllocator.Alloc();
3772 newItem->pNext = nextItem;
3773 newItem->pPrev = pItem;
3774 pItem->pNext = newItem;
3775 if(nextItem != VMA_NULL)
3777 nextItem->pPrev = newItem;
3781 VMA_HEAVY_ASSERT(m_pBack == pItem);
3791 template<
typename T>
3792 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3794 ItemType*
const newItem = InsertBefore(pItem);
3795 newItem->Value = value;
3799 template<
typename T>
3800 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3802 ItemType*
const newItem = InsertAfter(pItem);
3803 newItem->Value = value;
3807 template<
typename T,
typename AllocatorT>
3810 VMA_CLASS_NO_COPY(VmaList)
3821 T& operator*()
const 3823 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3824 return m_pItem->Value;
3826 T* operator->()
const 3828 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3829 return &m_pItem->Value;
3832 iterator& operator++()
3834 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3835 m_pItem = m_pItem->pNext;
3838 iterator& operator--()
3840 if(m_pItem != VMA_NULL)
3842 m_pItem = m_pItem->pPrev;
3846 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3847 m_pItem = m_pList->Back();
3852 iterator operator++(
int)
3854 iterator result = *
this;
3858 iterator operator--(
int)
3860 iterator result = *
this;
3865 bool operator==(
const iterator& rhs)
const 3867 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3868 return m_pItem == rhs.m_pItem;
3870 bool operator!=(
const iterator& rhs)
const 3872 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3873 return m_pItem != rhs.m_pItem;
3877 VmaRawList<T>* m_pList;
3878 VmaListItem<T>* m_pItem;
3880 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3886 friend class VmaList<T, AllocatorT>;
3889 class const_iterator
3898 const_iterator(
const iterator& src) :
3899 m_pList(src.m_pList),
3900 m_pItem(src.m_pItem)
3904 const T& operator*()
const 3906 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3907 return m_pItem->Value;
3909 const T* operator->()
const 3911 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3912 return &m_pItem->Value;
3915 const_iterator& operator++()
3917 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3918 m_pItem = m_pItem->pNext;
3921 const_iterator& operator--()
3923 if(m_pItem != VMA_NULL)
3925 m_pItem = m_pItem->pPrev;
3929 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3930 m_pItem = m_pList->Back();
3935 const_iterator operator++(
int)
3937 const_iterator result = *
this;
3941 const_iterator operator--(
int)
3943 const_iterator result = *
this;
3948 bool operator==(
const const_iterator& rhs)
const 3950 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3951 return m_pItem == rhs.m_pItem;
3953 bool operator!=(
const const_iterator& rhs)
const 3955 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3956 return m_pItem != rhs.m_pItem;
3960 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3966 const VmaRawList<T>* m_pList;
3967 const VmaListItem<T>* m_pItem;
3969 friend class VmaList<T, AllocatorT>;
3972 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3974 bool empty()
const {
return m_RawList.IsEmpty(); }
3975 size_t size()
const {
return m_RawList.GetCount(); }
3977 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3978 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3980 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3981 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3983 void clear() { m_RawList.Clear(); }
3984 void push_back(
const T& value) { m_RawList.PushBack(value); }
3985 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3986 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3989 VmaRawList<T> m_RawList;
3992 #endif // #if VMA_USE_STL_LIST 4000 #if VMA_USE_STL_UNORDERED_MAP 4002 #define VmaPair std::pair 4004 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4005 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4007 #else // #if VMA_USE_STL_UNORDERED_MAP 4009 template<
typename T1,
typename T2>
4015 VmaPair() : first(), second() { }
4016 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4022 template<
typename KeyT,
typename ValueT>
4026 typedef VmaPair<KeyT, ValueT> PairType;
4027 typedef PairType* iterator;
4029 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4031 iterator begin() {
return m_Vector.begin(); }
4032 iterator end() {
return m_Vector.end(); }
4034 void insert(
const PairType& pair);
4035 iterator find(
const KeyT& key);
4036 void erase(iterator it);
4039 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4042 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4044 template<
typename FirstT,
typename SecondT>
4045 struct VmaPairFirstLess
4047 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4049 return lhs.first < rhs.first;
4051 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4053 return lhs.first < rhsFirst;
4057 template<
typename KeyT,
typename ValueT>
4058 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4060 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4062 m_Vector.data() + m_Vector.size(),
4064 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4065 VmaVectorInsert(m_Vector, indexToInsert, pair);
4068 template<
typename KeyT,
typename ValueT>
4069 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4071 PairType* it = VmaBinaryFindFirstNotLess(
4073 m_Vector.data() + m_Vector.size(),
4075 VmaPairFirstLess<KeyT, ValueT>());
4076 if((it != m_Vector.end()) && (it->first == key))
4082 return m_Vector.end();
4086 template<
typename KeyT,
typename ValueT>
4087 void VmaMap<KeyT, ValueT>::erase(iterator it)
4089 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4092 #endif // #if VMA_USE_STL_UNORDERED_MAP 4098 class VmaDeviceMemoryBlock;
4100 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4102 struct VmaAllocation_T
4104 VMA_CLASS_NO_COPY(VmaAllocation_T)
4106 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4110 FLAG_USER_DATA_STRING = 0x01,
4114 enum ALLOCATION_TYPE
4116 ALLOCATION_TYPE_NONE,
4117 ALLOCATION_TYPE_BLOCK,
4118 ALLOCATION_TYPE_DEDICATED,
4121 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4124 m_pUserData(VMA_NULL),
4125 m_LastUseFrameIndex(currentFrameIndex),
4126 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4127 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4129 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4131 #if VMA_STATS_STRING_ENABLED 4132 m_CreationFrameIndex = currentFrameIndex;
4133 m_BufferImageUsage = 0;
4139 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4142 VMA_ASSERT(m_pUserData == VMA_NULL);
4145 void InitBlockAllocation(
4147 VmaDeviceMemoryBlock* block,
4148 VkDeviceSize offset,
4149 VkDeviceSize alignment,
4151 VmaSuballocationType suballocationType,
4155 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4156 VMA_ASSERT(block != VMA_NULL);
4157 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4158 m_Alignment = alignment;
4160 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4161 m_SuballocationType = (uint8_t)suballocationType;
4162 m_BlockAllocation.m_hPool = hPool;
4163 m_BlockAllocation.m_Block = block;
4164 m_BlockAllocation.m_Offset = offset;
4165 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4170 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4171 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4172 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4173 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4174 m_BlockAllocation.m_Block = VMA_NULL;
4175 m_BlockAllocation.m_Offset = 0;
4176 m_BlockAllocation.m_CanBecomeLost =
true;
4179 void ChangeBlockAllocation(
4181 VmaDeviceMemoryBlock* block,
4182 VkDeviceSize offset);
4185 void InitDedicatedAllocation(
4186 uint32_t memoryTypeIndex,
4187 VkDeviceMemory hMemory,
4188 VmaSuballocationType suballocationType,
4192 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4193 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4194 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4197 m_SuballocationType = (uint8_t)suballocationType;
4198 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4199 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4200 m_DedicatedAllocation.m_hMemory = hMemory;
4201 m_DedicatedAllocation.m_pMappedData = pMappedData;
4204 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4205 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4206 VkDeviceSize GetSize()
const {
return m_Size; }
4207 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4208 void* GetUserData()
const {
return m_pUserData; }
4209 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4210 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4212 VmaDeviceMemoryBlock* GetBlock()
const 4214 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4215 return m_BlockAllocation.m_Block;
4217 VkDeviceSize GetOffset()
const;
4218 VkDeviceMemory GetMemory()
const;
4219 uint32_t GetMemoryTypeIndex()
const;
4220 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4221 void* GetMappedData()
const;
4222 bool CanBecomeLost()
const;
4225 uint32_t GetLastUseFrameIndex()
const 4227 return m_LastUseFrameIndex.load();
4229 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4231 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4241 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4243 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4245 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4256 void BlockAllocMap();
4257 void BlockAllocUnmap();
4258 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4261 #if VMA_STATS_STRING_ENABLED 4262 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4263 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4265 void InitBufferImageUsage(uint32_t bufferImageUsage)
4267 VMA_ASSERT(m_BufferImageUsage == 0);
4268 m_BufferImageUsage = bufferImageUsage;
4271 void PrintParameters(
class VmaJsonWriter& json)
const;
4275 VkDeviceSize m_Alignment;
4276 VkDeviceSize m_Size;
4278 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4280 uint8_t m_SuballocationType;
4287 struct BlockAllocation
4290 VmaDeviceMemoryBlock* m_Block;
4291 VkDeviceSize m_Offset;
4292 bool m_CanBecomeLost;
4296 struct DedicatedAllocation
4298 uint32_t m_MemoryTypeIndex;
4299 VkDeviceMemory m_hMemory;
4300 void* m_pMappedData;
4306 BlockAllocation m_BlockAllocation;
4308 DedicatedAllocation m_DedicatedAllocation;
4311 #if VMA_STATS_STRING_ENABLED 4312 uint32_t m_CreationFrameIndex;
4313 uint32_t m_BufferImageUsage;
4323 struct VmaSuballocation
4325 VkDeviceSize offset;
4328 VmaSuballocationType type;
4331 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4334 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4349 struct VmaAllocationRequest
4351 VkDeviceSize offset;
4352 VkDeviceSize sumFreeSize;
4353 VkDeviceSize sumItemSize;
4354 VmaSuballocationList::iterator item;
4355 size_t itemsToMakeLostCount;
4357 VkDeviceSize CalcCost()
const 4359 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4367 class VmaBlockMetadata
4369 VMA_CLASS_NO_COPY(VmaBlockMetadata)
4372 ~VmaBlockMetadata();
4373 void Init(VkDeviceSize size);
4376 bool Validate()
const;
4377 VkDeviceSize GetSize()
const {
return m_Size; }
4378 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4379 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4380 VkDeviceSize GetUnusedRangeSizeMax()
const;
4382 bool IsEmpty()
const;
4384 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4387 #if VMA_STATS_STRING_ENABLED 4388 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4394 bool CreateAllocationRequest(
4395 uint32_t currentFrameIndex,
4396 uint32_t frameInUseCount,
4397 VkDeviceSize bufferImageGranularity,
4398 VkDeviceSize allocSize,
4399 VkDeviceSize allocAlignment,
4400 VmaSuballocationType allocType,
4401 bool canMakeOtherLost,
4402 VmaAllocationRequest* pAllocationRequest);
4404 bool MakeRequestedAllocationsLost(
4405 uint32_t currentFrameIndex,
4406 uint32_t frameInUseCount,
4407 VmaAllocationRequest* pAllocationRequest);
4409 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4411 VkResult CheckCorruption(
const void* pBlockData);
4415 const VmaAllocationRequest& request,
4416 VmaSuballocationType type,
4417 VkDeviceSize allocSize,
4422 void FreeAtOffset(VkDeviceSize offset);
4425 VkDeviceSize m_Size;
4426 uint32_t m_FreeCount;
4427 VkDeviceSize m_SumFreeSize;
4428 VmaSuballocationList m_Suballocations;
4431 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4433 bool ValidateFreeSuballocationList()
const;
4437 bool CheckAllocation(
4438 uint32_t currentFrameIndex,
4439 uint32_t frameInUseCount,
4440 VkDeviceSize bufferImageGranularity,
4441 VkDeviceSize allocSize,
4442 VkDeviceSize allocAlignment,
4443 VmaSuballocationType allocType,
4444 VmaSuballocationList::const_iterator suballocItem,
4445 bool canMakeOtherLost,
4446 VkDeviceSize* pOffset,
4447 size_t* itemsToMakeLostCount,
4448 VkDeviceSize* pSumFreeSize,
4449 VkDeviceSize* pSumItemSize)
const;
4451 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4455 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4458 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4461 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4470 class VmaDeviceMemoryBlock
4472 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
4474 VmaBlockMetadata m_Metadata;
4478 ~VmaDeviceMemoryBlock()
4480 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4481 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4486 uint32_t newMemoryTypeIndex,
4487 VkDeviceMemory newMemory,
4488 VkDeviceSize newSize,
4493 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4494 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4495 uint32_t GetId()
const {
return m_Id; }
4496 void* GetMappedData()
const {
return m_pMappedData; }
4499 bool Validate()
const;
4504 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4507 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4508 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4510 VkResult BindBufferMemory(
4514 VkResult BindImageMemory(
4520 uint32_t m_MemoryTypeIndex;
4522 VkDeviceMemory m_hMemory;
4527 uint32_t m_MapCount;
4528 void* m_pMappedData;
4531 struct VmaPointerLess
4533 bool operator()(
const void* lhs,
const void* rhs)
const 4539 class VmaDefragmentator;
4547 struct VmaBlockVector
4549 VMA_CLASS_NO_COPY(VmaBlockVector)
4553 uint32_t memoryTypeIndex,
4554 VkDeviceSize preferredBlockSize,
4555 size_t minBlockCount,
4556 size_t maxBlockCount,
4557 VkDeviceSize bufferImageGranularity,
4558 uint32_t frameInUseCount,
4562 VkResult CreateMinBlocks();
4564 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4565 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4566 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4567 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4571 bool IsEmpty()
const {
return m_Blocks.empty(); }
4572 bool IsCorruptionDetectionEnabled()
const;
4576 uint32_t currentFrameIndex,
4578 VkDeviceSize alignment,
4580 VmaSuballocationType suballocType,
4589 #if VMA_STATS_STRING_ENABLED 4590 void PrintDetailedMap(
class VmaJsonWriter& json);
4593 void MakePoolAllocationsLost(
4594 uint32_t currentFrameIndex,
4595 size_t* pLostAllocationCount);
4596 VkResult CheckCorruption();
4598 VmaDefragmentator* EnsureDefragmentator(
4600 uint32_t currentFrameIndex);
4602 VkResult Defragment(
4604 VkDeviceSize& maxBytesToMove,
4605 uint32_t& maxAllocationsToMove);
4607 void DestroyDefragmentator();
4610 friend class VmaDefragmentator;
4613 const uint32_t m_MemoryTypeIndex;
4614 const VkDeviceSize m_PreferredBlockSize;
4615 const size_t m_MinBlockCount;
4616 const size_t m_MaxBlockCount;
4617 const VkDeviceSize m_BufferImageGranularity;
4618 const uint32_t m_FrameInUseCount;
4619 const bool m_IsCustomPool;
4622 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4626 bool m_HasEmptyBlock;
4627 VmaDefragmentator* m_pDefragmentator;
4628 uint32_t m_NextBlockId;
4630 VkDeviceSize CalcMaxBlockSize()
const;
4633 void Remove(VmaDeviceMemoryBlock* pBlock);
4637 void IncrementallySortBlocks();
4639 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4644 VMA_CLASS_NO_COPY(VmaPool_T)
4646 VmaBlockVector m_BlockVector;
4653 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4654 uint32_t GetId()
const {
return m_Id; }
4655 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
4657 #if VMA_STATS_STRING_ENABLED 4665 class VmaDefragmentator
4667 VMA_CLASS_NO_COPY(VmaDefragmentator)
4670 VmaBlockVector*
const m_pBlockVector;
4671 uint32_t m_CurrentFrameIndex;
4672 VkDeviceSize m_BytesMoved;
4673 uint32_t m_AllocationsMoved;
4675 struct AllocationInfo
4678 VkBool32* m_pChanged;
4681 m_hAllocation(VK_NULL_HANDLE),
4682 m_pChanged(VMA_NULL)
4687 struct AllocationInfoSizeGreater
4689 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4691 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4696 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4700 VmaDeviceMemoryBlock* m_pBlock;
4701 bool m_HasNonMovableAllocations;
4702 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4704 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4706 m_HasNonMovableAllocations(true),
4707 m_Allocations(pAllocationCallbacks),
4708 m_pMappedDataForDefragmentation(VMA_NULL)
4712 void CalcHasNonMovableAllocations()
4714 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4715 const size_t defragmentAllocCount = m_Allocations.size();
4716 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4719 void SortAllocationsBySizeDescecnding()
4721 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4724 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4729 void* m_pMappedDataForDefragmentation;
4732 struct BlockPointerLess
4734 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4736 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4738 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4740 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4746 struct BlockInfoCompareMoveDestination
4748 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4750 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4754 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4758 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4766 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4767 BlockInfoVector m_Blocks;
4769 VkResult DefragmentRound(
4770 VkDeviceSize maxBytesToMove,
4771 uint32_t maxAllocationsToMove);
4773 static bool MoveMakesSense(
4774 size_t dstBlockIndex, VkDeviceSize dstOffset,
4775 size_t srcBlockIndex, VkDeviceSize srcOffset);
4780 VmaBlockVector* pBlockVector,
4781 uint32_t currentFrameIndex);
4783 ~VmaDefragmentator();
4785 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4786 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4788 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4790 VkResult Defragment(
4791 VkDeviceSize maxBytesToMove,
4792 uint32_t maxAllocationsToMove);
4795 #if VMA_RECORDING_ENABLED 4802 void WriteConfiguration(
4803 const VkPhysicalDeviceProperties& devProps,
4804 const VkPhysicalDeviceMemoryProperties& memProps,
4805 bool dedicatedAllocationExtensionEnabled);
4808 void RecordCreateAllocator(uint32_t frameIndex);
4809 void RecordDestroyAllocator(uint32_t frameIndex);
4810 void RecordCreatePool(uint32_t frameIndex,
4813 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
4814 void RecordAllocateMemory(uint32_t frameIndex,
4815 const VkMemoryRequirements& vkMemReq,
4818 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
4819 const VkMemoryRequirements& vkMemReq,
4820 bool requiresDedicatedAllocation,
4821 bool prefersDedicatedAllocation,
4824 void RecordAllocateMemoryForImage(uint32_t frameIndex,
4825 const VkMemoryRequirements& vkMemReq,
4826 bool requiresDedicatedAllocation,
4827 bool prefersDedicatedAllocation,
4830 void RecordFreeMemory(uint32_t frameIndex,
4832 void RecordSetAllocationUserData(uint32_t frameIndex,
4834 const void* pUserData);
4835 void RecordCreateLostAllocation(uint32_t frameIndex,
4837 void RecordMapMemory(uint32_t frameIndex,
4839 void RecordUnmapMemory(uint32_t frameIndex,
4841 void RecordFlushAllocation(uint32_t frameIndex,
4842 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
4843 void RecordInvalidateAllocation(uint32_t frameIndex,
4844 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
4845 void RecordCreateBuffer(uint32_t frameIndex,
4846 const VkBufferCreateInfo& bufCreateInfo,
4849 void RecordCreateImage(uint32_t frameIndex,
4850 const VkImageCreateInfo& imageCreateInfo,
4853 void RecordDestroyBuffer(uint32_t frameIndex,
4855 void RecordDestroyImage(uint32_t frameIndex,
4857 void RecordTouchAllocation(uint32_t frameIndex,
4859 void RecordGetAllocationInfo(uint32_t frameIndex,
4861 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
4871 class UserDataString
4875 const char* GetString()
const {
return m_Str; }
4885 VMA_MUTEX m_FileMutex;
4887 int64_t m_StartCounter;
4889 void GetBasicParams(CallParams& outParams);
4893 #endif // #if VMA_RECORDING_ENABLED 4896 struct VmaAllocator_T
4898 VMA_CLASS_NO_COPY(VmaAllocator_T)
4901 bool m_UseKhrDedicatedAllocation;
4903 bool m_AllocationCallbacksSpecified;
4904 VkAllocationCallbacks m_AllocationCallbacks;
4908 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4909 VMA_MUTEX m_HeapSizeLimitMutex;
4911 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4912 VkPhysicalDeviceMemoryProperties m_MemProps;
4915 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4918 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4919 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4920 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4926 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4928 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4932 return m_VulkanFunctions;
4935 VkDeviceSize GetBufferImageGranularity()
const 4938 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4939 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4942 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4943 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4945 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4947 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4948 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4951 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 4953 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
4954 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
4957 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 4959 return IsMemoryTypeNonCoherent(memTypeIndex) ?
4960 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
4961 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
4964 bool IsIntegratedGpu()
const 4966 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
4969 #if VMA_RECORDING_ENABLED 4970 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
4973 void GetBufferMemoryRequirements(
4975 VkMemoryRequirements& memReq,
4976 bool& requiresDedicatedAllocation,
4977 bool& prefersDedicatedAllocation)
const;
4978 void GetImageMemoryRequirements(
4980 VkMemoryRequirements& memReq,
4981 bool& requiresDedicatedAllocation,
4982 bool& prefersDedicatedAllocation)
const;
4985 VkResult AllocateMemory(
4986 const VkMemoryRequirements& vkMemReq,
4987 bool requiresDedicatedAllocation,
4988 bool prefersDedicatedAllocation,
4989 VkBuffer dedicatedBuffer,
4990 VkImage dedicatedImage,
4992 VmaSuballocationType suballocType,
4998 void CalculateStats(
VmaStats* pStats);
5000 #if VMA_STATS_STRING_ENABLED 5001 void PrintDetailedMap(
class VmaJsonWriter& json);
5004 VkResult Defragment(
5006 size_t allocationCount,
5007 VkBool32* pAllocationsChanged,
5015 void DestroyPool(
VmaPool pool);
5018 void SetCurrentFrameIndex(uint32_t frameIndex);
5019 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5021 void MakePoolAllocationsLost(
5023 size_t* pLostAllocationCount);
5024 VkResult CheckPoolCorruption(
VmaPool hPool);
5025 VkResult CheckCorruption(uint32_t memoryTypeBits);
5029 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5030 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5035 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5036 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5038 void FlushOrInvalidateAllocation(
5040 VkDeviceSize offset, VkDeviceSize size,
5041 VMA_CACHE_OPERATION op);
5043 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5046 VkDeviceSize m_PreferredLargeHeapBlockSize;
5048 VkPhysicalDevice m_PhysicalDevice;
5049 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5051 VMA_MUTEX m_PoolsMutex;
5053 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5054 uint32_t m_NextPoolId;
5058 #if VMA_RECORDING_ENABLED 5059 VmaRecorder* m_pRecorder;
5064 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5066 VkResult AllocateMemoryOfType(
5068 VkDeviceSize alignment,
5069 bool dedicatedAllocation,
5070 VkBuffer dedicatedBuffer,
5071 VkImage dedicatedImage,
5073 uint32_t memTypeIndex,
5074 VmaSuballocationType suballocType,
5078 VkResult AllocateDedicatedMemory(
5080 VmaSuballocationType suballocType,
5081 uint32_t memTypeIndex,
5083 bool isUserDataString,
5085 VkBuffer dedicatedBuffer,
5086 VkImage dedicatedImage,
5096 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5098 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5101 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5103 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5106 template<
typename T>
5109 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5112 template<
typename T>
5113 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5115 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5118 template<
typename T>
5119 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5124 VmaFree(hAllocator, ptr);
5128 template<
typename T>
5129 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5133 for(
size_t i = count; i--; )
5135 VmaFree(hAllocator, ptr);
5142 #if VMA_STATS_STRING_ENABLED 5144 class VmaStringBuilder
5147 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5148 size_t GetLength()
const {
return m_Data.size(); }
5149 const char* GetData()
const {
return m_Data.data(); }
5151 void Add(
char ch) { m_Data.push_back(ch); }
5152 void Add(
const char* pStr);
5153 void AddNewLine() { Add(
'\n'); }
5154 void AddNumber(uint32_t num);
5155 void AddNumber(uint64_t num);
5156 void AddPointer(
const void* ptr);
5159 VmaVector< char, VmaStlAllocator<char> > m_Data;
5162 void VmaStringBuilder::Add(
const char* pStr)
5164 const size_t strLen = strlen(pStr);
5167 const size_t oldCount = m_Data.size();
5168 m_Data.resize(oldCount + strLen);
5169 memcpy(m_Data.data() + oldCount, pStr, strLen);
5173 void VmaStringBuilder::AddNumber(uint32_t num)
5176 VmaUint32ToStr(buf,
sizeof(buf), num);
5180 void VmaStringBuilder::AddNumber(uint64_t num)
5183 VmaUint64ToStr(buf,
sizeof(buf), num);
5187 void VmaStringBuilder::AddPointer(
const void* ptr)
5190 VmaPtrToStr(buf,
sizeof(buf), ptr);
5194 #endif // #if VMA_STATS_STRING_ENABLED 5199 #if VMA_STATS_STRING_ENABLED 5203 VMA_CLASS_NO_COPY(VmaJsonWriter)
5205 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5208 void BeginObject(
bool singleLine =
false);
5211 void BeginArray(
bool singleLine =
false);
5214 void WriteString(
const char* pStr);
5215 void BeginString(
const char* pStr = VMA_NULL);
5216 void ContinueString(
const char* pStr);
5217 void ContinueString(uint32_t n);
5218 void ContinueString(uint64_t n);
5219 void ContinueString_Pointer(
const void* ptr);
5220 void EndString(
const char* pStr = VMA_NULL);
5222 void WriteNumber(uint32_t n);
5223 void WriteNumber(uint64_t n);
5224 void WriteBool(
bool b);
5228 static const char*
const INDENT;
5230 enum COLLECTION_TYPE
5232 COLLECTION_TYPE_OBJECT,
5233 COLLECTION_TYPE_ARRAY,
5237 COLLECTION_TYPE type;
5238 uint32_t valueCount;
5239 bool singleLineMode;
5242 VmaStringBuilder& m_SB;
5243 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5244 bool m_InsideString;
5246 void BeginValue(
bool isString);
5247 void WriteIndent(
bool oneLess =
false);
5250 const char*
const VmaJsonWriter::INDENT =
" ";
5252 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5254 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5255 m_InsideString(false)
5259 VmaJsonWriter::~VmaJsonWriter()
5261 VMA_ASSERT(!m_InsideString);
5262 VMA_ASSERT(m_Stack.empty());
5265 void VmaJsonWriter::BeginObject(
bool singleLine)
5267 VMA_ASSERT(!m_InsideString);
5273 item.type = COLLECTION_TYPE_OBJECT;
5274 item.valueCount = 0;
5275 item.singleLineMode = singleLine;
5276 m_Stack.push_back(item);
5279 void VmaJsonWriter::EndObject()
5281 VMA_ASSERT(!m_InsideString);
5286 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5290 void VmaJsonWriter::BeginArray(
bool singleLine)
5292 VMA_ASSERT(!m_InsideString);
5298 item.type = COLLECTION_TYPE_ARRAY;
5299 item.valueCount = 0;
5300 item.singleLineMode = singleLine;
5301 m_Stack.push_back(item);
5304 void VmaJsonWriter::EndArray()
5306 VMA_ASSERT(!m_InsideString);
5311 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5315 void VmaJsonWriter::WriteString(
const char* pStr)
5321 void VmaJsonWriter::BeginString(
const char* pStr)
5323 VMA_ASSERT(!m_InsideString);
5327 m_InsideString =
true;
5328 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5330 ContinueString(pStr);
5334 void VmaJsonWriter::ContinueString(
const char* pStr)
5336 VMA_ASSERT(m_InsideString);
5338 const size_t strLen = strlen(pStr);
5339 for(
size_t i = 0; i < strLen; ++i)
5372 VMA_ASSERT(0 &&
"Character not currently supported.");
5378 void VmaJsonWriter::ContinueString(uint32_t n)
5380 VMA_ASSERT(m_InsideString);
5384 void VmaJsonWriter::ContinueString(uint64_t n)
5386 VMA_ASSERT(m_InsideString);
5390 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5392 VMA_ASSERT(m_InsideString);
5393 m_SB.AddPointer(ptr);
5396 void VmaJsonWriter::EndString(
const char* pStr)
5398 VMA_ASSERT(m_InsideString);
5399 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5401 ContinueString(pStr);
5404 m_InsideString =
false;
5407 void VmaJsonWriter::WriteNumber(uint32_t n)
5409 VMA_ASSERT(!m_InsideString);
5414 void VmaJsonWriter::WriteNumber(uint64_t n)
5416 VMA_ASSERT(!m_InsideString);
5421 void VmaJsonWriter::WriteBool(
bool b)
5423 VMA_ASSERT(!m_InsideString);
5425 m_SB.Add(b ?
"true" :
"false");
5428 void VmaJsonWriter::WriteNull()
5430 VMA_ASSERT(!m_InsideString);
5435 void VmaJsonWriter::BeginValue(
bool isString)
5437 if(!m_Stack.empty())
5439 StackItem& currItem = m_Stack.back();
5440 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5441 currItem.valueCount % 2 == 0)
5443 VMA_ASSERT(isString);
5446 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5447 currItem.valueCount % 2 != 0)
5451 else if(currItem.valueCount > 0)
5460 ++currItem.valueCount;
5464 void VmaJsonWriter::WriteIndent(
bool oneLess)
5466 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
5470 size_t count = m_Stack.size();
5471 if(count > 0 && oneLess)
5475 for(
size_t i = 0; i < count; ++i)
5482 #endif // #if VMA_STATS_STRING_ENABLED 5486 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
5488 if(IsUserDataString())
5490 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
5492 FreeUserDataString(hAllocator);
5494 if(pUserData != VMA_NULL)
5496 const char*
const newStrSrc = (
char*)pUserData;
5497 const size_t newStrLen = strlen(newStrSrc);
5498 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
5499 memcpy(newStrDst, newStrSrc, newStrLen + 1);
5500 m_pUserData = newStrDst;
5505 m_pUserData = pUserData;
5509 void VmaAllocation_T::ChangeBlockAllocation(
5511 VmaDeviceMemoryBlock* block,
5512 VkDeviceSize offset)
5514 VMA_ASSERT(block != VMA_NULL);
5515 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5518 if(block != m_BlockAllocation.m_Block)
5520 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
5521 if(IsPersistentMap())
5523 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
5524 block->Map(hAllocator, mapRefCount, VMA_NULL);
5527 m_BlockAllocation.m_Block = block;
5528 m_BlockAllocation.m_Offset = offset;
5531 VkDeviceSize VmaAllocation_T::GetOffset()
const 5535 case ALLOCATION_TYPE_BLOCK:
5536 return m_BlockAllocation.m_Offset;
5537 case ALLOCATION_TYPE_DEDICATED:
5545 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5549 case ALLOCATION_TYPE_BLOCK:
5550 return m_BlockAllocation.m_Block->GetDeviceMemory();
5551 case ALLOCATION_TYPE_DEDICATED:
5552 return m_DedicatedAllocation.m_hMemory;
5555 return VK_NULL_HANDLE;
5559 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5563 case ALLOCATION_TYPE_BLOCK:
5564 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5565 case ALLOCATION_TYPE_DEDICATED:
5566 return m_DedicatedAllocation.m_MemoryTypeIndex;
5573 void* VmaAllocation_T::GetMappedData()
const 5577 case ALLOCATION_TYPE_BLOCK:
5580 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5581 VMA_ASSERT(pBlockData != VMA_NULL);
5582 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5589 case ALLOCATION_TYPE_DEDICATED:
5590 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5591 return m_DedicatedAllocation.m_pMappedData;
5598 bool VmaAllocation_T::CanBecomeLost()
const 5602 case ALLOCATION_TYPE_BLOCK:
5603 return m_BlockAllocation.m_CanBecomeLost;
5604 case ALLOCATION_TYPE_DEDICATED:
5612 VmaPool VmaAllocation_T::GetPool()
const 5614 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5615 return m_BlockAllocation.m_hPool;
5618 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5620 VMA_ASSERT(CanBecomeLost());
5626 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5629 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5634 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5640 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5650 #if VMA_STATS_STRING_ENABLED 5653 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5662 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 5664 json.WriteString(
"Type");
5665 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
5667 json.WriteString(
"Size");
5668 json.WriteNumber(m_Size);
5670 if(m_pUserData != VMA_NULL)
5672 json.WriteString(
"UserData");
5673 if(IsUserDataString())
5675 json.WriteString((
const char*)m_pUserData);
5680 json.ContinueString_Pointer(m_pUserData);
5685 json.WriteString(
"CreationFrameIndex");
5686 json.WriteNumber(m_CreationFrameIndex);
5688 json.WriteString(
"LastUseFrameIndex");
5689 json.WriteNumber(GetLastUseFrameIndex());
5691 if(m_BufferImageUsage != 0)
5693 json.WriteString(
"Usage");
5694 json.WriteNumber(m_BufferImageUsage);
5700 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
5702 VMA_ASSERT(IsUserDataString());
5703 if(m_pUserData != VMA_NULL)
5705 char*
const oldStr = (
char*)m_pUserData;
5706 const size_t oldStrLen = strlen(oldStr);
5707 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5708 m_pUserData = VMA_NULL;
5712 void VmaAllocation_T::BlockAllocMap()
5714 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5716 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5722 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
5726 void VmaAllocation_T::BlockAllocUnmap()
5728 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5730 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5736 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
5740 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
5742 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5746 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5748 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5749 *ppData = m_DedicatedAllocation.m_pMappedData;
5755 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
5756 return VK_ERROR_MEMORY_MAP_FAILED;
5761 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5762 hAllocator->m_hDevice,
5763 m_DedicatedAllocation.m_hMemory,
5768 if(result == VK_SUCCESS)
5770 m_DedicatedAllocation.m_pMappedData = *ppData;
5777 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
5779 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5781 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5786 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5787 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5788 hAllocator->m_hDevice,
5789 m_DedicatedAllocation.m_hMemory);
5794 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5798 #if VMA_STATS_STRING_ENABLED 5800 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5804 json.WriteString(
"Blocks");
5807 json.WriteString(
"Allocations");
5810 json.WriteString(
"UnusedRanges");
5813 json.WriteString(
"UsedBytes");
5816 json.WriteString(
"UnusedBytes");
5821 json.WriteString(
"AllocationSize");
5822 json.BeginObject(
true);
5823 json.WriteString(
"Min");
5825 json.WriteString(
"Avg");
5827 json.WriteString(
"Max");
5834 json.WriteString(
"UnusedRangeSize");
5835 json.BeginObject(
true);
5836 json.WriteString(
"Min");
5838 json.WriteString(
"Avg");
5840 json.WriteString(
"Max");
5848 #endif // #if VMA_STATS_STRING_ENABLED 5850 struct VmaSuballocationItemSizeLess
5853 const VmaSuballocationList::iterator lhs,
5854 const VmaSuballocationList::iterator rhs)
const 5856 return lhs->size < rhs->size;
5859 const VmaSuballocationList::iterator lhs,
5860 VkDeviceSize rhsSize)
const 5862 return lhs->size < rhsSize;
5869 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5873 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5874 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5878 VmaBlockMetadata::~VmaBlockMetadata()
5882 void VmaBlockMetadata::Init(VkDeviceSize size)
5886 m_SumFreeSize = size;
5888 VmaSuballocation suballoc = {};
5889 suballoc.offset = 0;
5890 suballoc.size = size;
5891 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5892 suballoc.hAllocation = VK_NULL_HANDLE;
5894 m_Suballocations.push_back(suballoc);
5895 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5897 m_FreeSuballocationsBySize.push_back(suballocItem);
5900 bool VmaBlockMetadata::Validate()
const 5902 if(m_Suballocations.empty())
5908 VkDeviceSize calculatedOffset = 0;
5910 uint32_t calculatedFreeCount = 0;
5912 VkDeviceSize calculatedSumFreeSize = 0;
5915 size_t freeSuballocationsToRegister = 0;
5917 bool prevFree =
false;
5919 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5920 suballocItem != m_Suballocations.cend();
5923 const VmaSuballocation& subAlloc = *suballocItem;
5926 if(subAlloc.offset != calculatedOffset)
5931 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5933 if(prevFree && currFree)
5938 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5945 calculatedSumFreeSize += subAlloc.size;
5946 ++calculatedFreeCount;
5947 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5949 ++freeSuballocationsToRegister;
5953 if(subAlloc.size < VMA_DEBUG_MARGIN)
5960 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5964 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5970 if(VMA_DEBUG_MARGIN > 0 && !prevFree)
5976 calculatedOffset += subAlloc.size;
5977 prevFree = currFree;
5982 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5987 VkDeviceSize lastSize = 0;
5988 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5990 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5993 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5998 if(suballocItem->size < lastSize)
6003 lastSize = suballocItem->size;
6007 if(!ValidateFreeSuballocationList() ||
6008 (calculatedOffset != m_Size) ||
6009 (calculatedSumFreeSize != m_SumFreeSize) ||
6010 (calculatedFreeCount != m_FreeCount))
6018 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 6020 if(!m_FreeSuballocationsBySize.empty())
6022 return m_FreeSuballocationsBySize.back()->size;
6030 bool VmaBlockMetadata::IsEmpty()
const 6032 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6035 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6039 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6051 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6052 suballocItem != m_Suballocations.cend();
6055 const VmaSuballocation& suballoc = *suballocItem;
6056 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6069 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 6071 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6073 inoutStats.
size += m_Size;
6080 #if VMA_STATS_STRING_ENABLED 6082 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 6086 json.WriteString(
"TotalBytes");
6087 json.WriteNumber(m_Size);
6089 json.WriteString(
"UnusedBytes");
6090 json.WriteNumber(m_SumFreeSize);
6092 json.WriteString(
"Allocations");
6093 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
6095 json.WriteString(
"UnusedRanges");
6096 json.WriteNumber(m_FreeCount);
6098 json.WriteString(
"Suballocations");
6101 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6102 suballocItem != m_Suballocations.cend();
6103 ++suballocItem, ++i)
6105 json.BeginObject(
true);
6107 json.WriteString(
"Offset");
6108 json.WriteNumber(suballocItem->offset);
6110 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6112 json.WriteString(
"Type");
6113 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6115 json.WriteString(
"Size");
6116 json.WriteNumber(suballocItem->size);
6120 suballocItem->hAllocation->PrintParameters(json);
6130 #endif // #if VMA_STATS_STRING_ENABLED 6142 bool VmaBlockMetadata::CreateAllocationRequest(
6143 uint32_t currentFrameIndex,
6144 uint32_t frameInUseCount,
6145 VkDeviceSize bufferImageGranularity,
6146 VkDeviceSize allocSize,
6147 VkDeviceSize allocAlignment,
6148 VmaSuballocationType allocType,
6149 bool canMakeOtherLost,
6150 VmaAllocationRequest* pAllocationRequest)
6152 VMA_ASSERT(allocSize > 0);
6153 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6154 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6155 VMA_HEAVY_ASSERT(Validate());
6158 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6164 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6165 if(freeSuballocCount > 0)
6170 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6171 m_FreeSuballocationsBySize.data(),
6172 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6173 allocSize + 2 * VMA_DEBUG_MARGIN,
6174 VmaSuballocationItemSizeLess());
6175 size_t index = it - m_FreeSuballocationsBySize.data();
6176 for(; index < freeSuballocCount; ++index)
6181 bufferImageGranularity,
6185 m_FreeSuballocationsBySize[index],
6187 &pAllocationRequest->offset,
6188 &pAllocationRequest->itemsToMakeLostCount,
6189 &pAllocationRequest->sumFreeSize,
6190 &pAllocationRequest->sumItemSize))
6192 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6200 for(
size_t index = freeSuballocCount; index--; )
6205 bufferImageGranularity,
6209 m_FreeSuballocationsBySize[index],
6211 &pAllocationRequest->offset,
6212 &pAllocationRequest->itemsToMakeLostCount,
6213 &pAllocationRequest->sumFreeSize,
6214 &pAllocationRequest->sumItemSize))
6216 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6223 if(canMakeOtherLost)
6227 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6228 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6230 VmaAllocationRequest tmpAllocRequest = {};
6231 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6232 suballocIt != m_Suballocations.end();
6235 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6236 suballocIt->hAllocation->CanBecomeLost())
6241 bufferImageGranularity,
6247 &tmpAllocRequest.offset,
6248 &tmpAllocRequest.itemsToMakeLostCount,
6249 &tmpAllocRequest.sumFreeSize,
6250 &tmpAllocRequest.sumItemSize))
6252 tmpAllocRequest.item = suballocIt;
6254 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
6256 *pAllocationRequest = tmpAllocRequest;
6262 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
6271 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
6272 uint32_t currentFrameIndex,
6273 uint32_t frameInUseCount,
6274 VmaAllocationRequest* pAllocationRequest)
6276 while(pAllocationRequest->itemsToMakeLostCount > 0)
6278 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
6280 ++pAllocationRequest->item;
6282 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6283 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
6284 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
6285 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6287 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
6288 --pAllocationRequest->itemsToMakeLostCount;
6296 VMA_HEAVY_ASSERT(Validate());
6297 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6298 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
6303 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6305 uint32_t lostAllocationCount = 0;
6306 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6307 it != m_Suballocations.end();
6310 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
6311 it->hAllocation->CanBecomeLost() &&
6312 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6314 it = FreeSuballocation(it);
6315 ++lostAllocationCount;
6318 return lostAllocationCount;
6321 VkResult VmaBlockMetadata::CheckCorruption(
const void* pBlockData)
6323 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6324 it != m_Suballocations.end();
6327 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6329 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
6331 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
6332 return VK_ERROR_VALIDATION_FAILED_EXT;
6334 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
6336 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
6337 return VK_ERROR_VALIDATION_FAILED_EXT;
6345 void VmaBlockMetadata::Alloc(
6346 const VmaAllocationRequest& request,
6347 VmaSuballocationType type,
6348 VkDeviceSize allocSize,
6351 VMA_ASSERT(request.item != m_Suballocations.end());
6352 VmaSuballocation& suballoc = *request.item;
6354 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6356 VMA_ASSERT(request.offset >= suballoc.offset);
6357 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
6358 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
6359 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
6363 UnregisterFreeSuballocation(request.item);
6365 suballoc.offset = request.offset;
6366 suballoc.size = allocSize;
6367 suballoc.type = type;
6368 suballoc.hAllocation = hAllocation;
6373 VmaSuballocation paddingSuballoc = {};
6374 paddingSuballoc.offset = request.offset + allocSize;
6375 paddingSuballoc.size = paddingEnd;
6376 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6377 VmaSuballocationList::iterator next = request.item;
6379 const VmaSuballocationList::iterator paddingEndItem =
6380 m_Suballocations.insert(next, paddingSuballoc);
6381 RegisterFreeSuballocation(paddingEndItem);
6387 VmaSuballocation paddingSuballoc = {};
6388 paddingSuballoc.offset = request.offset - paddingBegin;
6389 paddingSuballoc.size = paddingBegin;
6390 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6391 const VmaSuballocationList::iterator paddingBeginItem =
6392 m_Suballocations.insert(request.item, paddingSuballoc);
6393 RegisterFreeSuballocation(paddingBeginItem);
6397 m_FreeCount = m_FreeCount - 1;
6398 if(paddingBegin > 0)
6406 m_SumFreeSize -= allocSize;
6411 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6412 suballocItem != m_Suballocations.end();
6415 VmaSuballocation& suballoc = *suballocItem;
6416 if(suballoc.hAllocation == allocation)
6418 FreeSuballocation(suballocItem);
6419 VMA_HEAVY_ASSERT(Validate());
6423 VMA_ASSERT(0 &&
"Not found!");
6426 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
6428 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6429 suballocItem != m_Suballocations.end();
6432 VmaSuballocation& suballoc = *suballocItem;
6433 if(suballoc.offset == offset)
6435 FreeSuballocation(suballocItem);
6439 VMA_ASSERT(0 &&
"Not found!");
6442 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 6444 VkDeviceSize lastSize = 0;
6445 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
6447 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
6449 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6454 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6459 if(it->size < lastSize)
6465 lastSize = it->size;
6470 bool VmaBlockMetadata::CheckAllocation(
6471 uint32_t currentFrameIndex,
6472 uint32_t frameInUseCount,
6473 VkDeviceSize bufferImageGranularity,
6474 VkDeviceSize allocSize,
6475 VkDeviceSize allocAlignment,
6476 VmaSuballocationType allocType,
6477 VmaSuballocationList::const_iterator suballocItem,
6478 bool canMakeOtherLost,
6479 VkDeviceSize* pOffset,
6480 size_t* itemsToMakeLostCount,
6481 VkDeviceSize* pSumFreeSize,
6482 VkDeviceSize* pSumItemSize)
const 6484 VMA_ASSERT(allocSize > 0);
6485 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6486 VMA_ASSERT(suballocItem != m_Suballocations.cend());
6487 VMA_ASSERT(pOffset != VMA_NULL);
6489 *itemsToMakeLostCount = 0;
6493 if(canMakeOtherLost)
6495 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6497 *pSumFreeSize = suballocItem->size;
6501 if(suballocItem->hAllocation->CanBecomeLost() &&
6502 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6504 ++*itemsToMakeLostCount;
6505 *pSumItemSize = suballocItem->size;
6514 if(m_Size - suballocItem->offset < allocSize)
6520 *pOffset = suballocItem->offset;
6523 if(VMA_DEBUG_MARGIN > 0)
6525 *pOffset += VMA_DEBUG_MARGIN;
6529 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6533 if(bufferImageGranularity > 1)
6535 bool bufferImageGranularityConflict =
false;
6536 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6537 while(prevSuballocItem != m_Suballocations.cbegin())
6540 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6541 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6543 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6545 bufferImageGranularityConflict =
true;
6553 if(bufferImageGranularityConflict)
6555 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6561 if(*pOffset >= suballocItem->offset + suballocItem->size)
6567 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
6570 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6572 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
6574 if(suballocItem->offset + totalSize > m_Size)
6581 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
6582 if(totalSize > suballocItem->size)
6584 VkDeviceSize remainingSize = totalSize - suballocItem->size;
6585 while(remainingSize > 0)
6588 if(lastSuballocItem == m_Suballocations.cend())
6592 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6594 *pSumFreeSize += lastSuballocItem->size;
6598 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
6599 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
6600 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6602 ++*itemsToMakeLostCount;
6603 *pSumItemSize += lastSuballocItem->size;
6610 remainingSize = (lastSuballocItem->size < remainingSize) ?
6611 remainingSize - lastSuballocItem->size : 0;
6617 if(bufferImageGranularity > 1)
6619 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6621 while(nextSuballocItem != m_Suballocations.cend())
6623 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6624 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6626 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6628 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6629 if(nextSuballoc.hAllocation->CanBecomeLost() &&
6630 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6632 ++*itemsToMakeLostCount;
6651 const VmaSuballocation& suballoc = *suballocItem;
6652 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6654 *pSumFreeSize = suballoc.size;
6657 if(suballoc.size < allocSize)
6663 *pOffset = suballoc.offset;
6666 if(VMA_DEBUG_MARGIN > 0)
6668 *pOffset += VMA_DEBUG_MARGIN;
6672 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6676 if(bufferImageGranularity > 1)
6678 bool bufferImageGranularityConflict =
false;
6679 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6680 while(prevSuballocItem != m_Suballocations.cbegin())
6683 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6684 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6686 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6688 bufferImageGranularityConflict =
true;
6696 if(bufferImageGranularityConflict)
6698 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6703 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6706 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6709 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6716 if(bufferImageGranularity > 1)
6718 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6720 while(nextSuballocItem != m_Suballocations.cend())
6722 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6723 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6725 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6744 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6746 VMA_ASSERT(item != m_Suballocations.end());
6747 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6749 VmaSuballocationList::iterator nextItem = item;
6751 VMA_ASSERT(nextItem != m_Suballocations.end());
6752 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6754 item->size += nextItem->size;
6756 m_Suballocations.erase(nextItem);
6759 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6762 VmaSuballocation& suballoc = *suballocItem;
6763 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6764 suballoc.hAllocation = VK_NULL_HANDLE;
6768 m_SumFreeSize += suballoc.size;
6771 bool mergeWithNext =
false;
6772 bool mergeWithPrev =
false;
6774 VmaSuballocationList::iterator nextItem = suballocItem;
6776 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6778 mergeWithNext =
true;
6781 VmaSuballocationList::iterator prevItem = suballocItem;
6782 if(suballocItem != m_Suballocations.begin())
6785 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6787 mergeWithPrev =
true;
6793 UnregisterFreeSuballocation(nextItem);
6794 MergeFreeWithNext(suballocItem);
6799 UnregisterFreeSuballocation(prevItem);
6800 MergeFreeWithNext(prevItem);
6801 RegisterFreeSuballocation(prevItem);
6806 RegisterFreeSuballocation(suballocItem);
6807 return suballocItem;
6811 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6813 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6814 VMA_ASSERT(item->size > 0);
6818 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6820 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6822 if(m_FreeSuballocationsBySize.empty())
6824 m_FreeSuballocationsBySize.push_back(item);
6828 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6836 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6838 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6839 VMA_ASSERT(item->size > 0);
6843 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6845 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6847 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6848 m_FreeSuballocationsBySize.data(),
6849 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6851 VmaSuballocationItemSizeLess());
6852 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6853 index < m_FreeSuballocationsBySize.size();
6856 if(m_FreeSuballocationsBySize[index] == item)
6858 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6861 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6863 VMA_ASSERT(0 &&
"Not found.");
6872 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6873 m_Metadata(hAllocator),
6874 m_MemoryTypeIndex(UINT32_MAX),
6876 m_hMemory(VK_NULL_HANDLE),
6878 m_pMappedData(VMA_NULL)
6882 void VmaDeviceMemoryBlock::Init(
6883 uint32_t newMemoryTypeIndex,
6884 VkDeviceMemory newMemory,
6885 VkDeviceSize newSize,
6888 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6890 m_MemoryTypeIndex = newMemoryTypeIndex;
6892 m_hMemory = newMemory;
6894 m_Metadata.Init(newSize);
6897 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6901 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6903 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6904 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6905 m_hMemory = VK_NULL_HANDLE;
6908 bool VmaDeviceMemoryBlock::Validate()
const 6910 if((m_hMemory == VK_NULL_HANDLE) ||
6911 (m_Metadata.GetSize() == 0))
6916 return m_Metadata.Validate();
6919 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
6921 void* pData =
nullptr;
6922 VkResult res = Map(hAllocator, 1, &pData);
6923 if(res != VK_SUCCESS)
6928 res = m_Metadata.CheckCorruption(pData);
6930 Unmap(hAllocator, 1);
6935 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6942 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6945 m_MapCount += count;
6946 VMA_ASSERT(m_pMappedData != VMA_NULL);
6947 if(ppData != VMA_NULL)
6949 *ppData = m_pMappedData;
6955 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6956 hAllocator->m_hDevice,
6962 if(result == VK_SUCCESS)
6964 if(ppData != VMA_NULL)
6966 *ppData = m_pMappedData;
6974 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6981 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6982 if(m_MapCount >= count)
6984 m_MapCount -= count;
6987 m_pMappedData = VMA_NULL;
6988 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6993 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6997 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
6999 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
7000 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
7003 VkResult res = Map(hAllocator, 1, &pData);
7004 if(res != VK_SUCCESS)
7009 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
7010 VmaWriteMagicValue(pData, allocOffset + allocSize);
7012 Unmap(hAllocator, 1);
7017 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
7019 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
7020 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
7023 VkResult res = Map(hAllocator, 1, &pData);
7024 if(res != VK_SUCCESS)
7029 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
7031 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
7033 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
7035 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
7038 Unmap(hAllocator, 1);
7043 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
7048 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
7049 hAllocation->GetBlock() ==
this);
7051 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
7052 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
7053 hAllocator->m_hDevice,
7056 hAllocation->GetOffset());
7059 VkResult VmaDeviceMemoryBlock::BindImageMemory(
7064 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
7065 hAllocation->GetBlock() ==
this);
7067 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
7068 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
7069 hAllocator->m_hDevice,
7072 hAllocation->GetOffset());
7077 memset(&outInfo, 0,
sizeof(outInfo));
7096 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
7104 VmaPool_T::VmaPool_T(
7109 createInfo.memoryTypeIndex,
7110 createInfo.blockSize,
7111 createInfo.minBlockCount,
7112 createInfo.maxBlockCount,
7114 createInfo.frameInUseCount,
7120 VmaPool_T::~VmaPool_T()
7124 #if VMA_STATS_STRING_ENABLED 7126 #endif // #if VMA_STATS_STRING_ENABLED 7128 VmaBlockVector::VmaBlockVector(
7130 uint32_t memoryTypeIndex,
7131 VkDeviceSize preferredBlockSize,
7132 size_t minBlockCount,
7133 size_t maxBlockCount,
7134 VkDeviceSize bufferImageGranularity,
7135 uint32_t frameInUseCount,
7136 bool isCustomPool) :
7137 m_hAllocator(hAllocator),
7138 m_MemoryTypeIndex(memoryTypeIndex),
7139 m_PreferredBlockSize(preferredBlockSize),
7140 m_MinBlockCount(minBlockCount),
7141 m_MaxBlockCount(maxBlockCount),
7142 m_BufferImageGranularity(bufferImageGranularity),
7143 m_FrameInUseCount(frameInUseCount),
7144 m_IsCustomPool(isCustomPool),
7145 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
7146 m_HasEmptyBlock(false),
7147 m_pDefragmentator(VMA_NULL),
7152 VmaBlockVector::~VmaBlockVector()
7154 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
7156 for(
size_t i = m_Blocks.size(); i--; )
7158 m_Blocks[i]->Destroy(m_hAllocator);
7159 vma_delete(m_hAllocator, m_Blocks[i]);
7163 VkResult VmaBlockVector::CreateMinBlocks()
7165 for(
size_t i = 0; i < m_MinBlockCount; ++i)
7167 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
7168 if(res != VK_SUCCESS)
7176 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
7184 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7186 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7188 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7190 VMA_HEAVY_ASSERT(pBlock->Validate());
7191 pBlock->m_Metadata.AddPoolStats(*pStats);
7195 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 7197 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7198 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
7199 (VMA_DEBUG_MARGIN > 0) &&
7200 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
7203 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
7205 VkResult VmaBlockVector::Allocate(
7207 uint32_t currentFrameIndex,
7209 VkDeviceSize alignment,
7211 VmaSuballocationType suballocType,
7215 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
7217 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7223 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7227 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
7229 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
7230 VMA_ASSERT(pCurrBlock);
7231 VmaAllocationRequest currRequest = {};
7232 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
7235 m_BufferImageGranularity,
7243 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
7247 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
7248 if(res != VK_SUCCESS)
7255 if(pCurrBlock->m_Metadata.IsEmpty())
7257 m_HasEmptyBlock =
false;
7260 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7261 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, size, *pAllocation);
7262 (*pAllocation)->InitBlockAllocation(
7271 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
7272 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
7273 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7274 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
7276 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
7278 if(IsCorruptionDetectionEnabled())
7280 VkResult res = pCurrBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
7281 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7287 const bool canCreateNewBlock =
7289 (m_Blocks.size() < m_MaxBlockCount);
7292 if(canCreateNewBlock)
7295 VkDeviceSize newBlockSize = m_PreferredBlockSize;
7296 uint32_t newBlockSizeShift = 0;
7297 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
7301 if(m_IsCustomPool ==
false)
7304 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
7305 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
7307 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
7308 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
7310 newBlockSize = smallerNewBlockSize;
7311 ++newBlockSizeShift;
7320 size_t newBlockIndex = 0;
7321 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
7323 if(m_IsCustomPool ==
false)
7325 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
7327 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
7328 if(smallerNewBlockSize >= size)
7330 newBlockSize = smallerNewBlockSize;
7331 ++newBlockSizeShift;
7332 res = CreateBlock(newBlockSize, &newBlockIndex);
7341 if(res == VK_SUCCESS)
7343 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
7344 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= size);
7348 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
7349 if(res != VK_SUCCESS)
7356 VmaAllocationRequest allocRequest;
7357 if(pBlock->m_Metadata.CreateAllocationRequest(
7360 m_BufferImageGranularity,
7367 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7368 pBlock->m_Metadata.Alloc(allocRequest, suballocType, size, *pAllocation);
7369 (*pAllocation)->InitBlockAllocation(
7372 allocRequest.offset,
7378 VMA_HEAVY_ASSERT(pBlock->Validate());
7379 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
7380 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7381 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
7383 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
7385 if(IsCorruptionDetectionEnabled())
7387 res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, allocRequest.offset, size);
7388 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7395 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7403 if(canMakeOtherLost)
7405 uint32_t tryIndex = 0;
7406 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
7408 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
7409 VmaAllocationRequest bestRequest = {};
7410 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
7414 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
7416 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
7417 VMA_ASSERT(pCurrBlock);
7418 VmaAllocationRequest currRequest = {};
7419 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
7422 m_BufferImageGranularity,
7429 const VkDeviceSize currRequestCost = currRequest.CalcCost();
7430 if(pBestRequestBlock == VMA_NULL ||
7431 currRequestCost < bestRequestCost)
7433 pBestRequestBlock = pCurrBlock;
7434 bestRequest = currRequest;
7435 bestRequestCost = currRequestCost;
7437 if(bestRequestCost == 0)
7445 if(pBestRequestBlock != VMA_NULL)
7449 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
7450 if(res != VK_SUCCESS)
7456 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
7462 if(pBestRequestBlock->m_Metadata.IsEmpty())
7464 m_HasEmptyBlock =
false;
7467 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7468 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, size, *pAllocation);
7469 (*pAllocation)->InitBlockAllocation(
7478 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
7479 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
7480 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7481 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
7483 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
7485 if(IsCorruptionDetectionEnabled())
7487 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
7488 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7503 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
7505 return VK_ERROR_TOO_MANY_OBJECTS;
7509 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7512 void VmaBlockVector::Free(
7515 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
7519 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7521 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
7523 if(IsCorruptionDetectionEnabled())
7525 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
7526 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
7529 if(hAllocation->IsPersistentMap())
7531 pBlock->Unmap(m_hAllocator, 1);
7534 pBlock->m_Metadata.Free(hAllocation);
7535 VMA_HEAVY_ASSERT(pBlock->Validate());
7537 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
7540 if(pBlock->m_Metadata.IsEmpty())
7543 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
7545 pBlockToDelete = pBlock;
7551 m_HasEmptyBlock =
true;
7556 else if(m_HasEmptyBlock)
7558 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
7559 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
7561 pBlockToDelete = pLastBlock;
7562 m_Blocks.pop_back();
7563 m_HasEmptyBlock =
false;
7567 IncrementallySortBlocks();
7572 if(pBlockToDelete != VMA_NULL)
7574 VMA_DEBUG_LOG(
" Deleted empty allocation");
7575 pBlockToDelete->Destroy(m_hAllocator);
7576 vma_delete(m_hAllocator, pBlockToDelete);
7580 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 7582 VkDeviceSize result = 0;
7583 for(
size_t i = m_Blocks.size(); i--; )
7585 result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
7586 if(result >= m_PreferredBlockSize)
7594 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
7596 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7598 if(m_Blocks[blockIndex] == pBlock)
7600 VmaVectorRemove(m_Blocks, blockIndex);
7607 void VmaBlockVector::IncrementallySortBlocks()
7610 for(
size_t i = 1; i < m_Blocks.size(); ++i)
7612 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
7614 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
7620 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
7622 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7623 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
7624 allocInfo.allocationSize = blockSize;
7625 VkDeviceMemory mem = VK_NULL_HANDLE;
7626 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
7635 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
7639 allocInfo.allocationSize,
7642 m_Blocks.push_back(pBlock);
7643 if(pNewBlockIndex != VMA_NULL)
7645 *pNewBlockIndex = m_Blocks.size() - 1;
7651 #if VMA_STATS_STRING_ENABLED 7653 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
7655 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7661 json.WriteString(
"MemoryTypeIndex");
7662 json.WriteNumber(m_MemoryTypeIndex);
7664 json.WriteString(
"BlockSize");
7665 json.WriteNumber(m_PreferredBlockSize);
7667 json.WriteString(
"BlockCount");
7668 json.BeginObject(
true);
7669 if(m_MinBlockCount > 0)
7671 json.WriteString(
"Min");
7672 json.WriteNumber((uint64_t)m_MinBlockCount);
7674 if(m_MaxBlockCount < SIZE_MAX)
7676 json.WriteString(
"Max");
7677 json.WriteNumber((uint64_t)m_MaxBlockCount);
7679 json.WriteString(
"Cur");
7680 json.WriteNumber((uint64_t)m_Blocks.size());
7683 if(m_FrameInUseCount > 0)
7685 json.WriteString(
"FrameInUseCount");
7686 json.WriteNumber(m_FrameInUseCount);
7691 json.WriteString(
"PreferredBlockSize");
7692 json.WriteNumber(m_PreferredBlockSize);
7695 json.WriteString(
"Blocks");
7697 for(
size_t i = 0; i < m_Blocks.size(); ++i)
7700 json.ContinueString(m_Blocks[i]->GetId());
7703 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
7710 #endif // #if VMA_STATS_STRING_ENABLED 7712 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
7714 uint32_t currentFrameIndex)
7716 if(m_pDefragmentator == VMA_NULL)
7718 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
7724 return m_pDefragmentator;
7727 VkResult VmaBlockVector::Defragment(
7729 VkDeviceSize& maxBytesToMove,
7730 uint32_t& maxAllocationsToMove)
7732 if(m_pDefragmentator == VMA_NULL)
7737 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7740 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7743 if(pDefragmentationStats != VMA_NULL)
7745 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
7746 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7749 VMA_ASSERT(bytesMoved <= maxBytesToMove);
7750 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7756 m_HasEmptyBlock =
false;
7757 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
7759 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7760 if(pBlock->m_Metadata.IsEmpty())
7762 if(m_Blocks.size() > m_MinBlockCount)
7764 if(pDefragmentationStats != VMA_NULL)
7767 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
7770 VmaVectorRemove(m_Blocks, blockIndex);
7771 pBlock->Destroy(m_hAllocator);
7772 vma_delete(m_hAllocator, pBlock);
7776 m_HasEmptyBlock =
true;
7784 void VmaBlockVector::DestroyDefragmentator()
7786 if(m_pDefragmentator != VMA_NULL)
7788 vma_delete(m_hAllocator, m_pDefragmentator);
7789 m_pDefragmentator = VMA_NULL;
7793 void VmaBlockVector::MakePoolAllocationsLost(
7794 uint32_t currentFrameIndex,
7795 size_t* pLostAllocationCount)
7797 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7798 size_t lostAllocationCount = 0;
7799 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7801 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7803 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7805 if(pLostAllocationCount != VMA_NULL)
7807 *pLostAllocationCount = lostAllocationCount;
7811 VkResult VmaBlockVector::CheckCorruption()
7813 if(!IsCorruptionDetectionEnabled())
7815 return VK_ERROR_FEATURE_NOT_PRESENT;
7818 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7819 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7821 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7823 VkResult res = pBlock->CheckCorruption(m_hAllocator);
7824 if(res != VK_SUCCESS)
7832 void VmaBlockVector::AddStats(
VmaStats* pStats)
7834 const uint32_t memTypeIndex = m_MemoryTypeIndex;
7835 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7837 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7839 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7841 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7843 VMA_HEAVY_ASSERT(pBlock->Validate());
7845 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7846 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7847 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7848 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7855 VmaDefragmentator::VmaDefragmentator(
7857 VmaBlockVector* pBlockVector,
7858 uint32_t currentFrameIndex) :
7859 m_hAllocator(hAllocator),
7860 m_pBlockVector(pBlockVector),
7861 m_CurrentFrameIndex(currentFrameIndex),
7863 m_AllocationsMoved(0),
7864 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7865 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7869 VmaDefragmentator::~VmaDefragmentator()
7871 for(
size_t i = m_Blocks.size(); i--; )
7873 vma_delete(m_hAllocator, m_Blocks[i]);
7877 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
7879 AllocationInfo allocInfo;
7880 allocInfo.m_hAllocation = hAlloc;
7881 allocInfo.m_pChanged = pChanged;
7882 m_Allocations.push_back(allocInfo);
7885 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
7888 if(m_pMappedDataForDefragmentation)
7890 *ppMappedData = m_pMappedDataForDefragmentation;
7895 if(m_pBlock->GetMappedData())
7897 *ppMappedData = m_pBlock->GetMappedData();
7902 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7903 *ppMappedData = m_pMappedDataForDefragmentation;
7907 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
7909 if(m_pMappedDataForDefragmentation != VMA_NULL)
7911 m_pBlock->Unmap(hAllocator, 1);
7915 VkResult VmaDefragmentator::DefragmentRound(
7916 VkDeviceSize maxBytesToMove,
7917 uint32_t maxAllocationsToMove)
7919 if(m_Blocks.empty())
7924 size_t srcBlockIndex = m_Blocks.size() - 1;
7925 size_t srcAllocIndex = SIZE_MAX;
7931 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7933 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7936 if(srcBlockIndex == 0)
7943 srcAllocIndex = SIZE_MAX;
7948 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7952 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7953 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7955 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7956 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7957 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7958 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7961 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7963 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7964 VmaAllocationRequest dstAllocRequest;
7965 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7966 m_CurrentFrameIndex,
7967 m_pBlockVector->GetFrameInUseCount(),
7968 m_pBlockVector->GetBufferImageGranularity(),
7973 &dstAllocRequest) &&
7975 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7977 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7980 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7981 (m_BytesMoved + size > maxBytesToMove))
7983 return VK_INCOMPLETE;
7986 void* pDstMappedData = VMA_NULL;
7987 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7988 if(res != VK_SUCCESS)
7993 void* pSrcMappedData = VMA_NULL;
7994 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7995 if(res != VK_SUCCESS)
8002 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
8003 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
8004 static_cast<size_t>(size));
8006 if(VMA_DEBUG_MARGIN > 0)
8008 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
8009 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
8012 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
8013 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
8015 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
8017 if(allocInfo.m_pChanged != VMA_NULL)
8019 *allocInfo.m_pChanged = VK_TRUE;
8022 ++m_AllocationsMoved;
8023 m_BytesMoved += size;
8025 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
8033 if(srcAllocIndex > 0)
8039 if(srcBlockIndex > 0)
8042 srcAllocIndex = SIZE_MAX;
8052 VkResult VmaDefragmentator::Defragment(
8053 VkDeviceSize maxBytesToMove,
8054 uint32_t maxAllocationsToMove)
8056 if(m_Allocations.empty())
8062 const size_t blockCount = m_pBlockVector->m_Blocks.size();
8063 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
8065 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
8066 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
8067 m_Blocks.push_back(pBlockInfo);
8071 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
8074 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
8076 AllocationInfo& allocInfo = m_Allocations[blockIndex];
8078 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
8080 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
8081 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
8082 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
8084 (*it)->m_Allocations.push_back(allocInfo);
8092 m_Allocations.clear();
8094 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
8096 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
8097 pBlockInfo->CalcHasNonMovableAllocations();
8098 pBlockInfo->SortAllocationsBySizeDescecnding();
8102 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
8105 VkResult result = VK_SUCCESS;
8106 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
8108 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
8112 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
8114 m_Blocks[blockIndex]->Unmap(m_hAllocator);
8120 bool VmaDefragmentator::MoveMakesSense(
8121 size_t dstBlockIndex, VkDeviceSize dstOffset,
8122 size_t srcBlockIndex, VkDeviceSize srcOffset)
8124 if(dstBlockIndex < srcBlockIndex)
8128 if(dstBlockIndex > srcBlockIndex)
8132 if(dstOffset < srcOffset)
8142 #if VMA_RECORDING_ENABLED 8144 VmaRecorder::VmaRecorder() :
8149 m_StartCounter(INT64_MAX)
8155 m_UseMutex = useMutex;
8156 m_Flags = settings.
flags;
8158 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
8159 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
8162 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
8165 return VK_ERROR_INITIALIZATION_FAILED;
8169 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
8170 fprintf(m_File,
"%s\n",
"1,3");
8175 VmaRecorder::~VmaRecorder()
8177 if(m_File != VMA_NULL)
8183 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
8185 CallParams callParams;
8186 GetBasicParams(callParams);
8188 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8189 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
8193 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
8195 CallParams callParams;
8196 GetBasicParams(callParams);
8198 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8199 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
8205 CallParams callParams;
8206 GetBasicParams(callParams);
8208 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8209 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
8220 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
8222 CallParams callParams;
8223 GetBasicParams(callParams);
8225 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8226 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
8231 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
8232 const VkMemoryRequirements& vkMemReq,
8236 CallParams callParams;
8237 GetBasicParams(callParams);
8239 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8240 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
8241 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8244 vkMemReq.memoryTypeBits,
8252 userDataStr.GetString());
8256 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
8257 const VkMemoryRequirements& vkMemReq,
8258 bool requiresDedicatedAllocation,
8259 bool prefersDedicatedAllocation,
8263 CallParams callParams;
8264 GetBasicParams(callParams);
8266 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8267 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
8268 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8271 vkMemReq.memoryTypeBits,
8272 requiresDedicatedAllocation ? 1 : 0,
8273 prefersDedicatedAllocation ? 1 : 0,
8281 userDataStr.GetString());
8285 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
8286 const VkMemoryRequirements& vkMemReq,
8287 bool requiresDedicatedAllocation,
8288 bool prefersDedicatedAllocation,
8292 CallParams callParams;
8293 GetBasicParams(callParams);
8295 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8296 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
8297 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8300 vkMemReq.memoryTypeBits,
8301 requiresDedicatedAllocation ? 1 : 0,
8302 prefersDedicatedAllocation ? 1 : 0,
8310 userDataStr.GetString());
8314 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
8317 CallParams callParams;
8318 GetBasicParams(callParams);
8320 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8321 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
8326 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
8328 const void* pUserData)
8330 CallParams callParams;
8331 GetBasicParams(callParams);
8333 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8334 UserDataString userDataStr(
8337 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8339 userDataStr.GetString());
8343 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
8346 CallParams callParams;
8347 GetBasicParams(callParams);
8349 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8350 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
8355 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
8358 CallParams callParams;
8359 GetBasicParams(callParams);
8361 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8362 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
8367 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
8370 CallParams callParams;
8371 GetBasicParams(callParams);
8373 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8374 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
8379 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
8380 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
8382 CallParams callParams;
8383 GetBasicParams(callParams);
8385 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8386 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
8393 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
8394 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
8396 CallParams callParams;
8397 GetBasicParams(callParams);
8399 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8400 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
8407 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
8408 const VkBufferCreateInfo& bufCreateInfo,
8412 CallParams callParams;
8413 GetBasicParams(callParams);
8415 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8416 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
8417 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8418 bufCreateInfo.flags,
8420 bufCreateInfo.usage,
8421 bufCreateInfo.sharingMode,
8422 allocCreateInfo.
flags,
8423 allocCreateInfo.
usage,
8427 allocCreateInfo.
pool,
8429 userDataStr.GetString());
8433 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
8434 const VkImageCreateInfo& imageCreateInfo,
8438 CallParams callParams;
8439 GetBasicParams(callParams);
8441 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8442 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
8443 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8444 imageCreateInfo.flags,
8445 imageCreateInfo.imageType,
8446 imageCreateInfo.format,
8447 imageCreateInfo.extent.width,
8448 imageCreateInfo.extent.height,
8449 imageCreateInfo.extent.depth,
8450 imageCreateInfo.mipLevels,
8451 imageCreateInfo.arrayLayers,
8452 imageCreateInfo.samples,
8453 imageCreateInfo.tiling,
8454 imageCreateInfo.usage,
8455 imageCreateInfo.sharingMode,
8456 imageCreateInfo.initialLayout,
8457 allocCreateInfo.
flags,
8458 allocCreateInfo.
usage,
8462 allocCreateInfo.
pool,
8464 userDataStr.GetString());
8468 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
8471 CallParams callParams;
8472 GetBasicParams(callParams);
8474 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8475 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
8480 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
8483 CallParams callParams;
8484 GetBasicParams(callParams);
8486 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8487 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
8492 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
8495 CallParams callParams;
8496 GetBasicParams(callParams);
8498 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8499 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
8504 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
8507 CallParams callParams;
8508 GetBasicParams(callParams);
8510 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8511 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
8516 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
8519 CallParams callParams;
8520 GetBasicParams(callParams);
8522 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8523 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
8530 if(pUserData != VMA_NULL)
8534 m_Str = (
const char*)pUserData;
8538 sprintf_s(m_PtrStr,
"%p", pUserData);
8548 void VmaRecorder::WriteConfiguration(
8549 const VkPhysicalDeviceProperties& devProps,
8550 const VkPhysicalDeviceMemoryProperties& memProps,
8551 bool dedicatedAllocationExtensionEnabled)
8553 fprintf(m_File,
"Config,Begin\n");
8555 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
8556 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
8557 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
8558 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
8559 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
8560 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
8562 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
8563 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
8564 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
8566 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
8567 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
8569 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
8570 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
8572 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
8573 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
8575 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
8576 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
8579 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
8581 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
8582 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
8583 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
8584 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
8585 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
8586 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
8587 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
8588 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
8589 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
8591 fprintf(m_File,
"Config,End\n");
8594 void VmaRecorder::GetBasicParams(CallParams& outParams)
8596 outParams.threadId = GetCurrentThreadId();
8598 LARGE_INTEGER counter;
8599 QueryPerformanceCounter(&counter);
8600 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
8603 void VmaRecorder::Flush()
8611 #endif // #if VMA_RECORDING_ENABLED 8619 m_hDevice(pCreateInfo->device),
8620 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
8621 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
8622 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
8623 m_PreferredLargeHeapBlockSize(0),
8624 m_PhysicalDevice(pCreateInfo->physicalDevice),
8625 m_CurrentFrameIndex(0),
8626 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
8629 ,m_pRecorder(VMA_NULL)
8632 if(VMA_DEBUG_DETECT_CORRUPTION)
8635 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
8640 #if !(VMA_DEDICATED_ALLOCATION) 8643 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
8647 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
8648 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
8649 memset(&m_MemProps, 0,
sizeof(m_MemProps));
8651 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
8652 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
8654 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
8656 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
8667 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
8668 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
8675 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
8677 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
8678 if(limit != VK_WHOLE_SIZE)
8680 m_HeapSizeLimit[heapIndex] = limit;
8681 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
8683 m_MemProps.memoryHeaps[heapIndex].size = limit;
8689 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8691 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
8693 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
8699 GetBufferImageGranularity(),
8704 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
8711 VkResult res = VK_SUCCESS;
8716 #if VMA_RECORDING_ENABLED 8717 m_pRecorder = vma_new(
this, VmaRecorder)();
8719 if(res != VK_SUCCESS)
8723 m_pRecorder->WriteConfiguration(
8724 m_PhysicalDeviceProperties,
8726 m_UseKhrDedicatedAllocation);
8727 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
8729 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
8730 return VK_ERROR_FEATURE_NOT_PRESENT;
8737 VmaAllocator_T::~VmaAllocator_T()
8739 #if VMA_RECORDING_ENABLED 8740 if(m_pRecorder != VMA_NULL)
8742 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
8743 vma_delete(
this, m_pRecorder);
8747 VMA_ASSERT(m_Pools.empty());
8749 for(
size_t i = GetMemoryTypeCount(); i--; )
8751 vma_delete(
this, m_pDedicatedAllocations[i]);
8752 vma_delete(
this, m_pBlockVectors[i]);
8756 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
8758 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 8759 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
8760 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
8761 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
8762 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
8763 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
8764 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
8765 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
8766 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
8767 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
8768 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
8769 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
8770 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
8771 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
8772 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
8773 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
8774 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
8775 #if VMA_DEDICATED_ALLOCATION 8776 if(m_UseKhrDedicatedAllocation)
8778 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
8779 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
8780 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
8781 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
8783 #endif // #if VMA_DEDICATED_ALLOCATION 8784 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 8786 #define VMA_COPY_IF_NOT_NULL(funcName) \ 8787 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 8789 if(pVulkanFunctions != VMA_NULL)
8791 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
8792 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
8793 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
8794 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
8795 VMA_COPY_IF_NOT_NULL(vkMapMemory);
8796 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
8797 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
8798 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
8799 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
8800 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
8801 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
8802 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
8803 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
8804 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
8805 VMA_COPY_IF_NOT_NULL(vkCreateImage);
8806 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
8807 #if VMA_DEDICATED_ALLOCATION 8808 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
8809 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
8813 #undef VMA_COPY_IF_NOT_NULL 8817 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
8818 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
8819 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
8820 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
8821 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
8822 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
8823 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
8824 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
8825 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
8826 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
8827 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
8828 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
8829 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
8830 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
8831 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
8832 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
8833 #if VMA_DEDICATED_ALLOCATION 8834 if(m_UseKhrDedicatedAllocation)
8836 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
8837 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
8842 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
8844 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8845 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
8846 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
8847 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
8850 VkResult VmaAllocator_T::AllocateMemoryOfType(
8852 VkDeviceSize alignment,
8853 bool dedicatedAllocation,
8854 VkBuffer dedicatedBuffer,
8855 VkImage dedicatedImage,
8857 uint32_t memTypeIndex,
8858 VmaSuballocationType suballocType,
8861 VMA_ASSERT(pAllocation != VMA_NULL);
8862 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
8868 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
8873 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
8874 VMA_ASSERT(blockVector);
8876 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
8877 bool preferDedicatedMemory =
8878 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
8879 dedicatedAllocation ||
8881 size > preferredBlockSize / 2;
8883 if(preferDedicatedMemory &&
8885 finalCreateInfo.
pool == VK_NULL_HANDLE)
8894 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8898 return AllocateDedicatedMemory(
8912 VkResult res = blockVector->Allocate(
8914 m_CurrentFrameIndex.load(),
8920 if(res == VK_SUCCESS)
8928 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8932 res = AllocateDedicatedMemory(
8938 finalCreateInfo.pUserData,
8942 if(res == VK_SUCCESS)
8945 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
8951 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
8958 VkResult VmaAllocator_T::AllocateDedicatedMemory(
8960 VmaSuballocationType suballocType,
8961 uint32_t memTypeIndex,
8963 bool isUserDataString,
8965 VkBuffer dedicatedBuffer,
8966 VkImage dedicatedImage,
8969 VMA_ASSERT(pAllocation);
8971 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
8972 allocInfo.memoryTypeIndex = memTypeIndex;
8973 allocInfo.allocationSize = size;
8975 #if VMA_DEDICATED_ALLOCATION 8976 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
8977 if(m_UseKhrDedicatedAllocation)
8979 if(dedicatedBuffer != VK_NULL_HANDLE)
8981 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
8982 dedicatedAllocInfo.buffer = dedicatedBuffer;
8983 allocInfo.pNext = &dedicatedAllocInfo;
8985 else if(dedicatedImage != VK_NULL_HANDLE)
8987 dedicatedAllocInfo.image = dedicatedImage;
8988 allocInfo.pNext = &dedicatedAllocInfo;
8991 #endif // #if VMA_DEDICATED_ALLOCATION 8994 VkDeviceMemory hMemory = VK_NULL_HANDLE;
8995 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
8998 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
9002 void* pMappedData = VMA_NULL;
9005 res = (*m_VulkanFunctions.vkMapMemory)(
9014 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
9015 FreeVulkanMemory(memTypeIndex, size, hMemory);
9020 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
9021 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
9022 (*pAllocation)->SetUserData(
this, pUserData);
9023 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9025 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
9030 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9031 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
9032 VMA_ASSERT(pDedicatedAllocations);
9033 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
9036 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
9041 void VmaAllocator_T::GetBufferMemoryRequirements(
9043 VkMemoryRequirements& memReq,
9044 bool& requiresDedicatedAllocation,
9045 bool& prefersDedicatedAllocation)
const 9047 #if VMA_DEDICATED_ALLOCATION 9048 if(m_UseKhrDedicatedAllocation)
9050 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
9051 memReqInfo.buffer = hBuffer;
9053 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
9055 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
9056 memReq2.pNext = &memDedicatedReq;
9058 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
9060 memReq = memReq2.memoryRequirements;
9061 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
9062 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
9065 #endif // #if VMA_DEDICATED_ALLOCATION 9067 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
9068 requiresDedicatedAllocation =
false;
9069 prefersDedicatedAllocation =
false;
9073 void VmaAllocator_T::GetImageMemoryRequirements(
9075 VkMemoryRequirements& memReq,
9076 bool& requiresDedicatedAllocation,
9077 bool& prefersDedicatedAllocation)
const 9079 #if VMA_DEDICATED_ALLOCATION 9080 if(m_UseKhrDedicatedAllocation)
9082 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
9083 memReqInfo.image = hImage;
9085 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
9087 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
9088 memReq2.pNext = &memDedicatedReq;
9090 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
9092 memReq = memReq2.memoryRequirements;
9093 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
9094 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
9097 #endif // #if VMA_DEDICATED_ALLOCATION 9099 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
9100 requiresDedicatedAllocation =
false;
9101 prefersDedicatedAllocation =
false;
9105 VkResult VmaAllocator_T::AllocateMemory(
9106 const VkMemoryRequirements& vkMemReq,
9107 bool requiresDedicatedAllocation,
9108 bool prefersDedicatedAllocation,
9109 VkBuffer dedicatedBuffer,
9110 VkImage dedicatedImage,
9112 VmaSuballocationType suballocType,
9118 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
9119 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9124 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
9125 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9127 if(requiresDedicatedAllocation)
9131 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
9132 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9134 if(createInfo.
pool != VK_NULL_HANDLE)
9136 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
9137 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9140 if((createInfo.
pool != VK_NULL_HANDLE) &&
9143 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
9144 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9147 if(createInfo.
pool != VK_NULL_HANDLE)
9149 const VkDeviceSize alignmentForPool = VMA_MAX(
9151 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
9152 return createInfo.
pool->m_BlockVector.Allocate(
9154 m_CurrentFrameIndex.load(),
9164 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
9165 uint32_t memTypeIndex = UINT32_MAX;
9167 if(res == VK_SUCCESS)
9169 VkDeviceSize alignmentForMemType = VMA_MAX(
9171 GetMemoryTypeMinAlignment(memTypeIndex));
9173 res = AllocateMemoryOfType(
9175 alignmentForMemType,
9176 requiresDedicatedAllocation || prefersDedicatedAllocation,
9184 if(res == VK_SUCCESS)
9194 memoryTypeBits &= ~(1u << memTypeIndex);
9197 if(res == VK_SUCCESS)
9199 alignmentForMemType = VMA_MAX(
9201 GetMemoryTypeMinAlignment(memTypeIndex));
9203 res = AllocateMemoryOfType(
9205 alignmentForMemType,
9206 requiresDedicatedAllocation || prefersDedicatedAllocation,
9214 if(res == VK_SUCCESS)
9224 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9235 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
9237 VMA_ASSERT(allocation);
9239 if(allocation->CanBecomeLost() ==
false ||
9240 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
9242 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9244 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
9247 switch(allocation->GetType())
9249 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9251 VmaBlockVector* pBlockVector = VMA_NULL;
9252 VmaPool hPool = allocation->GetPool();
9253 if(hPool != VK_NULL_HANDLE)
9255 pBlockVector = &hPool->m_BlockVector;
9259 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
9260 pBlockVector = m_pBlockVectors[memTypeIndex];
9262 pBlockVector->Free(allocation);
9265 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9266 FreeDedicatedMemory(allocation);
9273 allocation->SetUserData(
this, VMA_NULL);
9274 vma_delete(
this, allocation);
9277 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
9280 InitStatInfo(pStats->
total);
9281 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
9283 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
9287 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9289 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
9290 VMA_ASSERT(pBlockVector);
9291 pBlockVector->AddStats(pStats);
9296 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9297 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
9299 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
9304 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9306 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
9307 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9308 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
9309 VMA_ASSERT(pDedicatedAllocVector);
9310 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
9313 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
9314 VmaAddStatInfo(pStats->
total, allocationStatInfo);
9315 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
9316 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
9321 VmaPostprocessCalcStatInfo(pStats->
total);
9322 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
9323 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
9324 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
9325 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
9328 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
9330 VkResult VmaAllocator_T::Defragment(
9332 size_t allocationCount,
9333 VkBool32* pAllocationsChanged,
9337 if(pAllocationsChanged != VMA_NULL)
9339 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
9341 if(pDefragmentationStats != VMA_NULL)
9343 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
9346 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
9348 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
9350 const size_t poolCount = m_Pools.size();
9353 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
9357 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
9359 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
9361 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
9363 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
9365 VmaBlockVector* pAllocBlockVector = VMA_NULL;
9367 const VmaPool hAllocPool = hAlloc->GetPool();
9369 if(hAllocPool != VK_NULL_HANDLE)
9371 pAllocBlockVector = &hAllocPool->GetBlockVector();
9376 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
9379 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
9381 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
9382 &pAllocationsChanged[allocIndex] : VMA_NULL;
9383 pDefragmentator->AddAllocation(hAlloc, pChanged);
9387 VkResult result = VK_SUCCESS;
9391 VkDeviceSize maxBytesToMove = SIZE_MAX;
9392 uint32_t maxAllocationsToMove = UINT32_MAX;
9393 if(pDefragmentationInfo != VMA_NULL)
9400 for(uint32_t memTypeIndex = 0;
9401 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
9405 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9407 result = m_pBlockVectors[memTypeIndex]->Defragment(
9408 pDefragmentationStats,
9410 maxAllocationsToMove);
9415 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
9417 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
9418 pDefragmentationStats,
9420 maxAllocationsToMove);
9426 for(
size_t poolIndex = poolCount; poolIndex--; )
9428 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
9432 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
9434 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9436 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
9445 if(hAllocation->CanBecomeLost())
9451 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
9452 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
9455 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9459 pAllocationInfo->
offset = 0;
9460 pAllocationInfo->
size = hAllocation->GetSize();
9462 pAllocationInfo->
pUserData = hAllocation->GetUserData();
9465 else if(localLastUseFrameIndex == localCurrFrameIndex)
9467 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
9468 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
9469 pAllocationInfo->
offset = hAllocation->GetOffset();
9470 pAllocationInfo->
size = hAllocation->GetSize();
9472 pAllocationInfo->
pUserData = hAllocation->GetUserData();
9477 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
9479 localLastUseFrameIndex = localCurrFrameIndex;
9486 #if VMA_STATS_STRING_ENABLED 9487 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
9488 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
9491 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
9492 if(localLastUseFrameIndex == localCurrFrameIndex)
9498 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
9500 localLastUseFrameIndex = localCurrFrameIndex;
9506 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
9507 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
9508 pAllocationInfo->
offset = hAllocation->GetOffset();
9509 pAllocationInfo->
size = hAllocation->GetSize();
9510 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
9511 pAllocationInfo->
pUserData = hAllocation->GetUserData();
9515 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
9518 if(hAllocation->CanBecomeLost())
9520 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
9521 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
9524 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9528 else if(localLastUseFrameIndex == localCurrFrameIndex)
9534 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
9536 localLastUseFrameIndex = localCurrFrameIndex;
9543 #if VMA_STATS_STRING_ENABLED 9544 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
9545 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
9548 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
9549 if(localLastUseFrameIndex == localCurrFrameIndex)
9555 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
9557 localLastUseFrameIndex = localCurrFrameIndex;
9569 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
9582 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
9584 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
9585 if(res != VK_SUCCESS)
9587 vma_delete(
this, *pPool);
9594 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9595 (*pPool)->SetId(m_NextPoolId++);
9596 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
9602 void VmaAllocator_T::DestroyPool(
VmaPool pool)
9606 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9607 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
9608 VMA_ASSERT(success &&
"Pool not found in Allocator.");
9611 vma_delete(
this, pool);
9616 pool->m_BlockVector.GetPoolStats(pPoolStats);
9619 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
9621 m_CurrentFrameIndex.store(frameIndex);
9624 void VmaAllocator_T::MakePoolAllocationsLost(
9626 size_t* pLostAllocationCount)
9628 hPool->m_BlockVector.MakePoolAllocationsLost(
9629 m_CurrentFrameIndex.load(),
9630 pLostAllocationCount);
9633 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
9635 return hPool->m_BlockVector.CheckCorruption();
9638 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
9640 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
9643 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9645 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
9647 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
9648 VMA_ASSERT(pBlockVector);
9649 VkResult localRes = pBlockVector->CheckCorruption();
9652 case VK_ERROR_FEATURE_NOT_PRESENT:
9655 finalRes = VK_SUCCESS;
9665 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9666 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
9668 if(((1u << m_Pools[poolIndex]->GetBlockVector().GetMemoryTypeIndex()) & memoryTypeBits) != 0)
9670 VkResult localRes = m_Pools[poolIndex]->GetBlockVector().CheckCorruption();
9673 case VK_ERROR_FEATURE_NOT_PRESENT:
9676 finalRes = VK_SUCCESS;
9688 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
9690 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
9691 (*pAllocation)->InitLost();
9694 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
9696 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
9699 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
9701 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
9702 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
9704 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
9705 if(res == VK_SUCCESS)
9707 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
9712 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
9717 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
9720 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
9722 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
9728 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
9730 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
9732 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
9735 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
9737 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
9738 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
9740 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
9741 m_HeapSizeLimit[heapIndex] += size;
9745 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
9747 if(hAllocation->CanBecomeLost())
9749 return VK_ERROR_MEMORY_MAP_FAILED;
9752 switch(hAllocation->GetType())
9754 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9756 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
9757 char *pBytes = VMA_NULL;
9758 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
9759 if(res == VK_SUCCESS)
9761 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
9762 hAllocation->BlockAllocMap();
9766 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9767 return hAllocation->DedicatedAllocMap(
this, ppData);
9770 return VK_ERROR_MEMORY_MAP_FAILED;
9776 switch(hAllocation->GetType())
9778 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9780 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
9781 hAllocation->BlockAllocUnmap();
9782 pBlock->Unmap(
this, 1);
9785 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9786 hAllocation->DedicatedAllocUnmap(
this);
9793 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
9795 VkResult res = VK_SUCCESS;
9796 switch(hAllocation->GetType())
9798 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9799 res = GetVulkanFunctions().vkBindBufferMemory(
9802 hAllocation->GetMemory(),
9805 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9807 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9808 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
9809 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
9818 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
9820 VkResult res = VK_SUCCESS;
9821 switch(hAllocation->GetType())
9823 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9824 res = GetVulkanFunctions().vkBindImageMemory(
9827 hAllocation->GetMemory(),
9830 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9832 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9833 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
9834 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
9843 void VmaAllocator_T::FlushOrInvalidateAllocation(
9845 VkDeviceSize offset, VkDeviceSize size,
9846 VMA_CACHE_OPERATION op)
9848 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
9849 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
9851 const VkDeviceSize allocationSize = hAllocation->GetSize();
9852 VMA_ASSERT(offset <= allocationSize);
9854 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
9856 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
9857 memRange.memory = hAllocation->GetMemory();
9859 switch(hAllocation->GetType())
9861 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9862 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
9863 if(size == VK_WHOLE_SIZE)
9865 memRange.size = allocationSize - memRange.offset;
9869 VMA_ASSERT(offset + size <= allocationSize);
9870 memRange.size = VMA_MIN(
9871 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
9872 allocationSize - memRange.offset);
9876 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9879 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
9880 if(size == VK_WHOLE_SIZE)
9882 size = allocationSize - offset;
9886 VMA_ASSERT(offset + size <= allocationSize);
9888 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
9891 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
9892 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
9893 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_Metadata.GetSize();
9894 memRange.offset += allocationOffset;
9895 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
9906 case VMA_CACHE_FLUSH:
9907 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
9909 case VMA_CACHE_INVALIDATE:
9910 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
9919 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
9921 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
9923 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
9925 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9926 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
9927 VMA_ASSERT(pDedicatedAllocations);
9928 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
9929 VMA_ASSERT(success);
9932 VkDeviceMemory hMemory = allocation->GetMemory();
9934 if(allocation->GetMappedData() != VMA_NULL)
9936 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
9939 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
9941 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
9944 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
9946 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
9947 !hAllocation->CanBecomeLost() &&
9948 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9950 void* pData = VMA_NULL;
9951 VkResult res = Map(hAllocation, &pData);
9952 if(res == VK_SUCCESS)
9954 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
9955 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
9960 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
9965 #if VMA_STATS_STRING_ENABLED 9967 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
9969 bool dedicatedAllocationsStarted =
false;
9970 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9972 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9973 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
9974 VMA_ASSERT(pDedicatedAllocVector);
9975 if(pDedicatedAllocVector->empty() ==
false)
9977 if(dedicatedAllocationsStarted ==
false)
9979 dedicatedAllocationsStarted =
true;
9980 json.WriteString(
"DedicatedAllocations");
9984 json.BeginString(
"Type ");
9985 json.ContinueString(memTypeIndex);
9990 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
9992 json.BeginObject(
true);
9994 hAlloc->PrintParameters(json);
10001 if(dedicatedAllocationsStarted)
10007 bool allocationsStarted =
false;
10008 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
10010 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
10012 if(allocationsStarted ==
false)
10014 allocationsStarted =
true;
10015 json.WriteString(
"DefaultPools");
10016 json.BeginObject();
10019 json.BeginString(
"Type ");
10020 json.ContinueString(memTypeIndex);
10023 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
10026 if(allocationsStarted)
10033 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
10034 const size_t poolCount = m_Pools.size();
10037 json.WriteString(
"Pools");
10038 json.BeginObject();
10039 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
10041 json.BeginString();
10042 json.ContinueString(m_Pools[poolIndex]->GetId());
10045 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
10052 #endif // #if VMA_STATS_STRING_ENABLED 10061 VMA_ASSERT(pCreateInfo && pAllocator);
10062 VMA_DEBUG_LOG(
"vmaCreateAllocator");
10064 return (*pAllocator)->Init(pCreateInfo);
10070 if(allocator != VK_NULL_HANDLE)
10072 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
10073 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
10074 vma_delete(&allocationCallbacks, allocator);
10080 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
10082 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
10083 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
10088 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
10090 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
10091 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
10096 uint32_t memoryTypeIndex,
10097 VkMemoryPropertyFlags* pFlags)
10099 VMA_ASSERT(allocator && pFlags);
10100 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
10101 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
10106 uint32_t frameIndex)
10108 VMA_ASSERT(allocator);
10109 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
10111 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10113 allocator->SetCurrentFrameIndex(frameIndex);
10120 VMA_ASSERT(allocator && pStats);
10121 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10122 allocator->CalculateStats(pStats);
10125 #if VMA_STATS_STRING_ENABLED 10129 char** ppStatsString,
10130 VkBool32 detailedMap)
10132 VMA_ASSERT(allocator && ppStatsString);
10133 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10135 VmaStringBuilder sb(allocator);
10137 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
10138 json.BeginObject();
10141 allocator->CalculateStats(&stats);
10143 json.WriteString(
"Total");
10144 VmaPrintStatInfo(json, stats.
total);
10146 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
10148 json.BeginString(
"Heap ");
10149 json.ContinueString(heapIndex);
10151 json.BeginObject();
10153 json.WriteString(
"Size");
10154 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
10156 json.WriteString(
"Flags");
10157 json.BeginArray(
true);
10158 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
10160 json.WriteString(
"DEVICE_LOCAL");
10166 json.WriteString(
"Stats");
10167 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
10170 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
10172 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
10174 json.BeginString(
"Type ");
10175 json.ContinueString(typeIndex);
10178 json.BeginObject();
10180 json.WriteString(
"Flags");
10181 json.BeginArray(
true);
10182 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
10183 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
10185 json.WriteString(
"DEVICE_LOCAL");
10187 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
10189 json.WriteString(
"HOST_VISIBLE");
10191 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
10193 json.WriteString(
"HOST_COHERENT");
10195 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
10197 json.WriteString(
"HOST_CACHED");
10199 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
10201 json.WriteString(
"LAZILY_ALLOCATED");
10207 json.WriteString(
"Stats");
10208 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
10217 if(detailedMap == VK_TRUE)
10219 allocator->PrintDetailedMap(json);
10225 const size_t len = sb.GetLength();
10226 char*
const pChars = vma_new_array(allocator,
char, len + 1);
10229 memcpy(pChars, sb.GetData(), len);
10231 pChars[len] =
'\0';
10232 *ppStatsString = pChars;
10237 char* pStatsString)
10239 if(pStatsString != VMA_NULL)
10241 VMA_ASSERT(allocator);
10242 size_t len = strlen(pStatsString);
10243 vma_delete_array(allocator, pStatsString, len + 1);
10247 #endif // #if VMA_STATS_STRING_ENABLED 10254 uint32_t memoryTypeBits,
10256 uint32_t* pMemoryTypeIndex)
10258 VMA_ASSERT(allocator != VK_NULL_HANDLE);
10259 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
10260 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
10267 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
10268 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
10273 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
10277 switch(pAllocationCreateInfo->
usage)
10282 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
10284 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
10288 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10291 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
10292 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
10294 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
10298 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
10299 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
10305 *pMemoryTypeIndex = UINT32_MAX;
10306 uint32_t minCost = UINT32_MAX;
10307 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
10308 memTypeIndex < allocator->GetMemoryTypeCount();
10309 ++memTypeIndex, memTypeBit <<= 1)
10312 if((memTypeBit & memoryTypeBits) != 0)
10314 const VkMemoryPropertyFlags currFlags =
10315 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
10317 if((requiredFlags & ~currFlags) == 0)
10320 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
10322 if(currCost < minCost)
10324 *pMemoryTypeIndex = memTypeIndex;
10329 minCost = currCost;
10334 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
10339 const VkBufferCreateInfo* pBufferCreateInfo,
10341 uint32_t* pMemoryTypeIndex)
10343 VMA_ASSERT(allocator != VK_NULL_HANDLE);
10344 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
10345 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
10346 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
10348 const VkDevice hDev = allocator->m_hDevice;
10349 VkBuffer hBuffer = VK_NULL_HANDLE;
10350 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
10351 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
10352 if(res == VK_SUCCESS)
10354 VkMemoryRequirements memReq = {};
10355 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
10356 hDev, hBuffer, &memReq);
10360 memReq.memoryTypeBits,
10361 pAllocationCreateInfo,
10364 allocator->GetVulkanFunctions().vkDestroyBuffer(
10365 hDev, hBuffer, allocator->GetAllocationCallbacks());
10372 const VkImageCreateInfo* pImageCreateInfo,
10374 uint32_t* pMemoryTypeIndex)
10376 VMA_ASSERT(allocator != VK_NULL_HANDLE);
10377 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
10378 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
10379 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
10381 const VkDevice hDev = allocator->m_hDevice;
10382 VkImage hImage = VK_NULL_HANDLE;
10383 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
10384 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
10385 if(res == VK_SUCCESS)
10387 VkMemoryRequirements memReq = {};
10388 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
10389 hDev, hImage, &memReq);
10393 memReq.memoryTypeBits,
10394 pAllocationCreateInfo,
10397 allocator->GetVulkanFunctions().vkDestroyImage(
10398 hDev, hImage, allocator->GetAllocationCallbacks());
10408 VMA_ASSERT(allocator && pCreateInfo && pPool);
10410 VMA_DEBUG_LOG(
"vmaCreatePool");
10412 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10414 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
10416 #if VMA_RECORDING_ENABLED 10417 if(allocator->GetRecorder() != VMA_NULL)
10419 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
10430 VMA_ASSERT(allocator);
10432 if(pool == VK_NULL_HANDLE)
10437 VMA_DEBUG_LOG(
"vmaDestroyPool");
10439 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10441 #if VMA_RECORDING_ENABLED 10442 if(allocator->GetRecorder() != VMA_NULL)
10444 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
10448 allocator->DestroyPool(pool);
10456 VMA_ASSERT(allocator && pool && pPoolStats);
10458 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10460 allocator->GetPoolStats(pool, pPoolStats);
10466 size_t* pLostAllocationCount)
10468 VMA_ASSERT(allocator && pool);
10470 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10472 #if VMA_RECORDING_ENABLED 10473 if(allocator->GetRecorder() != VMA_NULL)
10475 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
10479 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
10484 VMA_ASSERT(allocator && pool);
10486 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10488 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
10490 return allocator->CheckPoolCorruption(pool);
10495 const VkMemoryRequirements* pVkMemoryRequirements,
10500 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
10502 VMA_DEBUG_LOG(
"vmaAllocateMemory");
10504 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10506 VkResult result = allocator->AllocateMemory(
10507 *pVkMemoryRequirements,
10513 VMA_SUBALLOCATION_TYPE_UNKNOWN,
10516 #if VMA_RECORDING_ENABLED 10517 if(allocator->GetRecorder() != VMA_NULL)
10519 allocator->GetRecorder()->RecordAllocateMemory(
10520 allocator->GetCurrentFrameIndex(),
10521 *pVkMemoryRequirements,
10527 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
10529 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10542 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
10544 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
10546 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10548 VkMemoryRequirements vkMemReq = {};
10549 bool requiresDedicatedAllocation =
false;
10550 bool prefersDedicatedAllocation =
false;
10551 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
10552 requiresDedicatedAllocation,
10553 prefersDedicatedAllocation);
10555 VkResult result = allocator->AllocateMemory(
10557 requiresDedicatedAllocation,
10558 prefersDedicatedAllocation,
10562 VMA_SUBALLOCATION_TYPE_BUFFER,
10565 #if VMA_RECORDING_ENABLED 10566 if(allocator->GetRecorder() != VMA_NULL)
10568 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
10569 allocator->GetCurrentFrameIndex(),
10571 requiresDedicatedAllocation,
10572 prefersDedicatedAllocation,
10578 if(pAllocationInfo && result == VK_SUCCESS)
10580 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10593 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
10595 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
10597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10599 VkMemoryRequirements vkMemReq = {};
10600 bool requiresDedicatedAllocation =
false;
10601 bool prefersDedicatedAllocation =
false;
10602 allocator->GetImageMemoryRequirements(image, vkMemReq,
10603 requiresDedicatedAllocation, prefersDedicatedAllocation);
10605 VkResult result = allocator->AllocateMemory(
10607 requiresDedicatedAllocation,
10608 prefersDedicatedAllocation,
10612 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
10615 #if VMA_RECORDING_ENABLED 10616 if(allocator->GetRecorder() != VMA_NULL)
10618 allocator->GetRecorder()->RecordAllocateMemoryForImage(
10619 allocator->GetCurrentFrameIndex(),
10621 requiresDedicatedAllocation,
10622 prefersDedicatedAllocation,
10628 if(pAllocationInfo && result == VK_SUCCESS)
10630 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10640 VMA_ASSERT(allocator);
10642 if(allocation == VK_NULL_HANDLE)
10647 VMA_DEBUG_LOG(
"vmaFreeMemory");
10649 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10651 #if VMA_RECORDING_ENABLED 10652 if(allocator->GetRecorder() != VMA_NULL)
10654 allocator->GetRecorder()->RecordFreeMemory(
10655 allocator->GetCurrentFrameIndex(),
10660 allocator->FreeMemory(allocation);
10668 VMA_ASSERT(allocator && allocation && pAllocationInfo);
10670 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10672 #if VMA_RECORDING_ENABLED 10673 if(allocator->GetRecorder() != VMA_NULL)
10675 allocator->GetRecorder()->RecordGetAllocationInfo(
10676 allocator->GetCurrentFrameIndex(),
10681 allocator->GetAllocationInfo(allocation, pAllocationInfo);
10688 VMA_ASSERT(allocator && allocation);
10690 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10692 #if VMA_RECORDING_ENABLED 10693 if(allocator->GetRecorder() != VMA_NULL)
10695 allocator->GetRecorder()->RecordTouchAllocation(
10696 allocator->GetCurrentFrameIndex(),
10701 return allocator->TouchAllocation(allocation);
10709 VMA_ASSERT(allocator && allocation);
10711 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10713 allocation->SetUserData(allocator, pUserData);
10715 #if VMA_RECORDING_ENABLED 10716 if(allocator->GetRecorder() != VMA_NULL)
10718 allocator->GetRecorder()->RecordSetAllocationUserData(
10719 allocator->GetCurrentFrameIndex(),
10730 VMA_ASSERT(allocator && pAllocation);
10732 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
10734 allocator->CreateLostAllocation(pAllocation);
10736 #if VMA_RECORDING_ENABLED 10737 if(allocator->GetRecorder() != VMA_NULL)
10739 allocator->GetRecorder()->RecordCreateLostAllocation(
10740 allocator->GetCurrentFrameIndex(),
10751 VMA_ASSERT(allocator && allocation && ppData);
10753 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10755 VkResult res = allocator->Map(allocation, ppData);
10757 #if VMA_RECORDING_ENABLED 10758 if(allocator->GetRecorder() != VMA_NULL)
10760 allocator->GetRecorder()->RecordMapMemory(
10761 allocator->GetCurrentFrameIndex(),
10773 VMA_ASSERT(allocator && allocation);
10775 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10777 #if VMA_RECORDING_ENABLED 10778 if(allocator->GetRecorder() != VMA_NULL)
10780 allocator->GetRecorder()->RecordUnmapMemory(
10781 allocator->GetCurrentFrameIndex(),
10786 allocator->Unmap(allocation);
10791 VMA_ASSERT(allocator && allocation);
10793 VMA_DEBUG_LOG(
"vmaFlushAllocation");
10795 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10797 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
10799 #if VMA_RECORDING_ENABLED 10800 if(allocator->GetRecorder() != VMA_NULL)
10802 allocator->GetRecorder()->RecordFlushAllocation(
10803 allocator->GetCurrentFrameIndex(),
10804 allocation, offset, size);
10811 VMA_ASSERT(allocator && allocation);
10813 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
10815 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10817 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
10819 #if VMA_RECORDING_ENABLED 10820 if(allocator->GetRecorder() != VMA_NULL)
10822 allocator->GetRecorder()->RecordInvalidateAllocation(
10823 allocator->GetCurrentFrameIndex(),
10824 allocation, offset, size);
10831 VMA_ASSERT(allocator);
10833 VMA_DEBUG_LOG(
"vmaCheckCorruption");
10835 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10837 return allocator->CheckCorruption(memoryTypeBits);
10843 size_t allocationCount,
10844 VkBool32* pAllocationsChanged,
10848 VMA_ASSERT(allocator && pAllocations);
10850 VMA_DEBUG_LOG(
"vmaDefragment");
10852 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10854 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
10862 VMA_ASSERT(allocator && allocation && buffer);
10864 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
10866 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10868 return allocator->BindBufferMemory(allocation, buffer);
10876 VMA_ASSERT(allocator && allocation && image);
10878 VMA_DEBUG_LOG(
"vmaBindImageMemory");
10880 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10882 return allocator->BindImageMemory(allocation, image);
10887 const VkBufferCreateInfo* pBufferCreateInfo,
10893 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
10895 VMA_DEBUG_LOG(
"vmaCreateBuffer");
10897 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10899 *pBuffer = VK_NULL_HANDLE;
10900 *pAllocation = VK_NULL_HANDLE;
10903 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
10904 allocator->m_hDevice,
10906 allocator->GetAllocationCallbacks(),
10911 VkMemoryRequirements vkMemReq = {};
10912 bool requiresDedicatedAllocation =
false;
10913 bool prefersDedicatedAllocation =
false;
10914 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
10915 requiresDedicatedAllocation, prefersDedicatedAllocation);
10919 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
10921 VMA_ASSERT(vkMemReq.alignment %
10922 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
10924 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
10926 VMA_ASSERT(vkMemReq.alignment %
10927 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
10929 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
10931 VMA_ASSERT(vkMemReq.alignment %
10932 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
10936 res = allocator->AllocateMemory(
10938 requiresDedicatedAllocation,
10939 prefersDedicatedAllocation,
10942 *pAllocationCreateInfo,
10943 VMA_SUBALLOCATION_TYPE_BUFFER,
10946 #if VMA_RECORDING_ENABLED 10947 if(allocator->GetRecorder() != VMA_NULL)
10949 allocator->GetRecorder()->RecordCreateBuffer(
10950 allocator->GetCurrentFrameIndex(),
10951 *pBufferCreateInfo,
10952 *pAllocationCreateInfo,
10960 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
10964 #if VMA_STATS_STRING_ENABLED 10965 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
10967 if(pAllocationInfo != VMA_NULL)
10969 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10974 allocator->FreeMemory(*pAllocation);
10975 *pAllocation = VK_NULL_HANDLE;
10976 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
10977 *pBuffer = VK_NULL_HANDLE;
10980 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
10981 *pBuffer = VK_NULL_HANDLE;
10992 VMA_ASSERT(allocator);
10994 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
10999 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
11001 VMA_DEBUG_GLOBAL_MUTEX_LOCK
11003 #if VMA_RECORDING_ENABLED 11004 if(allocator->GetRecorder() != VMA_NULL)
11006 allocator->GetRecorder()->RecordDestroyBuffer(
11007 allocator->GetCurrentFrameIndex(),
11012 if(buffer != VK_NULL_HANDLE)
11014 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
11017 if(allocation != VK_NULL_HANDLE)
11019 allocator->FreeMemory(allocation);
11025 const VkImageCreateInfo* pImageCreateInfo,
11031 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
11033 VMA_DEBUG_LOG(
"vmaCreateImage");
11035 VMA_DEBUG_GLOBAL_MUTEX_LOCK
11037 *pImage = VK_NULL_HANDLE;
11038 *pAllocation = VK_NULL_HANDLE;
11041 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
11042 allocator->m_hDevice,
11044 allocator->GetAllocationCallbacks(),
11048 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
11049 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
11050 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
11053 VkMemoryRequirements vkMemReq = {};
11054 bool requiresDedicatedAllocation =
false;
11055 bool prefersDedicatedAllocation =
false;
11056 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
11057 requiresDedicatedAllocation, prefersDedicatedAllocation);
11059 res = allocator->AllocateMemory(
11061 requiresDedicatedAllocation,
11062 prefersDedicatedAllocation,
11065 *pAllocationCreateInfo,
11069 #if VMA_RECORDING_ENABLED 11070 if(allocator->GetRecorder() != VMA_NULL)
11072 allocator->GetRecorder()->RecordCreateImage(
11073 allocator->GetCurrentFrameIndex(),
11075 *pAllocationCreateInfo,
11083 res = allocator->BindImageMemory(*pAllocation, *pImage);
11087 #if VMA_STATS_STRING_ENABLED 11088 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
11090 if(pAllocationInfo != VMA_NULL)
11092 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
11097 allocator->FreeMemory(*pAllocation);
11098 *pAllocation = VK_NULL_HANDLE;
11099 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
11100 *pImage = VK_NULL_HANDLE;
11103 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
11104 *pImage = VK_NULL_HANDLE;
11115 VMA_ASSERT(allocator);
11117 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
11122 VMA_DEBUG_LOG(
"vmaDestroyImage");
11124 VMA_DEBUG_GLOBAL_MUTEX_LOCK
11126 #if VMA_RECORDING_ENABLED 11127 if(allocator->GetRecorder() != VMA_NULL)
11129 allocator->GetRecorder()->RecordDestroyImage(
11130 allocator->GetCurrentFrameIndex(),
11135 if(image != VK_NULL_HANDLE)
11137 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
11139 if(allocation != VK_NULL_HANDLE)
11141 allocator->FreeMemory(allocation);
11145 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1345
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1658
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1414
diff --git a/src/vk_mem_alloc.h b/src/vk_mem_alloc.h
index a908c90..5b8fcc0 100644
--- a/src/vk_mem_alloc.h
+++ b/src/vk_mem_alloc.h
@@ -29,7 +29,7 @@ extern "C" {
/** \mainpage Vulkan Memory Allocator
-
Version 2.1.0-alpha.3 (2018-06-14)
+
Version 2.1.0-alpha.4 (2018-08-22)
Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. \n
License: MIT