23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1082 #include <vulkan/vulkan.h> 1084 #if !defined(VMA_DEDICATED_ALLOCATION) 1085 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1086 #define VMA_DEDICATED_ALLOCATION 1 1088 #define VMA_DEDICATED_ALLOCATION 0 1106 uint32_t memoryType,
1107 VkDeviceMemory memory,
1112 uint32_t memoryType,
1113 VkDeviceMemory memory,
1183 #if VMA_DEDICATED_ALLOCATION 1184 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1185 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1276 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1284 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1294 uint32_t memoryTypeIndex,
1295 VkMemoryPropertyFlags* pFlags);
1307 uint32_t frameIndex);
1340 #define VMA_STATS_STRING_ENABLED 1 1342 #if VMA_STATS_STRING_ENABLED 1349 char** ppStatsString,
1350 VkBool32 detailedMap);
1354 char* pStatsString);
1356 #endif // #if VMA_STATS_STRING_ENABLED 1550 uint32_t memoryTypeBits,
1552 uint32_t* pMemoryTypeIndex);
1568 const VkBufferCreateInfo* pBufferCreateInfo,
1570 uint32_t* pMemoryTypeIndex);
1586 const VkImageCreateInfo* pImageCreateInfo,
1588 uint32_t* pMemoryTypeIndex);
1719 size_t* pLostAllocationCount);
1802 const VkMemoryRequirements* pVkMemoryRequirements,
2062 size_t allocationCount,
2063 VkBool32* pAllocationsChanged,
2129 const VkBufferCreateInfo* pBufferCreateInfo,
2154 const VkImageCreateInfo* pImageCreateInfo,
2180 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2183 #ifdef __INTELLISENSE__ 2184 #define VMA_IMPLEMENTATION 2187 #ifdef VMA_IMPLEMENTATION 2188 #undef VMA_IMPLEMENTATION 2210 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2211 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2223 #if VMA_USE_STL_CONTAINERS 2224 #define VMA_USE_STL_VECTOR 1 2225 #define VMA_USE_STL_UNORDERED_MAP 1 2226 #define VMA_USE_STL_LIST 1 2229 #if VMA_USE_STL_VECTOR 2233 #if VMA_USE_STL_UNORDERED_MAP 2234 #include <unordered_map> 2237 #if VMA_USE_STL_LIST 2246 #include <algorithm> 2250 #if !defined(_WIN32) && !defined(__APPLE__) 2256 #define VMA_NULL nullptr 2259 #if defined(__APPLE__) || defined(__ANDROID__) 2261 void *aligned_alloc(
size_t alignment,
size_t size)
2264 if(alignment <
sizeof(
void*))
2266 alignment =
sizeof(
void*);
2270 if(posix_memalign(&pointer, alignment, size) == 0)
2279 #define VMA_ASSERT(expr) assert(expr) 2281 #define VMA_ASSERT(expr) 2287 #ifndef VMA_HEAVY_ASSERT 2289 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2291 #define VMA_HEAVY_ASSERT(expr) 2295 #ifndef VMA_ALIGN_OF 2296 #define VMA_ALIGN_OF(type) (__alignof(type)) 2299 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2301 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2303 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2307 #ifndef VMA_SYSTEM_FREE 2309 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2311 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2316 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2320 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2324 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2328 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2331 #ifndef VMA_DEBUG_LOG 2332 #define VMA_DEBUG_LOG(format, ...) 2342 #if VMA_STATS_STRING_ENABLED 2343 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2345 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2347 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2349 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2351 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2353 snprintf(outStr, strLen,
"%p", ptr);
2363 void Lock() { m_Mutex.lock(); }
2364 void Unlock() { m_Mutex.unlock(); }
2368 #define VMA_MUTEX VmaMutex 2379 #ifndef VMA_ATOMIC_UINT32 2380 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2383 #ifndef VMA_BEST_FIT 2396 #define VMA_BEST_FIT (1) 2399 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2404 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2407 #ifndef VMA_DEBUG_ALIGNMENT 2412 #define VMA_DEBUG_ALIGNMENT (1) 2415 #ifndef VMA_DEBUG_MARGIN 2420 #define VMA_DEBUG_MARGIN (0) 2423 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2428 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2431 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2436 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2439 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2440 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2444 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2445 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2449 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2455 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2456 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2459 static inline uint32_t VmaCountBitsSet(uint32_t v)
2461 uint32_t c = v - ((v >> 1) & 0x55555555);
2462 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2463 c = ((c >> 4) + c) & 0x0F0F0F0F;
2464 c = ((c >> 8) + c) & 0x00FF00FF;
2465 c = ((c >> 16) + c) & 0x0000FFFF;
2471 template <
typename T>
2472 static inline T VmaAlignUp(T val, T align)
2474 return (val + align - 1) / align * align;
2478 template <
typename T>
2479 inline T VmaRoundDiv(T x, T y)
2481 return (x + (y / (T)2)) / y;
2486 template<
typename Iterator,
typename Compare>
2487 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2489 Iterator centerValue = end; --centerValue;
2490 Iterator insertIndex = beg;
2491 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2493 if(cmp(*memTypeIndex, *centerValue))
2495 if(insertIndex != memTypeIndex)
2497 VMA_SWAP(*memTypeIndex, *insertIndex);
2502 if(insertIndex != centerValue)
2504 VMA_SWAP(*insertIndex, *centerValue);
2509 template<
typename Iterator,
typename Compare>
2510 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2514 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2515 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2516 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2520 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2522 #endif // #ifndef VMA_SORT 2531 static inline bool VmaBlocksOnSamePage(
2532 VkDeviceSize resourceAOffset,
2533 VkDeviceSize resourceASize,
2534 VkDeviceSize resourceBOffset,
2535 VkDeviceSize pageSize)
2537 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2538 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2539 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2540 VkDeviceSize resourceBStart = resourceBOffset;
2541 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2542 return resourceAEndPage == resourceBStartPage;
2545 enum VmaSuballocationType
2547 VMA_SUBALLOCATION_TYPE_FREE = 0,
2548 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2549 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2550 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2551 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2552 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2553 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2562 static inline bool VmaIsBufferImageGranularityConflict(
2563 VmaSuballocationType suballocType1,
2564 VmaSuballocationType suballocType2)
2566 if(suballocType1 > suballocType2)
2568 VMA_SWAP(suballocType1, suballocType2);
2571 switch(suballocType1)
2573 case VMA_SUBALLOCATION_TYPE_FREE:
2575 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2577 case VMA_SUBALLOCATION_TYPE_BUFFER:
2579 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2580 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2581 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2583 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2584 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2585 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2586 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2588 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2589 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2601 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2602 m_pMutex(useMutex ? &mutex : VMA_NULL)
2619 VMA_MUTEX* m_pMutex;
2622 #if VMA_DEBUG_GLOBAL_MUTEX 2623 static VMA_MUTEX gDebugGlobalMutex;
2624 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2626 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2630 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2641 template <
typename IterT,
typename KeyT,
typename CmpT>
2642 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2644 size_t down = 0, up = (end - beg);
2647 const size_t mid = (down + up) / 2;
2648 if(cmp(*(beg+mid), key))
2663 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2665 if((pAllocationCallbacks != VMA_NULL) &&
2666 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2668 return (*pAllocationCallbacks->pfnAllocation)(
2669 pAllocationCallbacks->pUserData,
2672 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2676 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2680 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2682 if((pAllocationCallbacks != VMA_NULL) &&
2683 (pAllocationCallbacks->pfnFree != VMA_NULL))
2685 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2689 VMA_SYSTEM_FREE(ptr);
2693 template<
typename T>
2694 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2696 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2699 template<
typename T>
2700 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2702 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2705 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2707 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2709 template<
typename T>
2710 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2713 VmaFree(pAllocationCallbacks, ptr);
2716 template<
typename T>
2717 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2721 for(
size_t i = count; i--; )
2725 VmaFree(pAllocationCallbacks, ptr);
2730 template<
typename T>
2731 class VmaStlAllocator
2734 const VkAllocationCallbacks*
const m_pCallbacks;
2735 typedef T value_type;
2737 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2738 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2740 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2741 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2743 template<
typename U>
2744 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2746 return m_pCallbacks == rhs.m_pCallbacks;
2748 template<
typename U>
2749 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2751 return m_pCallbacks != rhs.m_pCallbacks;
2754 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2757 #if VMA_USE_STL_VECTOR 2759 #define VmaVector std::vector 2761 template<
typename T,
typename allocatorT>
2762 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2764 vec.insert(vec.begin() + index, item);
2767 template<
typename T,
typename allocatorT>
2768 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2770 vec.erase(vec.begin() + index);
2773 #else // #if VMA_USE_STL_VECTOR 2778 template<
typename T,
typename AllocatorT>
2782 typedef T value_type;
2784 VmaVector(
const AllocatorT& allocator) :
2785 m_Allocator(allocator),
2792 VmaVector(
size_t count,
const AllocatorT& allocator) :
2793 m_Allocator(allocator),
2794 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2800 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2801 m_Allocator(src.m_Allocator),
2802 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2803 m_Count(src.m_Count),
2804 m_Capacity(src.m_Count)
2808 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2814 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2817 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2821 resize(rhs.m_Count);
2824 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2830 bool empty()
const {
return m_Count == 0; }
2831 size_t size()
const {
return m_Count; }
2832 T* data() {
return m_pArray; }
2833 const T* data()
const {
return m_pArray; }
2835 T& operator[](
size_t index)
2837 VMA_HEAVY_ASSERT(index < m_Count);
2838 return m_pArray[index];
2840 const T& operator[](
size_t index)
const 2842 VMA_HEAVY_ASSERT(index < m_Count);
2843 return m_pArray[index];
2848 VMA_HEAVY_ASSERT(m_Count > 0);
2851 const T& front()
const 2853 VMA_HEAVY_ASSERT(m_Count > 0);
2858 VMA_HEAVY_ASSERT(m_Count > 0);
2859 return m_pArray[m_Count - 1];
2861 const T& back()
const 2863 VMA_HEAVY_ASSERT(m_Count > 0);
2864 return m_pArray[m_Count - 1];
2867 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2869 newCapacity = VMA_MAX(newCapacity, m_Count);
2871 if((newCapacity < m_Capacity) && !freeMemory)
2873 newCapacity = m_Capacity;
2876 if(newCapacity != m_Capacity)
2878 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2881 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2883 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2884 m_Capacity = newCapacity;
2885 m_pArray = newArray;
2889 void resize(
size_t newCount,
bool freeMemory =
false)
2891 size_t newCapacity = m_Capacity;
2892 if(newCount > m_Capacity)
2894 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2898 newCapacity = newCount;
2901 if(newCapacity != m_Capacity)
2903 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2904 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2905 if(elementsToCopy != 0)
2907 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2909 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2910 m_Capacity = newCapacity;
2911 m_pArray = newArray;
2917 void clear(
bool freeMemory =
false)
2919 resize(0, freeMemory);
2922 void insert(
size_t index,
const T& src)
2924 VMA_HEAVY_ASSERT(index <= m_Count);
2925 const size_t oldCount = size();
2926 resize(oldCount + 1);
2927 if(index < oldCount)
2929 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2931 m_pArray[index] = src;
2934 void remove(
size_t index)
2936 VMA_HEAVY_ASSERT(index < m_Count);
2937 const size_t oldCount = size();
2938 if(index < oldCount - 1)
2940 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2942 resize(oldCount - 1);
2945 void push_back(
const T& src)
2947 const size_t newIndex = size();
2948 resize(newIndex + 1);
2949 m_pArray[newIndex] = src;
2954 VMA_HEAVY_ASSERT(m_Count > 0);
2958 void push_front(
const T& src)
2965 VMA_HEAVY_ASSERT(m_Count > 0);
2969 typedef T* iterator;
2971 iterator begin() {
return m_pArray; }
2972 iterator end() {
return m_pArray + m_Count; }
2975 AllocatorT m_Allocator;
2981 template<
typename T,
typename allocatorT>
2982 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2984 vec.insert(index, item);
2987 template<
typename T,
typename allocatorT>
2988 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2993 #endif // #if VMA_USE_STL_VECTOR 2995 template<
typename CmpLess,
typename VectorT>
2996 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2998 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3000 vector.data() + vector.size(),
3002 CmpLess()) - vector.data();
3003 VmaVectorInsert(vector, indexToInsert, value);
3004 return indexToInsert;
3007 template<
typename CmpLess,
typename VectorT>
3008 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3011 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3016 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3018 size_t indexToRemove = it - vector.begin();
3019 VmaVectorRemove(vector, indexToRemove);
3025 template<
typename CmpLess,
typename VectorT>
3026 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
3029 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3031 vector.data() + vector.size(),
3034 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3036 return it - vector.begin();
3040 return vector.size();
3052 template<
typename T>
3053 class VmaPoolAllocator
3056 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3057 ~VmaPoolAllocator();
3065 uint32_t NextFreeIndex;
3072 uint32_t FirstFreeIndex;
3075 const VkAllocationCallbacks* m_pAllocationCallbacks;
3076 size_t m_ItemsPerBlock;
3077 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3079 ItemBlock& CreateNewBlock();
3082 template<
typename T>
3083 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3084 m_pAllocationCallbacks(pAllocationCallbacks),
3085 m_ItemsPerBlock(itemsPerBlock),
3086 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3088 VMA_ASSERT(itemsPerBlock > 0);
3091 template<
typename T>
3092 VmaPoolAllocator<T>::~VmaPoolAllocator()
3097 template<
typename T>
3098 void VmaPoolAllocator<T>::Clear()
3100 for(
size_t i = m_ItemBlocks.size(); i--; )
3101 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3102 m_ItemBlocks.clear();
3105 template<
typename T>
3106 T* VmaPoolAllocator<T>::Alloc()
3108 for(
size_t i = m_ItemBlocks.size(); i--; )
3110 ItemBlock& block = m_ItemBlocks[i];
3112 if(block.FirstFreeIndex != UINT32_MAX)
3114 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3115 block.FirstFreeIndex = pItem->NextFreeIndex;
3116 return &pItem->Value;
3121 ItemBlock& newBlock = CreateNewBlock();
3122 Item*
const pItem = &newBlock.pItems[0];
3123 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3124 return &pItem->Value;
3127 template<
typename T>
3128 void VmaPoolAllocator<T>::Free(T* ptr)
3131 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3133 ItemBlock& block = m_ItemBlocks[i];
3137 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3140 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3142 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3143 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3144 block.FirstFreeIndex = index;
3148 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3151 template<
typename T>
3152 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3154 ItemBlock newBlock = {
3155 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3157 m_ItemBlocks.push_back(newBlock);
3160 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3161 newBlock.pItems[i].NextFreeIndex = i + 1;
3162 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3163 return m_ItemBlocks.back();
3169 #if VMA_USE_STL_LIST 3171 #define VmaList std::list 3173 #else // #if VMA_USE_STL_LIST 3175 template<
typename T>
3184 template<
typename T>
3188 typedef VmaListItem<T> ItemType;
3190 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3194 size_t GetCount()
const {
return m_Count; }
3195 bool IsEmpty()
const {
return m_Count == 0; }
3197 ItemType* Front() {
return m_pFront; }
3198 const ItemType* Front()
const {
return m_pFront; }
3199 ItemType* Back() {
return m_pBack; }
3200 const ItemType* Back()
const {
return m_pBack; }
3202 ItemType* PushBack();
3203 ItemType* PushFront();
3204 ItemType* PushBack(
const T& value);
3205 ItemType* PushFront(
const T& value);
3210 ItemType* InsertBefore(ItemType* pItem);
3212 ItemType* InsertAfter(ItemType* pItem);
3214 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3215 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3217 void Remove(ItemType* pItem);
3220 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3221 VmaPoolAllocator<ItemType> m_ItemAllocator;
3227 VmaRawList(
const VmaRawList<T>& src);
3228 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
3231 template<
typename T>
3232 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3233 m_pAllocationCallbacks(pAllocationCallbacks),
3234 m_ItemAllocator(pAllocationCallbacks, 128),
3241 template<
typename T>
3242 VmaRawList<T>::~VmaRawList()
3248 template<
typename T>
3249 void VmaRawList<T>::Clear()
3251 if(IsEmpty() ==
false)
3253 ItemType* pItem = m_pBack;
3254 while(pItem != VMA_NULL)
3256 ItemType*
const pPrevItem = pItem->pPrev;
3257 m_ItemAllocator.Free(pItem);
3260 m_pFront = VMA_NULL;
3266 template<
typename T>
3267 VmaListItem<T>* VmaRawList<T>::PushBack()
3269 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3270 pNewItem->pNext = VMA_NULL;
3273 pNewItem->pPrev = VMA_NULL;
3274 m_pFront = pNewItem;
3280 pNewItem->pPrev = m_pBack;
3281 m_pBack->pNext = pNewItem;
3288 template<
typename T>
3289 VmaListItem<T>* VmaRawList<T>::PushFront()
3291 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3292 pNewItem->pPrev = VMA_NULL;
3295 pNewItem->pNext = VMA_NULL;
3296 m_pFront = pNewItem;
3302 pNewItem->pNext = m_pFront;
3303 m_pFront->pPrev = pNewItem;
3304 m_pFront = pNewItem;
3310 template<
typename T>
3311 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3313 ItemType*
const pNewItem = PushBack();
3314 pNewItem->Value = value;
3318 template<
typename T>
3319 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3321 ItemType*
const pNewItem = PushFront();
3322 pNewItem->Value = value;
3326 template<
typename T>
3327 void VmaRawList<T>::PopBack()
3329 VMA_HEAVY_ASSERT(m_Count > 0);
3330 ItemType*
const pBackItem = m_pBack;
3331 ItemType*
const pPrevItem = pBackItem->pPrev;
3332 if(pPrevItem != VMA_NULL)
3334 pPrevItem->pNext = VMA_NULL;
3336 m_pBack = pPrevItem;
3337 m_ItemAllocator.Free(pBackItem);
3341 template<
typename T>
3342 void VmaRawList<T>::PopFront()
3344 VMA_HEAVY_ASSERT(m_Count > 0);
3345 ItemType*
const pFrontItem = m_pFront;
3346 ItemType*
const pNextItem = pFrontItem->pNext;
3347 if(pNextItem != VMA_NULL)
3349 pNextItem->pPrev = VMA_NULL;
3351 m_pFront = pNextItem;
3352 m_ItemAllocator.Free(pFrontItem);
3356 template<
typename T>
3357 void VmaRawList<T>::Remove(ItemType* pItem)
3359 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3360 VMA_HEAVY_ASSERT(m_Count > 0);
3362 if(pItem->pPrev != VMA_NULL)
3364 pItem->pPrev->pNext = pItem->pNext;
3368 VMA_HEAVY_ASSERT(m_pFront == pItem);
3369 m_pFront = pItem->pNext;
3372 if(pItem->pNext != VMA_NULL)
3374 pItem->pNext->pPrev = pItem->pPrev;
3378 VMA_HEAVY_ASSERT(m_pBack == pItem);
3379 m_pBack = pItem->pPrev;
3382 m_ItemAllocator.Free(pItem);
3386 template<
typename T>
3387 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3389 if(pItem != VMA_NULL)
3391 ItemType*
const prevItem = pItem->pPrev;
3392 ItemType*
const newItem = m_ItemAllocator.Alloc();
3393 newItem->pPrev = prevItem;
3394 newItem->pNext = pItem;
3395 pItem->pPrev = newItem;
3396 if(prevItem != VMA_NULL)
3398 prevItem->pNext = newItem;
3402 VMA_HEAVY_ASSERT(m_pFront == pItem);
3412 template<
typename T>
3413 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3415 if(pItem != VMA_NULL)
3417 ItemType*
const nextItem = pItem->pNext;
3418 ItemType*
const newItem = m_ItemAllocator.Alloc();
3419 newItem->pNext = nextItem;
3420 newItem->pPrev = pItem;
3421 pItem->pNext = newItem;
3422 if(nextItem != VMA_NULL)
3424 nextItem->pPrev = newItem;
3428 VMA_HEAVY_ASSERT(m_pBack == pItem);
3438 template<
typename T>
3439 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3441 ItemType*
const newItem = InsertBefore(pItem);
3442 newItem->Value = value;
3446 template<
typename T>
3447 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3449 ItemType*
const newItem = InsertAfter(pItem);
3450 newItem->Value = value;
3454 template<
typename T,
typename AllocatorT>
3467 T& operator*()
const 3469 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3470 return m_pItem->Value;
3472 T* operator->()
const 3474 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3475 return &m_pItem->Value;
3478 iterator& operator++()
3480 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3481 m_pItem = m_pItem->pNext;
3484 iterator& operator--()
3486 if(m_pItem != VMA_NULL)
3488 m_pItem = m_pItem->pPrev;
3492 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3493 m_pItem = m_pList->Back();
3498 iterator operator++(
int)
3500 iterator result = *
this;
3504 iterator operator--(
int)
3506 iterator result = *
this;
3511 bool operator==(
const iterator& rhs)
const 3513 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3514 return m_pItem == rhs.m_pItem;
3516 bool operator!=(
const iterator& rhs)
const 3518 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3519 return m_pItem != rhs.m_pItem;
3523 VmaRawList<T>* m_pList;
3524 VmaListItem<T>* m_pItem;
3526 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3532 friend class VmaList<T, AllocatorT>;
3535 class const_iterator
3544 const_iterator(
const iterator& src) :
3545 m_pList(src.m_pList),
3546 m_pItem(src.m_pItem)
3550 const T& operator*()
const 3552 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3553 return m_pItem->Value;
3555 const T* operator->()
const 3557 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3558 return &m_pItem->Value;
3561 const_iterator& operator++()
3563 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3564 m_pItem = m_pItem->pNext;
3567 const_iterator& operator--()
3569 if(m_pItem != VMA_NULL)
3571 m_pItem = m_pItem->pPrev;
3575 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3576 m_pItem = m_pList->Back();
3581 const_iterator operator++(
int)
3583 const_iterator result = *
this;
3587 const_iterator operator--(
int)
3589 const_iterator result = *
this;
3594 bool operator==(
const const_iterator& rhs)
const 3596 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3597 return m_pItem == rhs.m_pItem;
3599 bool operator!=(
const const_iterator& rhs)
const 3601 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3602 return m_pItem != rhs.m_pItem;
3606 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3612 const VmaRawList<T>* m_pList;
3613 const VmaListItem<T>* m_pItem;
3615 friend class VmaList<T, AllocatorT>;
3618 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3620 bool empty()
const {
return m_RawList.IsEmpty(); }
3621 size_t size()
const {
return m_RawList.GetCount(); }
3623 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3624 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3626 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3627 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3629 void clear() { m_RawList.Clear(); }
3630 void push_back(
const T& value) { m_RawList.PushBack(value); }
3631 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3632 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3635 VmaRawList<T> m_RawList;
3638 #endif // #if VMA_USE_STL_LIST 3646 #if VMA_USE_STL_UNORDERED_MAP 3648 #define VmaPair std::pair 3650 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3651 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3653 #else // #if VMA_USE_STL_UNORDERED_MAP 3655 template<
typename T1,
typename T2>
3661 VmaPair() : first(), second() { }
3662 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3668 template<
typename KeyT,
typename ValueT>
3672 typedef VmaPair<KeyT, ValueT> PairType;
3673 typedef PairType* iterator;
3675 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3677 iterator begin() {
return m_Vector.begin(); }
3678 iterator end() {
return m_Vector.end(); }
3680 void insert(
const PairType& pair);
3681 iterator find(
const KeyT& key);
3682 void erase(iterator it);
3685 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3688 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3690 template<
typename FirstT,
typename SecondT>
3691 struct VmaPairFirstLess
3693 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3695 return lhs.first < rhs.first;
3697 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3699 return lhs.first < rhsFirst;
3703 template<
typename KeyT,
typename ValueT>
3704 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3706 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3708 m_Vector.data() + m_Vector.size(),
3710 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3711 VmaVectorInsert(m_Vector, indexToInsert, pair);
3714 template<
typename KeyT,
typename ValueT>
3715 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3717 PairType* it = VmaBinaryFindFirstNotLess(
3719 m_Vector.data() + m_Vector.size(),
3721 VmaPairFirstLess<KeyT, ValueT>());
3722 if((it != m_Vector.end()) && (it->first == key))
3728 return m_Vector.end();
3732 template<
typename KeyT,
typename ValueT>
3733 void VmaMap<KeyT, ValueT>::erase(iterator it)
3735 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3738 #endif // #if VMA_USE_STL_UNORDERED_MAP 3744 class VmaDeviceMemoryBlock;
3746 struct VmaAllocation_T
3749 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3753 FLAG_USER_DATA_STRING = 0x01,
3757 enum ALLOCATION_TYPE
3759 ALLOCATION_TYPE_NONE,
3760 ALLOCATION_TYPE_BLOCK,
3761 ALLOCATION_TYPE_DEDICATED,
3764 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3767 m_pUserData(VMA_NULL),
3768 m_LastUseFrameIndex(currentFrameIndex),
3769 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3770 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3772 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3778 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3781 VMA_ASSERT(m_pUserData == VMA_NULL);
3784 void InitBlockAllocation(
3786 VmaDeviceMemoryBlock* block,
3787 VkDeviceSize offset,
3788 VkDeviceSize alignment,
3790 VmaSuballocationType suballocationType,
3794 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3795 VMA_ASSERT(block != VMA_NULL);
3796 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3797 m_Alignment = alignment;
3799 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3800 m_SuballocationType = (uint8_t)suballocationType;
3801 m_BlockAllocation.m_hPool = hPool;
3802 m_BlockAllocation.m_Block = block;
3803 m_BlockAllocation.m_Offset = offset;
3804 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3809 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3810 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3811 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3812 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3813 m_BlockAllocation.m_Block = VMA_NULL;
3814 m_BlockAllocation.m_Offset = 0;
3815 m_BlockAllocation.m_CanBecomeLost =
true;
3818 void ChangeBlockAllocation(
3820 VmaDeviceMemoryBlock* block,
3821 VkDeviceSize offset);
3824 void InitDedicatedAllocation(
3825 uint32_t memoryTypeIndex,
3826 VkDeviceMemory hMemory,
3827 VmaSuballocationType suballocationType,
3831 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3832 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3833 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3836 m_SuballocationType = (uint8_t)suballocationType;
3837 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3838 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3839 m_DedicatedAllocation.m_hMemory = hMemory;
3840 m_DedicatedAllocation.m_pMappedData = pMappedData;
3843 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3844 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3845 VkDeviceSize GetSize()
const {
return m_Size; }
3846 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3847 void* GetUserData()
const {
return m_pUserData; }
3848 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
3849 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3851 VmaDeviceMemoryBlock* GetBlock()
const 3853 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3854 return m_BlockAllocation.m_Block;
3856 VkDeviceSize GetOffset()
const;
3857 VkDeviceMemory GetMemory()
const;
3858 uint32_t GetMemoryTypeIndex()
const;
3859 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3860 void* GetMappedData()
const;
3861 bool CanBecomeLost()
const;
3864 uint32_t GetLastUseFrameIndex()
const 3866 return m_LastUseFrameIndex.load();
3868 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3870 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3880 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3882 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3884 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3895 void BlockAllocMap();
3896 void BlockAllocUnmap();
3897 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
3901 VkDeviceSize m_Alignment;
3902 VkDeviceSize m_Size;
3904 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3906 uint8_t m_SuballocationType;
3913 struct BlockAllocation
3916 VmaDeviceMemoryBlock* m_Block;
3917 VkDeviceSize m_Offset;
3918 bool m_CanBecomeLost;
3922 struct DedicatedAllocation
3924 uint32_t m_MemoryTypeIndex;
3925 VkDeviceMemory m_hMemory;
3926 void* m_pMappedData;
3932 BlockAllocation m_BlockAllocation;
3934 DedicatedAllocation m_DedicatedAllocation;
3944 struct VmaSuballocation
3946 VkDeviceSize offset;
3949 VmaSuballocationType type;
3952 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3955 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3970 struct VmaAllocationRequest
3972 VkDeviceSize offset;
3973 VkDeviceSize sumFreeSize;
3974 VkDeviceSize sumItemSize;
3975 VmaSuballocationList::iterator item;
3976 size_t itemsToMakeLostCount;
3978 VkDeviceSize CalcCost()
const 3980 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3988 class VmaBlockMetadata
3992 ~VmaBlockMetadata();
3993 void Init(VkDeviceSize size);
3996 bool Validate()
const;
3997 VkDeviceSize GetSize()
const {
return m_Size; }
3998 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3999 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4000 VkDeviceSize GetUnusedRangeSizeMax()
const;
4002 bool IsEmpty()
const;
4004 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4007 #if VMA_STATS_STRING_ENABLED 4008 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4012 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
4017 bool CreateAllocationRequest(
4018 uint32_t currentFrameIndex,
4019 uint32_t frameInUseCount,
4020 VkDeviceSize bufferImageGranularity,
4021 VkDeviceSize allocSize,
4022 VkDeviceSize allocAlignment,
4023 VmaSuballocationType allocType,
4024 bool canMakeOtherLost,
4025 VmaAllocationRequest* pAllocationRequest);
4027 bool MakeRequestedAllocationsLost(
4028 uint32_t currentFrameIndex,
4029 uint32_t frameInUseCount,
4030 VmaAllocationRequest* pAllocationRequest);
4032 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4036 const VmaAllocationRequest& request,
4037 VmaSuballocationType type,
4038 VkDeviceSize allocSize,
4043 void FreeAtOffset(VkDeviceSize offset);
4046 VkDeviceSize m_Size;
4047 uint32_t m_FreeCount;
4048 VkDeviceSize m_SumFreeSize;
4049 VmaSuballocationList m_Suballocations;
4052 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4054 bool ValidateFreeSuballocationList()
const;
4058 bool CheckAllocation(
4059 uint32_t currentFrameIndex,
4060 uint32_t frameInUseCount,
4061 VkDeviceSize bufferImageGranularity,
4062 VkDeviceSize allocSize,
4063 VkDeviceSize allocAlignment,
4064 VmaSuballocationType allocType,
4065 VmaSuballocationList::const_iterator suballocItem,
4066 bool canMakeOtherLost,
4067 VkDeviceSize* pOffset,
4068 size_t* itemsToMakeLostCount,
4069 VkDeviceSize* pSumFreeSize,
4070 VkDeviceSize* pSumItemSize)
const;
4072 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4076 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4079 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4082 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4091 class VmaDeviceMemoryBlock
4094 VmaBlockMetadata m_Metadata;
4098 ~VmaDeviceMemoryBlock()
4100 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4101 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4106 uint32_t newMemoryTypeIndex,
4107 VkDeviceMemory newMemory,
4108 VkDeviceSize newSize);
4112 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4113 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4114 void* GetMappedData()
const {
return m_pMappedData; }
4117 bool Validate()
const;
4120 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4123 VkResult BindBufferMemory(
4127 VkResult BindImageMemory(
4133 uint32_t m_MemoryTypeIndex;
4134 VkDeviceMemory m_hMemory;
4139 uint32_t m_MapCount;
4140 void* m_pMappedData;
4143 struct VmaPointerLess
4145 bool operator()(
const void* lhs,
const void* rhs)
const 4151 class VmaDefragmentator;
4159 struct VmaBlockVector
4163 uint32_t memoryTypeIndex,
4164 VkDeviceSize preferredBlockSize,
4165 size_t minBlockCount,
4166 size_t maxBlockCount,
4167 VkDeviceSize bufferImageGranularity,
4168 uint32_t frameInUseCount,
4172 VkResult CreateMinBlocks();
4174 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4175 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4176 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4177 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4181 bool IsEmpty()
const {
return m_Blocks.empty(); }
4185 uint32_t currentFrameIndex,
4186 const VkMemoryRequirements& vkMemReq,
4188 VmaSuballocationType suballocType,
4197 #if VMA_STATS_STRING_ENABLED 4198 void PrintDetailedMap(
class VmaJsonWriter& json);
4201 void MakePoolAllocationsLost(
4202 uint32_t currentFrameIndex,
4203 size_t* pLostAllocationCount);
4205 VmaDefragmentator* EnsureDefragmentator(
4207 uint32_t currentFrameIndex);
4209 VkResult Defragment(
4211 VkDeviceSize& maxBytesToMove,
4212 uint32_t& maxAllocationsToMove);
4214 void DestroyDefragmentator();
4217 friend class VmaDefragmentator;
4220 const uint32_t m_MemoryTypeIndex;
4221 const VkDeviceSize m_PreferredBlockSize;
4222 const size_t m_MinBlockCount;
4223 const size_t m_MaxBlockCount;
4224 const VkDeviceSize m_BufferImageGranularity;
4225 const uint32_t m_FrameInUseCount;
4226 const bool m_IsCustomPool;
4229 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4233 bool m_HasEmptyBlock;
4234 VmaDefragmentator* m_pDefragmentator;
4236 size_t CalcMaxBlockSize()
const;
4239 void Remove(VmaDeviceMemoryBlock* pBlock);
4243 void IncrementallySortBlocks();
4245 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4251 VmaBlockVector m_BlockVector;
4259 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4261 #if VMA_STATS_STRING_ENABLED 4266 class VmaDefragmentator
4269 VmaBlockVector*
const m_pBlockVector;
4270 uint32_t m_CurrentFrameIndex;
4271 VkDeviceSize m_BytesMoved;
4272 uint32_t m_AllocationsMoved;
4274 struct AllocationInfo
4277 VkBool32* m_pChanged;
4280 m_hAllocation(VK_NULL_HANDLE),
4281 m_pChanged(VMA_NULL)
4286 struct AllocationInfoSizeGreater
4288 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4290 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4295 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4299 VmaDeviceMemoryBlock* m_pBlock;
4300 bool m_HasNonMovableAllocations;
4301 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4303 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4305 m_HasNonMovableAllocations(true),
4306 m_Allocations(pAllocationCallbacks),
4307 m_pMappedDataForDefragmentation(VMA_NULL)
4311 void CalcHasNonMovableAllocations()
4313 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4314 const size_t defragmentAllocCount = m_Allocations.size();
4315 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4318 void SortAllocationsBySizeDescecnding()
4320 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4323 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4328 void* m_pMappedDataForDefragmentation;
4331 struct BlockPointerLess
4333 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4335 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4337 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4339 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4345 struct BlockInfoCompareMoveDestination
4347 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4349 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4353 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4357 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4365 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4366 BlockInfoVector m_Blocks;
4368 VkResult DefragmentRound(
4369 VkDeviceSize maxBytesToMove,
4370 uint32_t maxAllocationsToMove);
4372 static bool MoveMakesSense(
4373 size_t dstBlockIndex, VkDeviceSize dstOffset,
4374 size_t srcBlockIndex, VkDeviceSize srcOffset);
4379 VmaBlockVector* pBlockVector,
4380 uint32_t currentFrameIndex);
4382 ~VmaDefragmentator();
4384 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4385 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4387 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4389 VkResult Defragment(
4390 VkDeviceSize maxBytesToMove,
4391 uint32_t maxAllocationsToMove);
4395 struct VmaAllocator_T
4398 bool m_UseKhrDedicatedAllocation;
4400 bool m_AllocationCallbacksSpecified;
4401 VkAllocationCallbacks m_AllocationCallbacks;
4405 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4406 VMA_MUTEX m_HeapSizeLimitMutex;
4408 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4409 VkPhysicalDeviceMemoryProperties m_MemProps;
4412 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4415 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4416 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4417 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4422 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4424 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4428 return m_VulkanFunctions;
4431 VkDeviceSize GetBufferImageGranularity()
const 4434 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4435 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4438 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4439 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4441 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4443 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4444 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4447 void GetBufferMemoryRequirements(
4449 VkMemoryRequirements& memReq,
4450 bool& requiresDedicatedAllocation,
4451 bool& prefersDedicatedAllocation)
const;
4452 void GetImageMemoryRequirements(
4454 VkMemoryRequirements& memReq,
4455 bool& requiresDedicatedAllocation,
4456 bool& prefersDedicatedAllocation)
const;
4459 VkResult AllocateMemory(
4460 const VkMemoryRequirements& vkMemReq,
4461 bool requiresDedicatedAllocation,
4462 bool prefersDedicatedAllocation,
4463 VkBuffer dedicatedBuffer,
4464 VkImage dedicatedImage,
4466 VmaSuballocationType suballocType,
4472 void CalculateStats(
VmaStats* pStats);
4474 #if VMA_STATS_STRING_ENABLED 4475 void PrintDetailedMap(
class VmaJsonWriter& json);
4478 VkResult Defragment(
4480 size_t allocationCount,
4481 VkBool32* pAllocationsChanged,
4489 void DestroyPool(
VmaPool pool);
4492 void SetCurrentFrameIndex(uint32_t frameIndex);
4494 void MakePoolAllocationsLost(
4496 size_t* pLostAllocationCount);
4500 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4501 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4506 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
4507 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
4510 VkDeviceSize m_PreferredLargeHeapBlockSize;
4512 VkPhysicalDevice m_PhysicalDevice;
4513 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4515 VMA_MUTEX m_PoolsMutex;
4517 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4523 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4525 VkResult AllocateMemoryOfType(
4526 const VkMemoryRequirements& vkMemReq,
4527 bool dedicatedAllocation,
4528 VkBuffer dedicatedBuffer,
4529 VkImage dedicatedImage,
4531 uint32_t memTypeIndex,
4532 VmaSuballocationType suballocType,
4536 VkResult AllocateDedicatedMemory(
4538 VmaSuballocationType suballocType,
4539 uint32_t memTypeIndex,
4541 bool isUserDataString,
4543 VkBuffer dedicatedBuffer,
4544 VkImage dedicatedImage,
4554 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
4556 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4559 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
4561 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4564 template<
typename T>
4567 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4570 template<
typename T>
4571 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
4573 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4576 template<
typename T>
4577 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
4582 VmaFree(hAllocator, ptr);
4586 template<
typename T>
4587 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
4591 for(
size_t i = count; i--; )
4593 VmaFree(hAllocator, ptr);
4600 #if VMA_STATS_STRING_ENABLED 4602 class VmaStringBuilder
4605 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4606 size_t GetLength()
const {
return m_Data.size(); }
4607 const char* GetData()
const {
return m_Data.data(); }
4609 void Add(
char ch) { m_Data.push_back(ch); }
4610 void Add(
const char* pStr);
4611 void AddNewLine() { Add(
'\n'); }
4612 void AddNumber(uint32_t num);
4613 void AddNumber(uint64_t num);
4614 void AddPointer(
const void* ptr);
4617 VmaVector< char, VmaStlAllocator<char> > m_Data;
4620 void VmaStringBuilder::Add(
const char* pStr)
4622 const size_t strLen = strlen(pStr);
4625 const size_t oldCount = m_Data.size();
4626 m_Data.resize(oldCount + strLen);
4627 memcpy(m_Data.data() + oldCount, pStr, strLen);
4631 void VmaStringBuilder::AddNumber(uint32_t num)
4634 VmaUint32ToStr(buf,
sizeof(buf), num);
4638 void VmaStringBuilder::AddNumber(uint64_t num)
4641 VmaUint64ToStr(buf,
sizeof(buf), num);
4645 void VmaStringBuilder::AddPointer(
const void* ptr)
4648 VmaPtrToStr(buf,
sizeof(buf), ptr);
4652 #endif // #if VMA_STATS_STRING_ENABLED 4657 #if VMA_STATS_STRING_ENABLED 4662 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4665 void BeginObject(
bool singleLine =
false);
4668 void BeginArray(
bool singleLine =
false);
4671 void WriteString(
const char* pStr);
4672 void BeginString(
const char* pStr = VMA_NULL);
4673 void ContinueString(
const char* pStr);
4674 void ContinueString(uint32_t n);
4675 void ContinueString(uint64_t n);
4676 void ContinueString_Pointer(
const void* ptr);
4677 void EndString(
const char* pStr = VMA_NULL);
4679 void WriteNumber(uint32_t n);
4680 void WriteNumber(uint64_t n);
4681 void WriteBool(
bool b);
4685 static const char*
const INDENT;
4687 enum COLLECTION_TYPE
4689 COLLECTION_TYPE_OBJECT,
4690 COLLECTION_TYPE_ARRAY,
4694 COLLECTION_TYPE type;
4695 uint32_t valueCount;
4696 bool singleLineMode;
4699 VmaStringBuilder& m_SB;
4700 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4701 bool m_InsideString;
4703 void BeginValue(
bool isString);
4704 void WriteIndent(
bool oneLess =
false);
4707 const char*
const VmaJsonWriter::INDENT =
" ";
4709 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4711 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4712 m_InsideString(false)
4716 VmaJsonWriter::~VmaJsonWriter()
4718 VMA_ASSERT(!m_InsideString);
4719 VMA_ASSERT(m_Stack.empty());
4722 void VmaJsonWriter::BeginObject(
bool singleLine)
4724 VMA_ASSERT(!m_InsideString);
4730 item.type = COLLECTION_TYPE_OBJECT;
4731 item.valueCount = 0;
4732 item.singleLineMode = singleLine;
4733 m_Stack.push_back(item);
4736 void VmaJsonWriter::EndObject()
4738 VMA_ASSERT(!m_InsideString);
4743 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4747 void VmaJsonWriter::BeginArray(
bool singleLine)
4749 VMA_ASSERT(!m_InsideString);
4755 item.type = COLLECTION_TYPE_ARRAY;
4756 item.valueCount = 0;
4757 item.singleLineMode = singleLine;
4758 m_Stack.push_back(item);
4761 void VmaJsonWriter::EndArray()
4763 VMA_ASSERT(!m_InsideString);
4768 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4772 void VmaJsonWriter::WriteString(
const char* pStr)
4778 void VmaJsonWriter::BeginString(
const char* pStr)
4780 VMA_ASSERT(!m_InsideString);
4784 m_InsideString =
true;
4785 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4787 ContinueString(pStr);
4791 void VmaJsonWriter::ContinueString(
const char* pStr)
4793 VMA_ASSERT(m_InsideString);
4795 const size_t strLen = strlen(pStr);
4796 for(
size_t i = 0; i < strLen; ++i)
4829 VMA_ASSERT(0 &&
"Character not currently supported.");
4835 void VmaJsonWriter::ContinueString(uint32_t n)
4837 VMA_ASSERT(m_InsideString);
4841 void VmaJsonWriter::ContinueString(uint64_t n)
4843 VMA_ASSERT(m_InsideString);
4847 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4849 VMA_ASSERT(m_InsideString);
4850 m_SB.AddPointer(ptr);
4853 void VmaJsonWriter::EndString(
const char* pStr)
4855 VMA_ASSERT(m_InsideString);
4856 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4858 ContinueString(pStr);
4861 m_InsideString =
false;
4864 void VmaJsonWriter::WriteNumber(uint32_t n)
4866 VMA_ASSERT(!m_InsideString);
4871 void VmaJsonWriter::WriteNumber(uint64_t n)
4873 VMA_ASSERT(!m_InsideString);
4878 void VmaJsonWriter::WriteBool(
bool b)
4880 VMA_ASSERT(!m_InsideString);
4882 m_SB.Add(b ?
"true" :
"false");
4885 void VmaJsonWriter::WriteNull()
4887 VMA_ASSERT(!m_InsideString);
4892 void VmaJsonWriter::BeginValue(
bool isString)
4894 if(!m_Stack.empty())
4896 StackItem& currItem = m_Stack.back();
4897 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4898 currItem.valueCount % 2 == 0)
4900 VMA_ASSERT(isString);
4903 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4904 currItem.valueCount % 2 != 0)
4908 else if(currItem.valueCount > 0)
4917 ++currItem.valueCount;
4921 void VmaJsonWriter::WriteIndent(
bool oneLess)
4923 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4927 size_t count = m_Stack.size();
4928 if(count > 0 && oneLess)
4932 for(
size_t i = 0; i < count; ++i)
4939 #endif // #if VMA_STATS_STRING_ENABLED 4943 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
4945 if(IsUserDataString())
4947 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4949 FreeUserDataString(hAllocator);
4951 if(pUserData != VMA_NULL)
4953 const char*
const newStrSrc = (
char*)pUserData;
4954 const size_t newStrLen = strlen(newStrSrc);
4955 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4956 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4957 m_pUserData = newStrDst;
4962 m_pUserData = pUserData;
4966 void VmaAllocation_T::ChangeBlockAllocation(
4968 VmaDeviceMemoryBlock* block,
4969 VkDeviceSize offset)
4971 VMA_ASSERT(block != VMA_NULL);
4972 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4975 if(block != m_BlockAllocation.m_Block)
4977 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4978 if(IsPersistentMap())
4980 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4981 block->Map(hAllocator, mapRefCount, VMA_NULL);
4984 m_BlockAllocation.m_Block = block;
4985 m_BlockAllocation.m_Offset = offset;
4988 VkDeviceSize VmaAllocation_T::GetOffset()
const 4992 case ALLOCATION_TYPE_BLOCK:
4993 return m_BlockAllocation.m_Offset;
4994 case ALLOCATION_TYPE_DEDICATED:
5002 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5006 case ALLOCATION_TYPE_BLOCK:
5007 return m_BlockAllocation.m_Block->GetDeviceMemory();
5008 case ALLOCATION_TYPE_DEDICATED:
5009 return m_DedicatedAllocation.m_hMemory;
5012 return VK_NULL_HANDLE;
5016 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5020 case ALLOCATION_TYPE_BLOCK:
5021 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5022 case ALLOCATION_TYPE_DEDICATED:
5023 return m_DedicatedAllocation.m_MemoryTypeIndex;
5030 void* VmaAllocation_T::GetMappedData()
const 5034 case ALLOCATION_TYPE_BLOCK:
5037 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5038 VMA_ASSERT(pBlockData != VMA_NULL);
5039 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5046 case ALLOCATION_TYPE_DEDICATED:
5047 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5048 return m_DedicatedAllocation.m_pMappedData;
5055 bool VmaAllocation_T::CanBecomeLost()
const 5059 case ALLOCATION_TYPE_BLOCK:
5060 return m_BlockAllocation.m_CanBecomeLost;
5061 case ALLOCATION_TYPE_DEDICATED:
5069 VmaPool VmaAllocation_T::GetPool()
const 5071 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5072 return m_BlockAllocation.m_hPool;
5075 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5077 VMA_ASSERT(CanBecomeLost());
5083 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5086 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5091 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5097 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5107 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
5109 VMA_ASSERT(IsUserDataString());
5110 if(m_pUserData != VMA_NULL)
5112 char*
const oldStr = (
char*)m_pUserData;
5113 const size_t oldStrLen = strlen(oldStr);
5114 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5115 m_pUserData = VMA_NULL;
5119 void VmaAllocation_T::BlockAllocMap()
5121 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5123 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5129 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
5133 void VmaAllocation_T::BlockAllocUnmap()
5135 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5137 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5143 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
5147 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
5149 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5153 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5155 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5156 *ppData = m_DedicatedAllocation.m_pMappedData;
5162 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
5163 return VK_ERROR_MEMORY_MAP_FAILED;
5168 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5169 hAllocator->m_hDevice,
5170 m_DedicatedAllocation.m_hMemory,
5175 if(result == VK_SUCCESS)
5177 m_DedicatedAllocation.m_pMappedData = *ppData;
5184 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
5186 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5188 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5193 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5194 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5195 hAllocator->m_hDevice,
5196 m_DedicatedAllocation.m_hMemory);
5201 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5205 #if VMA_STATS_STRING_ENABLED 5208 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5217 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5221 json.WriteString(
"Blocks");
5224 json.WriteString(
"Allocations");
5227 json.WriteString(
"UnusedRanges");
5230 json.WriteString(
"UsedBytes");
5233 json.WriteString(
"UnusedBytes");
5238 json.WriteString(
"AllocationSize");
5239 json.BeginObject(
true);
5240 json.WriteString(
"Min");
5242 json.WriteString(
"Avg");
5244 json.WriteString(
"Max");
5251 json.WriteString(
"UnusedRangeSize");
5252 json.BeginObject(
true);
5253 json.WriteString(
"Min");
5255 json.WriteString(
"Avg");
5257 json.WriteString(
"Max");
5265 #endif // #if VMA_STATS_STRING_ENABLED 5267 struct VmaSuballocationItemSizeLess
5270 const VmaSuballocationList::iterator lhs,
5271 const VmaSuballocationList::iterator rhs)
const 5273 return lhs->size < rhs->size;
5276 const VmaSuballocationList::iterator lhs,
5277 VkDeviceSize rhsSize)
const 5279 return lhs->size < rhsSize;
5286 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5290 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5291 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5295 VmaBlockMetadata::~VmaBlockMetadata()
5299 void VmaBlockMetadata::Init(VkDeviceSize size)
5303 m_SumFreeSize = size;
5305 VmaSuballocation suballoc = {};
5306 suballoc.offset = 0;
5307 suballoc.size = size;
5308 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5309 suballoc.hAllocation = VK_NULL_HANDLE;
5311 m_Suballocations.push_back(suballoc);
5312 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5314 m_FreeSuballocationsBySize.push_back(suballocItem);
5317 bool VmaBlockMetadata::Validate()
const 5319 if(m_Suballocations.empty())
5325 VkDeviceSize calculatedOffset = 0;
5327 uint32_t calculatedFreeCount = 0;
5329 VkDeviceSize calculatedSumFreeSize = 0;
5332 size_t freeSuballocationsToRegister = 0;
5334 bool prevFree =
false;
5336 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5337 suballocItem != m_Suballocations.cend();
5340 const VmaSuballocation& subAlloc = *suballocItem;
5343 if(subAlloc.offset != calculatedOffset)
5348 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5350 if(prevFree && currFree)
5355 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5362 calculatedSumFreeSize += subAlloc.size;
5363 ++calculatedFreeCount;
5364 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5366 ++freeSuballocationsToRegister;
5371 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5375 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5381 calculatedOffset += subAlloc.size;
5382 prevFree = currFree;
5387 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5392 VkDeviceSize lastSize = 0;
5393 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5395 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5398 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5403 if(suballocItem->size < lastSize)
5408 lastSize = suballocItem->size;
5412 if(!ValidateFreeSuballocationList() ||
5413 (calculatedOffset != m_Size) ||
5414 (calculatedSumFreeSize != m_SumFreeSize) ||
5415 (calculatedFreeCount != m_FreeCount))
5423 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5425 if(!m_FreeSuballocationsBySize.empty())
5427 return m_FreeSuballocationsBySize.back()->size;
5435 bool VmaBlockMetadata::IsEmpty()
const 5437 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5440 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5444 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5456 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5457 suballocItem != m_Suballocations.cend();
5460 const VmaSuballocation& suballoc = *suballocItem;
5461 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5474 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5476 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5478 inoutStats.
size += m_Size;
5485 #if VMA_STATS_STRING_ENABLED 5487 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5491 json.WriteString(
"TotalBytes");
5492 json.WriteNumber(m_Size);
5494 json.WriteString(
"UnusedBytes");
5495 json.WriteNumber(m_SumFreeSize);
5497 json.WriteString(
"Allocations");
5498 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5500 json.WriteString(
"UnusedRanges");
5501 json.WriteNumber(m_FreeCount);
5503 json.WriteString(
"Suballocations");
5506 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5507 suballocItem != m_Suballocations.cend();
5508 ++suballocItem, ++i)
5510 json.BeginObject(
true);
5512 json.WriteString(
"Type");
5513 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5515 json.WriteString(
"Size");
5516 json.WriteNumber(suballocItem->size);
5518 json.WriteString(
"Offset");
5519 json.WriteNumber(suballocItem->offset);
5521 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5523 const void* pUserData = suballocItem->hAllocation->GetUserData();
5524 if(pUserData != VMA_NULL)
5526 json.WriteString(
"UserData");
5527 if(suballocItem->hAllocation->IsUserDataString())
5529 json.WriteString((
const char*)pUserData);
5534 json.ContinueString_Pointer(pUserData);
5547 #endif // #if VMA_STATS_STRING_ENABLED 5559 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5561 VMA_ASSERT(IsEmpty());
5562 pAllocationRequest->offset = 0;
5563 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5564 pAllocationRequest->sumItemSize = 0;
5565 pAllocationRequest->item = m_Suballocations.begin();
5566 pAllocationRequest->itemsToMakeLostCount = 0;
5569 bool VmaBlockMetadata::CreateAllocationRequest(
5570 uint32_t currentFrameIndex,
5571 uint32_t frameInUseCount,
5572 VkDeviceSize bufferImageGranularity,
5573 VkDeviceSize allocSize,
5574 VkDeviceSize allocAlignment,
5575 VmaSuballocationType allocType,
5576 bool canMakeOtherLost,
5577 VmaAllocationRequest* pAllocationRequest)
5579 VMA_ASSERT(allocSize > 0);
5580 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5581 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5582 VMA_HEAVY_ASSERT(Validate());
5585 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5591 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5592 if(freeSuballocCount > 0)
5597 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5598 m_FreeSuballocationsBySize.data(),
5599 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5601 VmaSuballocationItemSizeLess());
5602 size_t index = it - m_FreeSuballocationsBySize.data();
5603 for(; index < freeSuballocCount; ++index)
5608 bufferImageGranularity,
5612 m_FreeSuballocationsBySize[index],
5614 &pAllocationRequest->offset,
5615 &pAllocationRequest->itemsToMakeLostCount,
5616 &pAllocationRequest->sumFreeSize,
5617 &pAllocationRequest->sumItemSize))
5619 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5627 for(
size_t index = freeSuballocCount; index--; )
5632 bufferImageGranularity,
5636 m_FreeSuballocationsBySize[index],
5638 &pAllocationRequest->offset,
5639 &pAllocationRequest->itemsToMakeLostCount,
5640 &pAllocationRequest->sumFreeSize,
5641 &pAllocationRequest->sumItemSize))
5643 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5650 if(canMakeOtherLost)
5654 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5655 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5657 VmaAllocationRequest tmpAllocRequest = {};
5658 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5659 suballocIt != m_Suballocations.end();
5662 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5663 suballocIt->hAllocation->CanBecomeLost())
5668 bufferImageGranularity,
5674 &tmpAllocRequest.offset,
5675 &tmpAllocRequest.itemsToMakeLostCount,
5676 &tmpAllocRequest.sumFreeSize,
5677 &tmpAllocRequest.sumItemSize))
5679 tmpAllocRequest.item = suballocIt;
5681 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5683 *pAllocationRequest = tmpAllocRequest;
5689 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5698 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5699 uint32_t currentFrameIndex,
5700 uint32_t frameInUseCount,
5701 VmaAllocationRequest* pAllocationRequest)
5703 while(pAllocationRequest->itemsToMakeLostCount > 0)
5705 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5707 ++pAllocationRequest->item;
5709 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5710 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5711 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5712 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5714 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5715 --pAllocationRequest->itemsToMakeLostCount;
5723 VMA_HEAVY_ASSERT(Validate());
5724 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5725 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5730 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5732 uint32_t lostAllocationCount = 0;
5733 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5734 it != m_Suballocations.end();
5737 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5738 it->hAllocation->CanBecomeLost() &&
5739 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5741 it = FreeSuballocation(it);
5742 ++lostAllocationCount;
5745 return lostAllocationCount;
5748 void VmaBlockMetadata::Alloc(
5749 const VmaAllocationRequest& request,
5750 VmaSuballocationType type,
5751 VkDeviceSize allocSize,
5754 VMA_ASSERT(request.item != m_Suballocations.end());
5755 VmaSuballocation& suballoc = *request.item;
5757 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5759 VMA_ASSERT(request.offset >= suballoc.offset);
5760 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5761 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5762 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5766 UnregisterFreeSuballocation(request.item);
5768 suballoc.offset = request.offset;
5769 suballoc.size = allocSize;
5770 suballoc.type = type;
5771 suballoc.hAllocation = hAllocation;
5776 VmaSuballocation paddingSuballoc = {};
5777 paddingSuballoc.offset = request.offset + allocSize;
5778 paddingSuballoc.size = paddingEnd;
5779 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5780 VmaSuballocationList::iterator next = request.item;
5782 const VmaSuballocationList::iterator paddingEndItem =
5783 m_Suballocations.insert(next, paddingSuballoc);
5784 RegisterFreeSuballocation(paddingEndItem);
5790 VmaSuballocation paddingSuballoc = {};
5791 paddingSuballoc.offset = request.offset - paddingBegin;
5792 paddingSuballoc.size = paddingBegin;
5793 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5794 const VmaSuballocationList::iterator paddingBeginItem =
5795 m_Suballocations.insert(request.item, paddingSuballoc);
5796 RegisterFreeSuballocation(paddingBeginItem);
5800 m_FreeCount = m_FreeCount - 1;
5801 if(paddingBegin > 0)
5809 m_SumFreeSize -= allocSize;
5814 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5815 suballocItem != m_Suballocations.end();
5818 VmaSuballocation& suballoc = *suballocItem;
5819 if(suballoc.hAllocation == allocation)
5821 FreeSuballocation(suballocItem);
5822 VMA_HEAVY_ASSERT(Validate());
5826 VMA_ASSERT(0 &&
"Not found!");
5829 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5831 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5832 suballocItem != m_Suballocations.end();
5835 VmaSuballocation& suballoc = *suballocItem;
5836 if(suballoc.offset == offset)
5838 FreeSuballocation(suballocItem);
5842 VMA_ASSERT(0 &&
"Not found!");
5845 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5847 VkDeviceSize lastSize = 0;
5848 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5850 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5852 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5857 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5862 if(it->size < lastSize)
5868 lastSize = it->size;
5873 bool VmaBlockMetadata::CheckAllocation(
5874 uint32_t currentFrameIndex,
5875 uint32_t frameInUseCount,
5876 VkDeviceSize bufferImageGranularity,
5877 VkDeviceSize allocSize,
5878 VkDeviceSize allocAlignment,
5879 VmaSuballocationType allocType,
5880 VmaSuballocationList::const_iterator suballocItem,
5881 bool canMakeOtherLost,
5882 VkDeviceSize* pOffset,
5883 size_t* itemsToMakeLostCount,
5884 VkDeviceSize* pSumFreeSize,
5885 VkDeviceSize* pSumItemSize)
const 5887 VMA_ASSERT(allocSize > 0);
5888 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5889 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5890 VMA_ASSERT(pOffset != VMA_NULL);
5892 *itemsToMakeLostCount = 0;
5896 if(canMakeOtherLost)
5898 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5900 *pSumFreeSize = suballocItem->size;
5904 if(suballocItem->hAllocation->CanBecomeLost() &&
5905 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5907 ++*itemsToMakeLostCount;
5908 *pSumItemSize = suballocItem->size;
5917 if(m_Size - suballocItem->offset < allocSize)
5923 *pOffset = suballocItem->offset;
5926 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5928 *pOffset += VMA_DEBUG_MARGIN;
5932 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5933 *pOffset = VmaAlignUp(*pOffset, alignment);
5937 if(bufferImageGranularity > 1)
5939 bool bufferImageGranularityConflict =
false;
5940 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5941 while(prevSuballocItem != m_Suballocations.cbegin())
5944 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5945 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5947 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5949 bufferImageGranularityConflict =
true;
5957 if(bufferImageGranularityConflict)
5959 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5965 if(*pOffset >= suballocItem->offset + suballocItem->size)
5971 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5974 VmaSuballocationList::const_iterator next = suballocItem;
5976 const VkDeviceSize requiredEndMargin =
5977 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5979 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5981 if(suballocItem->offset + totalSize > m_Size)
5988 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5989 if(totalSize > suballocItem->size)
5991 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5992 while(remainingSize > 0)
5995 if(lastSuballocItem == m_Suballocations.cend())
5999 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6001 *pSumFreeSize += lastSuballocItem->size;
6005 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
6006 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
6007 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6009 ++*itemsToMakeLostCount;
6010 *pSumItemSize += lastSuballocItem->size;
6017 remainingSize = (lastSuballocItem->size < remainingSize) ?
6018 remainingSize - lastSuballocItem->size : 0;
6024 if(bufferImageGranularity > 1)
6026 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6028 while(nextSuballocItem != m_Suballocations.cend())
6030 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6031 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6033 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6035 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6036 if(nextSuballoc.hAllocation->CanBecomeLost() &&
6037 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6039 ++*itemsToMakeLostCount;
6058 const VmaSuballocation& suballoc = *suballocItem;
6059 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6061 *pSumFreeSize = suballoc.size;
6064 if(suballoc.size < allocSize)
6070 *pOffset = suballoc.offset;
6073 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
6075 *pOffset += VMA_DEBUG_MARGIN;
6079 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
6080 *pOffset = VmaAlignUp(*pOffset, alignment);
6084 if(bufferImageGranularity > 1)
6086 bool bufferImageGranularityConflict =
false;
6087 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6088 while(prevSuballocItem != m_Suballocations.cbegin())
6091 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6092 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6094 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6096 bufferImageGranularityConflict =
true;
6104 if(bufferImageGranularityConflict)
6106 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6111 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6114 VmaSuballocationList::const_iterator next = suballocItem;
6116 const VkDeviceSize requiredEndMargin =
6117 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
6120 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6127 if(bufferImageGranularity > 1)
6129 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6131 while(nextSuballocItem != m_Suballocations.cend())
6133 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6134 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6136 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6155 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6157 VMA_ASSERT(item != m_Suballocations.end());
6158 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6160 VmaSuballocationList::iterator nextItem = item;
6162 VMA_ASSERT(nextItem != m_Suballocations.end());
6163 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6165 item->size += nextItem->size;
6167 m_Suballocations.erase(nextItem);
6170 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6173 VmaSuballocation& suballoc = *suballocItem;
6174 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6175 suballoc.hAllocation = VK_NULL_HANDLE;
6179 m_SumFreeSize += suballoc.size;
6182 bool mergeWithNext =
false;
6183 bool mergeWithPrev =
false;
6185 VmaSuballocationList::iterator nextItem = suballocItem;
6187 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6189 mergeWithNext =
true;
6192 VmaSuballocationList::iterator prevItem = suballocItem;
6193 if(suballocItem != m_Suballocations.begin())
6196 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6198 mergeWithPrev =
true;
6204 UnregisterFreeSuballocation(nextItem);
6205 MergeFreeWithNext(suballocItem);
6210 UnregisterFreeSuballocation(prevItem);
6211 MergeFreeWithNext(prevItem);
6212 RegisterFreeSuballocation(prevItem);
6217 RegisterFreeSuballocation(suballocItem);
6218 return suballocItem;
6222 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6224 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6225 VMA_ASSERT(item->size > 0);
6229 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6231 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6233 if(m_FreeSuballocationsBySize.empty())
6235 m_FreeSuballocationsBySize.push_back(item);
6239 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6247 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6249 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6250 VMA_ASSERT(item->size > 0);
6254 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6256 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6258 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6259 m_FreeSuballocationsBySize.data(),
6260 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6262 VmaSuballocationItemSizeLess());
6263 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6264 index < m_FreeSuballocationsBySize.size();
6267 if(m_FreeSuballocationsBySize[index] == item)
6269 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6272 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6274 VMA_ASSERT(0 &&
"Not found.");
6283 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6284 m_Metadata(hAllocator),
6285 m_MemoryTypeIndex(UINT32_MAX),
6286 m_hMemory(VK_NULL_HANDLE),
6288 m_pMappedData(VMA_NULL)
6292 void VmaDeviceMemoryBlock::Init(
6293 uint32_t newMemoryTypeIndex,
6294 VkDeviceMemory newMemory,
6295 VkDeviceSize newSize)
6297 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6299 m_MemoryTypeIndex = newMemoryTypeIndex;
6300 m_hMemory = newMemory;
6302 m_Metadata.Init(newSize);
6305 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6309 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6311 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6312 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6313 m_hMemory = VK_NULL_HANDLE;
6316 bool VmaDeviceMemoryBlock::Validate()
const 6318 if((m_hMemory == VK_NULL_HANDLE) ||
6319 (m_Metadata.GetSize() == 0))
6324 return m_Metadata.Validate();
6327 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6334 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6337 m_MapCount += count;
6338 VMA_ASSERT(m_pMappedData != VMA_NULL);
6339 if(ppData != VMA_NULL)
6341 *ppData = m_pMappedData;
6347 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6348 hAllocator->m_hDevice,
6354 if(result == VK_SUCCESS)
6356 if(ppData != VMA_NULL)
6358 *ppData = m_pMappedData;
6366 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6373 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6374 if(m_MapCount >= count)
6376 m_MapCount -= count;
6379 m_pMappedData = VMA_NULL;
6380 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6385 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6389 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
6394 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6395 hAllocation->GetBlock() ==
this);
6397 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6398 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
6399 hAllocator->m_hDevice,
6402 hAllocation->GetOffset());
6405 VkResult VmaDeviceMemoryBlock::BindImageMemory(
6410 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6411 hAllocation->GetBlock() ==
this);
6413 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6414 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
6415 hAllocator->m_hDevice,
6418 hAllocation->GetOffset());
6423 memset(&outInfo, 0,
sizeof(outInfo));
6442 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6450 VmaPool_T::VmaPool_T(
6455 createInfo.memoryTypeIndex,
6456 createInfo.blockSize,
6457 createInfo.minBlockCount,
6458 createInfo.maxBlockCount,
6460 createInfo.frameInUseCount,
6465 VmaPool_T::~VmaPool_T()
6469 #if VMA_STATS_STRING_ENABLED 6471 #endif // #if VMA_STATS_STRING_ENABLED 6473 VmaBlockVector::VmaBlockVector(
6475 uint32_t memoryTypeIndex,
6476 VkDeviceSize preferredBlockSize,
6477 size_t minBlockCount,
6478 size_t maxBlockCount,
6479 VkDeviceSize bufferImageGranularity,
6480 uint32_t frameInUseCount,
6481 bool isCustomPool) :
6482 m_hAllocator(hAllocator),
6483 m_MemoryTypeIndex(memoryTypeIndex),
6484 m_PreferredBlockSize(preferredBlockSize),
6485 m_MinBlockCount(minBlockCount),
6486 m_MaxBlockCount(maxBlockCount),
6487 m_BufferImageGranularity(bufferImageGranularity),
6488 m_FrameInUseCount(frameInUseCount),
6489 m_IsCustomPool(isCustomPool),
6490 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6491 m_HasEmptyBlock(false),
6492 m_pDefragmentator(VMA_NULL)
6496 VmaBlockVector::~VmaBlockVector()
6498 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6500 for(
size_t i = m_Blocks.size(); i--; )
6502 m_Blocks[i]->Destroy(m_hAllocator);
6503 vma_delete(m_hAllocator, m_Blocks[i]);
6507 VkResult VmaBlockVector::CreateMinBlocks()
6509 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6511 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6512 if(res != VK_SUCCESS)
6520 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6528 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6530 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6532 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6534 VMA_HEAVY_ASSERT(pBlock->Validate());
6535 pBlock->m_Metadata.AddPoolStats(*pStats);
6539 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6541 VkResult VmaBlockVector::Allocate(
6543 uint32_t currentFrameIndex,
6544 const VkMemoryRequirements& vkMemReq,
6546 VmaSuballocationType suballocType,
6552 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6556 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6558 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6559 VMA_ASSERT(pCurrBlock);
6560 VmaAllocationRequest currRequest = {};
6561 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6564 m_BufferImageGranularity,
6572 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6576 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6577 if(res != VK_SUCCESS)
6584 if(pCurrBlock->m_Metadata.IsEmpty())
6586 m_HasEmptyBlock =
false;
6589 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6590 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6591 (*pAllocation)->InitBlockAllocation(
6600 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6601 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6602 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6607 const bool canCreateNewBlock =
6609 (m_Blocks.size() < m_MaxBlockCount);
6612 if(canCreateNewBlock)
6615 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6616 uint32_t newBlockSizeShift = 0;
6617 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6621 if(m_IsCustomPool ==
false)
6624 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6625 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6627 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6628 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6630 newBlockSize = smallerNewBlockSize;
6631 ++newBlockSizeShift;
6640 size_t newBlockIndex = 0;
6641 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6643 if(m_IsCustomPool ==
false)
6645 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6647 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6648 if(smallerNewBlockSize >= vkMemReq.size)
6650 newBlockSize = smallerNewBlockSize;
6651 ++newBlockSizeShift;
6652 res = CreateBlock(newBlockSize, &newBlockIndex);
6661 if(res == VK_SUCCESS)
6663 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6664 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6668 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6669 if(res != VK_SUCCESS)
6676 VmaAllocationRequest allocRequest;
6677 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6678 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6679 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6680 (*pAllocation)->InitBlockAllocation(
6683 allocRequest.offset,
6689 VMA_HEAVY_ASSERT(pBlock->Validate());
6690 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6691 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6699 if(canMakeOtherLost)
6701 uint32_t tryIndex = 0;
6702 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6704 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6705 VmaAllocationRequest bestRequest = {};
6706 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6710 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6712 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6713 VMA_ASSERT(pCurrBlock);
6714 VmaAllocationRequest currRequest = {};
6715 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6718 m_BufferImageGranularity,
6725 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6726 if(pBestRequestBlock == VMA_NULL ||
6727 currRequestCost < bestRequestCost)
6729 pBestRequestBlock = pCurrBlock;
6730 bestRequest = currRequest;
6731 bestRequestCost = currRequestCost;
6733 if(bestRequestCost == 0)
6741 if(pBestRequestBlock != VMA_NULL)
6745 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6746 if(res != VK_SUCCESS)
6752 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6758 if(pBestRequestBlock->m_Metadata.IsEmpty())
6760 m_HasEmptyBlock =
false;
6763 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6764 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6765 (*pAllocation)->InitBlockAllocation(
6774 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6775 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6776 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6790 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6792 return VK_ERROR_TOO_MANY_OBJECTS;
6796 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6799 void VmaBlockVector::Free(
6802 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6806 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6808 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6810 if(hAllocation->IsPersistentMap())
6812 pBlock->Unmap(m_hAllocator, 1);
6815 pBlock->m_Metadata.Free(hAllocation);
6816 VMA_HEAVY_ASSERT(pBlock->Validate());
6818 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6821 if(pBlock->m_Metadata.IsEmpty())
6824 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6826 pBlockToDelete = pBlock;
6832 m_HasEmptyBlock =
true;
6837 else if(m_HasEmptyBlock)
6839 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6840 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6842 pBlockToDelete = pLastBlock;
6843 m_Blocks.pop_back();
6844 m_HasEmptyBlock =
false;
6848 IncrementallySortBlocks();
6853 if(pBlockToDelete != VMA_NULL)
6855 VMA_DEBUG_LOG(
" Deleted empty allocation");
6856 pBlockToDelete->Destroy(m_hAllocator);
6857 vma_delete(m_hAllocator, pBlockToDelete);
6861 size_t VmaBlockVector::CalcMaxBlockSize()
const 6864 for(
size_t i = m_Blocks.size(); i--; )
6866 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6867 if(result >= m_PreferredBlockSize)
6875 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6877 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6879 if(m_Blocks[blockIndex] == pBlock)
6881 VmaVectorRemove(m_Blocks, blockIndex);
6888 void VmaBlockVector::IncrementallySortBlocks()
6891 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6893 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6895 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6901 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6903 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6904 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6905 allocInfo.allocationSize = blockSize;
6906 VkDeviceMemory mem = VK_NULL_HANDLE;
6907 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6916 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6920 allocInfo.allocationSize);
6922 m_Blocks.push_back(pBlock);
6923 if(pNewBlockIndex != VMA_NULL)
6925 *pNewBlockIndex = m_Blocks.size() - 1;
6931 #if VMA_STATS_STRING_ENABLED 6933 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6935 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6941 json.WriteString(
"MemoryTypeIndex");
6942 json.WriteNumber(m_MemoryTypeIndex);
6944 json.WriteString(
"BlockSize");
6945 json.WriteNumber(m_PreferredBlockSize);
6947 json.WriteString(
"BlockCount");
6948 json.BeginObject(
true);
6949 if(m_MinBlockCount > 0)
6951 json.WriteString(
"Min");
6952 json.WriteNumber((uint64_t)m_MinBlockCount);
6954 if(m_MaxBlockCount < SIZE_MAX)
6956 json.WriteString(
"Max");
6957 json.WriteNumber((uint64_t)m_MaxBlockCount);
6959 json.WriteString(
"Cur");
6960 json.WriteNumber((uint64_t)m_Blocks.size());
6963 if(m_FrameInUseCount > 0)
6965 json.WriteString(
"FrameInUseCount");
6966 json.WriteNumber(m_FrameInUseCount);
6971 json.WriteString(
"PreferredBlockSize");
6972 json.WriteNumber(m_PreferredBlockSize);
6975 json.WriteString(
"Blocks");
6977 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6979 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6986 #endif // #if VMA_STATS_STRING_ENABLED 6988 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6990 uint32_t currentFrameIndex)
6992 if(m_pDefragmentator == VMA_NULL)
6994 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
7000 return m_pDefragmentator;
7003 VkResult VmaBlockVector::Defragment(
7005 VkDeviceSize& maxBytesToMove,
7006 uint32_t& maxAllocationsToMove)
7008 if(m_pDefragmentator == VMA_NULL)
7013 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7016 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7019 if(pDefragmentationStats != VMA_NULL)
7021 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
7022 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7025 VMA_ASSERT(bytesMoved <= maxBytesToMove);
7026 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7032 m_HasEmptyBlock =
false;
7033 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
7035 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7036 if(pBlock->m_Metadata.IsEmpty())
7038 if(m_Blocks.size() > m_MinBlockCount)
7040 if(pDefragmentationStats != VMA_NULL)
7043 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
7046 VmaVectorRemove(m_Blocks, blockIndex);
7047 pBlock->Destroy(m_hAllocator);
7048 vma_delete(m_hAllocator, pBlock);
7052 m_HasEmptyBlock =
true;
7060 void VmaBlockVector::DestroyDefragmentator()
7062 if(m_pDefragmentator != VMA_NULL)
7064 vma_delete(m_hAllocator, m_pDefragmentator);
7065 m_pDefragmentator = VMA_NULL;
7069 void VmaBlockVector::MakePoolAllocationsLost(
7070 uint32_t currentFrameIndex,
7071 size_t* pLostAllocationCount)
7073 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7074 size_t lostAllocationCount = 0;
7075 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7077 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7079 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7081 if(pLostAllocationCount != VMA_NULL)
7083 *pLostAllocationCount = lostAllocationCount;
7087 void VmaBlockVector::AddStats(
VmaStats* pStats)
7089 const uint32_t memTypeIndex = m_MemoryTypeIndex;
7090 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7092 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7094 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7096 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7098 VMA_HEAVY_ASSERT(pBlock->Validate());
7100 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7101 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7102 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7103 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7110 VmaDefragmentator::VmaDefragmentator(
7112 VmaBlockVector* pBlockVector,
7113 uint32_t currentFrameIndex) :
7114 m_hAllocator(hAllocator),
7115 m_pBlockVector(pBlockVector),
7116 m_CurrentFrameIndex(currentFrameIndex),
7118 m_AllocationsMoved(0),
7119 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7120 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7124 VmaDefragmentator::~VmaDefragmentator()
7126 for(
size_t i = m_Blocks.size(); i--; )
7128 vma_delete(m_hAllocator, m_Blocks[i]);
7132 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
7134 AllocationInfo allocInfo;
7135 allocInfo.m_hAllocation = hAlloc;
7136 allocInfo.m_pChanged = pChanged;
7137 m_Allocations.push_back(allocInfo);
7140 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
7143 if(m_pMappedDataForDefragmentation)
7145 *ppMappedData = m_pMappedDataForDefragmentation;
7150 if(m_pBlock->GetMappedData())
7152 *ppMappedData = m_pBlock->GetMappedData();
7157 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7158 *ppMappedData = m_pMappedDataForDefragmentation;
7162 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
7164 if(m_pMappedDataForDefragmentation != VMA_NULL)
7166 m_pBlock->Unmap(hAllocator, 1);
7170 VkResult VmaDefragmentator::DefragmentRound(
7171 VkDeviceSize maxBytesToMove,
7172 uint32_t maxAllocationsToMove)
7174 if(m_Blocks.empty())
7179 size_t srcBlockIndex = m_Blocks.size() - 1;
7180 size_t srcAllocIndex = SIZE_MAX;
7186 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7188 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7191 if(srcBlockIndex == 0)
7198 srcAllocIndex = SIZE_MAX;
7203 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7207 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7208 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7210 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7211 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7212 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7213 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7216 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7218 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7219 VmaAllocationRequest dstAllocRequest;
7220 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7221 m_CurrentFrameIndex,
7222 m_pBlockVector->GetFrameInUseCount(),
7223 m_pBlockVector->GetBufferImageGranularity(),
7228 &dstAllocRequest) &&
7230 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7232 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7235 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7236 (m_BytesMoved + size > maxBytesToMove))
7238 return VK_INCOMPLETE;
7241 void* pDstMappedData = VMA_NULL;
7242 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7243 if(res != VK_SUCCESS)
7248 void* pSrcMappedData = VMA_NULL;
7249 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7250 if(res != VK_SUCCESS)
7257 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7258 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7259 static_cast<size_t>(size));
7261 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7262 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7264 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7266 if(allocInfo.m_pChanged != VMA_NULL)
7268 *allocInfo.m_pChanged = VK_TRUE;
7271 ++m_AllocationsMoved;
7272 m_BytesMoved += size;
7274 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7282 if(srcAllocIndex > 0)
7288 if(srcBlockIndex > 0)
7291 srcAllocIndex = SIZE_MAX;
7301 VkResult VmaDefragmentator::Defragment(
7302 VkDeviceSize maxBytesToMove,
7303 uint32_t maxAllocationsToMove)
7305 if(m_Allocations.empty())
7311 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7312 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7314 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7315 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7316 m_Blocks.push_back(pBlockInfo);
7320 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7323 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7325 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7327 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7329 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7330 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7331 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7333 (*it)->m_Allocations.push_back(allocInfo);
7341 m_Allocations.clear();
7343 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7345 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7346 pBlockInfo->CalcHasNonMovableAllocations();
7347 pBlockInfo->SortAllocationsBySizeDescecnding();
7351 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7354 VkResult result = VK_SUCCESS;
7355 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7357 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7361 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7363 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7369 bool VmaDefragmentator::MoveMakesSense(
7370 size_t dstBlockIndex, VkDeviceSize dstOffset,
7371 size_t srcBlockIndex, VkDeviceSize srcOffset)
7373 if(dstBlockIndex < srcBlockIndex)
7377 if(dstBlockIndex > srcBlockIndex)
7381 if(dstOffset < srcOffset)
7394 m_hDevice(pCreateInfo->device),
7395 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7396 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7397 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7398 m_PreferredLargeHeapBlockSize(0),
7399 m_PhysicalDevice(pCreateInfo->physicalDevice),
7400 m_CurrentFrameIndex(0),
7401 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks()))
7405 #if !(VMA_DEDICATED_ALLOCATION) 7408 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
7412 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7413 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7414 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7416 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7417 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7419 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7421 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7432 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7433 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7440 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7442 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7443 if(limit != VK_WHOLE_SIZE)
7445 m_HeapSizeLimit[heapIndex] = limit;
7446 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7448 m_MemProps.memoryHeaps[heapIndex].size = limit;
7454 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7456 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7458 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7464 GetBufferImageGranularity(),
7469 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7473 VmaAllocator_T::~VmaAllocator_T()
7475 VMA_ASSERT(m_Pools.empty());
7477 for(
size_t i = GetMemoryTypeCount(); i--; )
7479 vma_delete(
this, m_pDedicatedAllocations[i]);
7480 vma_delete(
this, m_pBlockVectors[i]);
7484 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7486 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7487 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7488 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7489 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7490 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7491 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7492 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7493 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7494 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7495 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7496 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7497 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7498 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7499 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7500 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7501 #if VMA_DEDICATED_ALLOCATION 7502 if(m_UseKhrDedicatedAllocation)
7504 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7505 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7506 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7507 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7509 #endif // #if VMA_DEDICATED_ALLOCATION 7510 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7512 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7513 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7515 if(pVulkanFunctions != VMA_NULL)
7517 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7518 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7519 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7520 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7521 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7522 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7523 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7524 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7525 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7526 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7527 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7528 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7529 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7530 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7531 #if VMA_DEDICATED_ALLOCATION 7532 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7533 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7537 #undef VMA_COPY_IF_NOT_NULL 7541 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7542 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7543 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7544 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7545 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7546 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7547 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7548 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7549 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7550 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7551 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7552 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7553 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7554 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7555 #if VMA_DEDICATED_ALLOCATION 7556 if(m_UseKhrDedicatedAllocation)
7558 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7559 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7564 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7566 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7567 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7568 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7569 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7572 VkResult VmaAllocator_T::AllocateMemoryOfType(
7573 const VkMemoryRequirements& vkMemReq,
7574 bool dedicatedAllocation,
7575 VkBuffer dedicatedBuffer,
7576 VkImage dedicatedImage,
7578 uint32_t memTypeIndex,
7579 VmaSuballocationType suballocType,
7582 VMA_ASSERT(pAllocation != VMA_NULL);
7583 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7589 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7594 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7595 VMA_ASSERT(blockVector);
7597 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7598 bool preferDedicatedMemory =
7599 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7600 dedicatedAllocation ||
7602 vkMemReq.size > preferredBlockSize / 2;
7604 if(preferDedicatedMemory &&
7606 finalCreateInfo.
pool == VK_NULL_HANDLE)
7615 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7619 return AllocateDedicatedMemory(
7633 VkResult res = blockVector->Allocate(
7635 m_CurrentFrameIndex.load(),
7640 if(res == VK_SUCCESS)
7648 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7652 res = AllocateDedicatedMemory(
7658 finalCreateInfo.pUserData,
7662 if(res == VK_SUCCESS)
7665 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7671 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7678 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7680 VmaSuballocationType suballocType,
7681 uint32_t memTypeIndex,
7683 bool isUserDataString,
7685 VkBuffer dedicatedBuffer,
7686 VkImage dedicatedImage,
7689 VMA_ASSERT(pAllocation);
7691 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7692 allocInfo.memoryTypeIndex = memTypeIndex;
7693 allocInfo.allocationSize = size;
7695 #if VMA_DEDICATED_ALLOCATION 7696 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7697 if(m_UseKhrDedicatedAllocation)
7699 if(dedicatedBuffer != VK_NULL_HANDLE)
7701 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7702 dedicatedAllocInfo.buffer = dedicatedBuffer;
7703 allocInfo.pNext = &dedicatedAllocInfo;
7705 else if(dedicatedImage != VK_NULL_HANDLE)
7707 dedicatedAllocInfo.image = dedicatedImage;
7708 allocInfo.pNext = &dedicatedAllocInfo;
7711 #endif // #if VMA_DEDICATED_ALLOCATION 7714 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7715 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7718 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7722 void* pMappedData = VMA_NULL;
7725 res = (*m_VulkanFunctions.vkMapMemory)(
7734 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7735 FreeVulkanMemory(memTypeIndex, size, hMemory);
7740 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7741 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7742 (*pAllocation)->SetUserData(
this, pUserData);
7746 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7747 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7748 VMA_ASSERT(pDedicatedAllocations);
7749 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7752 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7757 void VmaAllocator_T::GetBufferMemoryRequirements(
7759 VkMemoryRequirements& memReq,
7760 bool& requiresDedicatedAllocation,
7761 bool& prefersDedicatedAllocation)
const 7763 #if VMA_DEDICATED_ALLOCATION 7764 if(m_UseKhrDedicatedAllocation)
7766 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7767 memReqInfo.buffer = hBuffer;
7769 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7771 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7772 memReq2.pNext = &memDedicatedReq;
7774 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7776 memReq = memReq2.memoryRequirements;
7777 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7778 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7781 #endif // #if VMA_DEDICATED_ALLOCATION 7783 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7784 requiresDedicatedAllocation =
false;
7785 prefersDedicatedAllocation =
false;
7789 void VmaAllocator_T::GetImageMemoryRequirements(
7791 VkMemoryRequirements& memReq,
7792 bool& requiresDedicatedAllocation,
7793 bool& prefersDedicatedAllocation)
const 7795 #if VMA_DEDICATED_ALLOCATION 7796 if(m_UseKhrDedicatedAllocation)
7798 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7799 memReqInfo.image = hImage;
7801 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7803 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7804 memReq2.pNext = &memDedicatedReq;
7806 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7808 memReq = memReq2.memoryRequirements;
7809 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7810 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7813 #endif // #if VMA_DEDICATED_ALLOCATION 7815 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7816 requiresDedicatedAllocation =
false;
7817 prefersDedicatedAllocation =
false;
7821 VkResult VmaAllocator_T::AllocateMemory(
7822 const VkMemoryRequirements& vkMemReq,
7823 bool requiresDedicatedAllocation,
7824 bool prefersDedicatedAllocation,
7825 VkBuffer dedicatedBuffer,
7826 VkImage dedicatedImage,
7828 VmaSuballocationType suballocType,
7834 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7835 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7840 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7841 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7843 if(requiresDedicatedAllocation)
7847 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7848 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7850 if(createInfo.
pool != VK_NULL_HANDLE)
7852 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7853 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7856 if((createInfo.
pool != VK_NULL_HANDLE) &&
7859 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7860 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7863 if(createInfo.
pool != VK_NULL_HANDLE)
7865 return createInfo.
pool->m_BlockVector.Allocate(
7867 m_CurrentFrameIndex.load(),
7876 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7877 uint32_t memTypeIndex = UINT32_MAX;
7879 if(res == VK_SUCCESS)
7881 res = AllocateMemoryOfType(
7883 requiresDedicatedAllocation || prefersDedicatedAllocation,
7891 if(res == VK_SUCCESS)
7901 memoryTypeBits &= ~(1u << memTypeIndex);
7904 if(res == VK_SUCCESS)
7906 res = AllocateMemoryOfType(
7908 requiresDedicatedAllocation || prefersDedicatedAllocation,
7916 if(res == VK_SUCCESS)
7926 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7937 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7939 VMA_ASSERT(allocation);
7941 if(allocation->CanBecomeLost() ==
false ||
7942 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7944 switch(allocation->GetType())
7946 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7948 VmaBlockVector* pBlockVector = VMA_NULL;
7949 VmaPool hPool = allocation->GetPool();
7950 if(hPool != VK_NULL_HANDLE)
7952 pBlockVector = &hPool->m_BlockVector;
7956 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7957 pBlockVector = m_pBlockVectors[memTypeIndex];
7959 pBlockVector->Free(allocation);
7962 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7963 FreeDedicatedMemory(allocation);
7970 allocation->SetUserData(
this, VMA_NULL);
7971 vma_delete(
this, allocation);
7974 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7977 InitStatInfo(pStats->
total);
7978 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7980 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7984 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7986 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7987 VMA_ASSERT(pBlockVector);
7988 pBlockVector->AddStats(pStats);
7993 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7994 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7996 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
8001 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8003 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8004 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8005 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8006 VMA_ASSERT(pDedicatedAllocVector);
8007 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
8010 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
8011 VmaAddStatInfo(pStats->
total, allocationStatInfo);
8012 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
8013 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
8018 VmaPostprocessCalcStatInfo(pStats->
total);
8019 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
8020 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
8021 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
8022 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
8025 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
8027 VkResult VmaAllocator_T::Defragment(
8029 size_t allocationCount,
8030 VkBool32* pAllocationsChanged,
8034 if(pAllocationsChanged != VMA_NULL)
8036 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
8038 if(pDefragmentationStats != VMA_NULL)
8040 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
8043 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
8045 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
8047 const size_t poolCount = m_Pools.size();
8050 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
8054 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
8056 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
8058 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
8060 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
8062 VmaBlockVector* pAllocBlockVector = VMA_NULL;
8064 const VmaPool hAllocPool = hAlloc->GetPool();
8066 if(hAllocPool != VK_NULL_HANDLE)
8068 pAllocBlockVector = &hAllocPool->GetBlockVector();
8073 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
8076 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
8078 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
8079 &pAllocationsChanged[allocIndex] : VMA_NULL;
8080 pDefragmentator->AddAllocation(hAlloc, pChanged);
8084 VkResult result = VK_SUCCESS;
8088 VkDeviceSize maxBytesToMove = SIZE_MAX;
8089 uint32_t maxAllocationsToMove = UINT32_MAX;
8090 if(pDefragmentationInfo != VMA_NULL)
8097 for(uint32_t memTypeIndex = 0;
8098 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
8102 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8104 result = m_pBlockVectors[memTypeIndex]->Defragment(
8105 pDefragmentationStats,
8107 maxAllocationsToMove);
8112 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
8114 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
8115 pDefragmentationStats,
8117 maxAllocationsToMove);
8123 for(
size_t poolIndex = poolCount; poolIndex--; )
8125 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
8129 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
8131 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8133 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
8142 if(hAllocation->CanBecomeLost())
8148 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8149 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8152 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8156 pAllocationInfo->
offset = 0;
8157 pAllocationInfo->
size = hAllocation->GetSize();
8159 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8162 else if(localLastUseFrameIndex == localCurrFrameIndex)
8164 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8165 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8166 pAllocationInfo->
offset = hAllocation->GetOffset();
8167 pAllocationInfo->
size = hAllocation->GetSize();
8169 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8174 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8176 localLastUseFrameIndex = localCurrFrameIndex;
8183 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8184 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8185 pAllocationInfo->
offset = hAllocation->GetOffset();
8186 pAllocationInfo->
size = hAllocation->GetSize();
8187 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
8188 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8192 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
8195 if(hAllocation->CanBecomeLost())
8197 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8198 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8201 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8205 else if(localLastUseFrameIndex == localCurrFrameIndex)
8211 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8213 localLastUseFrameIndex = localCurrFrameIndex;
8226 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
8239 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
8241 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
8242 if(res != VK_SUCCESS)
8244 vma_delete(
this, *pPool);
8251 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8252 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8258 void VmaAllocator_T::DestroyPool(
VmaPool pool)
8262 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8263 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8264 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8267 vma_delete(
this, pool);
8272 pool->m_BlockVector.GetPoolStats(pPoolStats);
8275 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8277 m_CurrentFrameIndex.store(frameIndex);
8280 void VmaAllocator_T::MakePoolAllocationsLost(
8282 size_t* pLostAllocationCount)
8284 hPool->m_BlockVector.MakePoolAllocationsLost(
8285 m_CurrentFrameIndex.load(),
8286 pLostAllocationCount);
8289 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
8291 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8292 (*pAllocation)->InitLost();
8295 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8297 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8300 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8302 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8303 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8305 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8306 if(res == VK_SUCCESS)
8308 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8313 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8318 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8321 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8323 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8329 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8331 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8333 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8336 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8338 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8339 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8341 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8342 m_HeapSizeLimit[heapIndex] += size;
8346 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
8348 if(hAllocation->CanBecomeLost())
8350 return VK_ERROR_MEMORY_MAP_FAILED;
8353 switch(hAllocation->GetType())
8355 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8357 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8358 char *pBytes = VMA_NULL;
8359 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8360 if(res == VK_SUCCESS)
8362 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8363 hAllocation->BlockAllocMap();
8367 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8368 return hAllocation->DedicatedAllocMap(
this, ppData);
8371 return VK_ERROR_MEMORY_MAP_FAILED;
8377 switch(hAllocation->GetType())
8379 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8381 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8382 hAllocation->BlockAllocUnmap();
8383 pBlock->Unmap(
this, 1);
8386 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8387 hAllocation->DedicatedAllocUnmap(
this);
8394 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
8396 VkResult res = VK_SUCCESS;
8397 switch(hAllocation->GetType())
8399 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8400 res = GetVulkanFunctions().vkBindBufferMemory(
8403 hAllocation->GetMemory(),
8406 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8408 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8409 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
8410 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
8419 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
8421 VkResult res = VK_SUCCESS;
8422 switch(hAllocation->GetType())
8424 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8425 res = GetVulkanFunctions().vkBindImageMemory(
8428 hAllocation->GetMemory(),
8431 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8433 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8434 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
8435 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
8444 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
8446 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8448 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8450 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8451 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8452 VMA_ASSERT(pDedicatedAllocations);
8453 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8454 VMA_ASSERT(success);
8457 VkDeviceMemory hMemory = allocation->GetMemory();
8459 if(allocation->GetMappedData() != VMA_NULL)
8461 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8464 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8466 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8469 #if VMA_STATS_STRING_ENABLED 8471 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8473 bool dedicatedAllocationsStarted =
false;
8474 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8476 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8477 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8478 VMA_ASSERT(pDedicatedAllocVector);
8479 if(pDedicatedAllocVector->empty() ==
false)
8481 if(dedicatedAllocationsStarted ==
false)
8483 dedicatedAllocationsStarted =
true;
8484 json.WriteString(
"DedicatedAllocations");
8488 json.BeginString(
"Type ");
8489 json.ContinueString(memTypeIndex);
8494 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8497 json.BeginObject(
true);
8499 json.WriteString(
"Type");
8500 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8502 json.WriteString(
"Size");
8503 json.WriteNumber(hAlloc->GetSize());
8505 const void* pUserData = hAlloc->GetUserData();
8506 if(pUserData != VMA_NULL)
8508 json.WriteString(
"UserData");
8509 if(hAlloc->IsUserDataString())
8511 json.WriteString((
const char*)pUserData);
8516 json.ContinueString_Pointer(pUserData);
8527 if(dedicatedAllocationsStarted)
8533 bool allocationsStarted =
false;
8534 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8536 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8538 if(allocationsStarted ==
false)
8540 allocationsStarted =
true;
8541 json.WriteString(
"DefaultPools");
8545 json.BeginString(
"Type ");
8546 json.ContinueString(memTypeIndex);
8549 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8552 if(allocationsStarted)
8559 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8560 const size_t poolCount = m_Pools.size();
8563 json.WriteString(
"Pools");
8565 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8567 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8574 #endif // #if VMA_STATS_STRING_ENABLED 8576 static VkResult AllocateMemoryForImage(
8580 VmaSuballocationType suballocType,
8583 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8585 VkMemoryRequirements vkMemReq = {};
8586 bool requiresDedicatedAllocation =
false;
8587 bool prefersDedicatedAllocation =
false;
8588 allocator->GetImageMemoryRequirements(image, vkMemReq,
8589 requiresDedicatedAllocation, prefersDedicatedAllocation);
8591 return allocator->AllocateMemory(
8593 requiresDedicatedAllocation,
8594 prefersDedicatedAllocation,
8597 *pAllocationCreateInfo,
8609 VMA_ASSERT(pCreateInfo && pAllocator);
8610 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8618 if(allocator != VK_NULL_HANDLE)
8620 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8621 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8622 vma_delete(&allocationCallbacks, allocator);
8628 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8630 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8631 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8636 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8638 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8639 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8644 uint32_t memoryTypeIndex,
8645 VkMemoryPropertyFlags* pFlags)
8647 VMA_ASSERT(allocator && pFlags);
8648 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8649 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8654 uint32_t frameIndex)
8656 VMA_ASSERT(allocator);
8657 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8659 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8661 allocator->SetCurrentFrameIndex(frameIndex);
8668 VMA_ASSERT(allocator && pStats);
8669 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8670 allocator->CalculateStats(pStats);
8673 #if VMA_STATS_STRING_ENABLED 8677 char** ppStatsString,
8678 VkBool32 detailedMap)
8680 VMA_ASSERT(allocator && ppStatsString);
8681 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8683 VmaStringBuilder sb(allocator);
8685 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8689 allocator->CalculateStats(&stats);
8691 json.WriteString(
"Total");
8692 VmaPrintStatInfo(json, stats.
total);
8694 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8696 json.BeginString(
"Heap ");
8697 json.ContinueString(heapIndex);
8701 json.WriteString(
"Size");
8702 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8704 json.WriteString(
"Flags");
8705 json.BeginArray(
true);
8706 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8708 json.WriteString(
"DEVICE_LOCAL");
8714 json.WriteString(
"Stats");
8715 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8718 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8720 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8722 json.BeginString(
"Type ");
8723 json.ContinueString(typeIndex);
8728 json.WriteString(
"Flags");
8729 json.BeginArray(
true);
8730 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8731 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8733 json.WriteString(
"DEVICE_LOCAL");
8735 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8737 json.WriteString(
"HOST_VISIBLE");
8739 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8741 json.WriteString(
"HOST_COHERENT");
8743 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8745 json.WriteString(
"HOST_CACHED");
8747 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8749 json.WriteString(
"LAZILY_ALLOCATED");
8755 json.WriteString(
"Stats");
8756 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8765 if(detailedMap == VK_TRUE)
8767 allocator->PrintDetailedMap(json);
8773 const size_t len = sb.GetLength();
8774 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8777 memcpy(pChars, sb.GetData(), len);
8780 *ppStatsString = pChars;
8787 if(pStatsString != VMA_NULL)
8789 VMA_ASSERT(allocator);
8790 size_t len = strlen(pStatsString);
8791 vma_delete_array(allocator, pStatsString, len + 1);
8795 #endif // #if VMA_STATS_STRING_ENABLED 8802 uint32_t memoryTypeBits,
8804 uint32_t* pMemoryTypeIndex)
8806 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8807 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8808 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8815 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8819 switch(pAllocationCreateInfo->
usage)
8824 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8827 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8830 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8831 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8834 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8835 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8841 *pMemoryTypeIndex = UINT32_MAX;
8842 uint32_t minCost = UINT32_MAX;
8843 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8844 memTypeIndex < allocator->GetMemoryTypeCount();
8845 ++memTypeIndex, memTypeBit <<= 1)
8848 if((memTypeBit & memoryTypeBits) != 0)
8850 const VkMemoryPropertyFlags currFlags =
8851 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8853 if((requiredFlags & ~currFlags) == 0)
8856 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8858 if(currCost < minCost)
8860 *pMemoryTypeIndex = memTypeIndex;
8870 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8875 const VkBufferCreateInfo* pBufferCreateInfo,
8877 uint32_t* pMemoryTypeIndex)
8879 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8880 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8881 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8882 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8884 const VkDevice hDev = allocator->m_hDevice;
8885 VkBuffer hBuffer = VK_NULL_HANDLE;
8886 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8887 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8888 if(res == VK_SUCCESS)
8890 VkMemoryRequirements memReq = {};
8891 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8892 hDev, hBuffer, &memReq);
8896 memReq.memoryTypeBits,
8897 pAllocationCreateInfo,
8900 allocator->GetVulkanFunctions().vkDestroyBuffer(
8901 hDev, hBuffer, allocator->GetAllocationCallbacks());
8908 const VkImageCreateInfo* pImageCreateInfo,
8910 uint32_t* pMemoryTypeIndex)
8912 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8913 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8914 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8915 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8917 const VkDevice hDev = allocator->m_hDevice;
8918 VkImage hImage = VK_NULL_HANDLE;
8919 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8920 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8921 if(res == VK_SUCCESS)
8923 VkMemoryRequirements memReq = {};
8924 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8925 hDev, hImage, &memReq);
8929 memReq.memoryTypeBits,
8930 pAllocationCreateInfo,
8933 allocator->GetVulkanFunctions().vkDestroyImage(
8934 hDev, hImage, allocator->GetAllocationCallbacks());
8944 VMA_ASSERT(allocator && pCreateInfo && pPool);
8946 VMA_DEBUG_LOG(
"vmaCreatePool");
8948 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8950 return allocator->CreatePool(pCreateInfo, pPool);
8957 VMA_ASSERT(allocator);
8959 if(pool == VK_NULL_HANDLE)
8964 VMA_DEBUG_LOG(
"vmaDestroyPool");
8966 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8968 allocator->DestroyPool(pool);
8976 VMA_ASSERT(allocator && pool && pPoolStats);
8978 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8980 allocator->GetPoolStats(pool, pPoolStats);
8986 size_t* pLostAllocationCount)
8988 VMA_ASSERT(allocator && pool);
8990 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8992 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8997 const VkMemoryRequirements* pVkMemoryRequirements,
9002 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
9004 VMA_DEBUG_LOG(
"vmaAllocateMemory");
9006 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9008 VkResult result = allocator->AllocateMemory(
9009 *pVkMemoryRequirements,
9015 VMA_SUBALLOCATION_TYPE_UNKNOWN,
9018 if(pAllocationInfo && result == VK_SUCCESS)
9020 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9033 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9035 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
9037 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9039 VkMemoryRequirements vkMemReq = {};
9040 bool requiresDedicatedAllocation =
false;
9041 bool prefersDedicatedAllocation =
false;
9042 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
9043 requiresDedicatedAllocation,
9044 prefersDedicatedAllocation);
9046 VkResult result = allocator->AllocateMemory(
9048 requiresDedicatedAllocation,
9049 prefersDedicatedAllocation,
9053 VMA_SUBALLOCATION_TYPE_BUFFER,
9056 if(pAllocationInfo && result == VK_SUCCESS)
9058 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9071 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9073 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
9075 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9077 VkResult result = AllocateMemoryForImage(
9081 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
9084 if(pAllocationInfo && result == VK_SUCCESS)
9086 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9096 VMA_ASSERT(allocator);
9097 VMA_DEBUG_LOG(
"vmaFreeMemory");
9098 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9099 if(allocation != VK_NULL_HANDLE)
9101 allocator->FreeMemory(allocation);
9110 VMA_ASSERT(allocator && allocation && pAllocationInfo);
9112 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9114 allocator->GetAllocationInfo(allocation, pAllocationInfo);
9121 VMA_ASSERT(allocator && allocation);
9123 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9125 return allocator->TouchAllocation(allocation);
9133 VMA_ASSERT(allocator && allocation);
9135 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9137 allocation->SetUserData(allocator, pUserData);
9144 VMA_ASSERT(allocator && pAllocation);
9146 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
9148 allocator->CreateLostAllocation(pAllocation);
9156 VMA_ASSERT(allocator && allocation && ppData);
9158 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9160 return allocator->Map(allocation, ppData);
9167 VMA_ASSERT(allocator && allocation);
9169 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9171 allocator->Unmap(allocation);
9177 size_t allocationCount,
9178 VkBool32* pAllocationsChanged,
9182 VMA_ASSERT(allocator && pAllocations);
9184 VMA_DEBUG_LOG(
"vmaDefragment");
9186 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9188 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
9196 VMA_ASSERT(allocator && allocation && buffer);
9198 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
9200 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9202 return allocator->BindBufferMemory(allocation, buffer);
9210 VMA_ASSERT(allocator && allocation && image);
9212 VMA_DEBUG_LOG(
"vmaBindImageMemory");
9214 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9216 return allocator->BindImageMemory(allocation, image);
9221 const VkBufferCreateInfo* pBufferCreateInfo,
9227 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
9229 VMA_DEBUG_LOG(
"vmaCreateBuffer");
9231 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9233 *pBuffer = VK_NULL_HANDLE;
9234 *pAllocation = VK_NULL_HANDLE;
9237 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
9238 allocator->m_hDevice,
9240 allocator->GetAllocationCallbacks(),
9245 VkMemoryRequirements vkMemReq = {};
9246 bool requiresDedicatedAllocation =
false;
9247 bool prefersDedicatedAllocation =
false;
9248 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
9249 requiresDedicatedAllocation, prefersDedicatedAllocation);
9253 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
9255 VMA_ASSERT(vkMemReq.alignment %
9256 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
9258 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
9260 VMA_ASSERT(vkMemReq.alignment %
9261 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
9263 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
9265 VMA_ASSERT(vkMemReq.alignment %
9266 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
9270 res = allocator->AllocateMemory(
9272 requiresDedicatedAllocation,
9273 prefersDedicatedAllocation,
9276 *pAllocationCreateInfo,
9277 VMA_SUBALLOCATION_TYPE_BUFFER,
9282 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
9286 if(pAllocationInfo != VMA_NULL)
9288 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9292 allocator->FreeMemory(*pAllocation);
9293 *pAllocation = VK_NULL_HANDLE;
9294 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9295 *pBuffer = VK_NULL_HANDLE;
9298 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9299 *pBuffer = VK_NULL_HANDLE;
9310 VMA_ASSERT(allocator);
9311 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
9312 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9313 if(buffer != VK_NULL_HANDLE)
9315 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
9317 if(allocation != VK_NULL_HANDLE)
9319 allocator->FreeMemory(allocation);
9325 const VkImageCreateInfo* pImageCreateInfo,
9331 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
9333 VMA_DEBUG_LOG(
"vmaCreateImage");
9335 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9337 *pImage = VK_NULL_HANDLE;
9338 *pAllocation = VK_NULL_HANDLE;
9341 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9342 allocator->m_hDevice,
9344 allocator->GetAllocationCallbacks(),
9348 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9349 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9350 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9353 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9357 res = allocator->BindImageMemory(*pAllocation, *pImage);
9361 if(pAllocationInfo != VMA_NULL)
9363 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9367 allocator->FreeMemory(*pAllocation);
9368 *pAllocation = VK_NULL_HANDLE;
9369 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9370 *pImage = VK_NULL_HANDLE;
9373 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9374 *pImage = VK_NULL_HANDLE;
9385 VMA_ASSERT(allocator);
9386 VMA_DEBUG_LOG(
"vmaDestroyImage");
9387 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9388 if(image != VK_NULL_HANDLE)
9390 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9392 if(allocation != VK_NULL_HANDLE)
9394 allocator->FreeMemory(allocation);
9398 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1169
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1433
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1182
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1196
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Represents single memory allocation.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1167
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1179
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1376
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1161
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1749
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1179
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1948
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1595
+
Definition: vk_mem_alloc.h:1390
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1173
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1763
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1193
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1962
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1609
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1649
-
Definition: vk_mem_alloc.h:1456
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1150
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1494
-
Definition: vk_mem_alloc.h:1403
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1191
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1663
+
Definition: vk_mem_alloc.h:1470
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1162
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1508
+
Definition: vk_mem_alloc.h:1417
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1205
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1244
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1176
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1258
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1190
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1407
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1421
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1309
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1164
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1308
-
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1172
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1952
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1323
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1176
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1322
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1966
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1208
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1318
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1960
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1478
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1943
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1165
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1092
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1222
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1332
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1974
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1492
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1957
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1177
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1104
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1185
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1199
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1603
-
Definition: vk_mem_alloc.h:1597
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1759
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1617
+
Definition: vk_mem_alloc.h:1611
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1773
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1162
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1515
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1619
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1655
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1174
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1529
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1633
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1669
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1148
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1606
+
Definition: vk_mem_alloc.h:1160
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1620
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1354
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1368
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1938
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1952
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1956
-
Definition: vk_mem_alloc.h:1393
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1502
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1163
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1970
+
Definition: vk_mem_alloc.h:1407
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1516
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1175
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1314
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1098
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1328
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1110
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1119
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1131
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1124
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1958
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1136
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1972
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1489
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1665
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1503
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1679
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1158
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1297
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1614
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1111
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1170
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1311
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1628
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1123
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1463
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1310
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1115
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1609
-
Definition: vk_mem_alloc.h:1402
+
Definition: vk_mem_alloc.h:1477
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1324
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1127
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1623
+
Definition: vk_mem_alloc.h:1416
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1484
-
Definition: vk_mem_alloc.h:1475
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1498
+
Definition: vk_mem_alloc.h:1489
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1300
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1160
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1627
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1194
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1658
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1473
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1508
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1314
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1172
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1641
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1208
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1672
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1487
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1522
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1232
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1316
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1443
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1309
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1246
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1330
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1457
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1323
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1169
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1113
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1168
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1181
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1125
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1180
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1641
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1655
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1773
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1188
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1309
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1306
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1787
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1202
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1323
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1320
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1646
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1754
-
Definition: vk_mem_alloc.h:1471
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1954
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1156
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1660
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1768
+
Definition: vk_mem_alloc.h:1485
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1968
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1168
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1171
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1304
-
Definition: vk_mem_alloc.h:1359
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1599
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1318
+
Definition: vk_mem_alloc.h:1373
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1613
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1302
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1166
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1170
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1430
-
Definition: vk_mem_alloc.h:1386
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1768
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1316
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1178
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1182
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1444
+
Definition: vk_mem_alloc.h:1400
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1782
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1146
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1158
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1159
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1735
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1171
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1749
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1577
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1310
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1591
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1324
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1469
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1317
+
Definition: vk_mem_alloc.h:1483
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1331
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1652
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1310
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1740
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1666
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1324
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1754