23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1157 #include <vulkan/vulkan.h> 1159 #if !defined(VMA_DEDICATED_ALLOCATION) 1160 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1161 #define VMA_DEDICATED_ALLOCATION 1 1163 #define VMA_DEDICATED_ALLOCATION 0 1181 uint32_t memoryType,
1182 VkDeviceMemory memory,
1187 uint32_t memoryType,
1188 VkDeviceMemory memory,
1260 #if VMA_DEDICATED_ALLOCATION 1261 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1262 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1353 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1361 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1371 uint32_t memoryTypeIndex,
1372 VkMemoryPropertyFlags* pFlags);
1384 uint32_t frameIndex);
1417 #define VMA_STATS_STRING_ENABLED 1 1419 #if VMA_STATS_STRING_ENABLED 1426 char** ppStatsString,
1427 VkBool32 detailedMap);
1431 char* pStatsString);
1433 #endif // #if VMA_STATS_STRING_ENABLED 1627 uint32_t memoryTypeBits,
1629 uint32_t* pMemoryTypeIndex);
1645 const VkBufferCreateInfo* pBufferCreateInfo,
1647 uint32_t* pMemoryTypeIndex);
1663 const VkImageCreateInfo* pImageCreateInfo,
1665 uint32_t* pMemoryTypeIndex);
1796 size_t* pLostAllocationCount);
1895 const VkMemoryRequirements* pVkMemoryRequirements,
2201 size_t allocationCount,
2202 VkBool32* pAllocationsChanged,
2268 const VkBufferCreateInfo* pBufferCreateInfo,
2293 const VkImageCreateInfo* pImageCreateInfo,
2319 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2322 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2323 #define VMA_IMPLEMENTATION 2326 #ifdef VMA_IMPLEMENTATION 2327 #undef VMA_IMPLEMENTATION 2349 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2350 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2362 #if VMA_USE_STL_CONTAINERS 2363 #define VMA_USE_STL_VECTOR 1 2364 #define VMA_USE_STL_UNORDERED_MAP 1 2365 #define VMA_USE_STL_LIST 1 2368 #if VMA_USE_STL_VECTOR 2372 #if VMA_USE_STL_UNORDERED_MAP 2373 #include <unordered_map> 2376 #if VMA_USE_STL_LIST 2385 #include <algorithm> 2391 #define VMA_NULL nullptr 2394 #if defined(__APPLE__) || defined(__ANDROID__) 2396 void *aligned_alloc(
size_t alignment,
size_t size)
2399 if(alignment <
sizeof(
void*))
2401 alignment =
sizeof(
void*);
2405 if(posix_memalign(&pointer, alignment, size) == 0)
2419 #define VMA_ASSERT(expr) assert(expr) 2421 #define VMA_ASSERT(expr) 2427 #ifndef VMA_HEAVY_ASSERT 2429 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2431 #define VMA_HEAVY_ASSERT(expr) 2435 #ifndef VMA_ALIGN_OF 2436 #define VMA_ALIGN_OF(type) (__alignof(type)) 2439 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2441 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2443 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2447 #ifndef VMA_SYSTEM_FREE 2449 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2451 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2456 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2460 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2464 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2468 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2471 #ifndef VMA_DEBUG_LOG 2472 #define VMA_DEBUG_LOG(format, ...) 2482 #if VMA_STATS_STRING_ENABLED 2483 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2485 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2487 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2489 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2491 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2493 snprintf(outStr, strLen,
"%p", ptr);
2503 void Lock() { m_Mutex.lock(); }
2504 void Unlock() { m_Mutex.unlock(); }
2508 #define VMA_MUTEX VmaMutex 2519 #ifndef VMA_ATOMIC_UINT32 2520 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2523 #ifndef VMA_BEST_FIT 2536 #define VMA_BEST_FIT (1) 2539 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2544 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2547 #ifndef VMA_DEBUG_ALIGNMENT 2552 #define VMA_DEBUG_ALIGNMENT (1) 2555 #ifndef VMA_DEBUG_MARGIN 2560 #define VMA_DEBUG_MARGIN (0) 2563 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2569 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2572 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2577 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2580 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2585 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2588 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2589 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2593 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2594 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2598 #ifndef VMA_CLASS_NO_COPY 2599 #define VMA_CLASS_NO_COPY(className) \ 2601 className(const className&) = delete; \ 2602 className& operator=(const className&) = delete; 2605 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2608 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2614 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2615 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2618 static inline uint32_t VmaCountBitsSet(uint32_t v)
2620 uint32_t c = v - ((v >> 1) & 0x55555555);
2621 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2622 c = ((c >> 4) + c) & 0x0F0F0F0F;
2623 c = ((c >> 8) + c) & 0x00FF00FF;
2624 c = ((c >> 16) + c) & 0x0000FFFF;
2630 template <
typename T>
2631 static inline T VmaAlignUp(T val, T align)
2633 return (val + align - 1) / align * align;
2637 template <
typename T>
2638 static inline T VmaAlignDown(T val, T align)
2640 return val / align * align;
2644 template <
typename T>
2645 inline T VmaRoundDiv(T x, T y)
2647 return (x + (y / (T)2)) / y;
2652 template<
typename Iterator,
typename Compare>
2653 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2655 Iterator centerValue = end; --centerValue;
2656 Iterator insertIndex = beg;
2657 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2659 if(cmp(*memTypeIndex, *centerValue))
2661 if(insertIndex != memTypeIndex)
2663 VMA_SWAP(*memTypeIndex, *insertIndex);
2668 if(insertIndex != centerValue)
2670 VMA_SWAP(*insertIndex, *centerValue);
2675 template<
typename Iterator,
typename Compare>
2676 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2680 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2681 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2682 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2686 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2688 #endif // #ifndef VMA_SORT 2697 static inline bool VmaBlocksOnSamePage(
2698 VkDeviceSize resourceAOffset,
2699 VkDeviceSize resourceASize,
2700 VkDeviceSize resourceBOffset,
2701 VkDeviceSize pageSize)
2703 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2704 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2705 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2706 VkDeviceSize resourceBStart = resourceBOffset;
2707 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2708 return resourceAEndPage == resourceBStartPage;
2711 enum VmaSuballocationType
2713 VMA_SUBALLOCATION_TYPE_FREE = 0,
2714 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2715 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2716 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2717 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2718 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2719 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2728 static inline bool VmaIsBufferImageGranularityConflict(
2729 VmaSuballocationType suballocType1,
2730 VmaSuballocationType suballocType2)
2732 if(suballocType1 > suballocType2)
2734 VMA_SWAP(suballocType1, suballocType2);
2737 switch(suballocType1)
2739 case VMA_SUBALLOCATION_TYPE_FREE:
2741 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2743 case VMA_SUBALLOCATION_TYPE_BUFFER:
2745 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2746 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2747 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2749 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2750 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2751 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2752 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2754 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2755 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2763 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
2765 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
2766 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
2767 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
2769 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
2773 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
2775 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
2776 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
2777 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
2779 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
2790 VMA_CLASS_NO_COPY(VmaMutexLock)
2792 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2793 m_pMutex(useMutex ? &mutex : VMA_NULL)
2810 VMA_MUTEX* m_pMutex;
2813 #if VMA_DEBUG_GLOBAL_MUTEX 2814 static VMA_MUTEX gDebugGlobalMutex;
2815 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2817 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2821 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2832 template <
typename IterT,
typename KeyT,
typename CmpT>
2833 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2835 size_t down = 0, up = (end - beg);
2838 const size_t mid = (down + up) / 2;
2839 if(cmp(*(beg+mid), key))
2854 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2856 if((pAllocationCallbacks != VMA_NULL) &&
2857 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2859 return (*pAllocationCallbacks->pfnAllocation)(
2860 pAllocationCallbacks->pUserData,
2863 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2867 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2871 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2873 if((pAllocationCallbacks != VMA_NULL) &&
2874 (pAllocationCallbacks->pfnFree != VMA_NULL))
2876 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2880 VMA_SYSTEM_FREE(ptr);
2884 template<
typename T>
2885 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2887 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2890 template<
typename T>
2891 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2893 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2896 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2898 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2900 template<
typename T>
2901 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2904 VmaFree(pAllocationCallbacks, ptr);
2907 template<
typename T>
2908 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2912 for(
size_t i = count; i--; )
2916 VmaFree(pAllocationCallbacks, ptr);
2921 template<
typename T>
2922 class VmaStlAllocator
2925 const VkAllocationCallbacks*
const m_pCallbacks;
2926 typedef T value_type;
2928 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2929 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2931 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2932 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2934 template<
typename U>
2935 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2937 return m_pCallbacks == rhs.m_pCallbacks;
2939 template<
typename U>
2940 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2942 return m_pCallbacks != rhs.m_pCallbacks;
2945 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2948 #if VMA_USE_STL_VECTOR 2950 #define VmaVector std::vector 2952 template<
typename T,
typename allocatorT>
2953 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2955 vec.insert(vec.begin() + index, item);
2958 template<
typename T,
typename allocatorT>
2959 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2961 vec.erase(vec.begin() + index);
2964 #else // #if VMA_USE_STL_VECTOR 2969 template<
typename T,
typename AllocatorT>
2973 typedef T value_type;
2975 VmaVector(
const AllocatorT& allocator) :
2976 m_Allocator(allocator),
2983 VmaVector(
size_t count,
const AllocatorT& allocator) :
2984 m_Allocator(allocator),
2985 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2991 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2992 m_Allocator(src.m_Allocator),
2993 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2994 m_Count(src.m_Count),
2995 m_Capacity(src.m_Count)
2999 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3005 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3008 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3012 resize(rhs.m_Count);
3015 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3021 bool empty()
const {
return m_Count == 0; }
3022 size_t size()
const {
return m_Count; }
3023 T* data() {
return m_pArray; }
3024 const T* data()
const {
return m_pArray; }
3026 T& operator[](
size_t index)
3028 VMA_HEAVY_ASSERT(index < m_Count);
3029 return m_pArray[index];
3031 const T& operator[](
size_t index)
const 3033 VMA_HEAVY_ASSERT(index < m_Count);
3034 return m_pArray[index];
3039 VMA_HEAVY_ASSERT(m_Count > 0);
3042 const T& front()
const 3044 VMA_HEAVY_ASSERT(m_Count > 0);
3049 VMA_HEAVY_ASSERT(m_Count > 0);
3050 return m_pArray[m_Count - 1];
3052 const T& back()
const 3054 VMA_HEAVY_ASSERT(m_Count > 0);
3055 return m_pArray[m_Count - 1];
3058 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3060 newCapacity = VMA_MAX(newCapacity, m_Count);
3062 if((newCapacity < m_Capacity) && !freeMemory)
3064 newCapacity = m_Capacity;
3067 if(newCapacity != m_Capacity)
3069 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3072 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3074 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3075 m_Capacity = newCapacity;
3076 m_pArray = newArray;
3080 void resize(
size_t newCount,
bool freeMemory =
false)
3082 size_t newCapacity = m_Capacity;
3083 if(newCount > m_Capacity)
3085 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3089 newCapacity = newCount;
3092 if(newCapacity != m_Capacity)
3094 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3095 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3096 if(elementsToCopy != 0)
3098 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3100 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3101 m_Capacity = newCapacity;
3102 m_pArray = newArray;
3108 void clear(
bool freeMemory =
false)
3110 resize(0, freeMemory);
3113 void insert(
size_t index,
const T& src)
3115 VMA_HEAVY_ASSERT(index <= m_Count);
3116 const size_t oldCount = size();
3117 resize(oldCount + 1);
3118 if(index < oldCount)
3120 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3122 m_pArray[index] = src;
3125 void remove(
size_t index)
3127 VMA_HEAVY_ASSERT(index < m_Count);
3128 const size_t oldCount = size();
3129 if(index < oldCount - 1)
3131 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3133 resize(oldCount - 1);
3136 void push_back(
const T& src)
3138 const size_t newIndex = size();
3139 resize(newIndex + 1);
3140 m_pArray[newIndex] = src;
3145 VMA_HEAVY_ASSERT(m_Count > 0);
3149 void push_front(
const T& src)
3156 VMA_HEAVY_ASSERT(m_Count > 0);
3160 typedef T* iterator;
3162 iterator begin() {
return m_pArray; }
3163 iterator end() {
return m_pArray + m_Count; }
3166 AllocatorT m_Allocator;
3172 template<
typename T,
typename allocatorT>
3173 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3175 vec.insert(index, item);
3178 template<
typename T,
typename allocatorT>
3179 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3184 #endif // #if VMA_USE_STL_VECTOR 3186 template<
typename CmpLess,
typename VectorT>
3187 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3189 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3191 vector.data() + vector.size(),
3193 CmpLess()) - vector.data();
3194 VmaVectorInsert(vector, indexToInsert, value);
3195 return indexToInsert;
3198 template<
typename CmpLess,
typename VectorT>
3199 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3202 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3207 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3209 size_t indexToRemove = it - vector.begin();
3210 VmaVectorRemove(vector, indexToRemove);
3216 template<
typename CmpLess,
typename VectorT>
3217 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
3220 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3222 vector.data() + vector.size(),
3225 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3227 return it - vector.begin();
3231 return vector.size();
3243 template<
typename T>
3244 class VmaPoolAllocator
3246 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3248 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3249 ~VmaPoolAllocator();
3257 uint32_t NextFreeIndex;
3264 uint32_t FirstFreeIndex;
3267 const VkAllocationCallbacks* m_pAllocationCallbacks;
3268 size_t m_ItemsPerBlock;
3269 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3271 ItemBlock& CreateNewBlock();
3274 template<
typename T>
3275 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3276 m_pAllocationCallbacks(pAllocationCallbacks),
3277 m_ItemsPerBlock(itemsPerBlock),
3278 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3280 VMA_ASSERT(itemsPerBlock > 0);
3283 template<
typename T>
3284 VmaPoolAllocator<T>::~VmaPoolAllocator()
3289 template<
typename T>
3290 void VmaPoolAllocator<T>::Clear()
3292 for(
size_t i = m_ItemBlocks.size(); i--; )
3293 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3294 m_ItemBlocks.clear();
3297 template<
typename T>
3298 T* VmaPoolAllocator<T>::Alloc()
3300 for(
size_t i = m_ItemBlocks.size(); i--; )
3302 ItemBlock& block = m_ItemBlocks[i];
3304 if(block.FirstFreeIndex != UINT32_MAX)
3306 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3307 block.FirstFreeIndex = pItem->NextFreeIndex;
3308 return &pItem->Value;
3313 ItemBlock& newBlock = CreateNewBlock();
3314 Item*
const pItem = &newBlock.pItems[0];
3315 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3316 return &pItem->Value;
3319 template<
typename T>
3320 void VmaPoolAllocator<T>::Free(T* ptr)
3323 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3325 ItemBlock& block = m_ItemBlocks[i];
3329 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3332 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3334 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3335 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3336 block.FirstFreeIndex = index;
3340 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3343 template<
typename T>
3344 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3346 ItemBlock newBlock = {
3347 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3349 m_ItemBlocks.push_back(newBlock);
3352 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3353 newBlock.pItems[i].NextFreeIndex = i + 1;
3354 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3355 return m_ItemBlocks.back();
3361 #if VMA_USE_STL_LIST 3363 #define VmaList std::list 3365 #else // #if VMA_USE_STL_LIST 3367 template<
typename T>
3376 template<
typename T>
3379 VMA_CLASS_NO_COPY(VmaRawList)
3381 typedef VmaListItem<T> ItemType;
3383 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3387 size_t GetCount()
const {
return m_Count; }
3388 bool IsEmpty()
const {
return m_Count == 0; }
3390 ItemType* Front() {
return m_pFront; }
3391 const ItemType* Front()
const {
return m_pFront; }
3392 ItemType* Back() {
return m_pBack; }
3393 const ItemType* Back()
const {
return m_pBack; }
3395 ItemType* PushBack();
3396 ItemType* PushFront();
3397 ItemType* PushBack(
const T& value);
3398 ItemType* PushFront(
const T& value);
3403 ItemType* InsertBefore(ItemType* pItem);
3405 ItemType* InsertAfter(ItemType* pItem);
3407 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3408 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3410 void Remove(ItemType* pItem);
3413 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3414 VmaPoolAllocator<ItemType> m_ItemAllocator;
3420 template<
typename T>
3421 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3422 m_pAllocationCallbacks(pAllocationCallbacks),
3423 m_ItemAllocator(pAllocationCallbacks, 128),
3430 template<
typename T>
3431 VmaRawList<T>::~VmaRawList()
3437 template<
typename T>
3438 void VmaRawList<T>::Clear()
3440 if(IsEmpty() ==
false)
3442 ItemType* pItem = m_pBack;
3443 while(pItem != VMA_NULL)
3445 ItemType*
const pPrevItem = pItem->pPrev;
3446 m_ItemAllocator.Free(pItem);
3449 m_pFront = VMA_NULL;
3455 template<
typename T>
3456 VmaListItem<T>* VmaRawList<T>::PushBack()
3458 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3459 pNewItem->pNext = VMA_NULL;
3462 pNewItem->pPrev = VMA_NULL;
3463 m_pFront = pNewItem;
3469 pNewItem->pPrev = m_pBack;
3470 m_pBack->pNext = pNewItem;
3477 template<
typename T>
3478 VmaListItem<T>* VmaRawList<T>::PushFront()
3480 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3481 pNewItem->pPrev = VMA_NULL;
3484 pNewItem->pNext = VMA_NULL;
3485 m_pFront = pNewItem;
3491 pNewItem->pNext = m_pFront;
3492 m_pFront->pPrev = pNewItem;
3493 m_pFront = pNewItem;
3499 template<
typename T>
3500 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3502 ItemType*
const pNewItem = PushBack();
3503 pNewItem->Value = value;
3507 template<
typename T>
3508 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3510 ItemType*
const pNewItem = PushFront();
3511 pNewItem->Value = value;
3515 template<
typename T>
3516 void VmaRawList<T>::PopBack()
3518 VMA_HEAVY_ASSERT(m_Count > 0);
3519 ItemType*
const pBackItem = m_pBack;
3520 ItemType*
const pPrevItem = pBackItem->pPrev;
3521 if(pPrevItem != VMA_NULL)
3523 pPrevItem->pNext = VMA_NULL;
3525 m_pBack = pPrevItem;
3526 m_ItemAllocator.Free(pBackItem);
3530 template<
typename T>
3531 void VmaRawList<T>::PopFront()
3533 VMA_HEAVY_ASSERT(m_Count > 0);
3534 ItemType*
const pFrontItem = m_pFront;
3535 ItemType*
const pNextItem = pFrontItem->pNext;
3536 if(pNextItem != VMA_NULL)
3538 pNextItem->pPrev = VMA_NULL;
3540 m_pFront = pNextItem;
3541 m_ItemAllocator.Free(pFrontItem);
3545 template<
typename T>
3546 void VmaRawList<T>::Remove(ItemType* pItem)
3548 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3549 VMA_HEAVY_ASSERT(m_Count > 0);
3551 if(pItem->pPrev != VMA_NULL)
3553 pItem->pPrev->pNext = pItem->pNext;
3557 VMA_HEAVY_ASSERT(m_pFront == pItem);
3558 m_pFront = pItem->pNext;
3561 if(pItem->pNext != VMA_NULL)
3563 pItem->pNext->pPrev = pItem->pPrev;
3567 VMA_HEAVY_ASSERT(m_pBack == pItem);
3568 m_pBack = pItem->pPrev;
3571 m_ItemAllocator.Free(pItem);
3575 template<
typename T>
3576 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3578 if(pItem != VMA_NULL)
3580 ItemType*
const prevItem = pItem->pPrev;
3581 ItemType*
const newItem = m_ItemAllocator.Alloc();
3582 newItem->pPrev = prevItem;
3583 newItem->pNext = pItem;
3584 pItem->pPrev = newItem;
3585 if(prevItem != VMA_NULL)
3587 prevItem->pNext = newItem;
3591 VMA_HEAVY_ASSERT(m_pFront == pItem);
3601 template<
typename T>
3602 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3604 if(pItem != VMA_NULL)
3606 ItemType*
const nextItem = pItem->pNext;
3607 ItemType*
const newItem = m_ItemAllocator.Alloc();
3608 newItem->pNext = nextItem;
3609 newItem->pPrev = pItem;
3610 pItem->pNext = newItem;
3611 if(nextItem != VMA_NULL)
3613 nextItem->pPrev = newItem;
3617 VMA_HEAVY_ASSERT(m_pBack == pItem);
3627 template<
typename T>
3628 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3630 ItemType*
const newItem = InsertBefore(pItem);
3631 newItem->Value = value;
3635 template<
typename T>
3636 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3638 ItemType*
const newItem = InsertAfter(pItem);
3639 newItem->Value = value;
3643 template<
typename T,
typename AllocatorT>
3646 VMA_CLASS_NO_COPY(VmaList)
3657 T& operator*()
const 3659 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3660 return m_pItem->Value;
3662 T* operator->()
const 3664 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3665 return &m_pItem->Value;
3668 iterator& operator++()
3670 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3671 m_pItem = m_pItem->pNext;
3674 iterator& operator--()
3676 if(m_pItem != VMA_NULL)
3678 m_pItem = m_pItem->pPrev;
3682 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3683 m_pItem = m_pList->Back();
3688 iterator operator++(
int)
3690 iterator result = *
this;
3694 iterator operator--(
int)
3696 iterator result = *
this;
3701 bool operator==(
const iterator& rhs)
const 3703 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3704 return m_pItem == rhs.m_pItem;
3706 bool operator!=(
const iterator& rhs)
const 3708 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3709 return m_pItem != rhs.m_pItem;
3713 VmaRawList<T>* m_pList;
3714 VmaListItem<T>* m_pItem;
3716 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3722 friend class VmaList<T, AllocatorT>;
3725 class const_iterator
3734 const_iterator(
const iterator& src) :
3735 m_pList(src.m_pList),
3736 m_pItem(src.m_pItem)
3740 const T& operator*()
const 3742 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3743 return m_pItem->Value;
3745 const T* operator->()
const 3747 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3748 return &m_pItem->Value;
3751 const_iterator& operator++()
3753 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3754 m_pItem = m_pItem->pNext;
3757 const_iterator& operator--()
3759 if(m_pItem != VMA_NULL)
3761 m_pItem = m_pItem->pPrev;
3765 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3766 m_pItem = m_pList->Back();
3771 const_iterator operator++(
int)
3773 const_iterator result = *
this;
3777 const_iterator operator--(
int)
3779 const_iterator result = *
this;
3784 bool operator==(
const const_iterator& rhs)
const 3786 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3787 return m_pItem == rhs.m_pItem;
3789 bool operator!=(
const const_iterator& rhs)
const 3791 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3792 return m_pItem != rhs.m_pItem;
3796 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3802 const VmaRawList<T>* m_pList;
3803 const VmaListItem<T>* m_pItem;
3805 friend class VmaList<T, AllocatorT>;
3808 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3810 bool empty()
const {
return m_RawList.IsEmpty(); }
3811 size_t size()
const {
return m_RawList.GetCount(); }
3813 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3814 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3816 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3817 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3819 void clear() { m_RawList.Clear(); }
3820 void push_back(
const T& value) { m_RawList.PushBack(value); }
3821 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3822 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3825 VmaRawList<T> m_RawList;
3828 #endif // #if VMA_USE_STL_LIST 3836 #if VMA_USE_STL_UNORDERED_MAP 3838 #define VmaPair std::pair 3840 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3841 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3843 #else // #if VMA_USE_STL_UNORDERED_MAP 3845 template<
typename T1,
typename T2>
3851 VmaPair() : first(), second() { }
3852 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3858 template<
typename KeyT,
typename ValueT>
3862 typedef VmaPair<KeyT, ValueT> PairType;
3863 typedef PairType* iterator;
3865 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3867 iterator begin() {
return m_Vector.begin(); }
3868 iterator end() {
return m_Vector.end(); }
3870 void insert(
const PairType& pair);
3871 iterator find(
const KeyT& key);
3872 void erase(iterator it);
3875 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3878 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3880 template<
typename FirstT,
typename SecondT>
3881 struct VmaPairFirstLess
3883 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3885 return lhs.first < rhs.first;
3887 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3889 return lhs.first < rhsFirst;
3893 template<
typename KeyT,
typename ValueT>
3894 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3896 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3898 m_Vector.data() + m_Vector.size(),
3900 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3901 VmaVectorInsert(m_Vector, indexToInsert, pair);
3904 template<
typename KeyT,
typename ValueT>
3905 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3907 PairType* it = VmaBinaryFindFirstNotLess(
3909 m_Vector.data() + m_Vector.size(),
3911 VmaPairFirstLess<KeyT, ValueT>());
3912 if((it != m_Vector.end()) && (it->first == key))
3918 return m_Vector.end();
3922 template<
typename KeyT,
typename ValueT>
3923 void VmaMap<KeyT, ValueT>::erase(iterator it)
3925 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3928 #endif // #if VMA_USE_STL_UNORDERED_MAP 3934 class VmaDeviceMemoryBlock;
3936 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
3938 struct VmaAllocation_T
3940 VMA_CLASS_NO_COPY(VmaAllocation_T)
3942 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3946 FLAG_USER_DATA_STRING = 0x01,
3950 enum ALLOCATION_TYPE
3952 ALLOCATION_TYPE_NONE,
3953 ALLOCATION_TYPE_BLOCK,
3954 ALLOCATION_TYPE_DEDICATED,
3957 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3960 m_pUserData(VMA_NULL),
3961 m_LastUseFrameIndex(currentFrameIndex),
3962 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3963 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3965 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3967 #if VMA_STATS_STRING_ENABLED 3968 m_CreationFrameIndex = currentFrameIndex;
3969 m_BufferImageUsage = 0;
3975 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3978 VMA_ASSERT(m_pUserData == VMA_NULL);
3981 void InitBlockAllocation(
3983 VmaDeviceMemoryBlock* block,
3984 VkDeviceSize offset,
3985 VkDeviceSize alignment,
3987 VmaSuballocationType suballocationType,
3991 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3992 VMA_ASSERT(block != VMA_NULL);
3993 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3994 m_Alignment = alignment;
3996 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3997 m_SuballocationType = (uint8_t)suballocationType;
3998 m_BlockAllocation.m_hPool = hPool;
3999 m_BlockAllocation.m_Block = block;
4000 m_BlockAllocation.m_Offset = offset;
4001 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4006 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4007 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4008 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4009 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4010 m_BlockAllocation.m_Block = VMA_NULL;
4011 m_BlockAllocation.m_Offset = 0;
4012 m_BlockAllocation.m_CanBecomeLost =
true;
4015 void ChangeBlockAllocation(
4017 VmaDeviceMemoryBlock* block,
4018 VkDeviceSize offset);
4021 void InitDedicatedAllocation(
4022 uint32_t memoryTypeIndex,
4023 VkDeviceMemory hMemory,
4024 VmaSuballocationType suballocationType,
4028 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4029 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4030 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4033 m_SuballocationType = (uint8_t)suballocationType;
4034 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4035 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4036 m_DedicatedAllocation.m_hMemory = hMemory;
4037 m_DedicatedAllocation.m_pMappedData = pMappedData;
4040 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4041 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4042 VkDeviceSize GetSize()
const {
return m_Size; }
4043 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4044 void* GetUserData()
const {
return m_pUserData; }
4045 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4046 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4048 VmaDeviceMemoryBlock* GetBlock()
const 4050 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4051 return m_BlockAllocation.m_Block;
4053 VkDeviceSize GetOffset()
const;
4054 VkDeviceMemory GetMemory()
const;
4055 uint32_t GetMemoryTypeIndex()
const;
4056 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4057 void* GetMappedData()
const;
4058 bool CanBecomeLost()
const;
4061 uint32_t GetLastUseFrameIndex()
const 4063 return m_LastUseFrameIndex.load();
4065 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4067 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4077 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4079 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4081 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4092 void BlockAllocMap();
4093 void BlockAllocUnmap();
4094 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4097 #if VMA_STATS_STRING_ENABLED 4098 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4099 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4101 void InitBufferImageUsage(uint32_t bufferImageUsage)
4103 VMA_ASSERT(m_BufferImageUsage == 0);
4104 m_BufferImageUsage = bufferImageUsage;
4107 void PrintParameters(
class VmaJsonWriter& json)
const;
4111 VkDeviceSize m_Alignment;
4112 VkDeviceSize m_Size;
4114 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4116 uint8_t m_SuballocationType;
4123 struct BlockAllocation
4126 VmaDeviceMemoryBlock* m_Block;
4127 VkDeviceSize m_Offset;
4128 bool m_CanBecomeLost;
4132 struct DedicatedAllocation
4134 uint32_t m_MemoryTypeIndex;
4135 VkDeviceMemory m_hMemory;
4136 void* m_pMappedData;
4142 BlockAllocation m_BlockAllocation;
4144 DedicatedAllocation m_DedicatedAllocation;
4147 #if VMA_STATS_STRING_ENABLED 4148 uint32_t m_CreationFrameIndex;
4149 uint32_t m_BufferImageUsage;
4159 struct VmaSuballocation
4161 VkDeviceSize offset;
4164 VmaSuballocationType type;
4167 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4170 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4185 struct VmaAllocationRequest
4187 VkDeviceSize offset;
4188 VkDeviceSize sumFreeSize;
4189 VkDeviceSize sumItemSize;
4190 VmaSuballocationList::iterator item;
4191 size_t itemsToMakeLostCount;
4193 VkDeviceSize CalcCost()
const 4195 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4203 class VmaBlockMetadata
4205 VMA_CLASS_NO_COPY(VmaBlockMetadata)
4208 ~VmaBlockMetadata();
4209 void Init(VkDeviceSize size);
4212 bool Validate()
const;
4213 VkDeviceSize GetSize()
const {
return m_Size; }
4214 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4215 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4216 VkDeviceSize GetUnusedRangeSizeMax()
const;
4218 bool IsEmpty()
const;
4220 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4223 #if VMA_STATS_STRING_ENABLED 4224 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4230 bool CreateAllocationRequest(
4231 uint32_t currentFrameIndex,
4232 uint32_t frameInUseCount,
4233 VkDeviceSize bufferImageGranularity,
4234 VkDeviceSize allocSize,
4235 VkDeviceSize allocAlignment,
4236 VmaSuballocationType allocType,
4237 bool canMakeOtherLost,
4238 VmaAllocationRequest* pAllocationRequest);
4240 bool MakeRequestedAllocationsLost(
4241 uint32_t currentFrameIndex,
4242 uint32_t frameInUseCount,
4243 VmaAllocationRequest* pAllocationRequest);
4245 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4247 VkResult CheckCorruption(
const void* pBlockData);
4251 const VmaAllocationRequest& request,
4252 VmaSuballocationType type,
4253 VkDeviceSize allocSize,
4258 void FreeAtOffset(VkDeviceSize offset);
4261 VkDeviceSize m_Size;
4262 uint32_t m_FreeCount;
4263 VkDeviceSize m_SumFreeSize;
4264 VmaSuballocationList m_Suballocations;
4267 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4269 bool ValidateFreeSuballocationList()
const;
4273 bool CheckAllocation(
4274 uint32_t currentFrameIndex,
4275 uint32_t frameInUseCount,
4276 VkDeviceSize bufferImageGranularity,
4277 VkDeviceSize allocSize,
4278 VkDeviceSize allocAlignment,
4279 VmaSuballocationType allocType,
4280 VmaSuballocationList::const_iterator suballocItem,
4281 bool canMakeOtherLost,
4282 VkDeviceSize* pOffset,
4283 size_t* itemsToMakeLostCount,
4284 VkDeviceSize* pSumFreeSize,
4285 VkDeviceSize* pSumItemSize)
const;
4287 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4291 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4294 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4297 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4306 class VmaDeviceMemoryBlock
4308 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
4310 VmaBlockMetadata m_Metadata;
4314 ~VmaDeviceMemoryBlock()
4316 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4317 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4322 uint32_t newMemoryTypeIndex,
4323 VkDeviceMemory newMemory,
4324 VkDeviceSize newSize,
4329 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4330 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4331 uint32_t GetId()
const {
return m_Id; }
4332 void* GetMappedData()
const {
return m_pMappedData; }
4335 bool Validate()
const;
4340 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4343 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4344 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4346 VkResult BindBufferMemory(
4350 VkResult BindImageMemory(
4356 uint32_t m_MemoryTypeIndex;
4358 VkDeviceMemory m_hMemory;
4363 uint32_t m_MapCount;
4364 void* m_pMappedData;
4367 struct VmaPointerLess
4369 bool operator()(
const void* lhs,
const void* rhs)
const 4375 class VmaDefragmentator;
4383 struct VmaBlockVector
4385 VMA_CLASS_NO_COPY(VmaBlockVector)
4389 uint32_t memoryTypeIndex,
4390 VkDeviceSize preferredBlockSize,
4391 size_t minBlockCount,
4392 size_t maxBlockCount,
4393 VkDeviceSize bufferImageGranularity,
4394 uint32_t frameInUseCount,
4398 VkResult CreateMinBlocks();
4400 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4401 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4402 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4403 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4407 bool IsEmpty()
const {
return m_Blocks.empty(); }
4408 bool IsCorruptionDetectionEnabled()
const;
4412 uint32_t currentFrameIndex,
4414 VkDeviceSize alignment,
4416 VmaSuballocationType suballocType,
4425 #if VMA_STATS_STRING_ENABLED 4426 void PrintDetailedMap(
class VmaJsonWriter& json);
4429 void MakePoolAllocationsLost(
4430 uint32_t currentFrameIndex,
4431 size_t* pLostAllocationCount);
4432 VkResult CheckCorruption();
4434 VmaDefragmentator* EnsureDefragmentator(
4436 uint32_t currentFrameIndex);
4438 VkResult Defragment(
4440 VkDeviceSize& maxBytesToMove,
4441 uint32_t& maxAllocationsToMove);
4443 void DestroyDefragmentator();
4446 friend class VmaDefragmentator;
4449 const uint32_t m_MemoryTypeIndex;
4450 const VkDeviceSize m_PreferredBlockSize;
4451 const size_t m_MinBlockCount;
4452 const size_t m_MaxBlockCount;
4453 const VkDeviceSize m_BufferImageGranularity;
4454 const uint32_t m_FrameInUseCount;
4455 const bool m_IsCustomPool;
4458 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4462 bool m_HasEmptyBlock;
4463 VmaDefragmentator* m_pDefragmentator;
4464 uint32_t m_NextBlockId;
4466 VkDeviceSize CalcMaxBlockSize()
const;
4469 void Remove(VmaDeviceMemoryBlock* pBlock);
4473 void IncrementallySortBlocks();
4475 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4480 VMA_CLASS_NO_COPY(VmaPool_T)
4482 VmaBlockVector m_BlockVector;
4489 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4490 uint32_t GetId()
const {
return m_Id; }
4491 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
4493 #if VMA_STATS_STRING_ENABLED 4501 class VmaDefragmentator
4503 VMA_CLASS_NO_COPY(VmaDefragmentator)
4506 VmaBlockVector*
const m_pBlockVector;
4507 uint32_t m_CurrentFrameIndex;
4508 VkDeviceSize m_BytesMoved;
4509 uint32_t m_AllocationsMoved;
4511 struct AllocationInfo
4514 VkBool32* m_pChanged;
4517 m_hAllocation(VK_NULL_HANDLE),
4518 m_pChanged(VMA_NULL)
4523 struct AllocationInfoSizeGreater
4525 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4527 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4532 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4536 VmaDeviceMemoryBlock* m_pBlock;
4537 bool m_HasNonMovableAllocations;
4538 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4540 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4542 m_HasNonMovableAllocations(true),
4543 m_Allocations(pAllocationCallbacks),
4544 m_pMappedDataForDefragmentation(VMA_NULL)
4548 void CalcHasNonMovableAllocations()
4550 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4551 const size_t defragmentAllocCount = m_Allocations.size();
4552 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4555 void SortAllocationsBySizeDescecnding()
4557 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4560 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4565 void* m_pMappedDataForDefragmentation;
4568 struct BlockPointerLess
4570 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4572 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4574 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4576 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4582 struct BlockInfoCompareMoveDestination
4584 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4586 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4590 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4594 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4602 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4603 BlockInfoVector m_Blocks;
4605 VkResult DefragmentRound(
4606 VkDeviceSize maxBytesToMove,
4607 uint32_t maxAllocationsToMove);
4609 static bool MoveMakesSense(
4610 size_t dstBlockIndex, VkDeviceSize dstOffset,
4611 size_t srcBlockIndex, VkDeviceSize srcOffset);
4616 VmaBlockVector* pBlockVector,
4617 uint32_t currentFrameIndex);
4619 ~VmaDefragmentator();
4621 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4622 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4624 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4626 VkResult Defragment(
4627 VkDeviceSize maxBytesToMove,
4628 uint32_t maxAllocationsToMove);
4632 struct VmaAllocator_T
4634 VMA_CLASS_NO_COPY(VmaAllocator_T)
4637 bool m_UseKhrDedicatedAllocation;
4639 bool m_AllocationCallbacksSpecified;
4640 VkAllocationCallbacks m_AllocationCallbacks;
4644 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4645 VMA_MUTEX m_HeapSizeLimitMutex;
4647 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4648 VkPhysicalDeviceMemoryProperties m_MemProps;
4651 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4654 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4655 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4656 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4661 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4663 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4667 return m_VulkanFunctions;
4670 VkDeviceSize GetBufferImageGranularity()
const 4673 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4674 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4677 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4678 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4680 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4682 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4683 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4686 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 4688 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
4689 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
4692 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 4694 return IsMemoryTypeNonCoherent(memTypeIndex) ?
4695 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
4696 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
4699 bool IsIntegratedGpu()
const 4701 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
4704 void GetBufferMemoryRequirements(
4706 VkMemoryRequirements& memReq,
4707 bool& requiresDedicatedAllocation,
4708 bool& prefersDedicatedAllocation)
const;
4709 void GetImageMemoryRequirements(
4711 VkMemoryRequirements& memReq,
4712 bool& requiresDedicatedAllocation,
4713 bool& prefersDedicatedAllocation)
const;
4716 VkResult AllocateMemory(
4717 const VkMemoryRequirements& vkMemReq,
4718 bool requiresDedicatedAllocation,
4719 bool prefersDedicatedAllocation,
4720 VkBuffer dedicatedBuffer,
4721 VkImage dedicatedImage,
4723 VmaSuballocationType suballocType,
4729 void CalculateStats(
VmaStats* pStats);
4731 #if VMA_STATS_STRING_ENABLED 4732 void PrintDetailedMap(
class VmaJsonWriter& json);
4735 VkResult Defragment(
4737 size_t allocationCount,
4738 VkBool32* pAllocationsChanged,
4746 void DestroyPool(
VmaPool pool);
4749 void SetCurrentFrameIndex(uint32_t frameIndex);
4751 void MakePoolAllocationsLost(
4753 size_t* pLostAllocationCount);
4754 VkResult CheckPoolCorruption(
VmaPool hPool);
4755 VkResult CheckCorruption(uint32_t memoryTypeBits);
4759 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4760 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4765 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
4766 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
4768 void FlushOrInvalidateAllocation(
4770 VkDeviceSize offset, VkDeviceSize size,
4771 VMA_CACHE_OPERATION op);
4774 VkDeviceSize m_PreferredLargeHeapBlockSize;
4776 VkPhysicalDevice m_PhysicalDevice;
4777 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4779 VMA_MUTEX m_PoolsMutex;
4781 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4782 uint32_t m_NextPoolId;
4788 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4790 VkResult AllocateMemoryOfType(
4792 VkDeviceSize alignment,
4793 bool dedicatedAllocation,
4794 VkBuffer dedicatedBuffer,
4795 VkImage dedicatedImage,
4797 uint32_t memTypeIndex,
4798 VmaSuballocationType suballocType,
4802 VkResult AllocateDedicatedMemory(
4804 VmaSuballocationType suballocType,
4805 uint32_t memTypeIndex,
4807 bool isUserDataString,
4809 VkBuffer dedicatedBuffer,
4810 VkImage dedicatedImage,
4820 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
4822 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4825 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
4827 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4830 template<
typename T>
4833 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4836 template<
typename T>
4837 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
4839 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4842 template<
typename T>
4843 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
4848 VmaFree(hAllocator, ptr);
4852 template<
typename T>
4853 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
4857 for(
size_t i = count; i--; )
4859 VmaFree(hAllocator, ptr);
4866 #if VMA_STATS_STRING_ENABLED 4868 class VmaStringBuilder
4871 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4872 size_t GetLength()
const {
return m_Data.size(); }
4873 const char* GetData()
const {
return m_Data.data(); }
4875 void Add(
char ch) { m_Data.push_back(ch); }
4876 void Add(
const char* pStr);
4877 void AddNewLine() { Add(
'\n'); }
4878 void AddNumber(uint32_t num);
4879 void AddNumber(uint64_t num);
4880 void AddPointer(
const void* ptr);
4883 VmaVector< char, VmaStlAllocator<char> > m_Data;
4886 void VmaStringBuilder::Add(
const char* pStr)
4888 const size_t strLen = strlen(pStr);
4891 const size_t oldCount = m_Data.size();
4892 m_Data.resize(oldCount + strLen);
4893 memcpy(m_Data.data() + oldCount, pStr, strLen);
4897 void VmaStringBuilder::AddNumber(uint32_t num)
4900 VmaUint32ToStr(buf,
sizeof(buf), num);
4904 void VmaStringBuilder::AddNumber(uint64_t num)
4907 VmaUint64ToStr(buf,
sizeof(buf), num);
4911 void VmaStringBuilder::AddPointer(
const void* ptr)
4914 VmaPtrToStr(buf,
sizeof(buf), ptr);
4918 #endif // #if VMA_STATS_STRING_ENABLED 4923 #if VMA_STATS_STRING_ENABLED 4927 VMA_CLASS_NO_COPY(VmaJsonWriter)
4929 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4932 void BeginObject(
bool singleLine =
false);
4935 void BeginArray(
bool singleLine =
false);
4938 void WriteString(
const char* pStr);
4939 void BeginString(
const char* pStr = VMA_NULL);
4940 void ContinueString(
const char* pStr);
4941 void ContinueString(uint32_t n);
4942 void ContinueString(uint64_t n);
4943 void ContinueString_Pointer(
const void* ptr);
4944 void EndString(
const char* pStr = VMA_NULL);
4946 void WriteNumber(uint32_t n);
4947 void WriteNumber(uint64_t n);
4948 void WriteBool(
bool b);
4952 static const char*
const INDENT;
4954 enum COLLECTION_TYPE
4956 COLLECTION_TYPE_OBJECT,
4957 COLLECTION_TYPE_ARRAY,
4961 COLLECTION_TYPE type;
4962 uint32_t valueCount;
4963 bool singleLineMode;
4966 VmaStringBuilder& m_SB;
4967 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4968 bool m_InsideString;
4970 void BeginValue(
bool isString);
4971 void WriteIndent(
bool oneLess =
false);
4974 const char*
const VmaJsonWriter::INDENT =
" ";
4976 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4978 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4979 m_InsideString(false)
4983 VmaJsonWriter::~VmaJsonWriter()
4985 VMA_ASSERT(!m_InsideString);
4986 VMA_ASSERT(m_Stack.empty());
4989 void VmaJsonWriter::BeginObject(
bool singleLine)
4991 VMA_ASSERT(!m_InsideString);
4997 item.type = COLLECTION_TYPE_OBJECT;
4998 item.valueCount = 0;
4999 item.singleLineMode = singleLine;
5000 m_Stack.push_back(item);
5003 void VmaJsonWriter::EndObject()
5005 VMA_ASSERT(!m_InsideString);
5010 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5014 void VmaJsonWriter::BeginArray(
bool singleLine)
5016 VMA_ASSERT(!m_InsideString);
5022 item.type = COLLECTION_TYPE_ARRAY;
5023 item.valueCount = 0;
5024 item.singleLineMode = singleLine;
5025 m_Stack.push_back(item);
5028 void VmaJsonWriter::EndArray()
5030 VMA_ASSERT(!m_InsideString);
5035 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5039 void VmaJsonWriter::WriteString(
const char* pStr)
5045 void VmaJsonWriter::BeginString(
const char* pStr)
5047 VMA_ASSERT(!m_InsideString);
5051 m_InsideString =
true;
5052 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5054 ContinueString(pStr);
5058 void VmaJsonWriter::ContinueString(
const char* pStr)
5060 VMA_ASSERT(m_InsideString);
5062 const size_t strLen = strlen(pStr);
5063 for(
size_t i = 0; i < strLen; ++i)
5096 VMA_ASSERT(0 &&
"Character not currently supported.");
5102 void VmaJsonWriter::ContinueString(uint32_t n)
5104 VMA_ASSERT(m_InsideString);
5108 void VmaJsonWriter::ContinueString(uint64_t n)
5110 VMA_ASSERT(m_InsideString);
5114 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5116 VMA_ASSERT(m_InsideString);
5117 m_SB.AddPointer(ptr);
5120 void VmaJsonWriter::EndString(
const char* pStr)
5122 VMA_ASSERT(m_InsideString);
5123 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5125 ContinueString(pStr);
5128 m_InsideString =
false;
5131 void VmaJsonWriter::WriteNumber(uint32_t n)
5133 VMA_ASSERT(!m_InsideString);
5138 void VmaJsonWriter::WriteNumber(uint64_t n)
5140 VMA_ASSERT(!m_InsideString);
5145 void VmaJsonWriter::WriteBool(
bool b)
5147 VMA_ASSERT(!m_InsideString);
5149 m_SB.Add(b ?
"true" :
"false");
5152 void VmaJsonWriter::WriteNull()
5154 VMA_ASSERT(!m_InsideString);
5159 void VmaJsonWriter::BeginValue(
bool isString)
5161 if(!m_Stack.empty())
5163 StackItem& currItem = m_Stack.back();
5164 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5165 currItem.valueCount % 2 == 0)
5167 VMA_ASSERT(isString);
5170 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5171 currItem.valueCount % 2 != 0)
5175 else if(currItem.valueCount > 0)
5184 ++currItem.valueCount;
5188 void VmaJsonWriter::WriteIndent(
bool oneLess)
5190 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
5194 size_t count = m_Stack.size();
5195 if(count > 0 && oneLess)
5199 for(
size_t i = 0; i < count; ++i)
5206 #endif // #if VMA_STATS_STRING_ENABLED 5210 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
5212 if(IsUserDataString())
5214 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
5216 FreeUserDataString(hAllocator);
5218 if(pUserData != VMA_NULL)
5220 const char*
const newStrSrc = (
char*)pUserData;
5221 const size_t newStrLen = strlen(newStrSrc);
5222 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
5223 memcpy(newStrDst, newStrSrc, newStrLen + 1);
5224 m_pUserData = newStrDst;
5229 m_pUserData = pUserData;
5233 void VmaAllocation_T::ChangeBlockAllocation(
5235 VmaDeviceMemoryBlock* block,
5236 VkDeviceSize offset)
5238 VMA_ASSERT(block != VMA_NULL);
5239 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5242 if(block != m_BlockAllocation.m_Block)
5244 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
5245 if(IsPersistentMap())
5247 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
5248 block->Map(hAllocator, mapRefCount, VMA_NULL);
5251 m_BlockAllocation.m_Block = block;
5252 m_BlockAllocation.m_Offset = offset;
5255 VkDeviceSize VmaAllocation_T::GetOffset()
const 5259 case ALLOCATION_TYPE_BLOCK:
5260 return m_BlockAllocation.m_Offset;
5261 case ALLOCATION_TYPE_DEDICATED:
5269 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5273 case ALLOCATION_TYPE_BLOCK:
5274 return m_BlockAllocation.m_Block->GetDeviceMemory();
5275 case ALLOCATION_TYPE_DEDICATED:
5276 return m_DedicatedAllocation.m_hMemory;
5279 return VK_NULL_HANDLE;
5283 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5287 case ALLOCATION_TYPE_BLOCK:
5288 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5289 case ALLOCATION_TYPE_DEDICATED:
5290 return m_DedicatedAllocation.m_MemoryTypeIndex;
5297 void* VmaAllocation_T::GetMappedData()
const 5301 case ALLOCATION_TYPE_BLOCK:
5304 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5305 VMA_ASSERT(pBlockData != VMA_NULL);
5306 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5313 case ALLOCATION_TYPE_DEDICATED:
5314 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5315 return m_DedicatedAllocation.m_pMappedData;
5322 bool VmaAllocation_T::CanBecomeLost()
const 5326 case ALLOCATION_TYPE_BLOCK:
5327 return m_BlockAllocation.m_CanBecomeLost;
5328 case ALLOCATION_TYPE_DEDICATED:
5336 VmaPool VmaAllocation_T::GetPool()
const 5338 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5339 return m_BlockAllocation.m_hPool;
5342 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5344 VMA_ASSERT(CanBecomeLost());
5350 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5353 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5358 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5364 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5374 #if VMA_STATS_STRING_ENABLED 5377 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5386 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 5388 json.WriteString(
"Type");
5389 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
5391 json.WriteString(
"Size");
5392 json.WriteNumber(m_Size);
5394 if(m_pUserData != VMA_NULL)
5396 json.WriteString(
"UserData");
5397 if(IsUserDataString())
5399 json.WriteString((
const char*)m_pUserData);
5404 json.ContinueString_Pointer(m_pUserData);
5409 json.WriteString(
"CreationFrameIndex");
5410 json.WriteNumber(m_CreationFrameIndex);
5412 json.WriteString(
"LastUseFrameIndex");
5413 json.WriteNumber(GetLastUseFrameIndex());
5415 if(m_BufferImageUsage != 0)
5417 json.WriteString(
"Usage");
5418 json.WriteNumber(m_BufferImageUsage);
5424 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
5426 VMA_ASSERT(IsUserDataString());
5427 if(m_pUserData != VMA_NULL)
5429 char*
const oldStr = (
char*)m_pUserData;
5430 const size_t oldStrLen = strlen(oldStr);
5431 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5432 m_pUserData = VMA_NULL;
5436 void VmaAllocation_T::BlockAllocMap()
5438 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5440 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5446 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
5450 void VmaAllocation_T::BlockAllocUnmap()
5452 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5454 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5460 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
5464 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
5466 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5470 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5472 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5473 *ppData = m_DedicatedAllocation.m_pMappedData;
5479 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
5480 return VK_ERROR_MEMORY_MAP_FAILED;
5485 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5486 hAllocator->m_hDevice,
5487 m_DedicatedAllocation.m_hMemory,
5492 if(result == VK_SUCCESS)
5494 m_DedicatedAllocation.m_pMappedData = *ppData;
5501 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
5503 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5505 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5510 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5511 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5512 hAllocator->m_hDevice,
5513 m_DedicatedAllocation.m_hMemory);
5518 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5522 #if VMA_STATS_STRING_ENABLED 5524 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5528 json.WriteString(
"Blocks");
5531 json.WriteString(
"Allocations");
5534 json.WriteString(
"UnusedRanges");
5537 json.WriteString(
"UsedBytes");
5540 json.WriteString(
"UnusedBytes");
5545 json.WriteString(
"AllocationSize");
5546 json.BeginObject(
true);
5547 json.WriteString(
"Min");
5549 json.WriteString(
"Avg");
5551 json.WriteString(
"Max");
5558 json.WriteString(
"UnusedRangeSize");
5559 json.BeginObject(
true);
5560 json.WriteString(
"Min");
5562 json.WriteString(
"Avg");
5564 json.WriteString(
"Max");
5572 #endif // #if VMA_STATS_STRING_ENABLED 5574 struct VmaSuballocationItemSizeLess
5577 const VmaSuballocationList::iterator lhs,
5578 const VmaSuballocationList::iterator rhs)
const 5580 return lhs->size < rhs->size;
5583 const VmaSuballocationList::iterator lhs,
5584 VkDeviceSize rhsSize)
const 5586 return lhs->size < rhsSize;
5593 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5597 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5598 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5602 VmaBlockMetadata::~VmaBlockMetadata()
5606 void VmaBlockMetadata::Init(VkDeviceSize size)
5610 m_SumFreeSize = size;
5612 VmaSuballocation suballoc = {};
5613 suballoc.offset = 0;
5614 suballoc.size = size;
5615 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5616 suballoc.hAllocation = VK_NULL_HANDLE;
5618 m_Suballocations.push_back(suballoc);
5619 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5621 m_FreeSuballocationsBySize.push_back(suballocItem);
5624 bool VmaBlockMetadata::Validate()
const 5626 if(m_Suballocations.empty())
5632 VkDeviceSize calculatedOffset = 0;
5634 uint32_t calculatedFreeCount = 0;
5636 VkDeviceSize calculatedSumFreeSize = 0;
5639 size_t freeSuballocationsToRegister = 0;
5641 bool prevFree =
false;
5643 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5644 suballocItem != m_Suballocations.cend();
5647 const VmaSuballocation& subAlloc = *suballocItem;
5650 if(subAlloc.offset != calculatedOffset)
5655 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5657 if(prevFree && currFree)
5662 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5669 calculatedSumFreeSize += subAlloc.size;
5670 ++calculatedFreeCount;
5671 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5673 ++freeSuballocationsToRegister;
5677 if(subAlloc.size < VMA_DEBUG_MARGIN)
5684 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5688 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5694 if(VMA_DEBUG_MARGIN > 0 && !prevFree)
5700 calculatedOffset += subAlloc.size;
5701 prevFree = currFree;
5706 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5711 VkDeviceSize lastSize = 0;
5712 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5714 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5717 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5722 if(suballocItem->size < lastSize)
5727 lastSize = suballocItem->size;
5731 if(!ValidateFreeSuballocationList() ||
5732 (calculatedOffset != m_Size) ||
5733 (calculatedSumFreeSize != m_SumFreeSize) ||
5734 (calculatedFreeCount != m_FreeCount))
5742 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5744 if(!m_FreeSuballocationsBySize.empty())
5746 return m_FreeSuballocationsBySize.back()->size;
5754 bool VmaBlockMetadata::IsEmpty()
const 5756 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5759 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5763 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5775 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5776 suballocItem != m_Suballocations.cend();
5779 const VmaSuballocation& suballoc = *suballocItem;
5780 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5793 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5795 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5797 inoutStats.
size += m_Size;
5804 #if VMA_STATS_STRING_ENABLED 5806 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5810 json.WriteString(
"TotalBytes");
5811 json.WriteNumber(m_Size);
5813 json.WriteString(
"UnusedBytes");
5814 json.WriteNumber(m_SumFreeSize);
5816 json.WriteString(
"Allocations");
5817 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5819 json.WriteString(
"UnusedRanges");
5820 json.WriteNumber(m_FreeCount);
5822 json.WriteString(
"Suballocations");
5825 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5826 suballocItem != m_Suballocations.cend();
5827 ++suballocItem, ++i)
5829 json.BeginObject(
true);
5831 json.WriteString(
"Offset");
5832 json.WriteNumber(suballocItem->offset);
5834 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5836 json.WriteString(
"Type");
5837 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
5839 json.WriteString(
"Size");
5840 json.WriteNumber(suballocItem->size);
5844 suballocItem->hAllocation->PrintParameters(json);
5854 #endif // #if VMA_STATS_STRING_ENABLED 5866 bool VmaBlockMetadata::CreateAllocationRequest(
5867 uint32_t currentFrameIndex,
5868 uint32_t frameInUseCount,
5869 VkDeviceSize bufferImageGranularity,
5870 VkDeviceSize allocSize,
5871 VkDeviceSize allocAlignment,
5872 VmaSuballocationType allocType,
5873 bool canMakeOtherLost,
5874 VmaAllocationRequest* pAllocationRequest)
5876 VMA_ASSERT(allocSize > 0);
5877 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5878 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5879 VMA_HEAVY_ASSERT(Validate());
5882 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
5888 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5889 if(freeSuballocCount > 0)
5894 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5895 m_FreeSuballocationsBySize.data(),
5896 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5897 allocSize + 2 * VMA_DEBUG_MARGIN,
5898 VmaSuballocationItemSizeLess());
5899 size_t index = it - m_FreeSuballocationsBySize.data();
5900 for(; index < freeSuballocCount; ++index)
5905 bufferImageGranularity,
5909 m_FreeSuballocationsBySize[index],
5911 &pAllocationRequest->offset,
5912 &pAllocationRequest->itemsToMakeLostCount,
5913 &pAllocationRequest->sumFreeSize,
5914 &pAllocationRequest->sumItemSize))
5916 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5924 for(
size_t index = freeSuballocCount; index--; )
5929 bufferImageGranularity,
5933 m_FreeSuballocationsBySize[index],
5935 &pAllocationRequest->offset,
5936 &pAllocationRequest->itemsToMakeLostCount,
5937 &pAllocationRequest->sumFreeSize,
5938 &pAllocationRequest->sumItemSize))
5940 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5947 if(canMakeOtherLost)
5951 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5952 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5954 VmaAllocationRequest tmpAllocRequest = {};
5955 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5956 suballocIt != m_Suballocations.end();
5959 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5960 suballocIt->hAllocation->CanBecomeLost())
5965 bufferImageGranularity,
5971 &tmpAllocRequest.offset,
5972 &tmpAllocRequest.itemsToMakeLostCount,
5973 &tmpAllocRequest.sumFreeSize,
5974 &tmpAllocRequest.sumItemSize))
5976 tmpAllocRequest.item = suballocIt;
5978 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5980 *pAllocationRequest = tmpAllocRequest;
5986 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5995 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5996 uint32_t currentFrameIndex,
5997 uint32_t frameInUseCount,
5998 VmaAllocationRequest* pAllocationRequest)
6000 while(pAllocationRequest->itemsToMakeLostCount > 0)
6002 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
6004 ++pAllocationRequest->item;
6006 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6007 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
6008 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
6009 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6011 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
6012 --pAllocationRequest->itemsToMakeLostCount;
6020 VMA_HEAVY_ASSERT(Validate());
6021 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6022 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
6027 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6029 uint32_t lostAllocationCount = 0;
6030 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6031 it != m_Suballocations.end();
6034 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
6035 it->hAllocation->CanBecomeLost() &&
6036 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6038 it = FreeSuballocation(it);
6039 ++lostAllocationCount;
6042 return lostAllocationCount;
6045 VkResult VmaBlockMetadata::CheckCorruption(
const void* pBlockData)
6047 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6048 it != m_Suballocations.end();
6051 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6053 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
6055 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
6056 return VK_ERROR_VALIDATION_FAILED_EXT;
6058 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
6060 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
6061 return VK_ERROR_VALIDATION_FAILED_EXT;
6069 void VmaBlockMetadata::Alloc(
6070 const VmaAllocationRequest& request,
6071 VmaSuballocationType type,
6072 VkDeviceSize allocSize,
6075 VMA_ASSERT(request.item != m_Suballocations.end());
6076 VmaSuballocation& suballoc = *request.item;
6078 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6080 VMA_ASSERT(request.offset >= suballoc.offset);
6081 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
6082 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
6083 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
6087 UnregisterFreeSuballocation(request.item);
6089 suballoc.offset = request.offset;
6090 suballoc.size = allocSize;
6091 suballoc.type = type;
6092 suballoc.hAllocation = hAllocation;
6097 VmaSuballocation paddingSuballoc = {};
6098 paddingSuballoc.offset = request.offset + allocSize;
6099 paddingSuballoc.size = paddingEnd;
6100 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6101 VmaSuballocationList::iterator next = request.item;
6103 const VmaSuballocationList::iterator paddingEndItem =
6104 m_Suballocations.insert(next, paddingSuballoc);
6105 RegisterFreeSuballocation(paddingEndItem);
6111 VmaSuballocation paddingSuballoc = {};
6112 paddingSuballoc.offset = request.offset - paddingBegin;
6113 paddingSuballoc.size = paddingBegin;
6114 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6115 const VmaSuballocationList::iterator paddingBeginItem =
6116 m_Suballocations.insert(request.item, paddingSuballoc);
6117 RegisterFreeSuballocation(paddingBeginItem);
6121 m_FreeCount = m_FreeCount - 1;
6122 if(paddingBegin > 0)
6130 m_SumFreeSize -= allocSize;
6135 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6136 suballocItem != m_Suballocations.end();
6139 VmaSuballocation& suballoc = *suballocItem;
6140 if(suballoc.hAllocation == allocation)
6142 FreeSuballocation(suballocItem);
6143 VMA_HEAVY_ASSERT(Validate());
6147 VMA_ASSERT(0 &&
"Not found!");
6150 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
6152 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6153 suballocItem != m_Suballocations.end();
6156 VmaSuballocation& suballoc = *suballocItem;
6157 if(suballoc.offset == offset)
6159 FreeSuballocation(suballocItem);
6163 VMA_ASSERT(0 &&
"Not found!");
6166 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 6168 VkDeviceSize lastSize = 0;
6169 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
6171 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
6173 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6178 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6183 if(it->size < lastSize)
6189 lastSize = it->size;
6194 bool VmaBlockMetadata::CheckAllocation(
6195 uint32_t currentFrameIndex,
6196 uint32_t frameInUseCount,
6197 VkDeviceSize bufferImageGranularity,
6198 VkDeviceSize allocSize,
6199 VkDeviceSize allocAlignment,
6200 VmaSuballocationType allocType,
6201 VmaSuballocationList::const_iterator suballocItem,
6202 bool canMakeOtherLost,
6203 VkDeviceSize* pOffset,
6204 size_t* itemsToMakeLostCount,
6205 VkDeviceSize* pSumFreeSize,
6206 VkDeviceSize* pSumItemSize)
const 6208 VMA_ASSERT(allocSize > 0);
6209 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6210 VMA_ASSERT(suballocItem != m_Suballocations.cend());
6211 VMA_ASSERT(pOffset != VMA_NULL);
6213 *itemsToMakeLostCount = 0;
6217 if(canMakeOtherLost)
6219 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6221 *pSumFreeSize = suballocItem->size;
6225 if(suballocItem->hAllocation->CanBecomeLost() &&
6226 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6228 ++*itemsToMakeLostCount;
6229 *pSumItemSize = suballocItem->size;
6238 if(m_Size - suballocItem->offset < allocSize)
6244 *pOffset = suballocItem->offset;
6247 if(VMA_DEBUG_MARGIN > 0)
6249 *pOffset += VMA_DEBUG_MARGIN;
6253 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6257 if(bufferImageGranularity > 1)
6259 bool bufferImageGranularityConflict =
false;
6260 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6261 while(prevSuballocItem != m_Suballocations.cbegin())
6264 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6265 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6267 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6269 bufferImageGranularityConflict =
true;
6277 if(bufferImageGranularityConflict)
6279 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6285 if(*pOffset >= suballocItem->offset + suballocItem->size)
6291 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
6294 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6296 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
6298 if(suballocItem->offset + totalSize > m_Size)
6305 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
6306 if(totalSize > suballocItem->size)
6308 VkDeviceSize remainingSize = totalSize - suballocItem->size;
6309 while(remainingSize > 0)
6312 if(lastSuballocItem == m_Suballocations.cend())
6316 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6318 *pSumFreeSize += lastSuballocItem->size;
6322 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
6323 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
6324 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6326 ++*itemsToMakeLostCount;
6327 *pSumItemSize += lastSuballocItem->size;
6334 remainingSize = (lastSuballocItem->size < remainingSize) ?
6335 remainingSize - lastSuballocItem->size : 0;
6341 if(bufferImageGranularity > 1)
6343 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6345 while(nextSuballocItem != m_Suballocations.cend())
6347 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6348 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6350 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6352 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6353 if(nextSuballoc.hAllocation->CanBecomeLost() &&
6354 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6356 ++*itemsToMakeLostCount;
6375 const VmaSuballocation& suballoc = *suballocItem;
6376 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6378 *pSumFreeSize = suballoc.size;
6381 if(suballoc.size < allocSize)
6387 *pOffset = suballoc.offset;
6390 if(VMA_DEBUG_MARGIN > 0)
6392 *pOffset += VMA_DEBUG_MARGIN;
6396 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6400 if(bufferImageGranularity > 1)
6402 bool bufferImageGranularityConflict =
false;
6403 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6404 while(prevSuballocItem != m_Suballocations.cbegin())
6407 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6408 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6410 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6412 bufferImageGranularityConflict =
true;
6420 if(bufferImageGranularityConflict)
6422 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6427 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6430 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6433 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6440 if(bufferImageGranularity > 1)
6442 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6444 while(nextSuballocItem != m_Suballocations.cend())
6446 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6447 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6449 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6468 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6470 VMA_ASSERT(item != m_Suballocations.end());
6471 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6473 VmaSuballocationList::iterator nextItem = item;
6475 VMA_ASSERT(nextItem != m_Suballocations.end());
6476 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6478 item->size += nextItem->size;
6480 m_Suballocations.erase(nextItem);
6483 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6486 VmaSuballocation& suballoc = *suballocItem;
6487 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6488 suballoc.hAllocation = VK_NULL_HANDLE;
6492 m_SumFreeSize += suballoc.size;
6495 bool mergeWithNext =
false;
6496 bool mergeWithPrev =
false;
6498 VmaSuballocationList::iterator nextItem = suballocItem;
6500 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6502 mergeWithNext =
true;
6505 VmaSuballocationList::iterator prevItem = suballocItem;
6506 if(suballocItem != m_Suballocations.begin())
6509 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6511 mergeWithPrev =
true;
6517 UnregisterFreeSuballocation(nextItem);
6518 MergeFreeWithNext(suballocItem);
6523 UnregisterFreeSuballocation(prevItem);
6524 MergeFreeWithNext(prevItem);
6525 RegisterFreeSuballocation(prevItem);
6530 RegisterFreeSuballocation(suballocItem);
6531 return suballocItem;
6535 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6537 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6538 VMA_ASSERT(item->size > 0);
6542 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6544 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6546 if(m_FreeSuballocationsBySize.empty())
6548 m_FreeSuballocationsBySize.push_back(item);
6552 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6560 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6562 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6563 VMA_ASSERT(item->size > 0);
6567 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6569 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6571 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6572 m_FreeSuballocationsBySize.data(),
6573 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6575 VmaSuballocationItemSizeLess());
6576 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6577 index < m_FreeSuballocationsBySize.size();
6580 if(m_FreeSuballocationsBySize[index] == item)
6582 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6585 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6587 VMA_ASSERT(0 &&
"Not found.");
6596 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6597 m_Metadata(hAllocator),
6598 m_MemoryTypeIndex(UINT32_MAX),
6600 m_hMemory(VK_NULL_HANDLE),
6602 m_pMappedData(VMA_NULL)
6606 void VmaDeviceMemoryBlock::Init(
6607 uint32_t newMemoryTypeIndex,
6608 VkDeviceMemory newMemory,
6609 VkDeviceSize newSize,
6612 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6614 m_MemoryTypeIndex = newMemoryTypeIndex;
6616 m_hMemory = newMemory;
6618 m_Metadata.Init(newSize);
6621 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6625 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6627 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6628 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6629 m_hMemory = VK_NULL_HANDLE;
6632 bool VmaDeviceMemoryBlock::Validate()
const 6634 if((m_hMemory == VK_NULL_HANDLE) ||
6635 (m_Metadata.GetSize() == 0))
6640 return m_Metadata.Validate();
6643 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
6645 void* pData =
nullptr;
6646 VkResult res = Map(hAllocator, 1, &pData);
6647 if(res != VK_SUCCESS)
6652 res = m_Metadata.CheckCorruption(pData);
6654 Unmap(hAllocator, 1);
6659 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6666 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6669 m_MapCount += count;
6670 VMA_ASSERT(m_pMappedData != VMA_NULL);
6671 if(ppData != VMA_NULL)
6673 *ppData = m_pMappedData;
6679 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6680 hAllocator->m_hDevice,
6686 if(result == VK_SUCCESS)
6688 if(ppData != VMA_NULL)
6690 *ppData = m_pMappedData;
6698 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6705 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6706 if(m_MapCount >= count)
6708 m_MapCount -= count;
6711 m_pMappedData = VMA_NULL;
6712 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6717 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6721 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
6723 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
6724 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
6727 VkResult res = Map(hAllocator, 1, &pData);
6728 if(res != VK_SUCCESS)
6733 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
6734 VmaWriteMagicValue(pData, allocOffset + allocSize);
6736 Unmap(hAllocator, 1);
6741 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
6743 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
6744 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
6747 VkResult res = Map(hAllocator, 1, &pData);
6748 if(res != VK_SUCCESS)
6753 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
6755 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
6757 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
6759 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
6762 Unmap(hAllocator, 1);
6767 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
6772 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6773 hAllocation->GetBlock() ==
this);
6775 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6776 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
6777 hAllocator->m_hDevice,
6780 hAllocation->GetOffset());
6783 VkResult VmaDeviceMemoryBlock::BindImageMemory(
6788 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6789 hAllocation->GetBlock() ==
this);
6791 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6792 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
6793 hAllocator->m_hDevice,
6796 hAllocation->GetOffset());
6801 memset(&outInfo, 0,
sizeof(outInfo));
6820 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6828 VmaPool_T::VmaPool_T(
6833 createInfo.memoryTypeIndex,
6834 createInfo.blockSize,
6835 createInfo.minBlockCount,
6836 createInfo.maxBlockCount,
6838 createInfo.frameInUseCount,
6844 VmaPool_T::~VmaPool_T()
6848 #if VMA_STATS_STRING_ENABLED 6850 #endif // #if VMA_STATS_STRING_ENABLED 6852 VmaBlockVector::VmaBlockVector(
6854 uint32_t memoryTypeIndex,
6855 VkDeviceSize preferredBlockSize,
6856 size_t minBlockCount,
6857 size_t maxBlockCount,
6858 VkDeviceSize bufferImageGranularity,
6859 uint32_t frameInUseCount,
6860 bool isCustomPool) :
6861 m_hAllocator(hAllocator),
6862 m_MemoryTypeIndex(memoryTypeIndex),
6863 m_PreferredBlockSize(preferredBlockSize),
6864 m_MinBlockCount(minBlockCount),
6865 m_MaxBlockCount(maxBlockCount),
6866 m_BufferImageGranularity(bufferImageGranularity),
6867 m_FrameInUseCount(frameInUseCount),
6868 m_IsCustomPool(isCustomPool),
6869 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6870 m_HasEmptyBlock(false),
6871 m_pDefragmentator(VMA_NULL),
6876 VmaBlockVector::~VmaBlockVector()
6878 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6880 for(
size_t i = m_Blocks.size(); i--; )
6882 m_Blocks[i]->Destroy(m_hAllocator);
6883 vma_delete(m_hAllocator, m_Blocks[i]);
6887 VkResult VmaBlockVector::CreateMinBlocks()
6889 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6891 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6892 if(res != VK_SUCCESS)
6900 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6908 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6910 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6912 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6914 VMA_HEAVY_ASSERT(pBlock->Validate());
6915 pBlock->m_Metadata.AddPoolStats(*pStats);
6919 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 6921 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
6922 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
6923 (VMA_DEBUG_MARGIN > 0) &&
6924 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
6927 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6929 VkResult VmaBlockVector::Allocate(
6931 uint32_t currentFrameIndex,
6933 VkDeviceSize alignment,
6935 VmaSuballocationType suballocType,
6939 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
6941 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6947 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6951 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6953 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6954 VMA_ASSERT(pCurrBlock);
6955 VmaAllocationRequest currRequest = {};
6956 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6959 m_BufferImageGranularity,
6967 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6971 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6972 if(res != VK_SUCCESS)
6979 if(pCurrBlock->m_Metadata.IsEmpty())
6981 m_HasEmptyBlock =
false;
6984 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6985 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, size, *pAllocation);
6986 (*pAllocation)->InitBlockAllocation(
6995 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6996 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6997 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6998 if(IsCorruptionDetectionEnabled())
7000 VkResult res = pCurrBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
7001 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7007 const bool canCreateNewBlock =
7009 (m_Blocks.size() < m_MaxBlockCount);
7012 if(canCreateNewBlock)
7015 VkDeviceSize newBlockSize = m_PreferredBlockSize;
7016 uint32_t newBlockSizeShift = 0;
7017 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
7021 if(m_IsCustomPool ==
false)
7024 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
7025 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
7027 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
7028 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
7030 newBlockSize = smallerNewBlockSize;
7031 ++newBlockSizeShift;
7040 size_t newBlockIndex = 0;
7041 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
7043 if(m_IsCustomPool ==
false)
7045 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
7047 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
7048 if(smallerNewBlockSize >= size)
7050 newBlockSize = smallerNewBlockSize;
7051 ++newBlockSizeShift;
7052 res = CreateBlock(newBlockSize, &newBlockIndex);
7061 if(res == VK_SUCCESS)
7063 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
7064 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= size);
7068 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
7069 if(res != VK_SUCCESS)
7076 VmaAllocationRequest allocRequest;
7077 if(pBlock->m_Metadata.CreateAllocationRequest(
7080 m_BufferImageGranularity,
7087 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7088 pBlock->m_Metadata.Alloc(allocRequest, suballocType, size, *pAllocation);
7089 (*pAllocation)->InitBlockAllocation(
7092 allocRequest.offset,
7098 VMA_HEAVY_ASSERT(pBlock->Validate());
7099 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
7100 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7101 if(IsCorruptionDetectionEnabled())
7103 res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, allocRequest.offset, size);
7104 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7111 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7119 if(canMakeOtherLost)
7121 uint32_t tryIndex = 0;
7122 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
7124 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
7125 VmaAllocationRequest bestRequest = {};
7126 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
7130 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
7132 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
7133 VMA_ASSERT(pCurrBlock);
7134 VmaAllocationRequest currRequest = {};
7135 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
7138 m_BufferImageGranularity,
7145 const VkDeviceSize currRequestCost = currRequest.CalcCost();
7146 if(pBestRequestBlock == VMA_NULL ||
7147 currRequestCost < bestRequestCost)
7149 pBestRequestBlock = pCurrBlock;
7150 bestRequest = currRequest;
7151 bestRequestCost = currRequestCost;
7153 if(bestRequestCost == 0)
7161 if(pBestRequestBlock != VMA_NULL)
7165 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
7166 if(res != VK_SUCCESS)
7172 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
7178 if(pBestRequestBlock->m_Metadata.IsEmpty())
7180 m_HasEmptyBlock =
false;
7183 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7184 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, size, *pAllocation);
7185 (*pAllocation)->InitBlockAllocation(
7194 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
7195 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
7196 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7197 if(IsCorruptionDetectionEnabled())
7199 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
7200 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7215 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
7217 return VK_ERROR_TOO_MANY_OBJECTS;
7221 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7224 void VmaBlockVector::Free(
7227 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
7231 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7233 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
7235 if(IsCorruptionDetectionEnabled())
7237 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
7238 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
7241 if(hAllocation->IsPersistentMap())
7243 pBlock->Unmap(m_hAllocator, 1);
7246 pBlock->m_Metadata.Free(hAllocation);
7247 VMA_HEAVY_ASSERT(pBlock->Validate());
7249 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
7252 if(pBlock->m_Metadata.IsEmpty())
7255 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
7257 pBlockToDelete = pBlock;
7263 m_HasEmptyBlock =
true;
7268 else if(m_HasEmptyBlock)
7270 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
7271 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
7273 pBlockToDelete = pLastBlock;
7274 m_Blocks.pop_back();
7275 m_HasEmptyBlock =
false;
7279 IncrementallySortBlocks();
7284 if(pBlockToDelete != VMA_NULL)
7286 VMA_DEBUG_LOG(
" Deleted empty allocation");
7287 pBlockToDelete->Destroy(m_hAllocator);
7288 vma_delete(m_hAllocator, pBlockToDelete);
7292 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 7294 VkDeviceSize result = 0;
7295 for(
size_t i = m_Blocks.size(); i--; )
7297 result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
7298 if(result >= m_PreferredBlockSize)
7306 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
7308 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7310 if(m_Blocks[blockIndex] == pBlock)
7312 VmaVectorRemove(m_Blocks, blockIndex);
7319 void VmaBlockVector::IncrementallySortBlocks()
7322 for(
size_t i = 1; i < m_Blocks.size(); ++i)
7324 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
7326 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
7332 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
7334 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7335 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
7336 allocInfo.allocationSize = blockSize;
7337 VkDeviceMemory mem = VK_NULL_HANDLE;
7338 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
7347 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
7351 allocInfo.allocationSize,
7354 m_Blocks.push_back(pBlock);
7355 if(pNewBlockIndex != VMA_NULL)
7357 *pNewBlockIndex = m_Blocks.size() - 1;
7363 #if VMA_STATS_STRING_ENABLED 7365 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
7367 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7373 json.WriteString(
"MemoryTypeIndex");
7374 json.WriteNumber(m_MemoryTypeIndex);
7376 json.WriteString(
"BlockSize");
7377 json.WriteNumber(m_PreferredBlockSize);
7379 json.WriteString(
"BlockCount");
7380 json.BeginObject(
true);
7381 if(m_MinBlockCount > 0)
7383 json.WriteString(
"Min");
7384 json.WriteNumber((uint64_t)m_MinBlockCount);
7386 if(m_MaxBlockCount < SIZE_MAX)
7388 json.WriteString(
"Max");
7389 json.WriteNumber((uint64_t)m_MaxBlockCount);
7391 json.WriteString(
"Cur");
7392 json.WriteNumber((uint64_t)m_Blocks.size());
7395 if(m_FrameInUseCount > 0)
7397 json.WriteString(
"FrameInUseCount");
7398 json.WriteNumber(m_FrameInUseCount);
7403 json.WriteString(
"PreferredBlockSize");
7404 json.WriteNumber(m_PreferredBlockSize);
7407 json.WriteString(
"Blocks");
7409 for(
size_t i = 0; i < m_Blocks.size(); ++i)
7412 json.ContinueString(m_Blocks[i]->GetId());
7415 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
7422 #endif // #if VMA_STATS_STRING_ENABLED 7424 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
7426 uint32_t currentFrameIndex)
7428 if(m_pDefragmentator == VMA_NULL)
7430 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
7436 return m_pDefragmentator;
7439 VkResult VmaBlockVector::Defragment(
7441 VkDeviceSize& maxBytesToMove,
7442 uint32_t& maxAllocationsToMove)
7444 if(m_pDefragmentator == VMA_NULL)
7449 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7452 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7455 if(pDefragmentationStats != VMA_NULL)
7457 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
7458 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7461 VMA_ASSERT(bytesMoved <= maxBytesToMove);
7462 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7468 m_HasEmptyBlock =
false;
7469 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
7471 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7472 if(pBlock->m_Metadata.IsEmpty())
7474 if(m_Blocks.size() > m_MinBlockCount)
7476 if(pDefragmentationStats != VMA_NULL)
7479 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
7482 VmaVectorRemove(m_Blocks, blockIndex);
7483 pBlock->Destroy(m_hAllocator);
7484 vma_delete(m_hAllocator, pBlock);
7488 m_HasEmptyBlock =
true;
7496 void VmaBlockVector::DestroyDefragmentator()
7498 if(m_pDefragmentator != VMA_NULL)
7500 vma_delete(m_hAllocator, m_pDefragmentator);
7501 m_pDefragmentator = VMA_NULL;
7505 void VmaBlockVector::MakePoolAllocationsLost(
7506 uint32_t currentFrameIndex,
7507 size_t* pLostAllocationCount)
7509 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7510 size_t lostAllocationCount = 0;
7511 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7513 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7515 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7517 if(pLostAllocationCount != VMA_NULL)
7519 *pLostAllocationCount = lostAllocationCount;
7523 VkResult VmaBlockVector::CheckCorruption()
7525 if(!IsCorruptionDetectionEnabled())
7527 return VK_ERROR_FEATURE_NOT_PRESENT;
7530 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7531 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7533 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7535 VkResult res = pBlock->CheckCorruption(m_hAllocator);
7536 if(res != VK_SUCCESS)
7544 void VmaBlockVector::AddStats(
VmaStats* pStats)
7546 const uint32_t memTypeIndex = m_MemoryTypeIndex;
7547 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7549 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7551 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7553 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7555 VMA_HEAVY_ASSERT(pBlock->Validate());
7557 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7558 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7559 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7560 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7567 VmaDefragmentator::VmaDefragmentator(
7569 VmaBlockVector* pBlockVector,
7570 uint32_t currentFrameIndex) :
7571 m_hAllocator(hAllocator),
7572 m_pBlockVector(pBlockVector),
7573 m_CurrentFrameIndex(currentFrameIndex),
7575 m_AllocationsMoved(0),
7576 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7577 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7581 VmaDefragmentator::~VmaDefragmentator()
7583 for(
size_t i = m_Blocks.size(); i--; )
7585 vma_delete(m_hAllocator, m_Blocks[i]);
7589 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
7591 AllocationInfo allocInfo;
7592 allocInfo.m_hAllocation = hAlloc;
7593 allocInfo.m_pChanged = pChanged;
7594 m_Allocations.push_back(allocInfo);
7597 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
7600 if(m_pMappedDataForDefragmentation)
7602 *ppMappedData = m_pMappedDataForDefragmentation;
7607 if(m_pBlock->GetMappedData())
7609 *ppMappedData = m_pBlock->GetMappedData();
7614 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7615 *ppMappedData = m_pMappedDataForDefragmentation;
7619 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
7621 if(m_pMappedDataForDefragmentation != VMA_NULL)
7623 m_pBlock->Unmap(hAllocator, 1);
7627 VkResult VmaDefragmentator::DefragmentRound(
7628 VkDeviceSize maxBytesToMove,
7629 uint32_t maxAllocationsToMove)
7631 if(m_Blocks.empty())
7636 size_t srcBlockIndex = m_Blocks.size() - 1;
7637 size_t srcAllocIndex = SIZE_MAX;
7643 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7645 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7648 if(srcBlockIndex == 0)
7655 srcAllocIndex = SIZE_MAX;
7660 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7664 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7665 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7667 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7668 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7669 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7670 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7673 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7675 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7676 VmaAllocationRequest dstAllocRequest;
7677 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7678 m_CurrentFrameIndex,
7679 m_pBlockVector->GetFrameInUseCount(),
7680 m_pBlockVector->GetBufferImageGranularity(),
7685 &dstAllocRequest) &&
7687 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7689 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7692 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7693 (m_BytesMoved + size > maxBytesToMove))
7695 return VK_INCOMPLETE;
7698 void* pDstMappedData = VMA_NULL;
7699 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7700 if(res != VK_SUCCESS)
7705 void* pSrcMappedData = VMA_NULL;
7706 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7707 if(res != VK_SUCCESS)
7714 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7715 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7716 static_cast<size_t>(size));
7718 if(VMA_DEBUG_MARGIN > 0)
7720 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
7721 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
7724 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7725 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7727 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7729 if(allocInfo.m_pChanged != VMA_NULL)
7731 *allocInfo.m_pChanged = VK_TRUE;
7734 ++m_AllocationsMoved;
7735 m_BytesMoved += size;
7737 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7745 if(srcAllocIndex > 0)
7751 if(srcBlockIndex > 0)
7754 srcAllocIndex = SIZE_MAX;
7764 VkResult VmaDefragmentator::Defragment(
7765 VkDeviceSize maxBytesToMove,
7766 uint32_t maxAllocationsToMove)
7768 if(m_Allocations.empty())
7774 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7775 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7777 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7778 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7779 m_Blocks.push_back(pBlockInfo);
7783 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7786 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7788 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7790 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7792 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7793 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7794 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7796 (*it)->m_Allocations.push_back(allocInfo);
7804 m_Allocations.clear();
7806 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7808 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7809 pBlockInfo->CalcHasNonMovableAllocations();
7810 pBlockInfo->SortAllocationsBySizeDescecnding();
7814 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7817 VkResult result = VK_SUCCESS;
7818 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7820 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7824 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7826 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7832 bool VmaDefragmentator::MoveMakesSense(
7833 size_t dstBlockIndex, VkDeviceSize dstOffset,
7834 size_t srcBlockIndex, VkDeviceSize srcOffset)
7836 if(dstBlockIndex < srcBlockIndex)
7840 if(dstBlockIndex > srcBlockIndex)
7844 if(dstOffset < srcOffset)
7857 m_hDevice(pCreateInfo->device),
7858 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7859 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7860 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7861 m_PreferredLargeHeapBlockSize(0),
7862 m_PhysicalDevice(pCreateInfo->physicalDevice),
7863 m_CurrentFrameIndex(0),
7864 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
7867 if(VMA_DEBUG_DETECT_CORRUPTION)
7870 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
7875 #if !(VMA_DEDICATED_ALLOCATION) 7878 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
7882 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7883 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7884 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7886 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7887 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7889 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7891 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7902 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7903 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7910 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7912 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7913 if(limit != VK_WHOLE_SIZE)
7915 m_HeapSizeLimit[heapIndex] = limit;
7916 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7918 m_MemProps.memoryHeaps[heapIndex].size = limit;
7924 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7926 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7928 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7934 GetBufferImageGranularity(),
7939 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7944 VmaAllocator_T::~VmaAllocator_T()
7946 VMA_ASSERT(m_Pools.empty());
7948 for(
size_t i = GetMemoryTypeCount(); i--; )
7950 vma_delete(
this, m_pDedicatedAllocations[i]);
7951 vma_delete(
this, m_pBlockVectors[i]);
7955 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7957 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7958 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7959 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7960 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7961 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7962 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7963 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7964 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
7965 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
7966 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7967 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7968 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7969 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7970 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7971 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7972 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7973 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7974 #if VMA_DEDICATED_ALLOCATION 7975 if(m_UseKhrDedicatedAllocation)
7977 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7978 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7979 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7980 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7982 #endif // #if VMA_DEDICATED_ALLOCATION 7983 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7985 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7986 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7988 if(pVulkanFunctions != VMA_NULL)
7990 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7991 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7992 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7993 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7994 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7995 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7996 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
7997 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
7998 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7999 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
8000 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
8001 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
8002 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
8003 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
8004 VMA_COPY_IF_NOT_NULL(vkCreateImage);
8005 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
8006 #if VMA_DEDICATED_ALLOCATION 8007 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
8008 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
8012 #undef VMA_COPY_IF_NOT_NULL 8016 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
8017 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
8018 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
8019 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
8020 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
8021 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
8022 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
8023 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
8024 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
8025 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
8026 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
8027 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
8028 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
8029 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
8030 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
8031 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
8032 #if VMA_DEDICATED_ALLOCATION 8033 if(m_UseKhrDedicatedAllocation)
8035 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
8036 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
8041 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
8043 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8044 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
8045 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
8046 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
8049 VkResult VmaAllocator_T::AllocateMemoryOfType(
8051 VkDeviceSize alignment,
8052 bool dedicatedAllocation,
8053 VkBuffer dedicatedBuffer,
8054 VkImage dedicatedImage,
8056 uint32_t memTypeIndex,
8057 VmaSuballocationType suballocType,
8060 VMA_ASSERT(pAllocation != VMA_NULL);
8061 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
8067 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
8072 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
8073 VMA_ASSERT(blockVector);
8075 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
8076 bool preferDedicatedMemory =
8077 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
8078 dedicatedAllocation ||
8080 size > preferredBlockSize / 2;
8082 if(preferDedicatedMemory &&
8084 finalCreateInfo.
pool == VK_NULL_HANDLE)
8093 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8097 return AllocateDedicatedMemory(
8111 VkResult res = blockVector->Allocate(
8113 m_CurrentFrameIndex.load(),
8119 if(res == VK_SUCCESS)
8127 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8131 res = AllocateDedicatedMemory(
8137 finalCreateInfo.pUserData,
8141 if(res == VK_SUCCESS)
8144 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
8150 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
8157 VkResult VmaAllocator_T::AllocateDedicatedMemory(
8159 VmaSuballocationType suballocType,
8160 uint32_t memTypeIndex,
8162 bool isUserDataString,
8164 VkBuffer dedicatedBuffer,
8165 VkImage dedicatedImage,
8168 VMA_ASSERT(pAllocation);
8170 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
8171 allocInfo.memoryTypeIndex = memTypeIndex;
8172 allocInfo.allocationSize = size;
8174 #if VMA_DEDICATED_ALLOCATION 8175 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
8176 if(m_UseKhrDedicatedAllocation)
8178 if(dedicatedBuffer != VK_NULL_HANDLE)
8180 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
8181 dedicatedAllocInfo.buffer = dedicatedBuffer;
8182 allocInfo.pNext = &dedicatedAllocInfo;
8184 else if(dedicatedImage != VK_NULL_HANDLE)
8186 dedicatedAllocInfo.image = dedicatedImage;
8187 allocInfo.pNext = &dedicatedAllocInfo;
8190 #endif // #if VMA_DEDICATED_ALLOCATION 8193 VkDeviceMemory hMemory = VK_NULL_HANDLE;
8194 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
8197 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
8201 void* pMappedData = VMA_NULL;
8204 res = (*m_VulkanFunctions.vkMapMemory)(
8213 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
8214 FreeVulkanMemory(memTypeIndex, size, hMemory);
8219 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
8220 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
8221 (*pAllocation)->SetUserData(
this, pUserData);
8225 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8226 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8227 VMA_ASSERT(pDedicatedAllocations);
8228 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
8231 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
8236 void VmaAllocator_T::GetBufferMemoryRequirements(
8238 VkMemoryRequirements& memReq,
8239 bool& requiresDedicatedAllocation,
8240 bool& prefersDedicatedAllocation)
const 8242 #if VMA_DEDICATED_ALLOCATION 8243 if(m_UseKhrDedicatedAllocation)
8245 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
8246 memReqInfo.buffer = hBuffer;
8248 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
8250 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
8251 memReq2.pNext = &memDedicatedReq;
8253 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
8255 memReq = memReq2.memoryRequirements;
8256 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
8257 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
8260 #endif // #if VMA_DEDICATED_ALLOCATION 8262 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
8263 requiresDedicatedAllocation =
false;
8264 prefersDedicatedAllocation =
false;
8268 void VmaAllocator_T::GetImageMemoryRequirements(
8270 VkMemoryRequirements& memReq,
8271 bool& requiresDedicatedAllocation,
8272 bool& prefersDedicatedAllocation)
const 8274 #if VMA_DEDICATED_ALLOCATION 8275 if(m_UseKhrDedicatedAllocation)
8277 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
8278 memReqInfo.image = hImage;
8280 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
8282 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
8283 memReq2.pNext = &memDedicatedReq;
8285 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
8287 memReq = memReq2.memoryRequirements;
8288 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
8289 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
8292 #endif // #if VMA_DEDICATED_ALLOCATION 8294 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
8295 requiresDedicatedAllocation =
false;
8296 prefersDedicatedAllocation =
false;
8300 VkResult VmaAllocator_T::AllocateMemory(
8301 const VkMemoryRequirements& vkMemReq,
8302 bool requiresDedicatedAllocation,
8303 bool prefersDedicatedAllocation,
8304 VkBuffer dedicatedBuffer,
8305 VkImage dedicatedImage,
8307 VmaSuballocationType suballocType,
8313 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
8314 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8319 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
8320 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8322 if(requiresDedicatedAllocation)
8326 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
8327 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8329 if(createInfo.
pool != VK_NULL_HANDLE)
8331 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
8332 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8335 if((createInfo.
pool != VK_NULL_HANDLE) &&
8338 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
8339 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8342 if(createInfo.
pool != VK_NULL_HANDLE)
8344 const VkDeviceSize alignmentForPool = VMA_MAX(
8346 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
8347 return createInfo.
pool->m_BlockVector.Allocate(
8349 m_CurrentFrameIndex.load(),
8359 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
8360 uint32_t memTypeIndex = UINT32_MAX;
8362 if(res == VK_SUCCESS)
8364 VkDeviceSize alignmentForMemType = VMA_MAX(
8366 GetMemoryTypeMinAlignment(memTypeIndex));
8368 res = AllocateMemoryOfType(
8370 alignmentForMemType,
8371 requiresDedicatedAllocation || prefersDedicatedAllocation,
8379 if(res == VK_SUCCESS)
8389 memoryTypeBits &= ~(1u << memTypeIndex);
8392 if(res == VK_SUCCESS)
8394 alignmentForMemType = VMA_MAX(
8396 GetMemoryTypeMinAlignment(memTypeIndex));
8398 res = AllocateMemoryOfType(
8400 alignmentForMemType,
8401 requiresDedicatedAllocation || prefersDedicatedAllocation,
8409 if(res == VK_SUCCESS)
8419 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8430 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
8432 VMA_ASSERT(allocation);
8434 if(allocation->CanBecomeLost() ==
false ||
8435 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
8437 switch(allocation->GetType())
8439 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8441 VmaBlockVector* pBlockVector = VMA_NULL;
8442 VmaPool hPool = allocation->GetPool();
8443 if(hPool != VK_NULL_HANDLE)
8445 pBlockVector = &hPool->m_BlockVector;
8449 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8450 pBlockVector = m_pBlockVectors[memTypeIndex];
8452 pBlockVector->Free(allocation);
8455 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8456 FreeDedicatedMemory(allocation);
8463 allocation->SetUserData(
this, VMA_NULL);
8464 vma_delete(
this, allocation);
8467 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
8470 InitStatInfo(pStats->
total);
8471 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
8473 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
8477 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8479 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
8480 VMA_ASSERT(pBlockVector);
8481 pBlockVector->AddStats(pStats);
8486 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8487 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
8489 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
8494 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8496 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8497 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8498 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8499 VMA_ASSERT(pDedicatedAllocVector);
8500 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
8503 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
8504 VmaAddStatInfo(pStats->
total, allocationStatInfo);
8505 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
8506 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
8511 VmaPostprocessCalcStatInfo(pStats->
total);
8512 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
8513 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
8514 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
8515 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
8518 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
8520 VkResult VmaAllocator_T::Defragment(
8522 size_t allocationCount,
8523 VkBool32* pAllocationsChanged,
8527 if(pAllocationsChanged != VMA_NULL)
8529 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
8531 if(pDefragmentationStats != VMA_NULL)
8533 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
8536 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
8538 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
8540 const size_t poolCount = m_Pools.size();
8543 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
8547 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
8549 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
8551 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
8553 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
8555 VmaBlockVector* pAllocBlockVector = VMA_NULL;
8557 const VmaPool hAllocPool = hAlloc->GetPool();
8559 if(hAllocPool != VK_NULL_HANDLE)
8561 pAllocBlockVector = &hAllocPool->GetBlockVector();
8566 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
8569 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
8571 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
8572 &pAllocationsChanged[allocIndex] : VMA_NULL;
8573 pDefragmentator->AddAllocation(hAlloc, pChanged);
8577 VkResult result = VK_SUCCESS;
8581 VkDeviceSize maxBytesToMove = SIZE_MAX;
8582 uint32_t maxAllocationsToMove = UINT32_MAX;
8583 if(pDefragmentationInfo != VMA_NULL)
8590 for(uint32_t memTypeIndex = 0;
8591 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
8595 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8597 result = m_pBlockVectors[memTypeIndex]->Defragment(
8598 pDefragmentationStats,
8600 maxAllocationsToMove);
8605 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
8607 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
8608 pDefragmentationStats,
8610 maxAllocationsToMove);
8616 for(
size_t poolIndex = poolCount; poolIndex--; )
8618 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
8622 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
8624 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8626 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
8635 if(hAllocation->CanBecomeLost())
8641 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8642 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8645 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8649 pAllocationInfo->
offset = 0;
8650 pAllocationInfo->
size = hAllocation->GetSize();
8652 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8655 else if(localLastUseFrameIndex == localCurrFrameIndex)
8657 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8658 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8659 pAllocationInfo->
offset = hAllocation->GetOffset();
8660 pAllocationInfo->
size = hAllocation->GetSize();
8662 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8667 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8669 localLastUseFrameIndex = localCurrFrameIndex;
8676 #if VMA_STATS_STRING_ENABLED 8677 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8678 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8681 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
8682 if(localLastUseFrameIndex == localCurrFrameIndex)
8688 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8690 localLastUseFrameIndex = localCurrFrameIndex;
8696 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8697 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8698 pAllocationInfo->
offset = hAllocation->GetOffset();
8699 pAllocationInfo->
size = hAllocation->GetSize();
8700 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
8701 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8705 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
8708 if(hAllocation->CanBecomeLost())
8710 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8711 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8714 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8718 else if(localLastUseFrameIndex == localCurrFrameIndex)
8724 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8726 localLastUseFrameIndex = localCurrFrameIndex;
8733 #if VMA_STATS_STRING_ENABLED 8734 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8735 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8738 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
8739 if(localLastUseFrameIndex == localCurrFrameIndex)
8745 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8747 localLastUseFrameIndex = localCurrFrameIndex;
8759 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
8772 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
8774 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
8775 if(res != VK_SUCCESS)
8777 vma_delete(
this, *pPool);
8784 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8785 (*pPool)->SetId(m_NextPoolId++);
8786 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8792 void VmaAllocator_T::DestroyPool(
VmaPool pool)
8796 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8797 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8798 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8801 vma_delete(
this, pool);
8806 pool->m_BlockVector.GetPoolStats(pPoolStats);
8809 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8811 m_CurrentFrameIndex.store(frameIndex);
8814 void VmaAllocator_T::MakePoolAllocationsLost(
8816 size_t* pLostAllocationCount)
8818 hPool->m_BlockVector.MakePoolAllocationsLost(
8819 m_CurrentFrameIndex.load(),
8820 pLostAllocationCount);
8823 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
8825 return hPool->m_BlockVector.CheckCorruption();
8828 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
8830 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
8833 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8835 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
8837 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
8838 VMA_ASSERT(pBlockVector);
8839 VkResult localRes = pBlockVector->CheckCorruption();
8842 case VK_ERROR_FEATURE_NOT_PRESENT:
8845 finalRes = VK_SUCCESS;
8855 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8856 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
8858 if(((1u << m_Pools[poolIndex]->GetBlockVector().GetMemoryTypeIndex()) & memoryTypeBits) != 0)
8860 VkResult localRes = m_Pools[poolIndex]->GetBlockVector().CheckCorruption();
8863 case VK_ERROR_FEATURE_NOT_PRESENT:
8866 finalRes = VK_SUCCESS;
8878 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
8880 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8881 (*pAllocation)->InitLost();
8884 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8886 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8889 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8891 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8892 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8894 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8895 if(res == VK_SUCCESS)
8897 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8902 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8907 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8910 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8912 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8918 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8920 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8922 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8925 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8927 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8928 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8930 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8931 m_HeapSizeLimit[heapIndex] += size;
8935 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
8937 if(hAllocation->CanBecomeLost())
8939 return VK_ERROR_MEMORY_MAP_FAILED;
8942 switch(hAllocation->GetType())
8944 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8946 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8947 char *pBytes = VMA_NULL;
8948 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8949 if(res == VK_SUCCESS)
8951 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8952 hAllocation->BlockAllocMap();
8956 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8957 return hAllocation->DedicatedAllocMap(
this, ppData);
8960 return VK_ERROR_MEMORY_MAP_FAILED;
8966 switch(hAllocation->GetType())
8968 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8970 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8971 hAllocation->BlockAllocUnmap();
8972 pBlock->Unmap(
this, 1);
8975 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8976 hAllocation->DedicatedAllocUnmap(
this);
8983 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
8985 VkResult res = VK_SUCCESS;
8986 switch(hAllocation->GetType())
8988 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8989 res = GetVulkanFunctions().vkBindBufferMemory(
8992 hAllocation->GetMemory(),
8995 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8997 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8998 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
8999 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
9008 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
9010 VkResult res = VK_SUCCESS;
9011 switch(hAllocation->GetType())
9013 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9014 res = GetVulkanFunctions().vkBindImageMemory(
9017 hAllocation->GetMemory(),
9020 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9022 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9023 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
9024 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
9033 void VmaAllocator_T::FlushOrInvalidateAllocation(
9035 VkDeviceSize offset, VkDeviceSize size,
9036 VMA_CACHE_OPERATION op)
9038 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
9039 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
9041 const VkDeviceSize allocationSize = hAllocation->GetSize();
9042 VMA_ASSERT(offset <= allocationSize);
9044 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
9046 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
9047 memRange.memory = hAllocation->GetMemory();
9049 switch(hAllocation->GetType())
9051 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9052 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
9053 if(size == VK_WHOLE_SIZE)
9055 memRange.size = allocationSize - memRange.offset;
9059 VMA_ASSERT(offset + size <= allocationSize);
9060 memRange.size = VMA_MIN(
9061 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
9062 allocationSize - memRange.offset);
9066 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9069 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
9070 if(size == VK_WHOLE_SIZE)
9072 size = allocationSize - offset;
9076 VMA_ASSERT(offset + size <= allocationSize);
9078 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
9081 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
9082 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
9083 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_Metadata.GetSize();
9084 memRange.offset += allocationOffset;
9085 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
9096 case VMA_CACHE_FLUSH:
9097 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
9099 case VMA_CACHE_INVALIDATE:
9100 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
9109 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
9111 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
9113 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
9115 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9116 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
9117 VMA_ASSERT(pDedicatedAllocations);
9118 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
9119 VMA_ASSERT(success);
9122 VkDeviceMemory hMemory = allocation->GetMemory();
9124 if(allocation->GetMappedData() != VMA_NULL)
9126 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
9129 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
9131 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
9134 #if VMA_STATS_STRING_ENABLED 9136 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
9138 bool dedicatedAllocationsStarted =
false;
9139 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9141 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9142 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
9143 VMA_ASSERT(pDedicatedAllocVector);
9144 if(pDedicatedAllocVector->empty() ==
false)
9146 if(dedicatedAllocationsStarted ==
false)
9148 dedicatedAllocationsStarted =
true;
9149 json.WriteString(
"DedicatedAllocations");
9153 json.BeginString(
"Type ");
9154 json.ContinueString(memTypeIndex);
9159 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
9161 json.BeginObject(
true);
9163 hAlloc->PrintParameters(json);
9170 if(dedicatedAllocationsStarted)
9176 bool allocationsStarted =
false;
9177 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9179 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
9181 if(allocationsStarted ==
false)
9183 allocationsStarted =
true;
9184 json.WriteString(
"DefaultPools");
9188 json.BeginString(
"Type ");
9189 json.ContinueString(memTypeIndex);
9192 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
9195 if(allocationsStarted)
9202 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9203 const size_t poolCount = m_Pools.size();
9206 json.WriteString(
"Pools");
9208 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
9211 json.ContinueString(m_Pools[poolIndex]->GetId());
9214 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
9221 #endif // #if VMA_STATS_STRING_ENABLED 9223 static VkResult AllocateMemoryForImage(
9227 VmaSuballocationType suballocType,
9230 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
9232 VkMemoryRequirements vkMemReq = {};
9233 bool requiresDedicatedAllocation =
false;
9234 bool prefersDedicatedAllocation =
false;
9235 allocator->GetImageMemoryRequirements(image, vkMemReq,
9236 requiresDedicatedAllocation, prefersDedicatedAllocation);
9238 return allocator->AllocateMemory(
9240 requiresDedicatedAllocation,
9241 prefersDedicatedAllocation,
9244 *pAllocationCreateInfo,
9256 VMA_ASSERT(pCreateInfo && pAllocator);
9257 VMA_DEBUG_LOG(
"vmaCreateAllocator");
9265 if(allocator != VK_NULL_HANDLE)
9267 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
9268 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
9269 vma_delete(&allocationCallbacks, allocator);
9275 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
9277 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
9278 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
9283 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
9285 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
9286 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
9291 uint32_t memoryTypeIndex,
9292 VkMemoryPropertyFlags* pFlags)
9294 VMA_ASSERT(allocator && pFlags);
9295 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
9296 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
9301 uint32_t frameIndex)
9303 VMA_ASSERT(allocator);
9304 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
9306 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9308 allocator->SetCurrentFrameIndex(frameIndex);
9315 VMA_ASSERT(allocator && pStats);
9316 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9317 allocator->CalculateStats(pStats);
9320 #if VMA_STATS_STRING_ENABLED 9324 char** ppStatsString,
9325 VkBool32 detailedMap)
9327 VMA_ASSERT(allocator && ppStatsString);
9328 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9330 VmaStringBuilder sb(allocator);
9332 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
9336 allocator->CalculateStats(&stats);
9338 json.WriteString(
"Total");
9339 VmaPrintStatInfo(json, stats.
total);
9341 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
9343 json.BeginString(
"Heap ");
9344 json.ContinueString(heapIndex);
9348 json.WriteString(
"Size");
9349 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
9351 json.WriteString(
"Flags");
9352 json.BeginArray(
true);
9353 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
9355 json.WriteString(
"DEVICE_LOCAL");
9361 json.WriteString(
"Stats");
9362 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
9365 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
9367 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
9369 json.BeginString(
"Type ");
9370 json.ContinueString(typeIndex);
9375 json.WriteString(
"Flags");
9376 json.BeginArray(
true);
9377 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
9378 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
9380 json.WriteString(
"DEVICE_LOCAL");
9382 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9384 json.WriteString(
"HOST_VISIBLE");
9386 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
9388 json.WriteString(
"HOST_COHERENT");
9390 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
9392 json.WriteString(
"HOST_CACHED");
9394 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
9396 json.WriteString(
"LAZILY_ALLOCATED");
9402 json.WriteString(
"Stats");
9403 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
9412 if(detailedMap == VK_TRUE)
9414 allocator->PrintDetailedMap(json);
9420 const size_t len = sb.GetLength();
9421 char*
const pChars = vma_new_array(allocator,
char, len + 1);
9424 memcpy(pChars, sb.GetData(), len);
9427 *ppStatsString = pChars;
9434 if(pStatsString != VMA_NULL)
9436 VMA_ASSERT(allocator);
9437 size_t len = strlen(pStatsString);
9438 vma_delete_array(allocator, pStatsString, len + 1);
9442 #endif // #if VMA_STATS_STRING_ENABLED 9449 uint32_t memoryTypeBits,
9451 uint32_t* pMemoryTypeIndex)
9453 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9454 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9455 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9462 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
9468 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
9472 switch(pAllocationCreateInfo->
usage)
9477 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
9479 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
9483 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
9486 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
9487 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
9489 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
9493 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
9494 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
9500 *pMemoryTypeIndex = UINT32_MAX;
9501 uint32_t minCost = UINT32_MAX;
9502 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
9503 memTypeIndex < allocator->GetMemoryTypeCount();
9504 ++memTypeIndex, memTypeBit <<= 1)
9507 if((memTypeBit & memoryTypeBits) != 0)
9509 const VkMemoryPropertyFlags currFlags =
9510 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
9512 if((requiredFlags & ~currFlags) == 0)
9515 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
9517 if(currCost < minCost)
9519 *pMemoryTypeIndex = memTypeIndex;
9529 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
9534 const VkBufferCreateInfo* pBufferCreateInfo,
9536 uint32_t* pMemoryTypeIndex)
9538 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9539 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
9540 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9541 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9543 const VkDevice hDev = allocator->m_hDevice;
9544 VkBuffer hBuffer = VK_NULL_HANDLE;
9545 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
9546 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
9547 if(res == VK_SUCCESS)
9549 VkMemoryRequirements memReq = {};
9550 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
9551 hDev, hBuffer, &memReq);
9555 memReq.memoryTypeBits,
9556 pAllocationCreateInfo,
9559 allocator->GetVulkanFunctions().vkDestroyBuffer(
9560 hDev, hBuffer, allocator->GetAllocationCallbacks());
9567 const VkImageCreateInfo* pImageCreateInfo,
9569 uint32_t* pMemoryTypeIndex)
9571 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9572 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
9573 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9574 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9576 const VkDevice hDev = allocator->m_hDevice;
9577 VkImage hImage = VK_NULL_HANDLE;
9578 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
9579 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
9580 if(res == VK_SUCCESS)
9582 VkMemoryRequirements memReq = {};
9583 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
9584 hDev, hImage, &memReq);
9588 memReq.memoryTypeBits,
9589 pAllocationCreateInfo,
9592 allocator->GetVulkanFunctions().vkDestroyImage(
9593 hDev, hImage, allocator->GetAllocationCallbacks());
9603 VMA_ASSERT(allocator && pCreateInfo && pPool);
9605 VMA_DEBUG_LOG(
"vmaCreatePool");
9607 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9609 return allocator->CreatePool(pCreateInfo, pPool);
9616 VMA_ASSERT(allocator);
9618 if(pool == VK_NULL_HANDLE)
9623 VMA_DEBUG_LOG(
"vmaDestroyPool");
9625 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9627 allocator->DestroyPool(pool);
9635 VMA_ASSERT(allocator && pool && pPoolStats);
9637 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9639 allocator->GetPoolStats(pool, pPoolStats);
9645 size_t* pLostAllocationCount)
9647 VMA_ASSERT(allocator && pool);
9649 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9651 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
9656 VMA_ASSERT(allocator && pool);
9658 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9660 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
9662 return allocator->CheckPoolCorruption(pool);
9667 const VkMemoryRequirements* pVkMemoryRequirements,
9672 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
9674 VMA_DEBUG_LOG(
"vmaAllocateMemory");
9676 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9678 VkResult result = allocator->AllocateMemory(
9679 *pVkMemoryRequirements,
9685 VMA_SUBALLOCATION_TYPE_UNKNOWN,
9688 if(pAllocationInfo && result == VK_SUCCESS)
9690 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9703 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9705 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
9707 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9709 VkMemoryRequirements vkMemReq = {};
9710 bool requiresDedicatedAllocation =
false;
9711 bool prefersDedicatedAllocation =
false;
9712 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
9713 requiresDedicatedAllocation,
9714 prefersDedicatedAllocation);
9716 VkResult result = allocator->AllocateMemory(
9718 requiresDedicatedAllocation,
9719 prefersDedicatedAllocation,
9723 VMA_SUBALLOCATION_TYPE_BUFFER,
9726 if(pAllocationInfo && result == VK_SUCCESS)
9728 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9741 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9743 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
9745 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9747 VkResult result = AllocateMemoryForImage(
9751 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
9754 if(pAllocationInfo && result == VK_SUCCESS)
9756 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9766 VMA_ASSERT(allocator);
9767 VMA_DEBUG_LOG(
"vmaFreeMemory");
9768 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9769 if(allocation != VK_NULL_HANDLE)
9771 allocator->FreeMemory(allocation);
9780 VMA_ASSERT(allocator && allocation && pAllocationInfo);
9782 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9784 allocator->GetAllocationInfo(allocation, pAllocationInfo);
9791 VMA_ASSERT(allocator && allocation);
9793 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9795 return allocator->TouchAllocation(allocation);
9803 VMA_ASSERT(allocator && allocation);
9805 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9807 allocation->SetUserData(allocator, pUserData);
9814 VMA_ASSERT(allocator && pAllocation);
9816 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
9818 allocator->CreateLostAllocation(pAllocation);
9826 VMA_ASSERT(allocator && allocation && ppData);
9828 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9830 return allocator->Map(allocation, ppData);
9837 VMA_ASSERT(allocator && allocation);
9839 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9841 allocator->Unmap(allocation);
9846 VMA_ASSERT(allocator && allocation);
9848 VMA_DEBUG_LOG(
"vmaFlushAllocation");
9850 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9852 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
9857 VMA_ASSERT(allocator && allocation);
9859 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
9861 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9863 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
9868 VMA_ASSERT(allocator);
9870 VMA_DEBUG_LOG(
"vmaCheckCorruption");
9872 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9874 return allocator->CheckCorruption(memoryTypeBits);
9880 size_t allocationCount,
9881 VkBool32* pAllocationsChanged,
9885 VMA_ASSERT(allocator && pAllocations);
9887 VMA_DEBUG_LOG(
"vmaDefragment");
9889 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9891 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
9899 VMA_ASSERT(allocator && allocation && buffer);
9901 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
9903 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9905 return allocator->BindBufferMemory(allocation, buffer);
9913 VMA_ASSERT(allocator && allocation && image);
9915 VMA_DEBUG_LOG(
"vmaBindImageMemory");
9917 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9919 return allocator->BindImageMemory(allocation, image);
9924 const VkBufferCreateInfo* pBufferCreateInfo,
9930 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
9932 VMA_DEBUG_LOG(
"vmaCreateBuffer");
9934 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9936 *pBuffer = VK_NULL_HANDLE;
9937 *pAllocation = VK_NULL_HANDLE;
9940 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
9941 allocator->m_hDevice,
9943 allocator->GetAllocationCallbacks(),
9948 VkMemoryRequirements vkMemReq = {};
9949 bool requiresDedicatedAllocation =
false;
9950 bool prefersDedicatedAllocation =
false;
9951 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
9952 requiresDedicatedAllocation, prefersDedicatedAllocation);
9956 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
9958 VMA_ASSERT(vkMemReq.alignment %
9959 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
9961 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
9963 VMA_ASSERT(vkMemReq.alignment %
9964 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
9966 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
9968 VMA_ASSERT(vkMemReq.alignment %
9969 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
9973 res = allocator->AllocateMemory(
9975 requiresDedicatedAllocation,
9976 prefersDedicatedAllocation,
9979 *pAllocationCreateInfo,
9980 VMA_SUBALLOCATION_TYPE_BUFFER,
9985 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
9989 #if VMA_STATS_STRING_ENABLED 9990 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
9992 if(pAllocationInfo != VMA_NULL)
9994 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9998 allocator->FreeMemory(*pAllocation);
9999 *pAllocation = VK_NULL_HANDLE;
10000 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
10001 *pBuffer = VK_NULL_HANDLE;
10004 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
10005 *pBuffer = VK_NULL_HANDLE;
10016 VMA_ASSERT(allocator);
10017 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
10018 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10019 if(buffer != VK_NULL_HANDLE)
10021 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
10023 if(allocation != VK_NULL_HANDLE)
10025 allocator->FreeMemory(allocation);
10031 const VkImageCreateInfo* pImageCreateInfo,
10037 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
10039 VMA_DEBUG_LOG(
"vmaCreateImage");
10041 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10043 *pImage = VK_NULL_HANDLE;
10044 *pAllocation = VK_NULL_HANDLE;
10047 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
10048 allocator->m_hDevice,
10050 allocator->GetAllocationCallbacks(),
10054 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
10055 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
10056 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
10059 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
10063 res = allocator->BindImageMemory(*pAllocation, *pImage);
10067 #if VMA_STATS_STRING_ENABLED 10068 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
10070 if(pAllocationInfo != VMA_NULL)
10072 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10076 allocator->FreeMemory(*pAllocation);
10077 *pAllocation = VK_NULL_HANDLE;
10078 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
10079 *pImage = VK_NULL_HANDLE;
10082 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
10083 *pImage = VK_NULL_HANDLE;
10094 VMA_ASSERT(allocator);
10095 VMA_DEBUG_LOG(
"vmaDestroyImage");
10096 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10097 if(image != VK_NULL_HANDLE)
10099 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
10101 if(allocation != VK_NULL_HANDLE)
10103 allocator->FreeMemory(allocation);
10107 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1244
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1510
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1200
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1273
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1183
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1256
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1394
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1175
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1767
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1197
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1994
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1613
+
Definition: vk_mem_alloc.h:1467
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1248
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1856
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1270
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2101
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1686
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1667
-
Definition: vk_mem_alloc.h:1474
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1164
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1512
-
Definition: vk_mem_alloc.h:1421
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1209
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1740
+
Definition: vk_mem_alloc.h:1547
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1237
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1585
+
Definition: vk_mem_alloc.h:1494
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1282
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1262
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1194
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1335
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1267
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1425
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1498
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1327
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1180
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1326
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1998
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1400
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1253
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1399
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2105
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1226
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1336
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2006
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1496
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1989
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1181
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1106
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1299
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1409
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2113
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1569
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2096
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1254
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1179
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1203
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1276
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1621
-
Definition: vk_mem_alloc.h:1615
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1777
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1694
+
Definition: vk_mem_alloc.h:1688
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1866
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1176
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1533
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1637
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1673
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1249
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1606
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1710
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1746
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1162
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1624
+
Definition: vk_mem_alloc.h:1235
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1697
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1372
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1445
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1984
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2091
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2002
-
Definition: vk_mem_alloc.h:1411
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1520
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1179
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2109
+
Definition: vk_mem_alloc.h:1484
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1593
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1252
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1332
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1112
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1405
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1185
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1133
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1206
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1138
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2004
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1211
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2111
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1507
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1683
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1580
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1756
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1172
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1315
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1632
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1125
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1245
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1388
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1705
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1198
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1481
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1328
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1129
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1627
-
Definition: vk_mem_alloc.h:1420
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1178
+
Definition: vk_mem_alloc.h:1554
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1401
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1202
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1700
+
Definition: vk_mem_alloc.h:1493
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1251
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1502
-
Definition: vk_mem_alloc.h:1493
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1575
+
Definition: vk_mem_alloc.h:1566
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1318
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1174
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1645
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1212
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1676
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1491
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1526
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1391
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1247
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1718
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1285
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1749
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1564
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1599
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1250
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1334
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1461
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1327
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1323
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1407
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1534
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1400
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1185
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1127
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1184
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1258
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1200
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1257
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1659
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1177
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1732
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1250
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1791
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1206
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1327
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1324
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1880
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1279
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1400
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1397
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1664
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1772
-
Definition: vk_mem_alloc.h:1489
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2000
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1170
+
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1737
+
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1861
+
Definition: vk_mem_alloc.h:1562
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2107
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1243
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1322
-
Definition: vk_mem_alloc.h:1377
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1617
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1395
+
Definition: vk_mem_alloc.h:1450
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1690
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1320
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1182
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1186
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1448
-
Definition: vk_mem_alloc.h:1404
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1786
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1393
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1255
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1259
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1521
+
Definition: vk_mem_alloc.h:1477
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1875
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1160
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1233
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1173
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1753
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1246
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1842
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1595
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1328
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1668
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1401
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1487
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1335
+
Definition: vk_mem_alloc.h:1560
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1408
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1670
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1328
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1758
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1743
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1401
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1847