23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1342 #include <vulkan/vulkan.h> 1344 #if !defined(VMA_DEDICATED_ALLOCATION) 1345 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1346 #define VMA_DEDICATED_ALLOCATION 1 1348 #define VMA_DEDICATED_ALLOCATION 0 1366 uint32_t memoryType,
1367 VkDeviceMemory memory,
1372 uint32_t memoryType,
1373 VkDeviceMemory memory,
1445 #if VMA_DEDICATED_ALLOCATION 1446 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1447 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1468 #ifndef VMA_RECORDING_ENABLED 1470 #define VMA_RECORDING_ENABLED 1 1472 #define VMA_RECORDING_ENABLED 0 1585 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1593 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1603 uint32_t memoryTypeIndex,
1604 VkMemoryPropertyFlags* pFlags);
1616 uint32_t frameIndex);
1649 #define VMA_STATS_STRING_ENABLED 1 1651 #if VMA_STATS_STRING_ENABLED 1658 char** ppStatsString,
1659 VkBool32 detailedMap);
1663 char* pStatsString);
1665 #endif // #if VMA_STATS_STRING_ENABLED 1864 uint32_t memoryTypeBits,
1866 uint32_t* pMemoryTypeIndex);
1882 const VkBufferCreateInfo* pBufferCreateInfo,
1884 uint32_t* pMemoryTypeIndex);
1900 const VkImageCreateInfo* pImageCreateInfo,
1902 uint32_t* pMemoryTypeIndex);
2049 size_t* pLostAllocationCount);
2148 const VkMemoryRequirements* pVkMemoryRequirements,
2458 size_t allocationCount,
2459 VkBool32* pAllocationsChanged,
2525 const VkBufferCreateInfo* pBufferCreateInfo,
2550 const VkImageCreateInfo* pImageCreateInfo,
2576 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2579 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2580 #define VMA_IMPLEMENTATION 2583 #ifdef VMA_IMPLEMENTATION 2584 #undef VMA_IMPLEMENTATION 2606 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2607 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2619 #if VMA_USE_STL_CONTAINERS 2620 #define VMA_USE_STL_VECTOR 1 2621 #define VMA_USE_STL_UNORDERED_MAP 1 2622 #define VMA_USE_STL_LIST 1 2625 #if VMA_USE_STL_VECTOR 2629 #if VMA_USE_STL_UNORDERED_MAP 2630 #include <unordered_map> 2633 #if VMA_USE_STL_LIST 2642 #include <algorithm> 2648 #define VMA_NULL nullptr 2651 #if defined(__APPLE__) || defined(__ANDROID__) 2653 void *aligned_alloc(
size_t alignment,
size_t size)
2656 if(alignment <
sizeof(
void*))
2658 alignment =
sizeof(
void*);
2662 if(posix_memalign(&pointer, alignment, size) == 0)
2676 #define VMA_ASSERT(expr) assert(expr) 2678 #define VMA_ASSERT(expr) 2684 #ifndef VMA_HEAVY_ASSERT 2686 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2688 #define VMA_HEAVY_ASSERT(expr) 2692 #ifndef VMA_ALIGN_OF 2693 #define VMA_ALIGN_OF(type) (__alignof(type)) 2696 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2698 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2700 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2704 #ifndef VMA_SYSTEM_FREE 2706 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2708 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2713 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2717 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2721 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2725 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2728 #ifndef VMA_DEBUG_LOG 2729 #define VMA_DEBUG_LOG(format, ...) 2739 #if VMA_STATS_STRING_ENABLED 2740 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2742 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2744 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2746 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2748 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2750 snprintf(outStr, strLen,
"%p", ptr);
2760 void Lock() { m_Mutex.lock(); }
2761 void Unlock() { m_Mutex.unlock(); }
2765 #define VMA_MUTEX VmaMutex 2776 #ifndef VMA_ATOMIC_UINT32 2777 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2780 #ifndef VMA_BEST_FIT 2793 #define VMA_BEST_FIT (1) 2796 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2801 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2804 #ifndef VMA_DEBUG_ALIGNMENT 2809 #define VMA_DEBUG_ALIGNMENT (1) 2812 #ifndef VMA_DEBUG_MARGIN 2817 #define VMA_DEBUG_MARGIN (0) 2820 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2825 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2828 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2834 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2837 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2842 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2845 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2850 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2853 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2854 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2858 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2859 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2863 #ifndef VMA_CLASS_NO_COPY 2864 #define VMA_CLASS_NO_COPY(className) \ 2866 className(const className&) = delete; \ 2867 className& operator=(const className&) = delete; 2870 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2873 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2875 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2876 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
2882 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2883 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2886 static inline uint32_t VmaCountBitsSet(uint32_t v)
2888 uint32_t c = v - ((v >> 1) & 0x55555555);
2889 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2890 c = ((c >> 4) + c) & 0x0F0F0F0F;
2891 c = ((c >> 8) + c) & 0x00FF00FF;
2892 c = ((c >> 16) + c) & 0x0000FFFF;
2898 template <
typename T>
2899 static inline T VmaAlignUp(T val, T align)
2901 return (val + align - 1) / align * align;
2905 template <
typename T>
2906 static inline T VmaAlignDown(T val, T align)
2908 return val / align * align;
2912 template <
typename T>
2913 inline T VmaRoundDiv(T x, T y)
2915 return (x + (y / (T)2)) / y;
2918 static inline bool VmaStrIsEmpty(
const char* pStr)
2920 return pStr == VMA_NULL || *pStr ==
'\0';
2925 template<
typename Iterator,
typename Compare>
2926 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2928 Iterator centerValue = end; --centerValue;
2929 Iterator insertIndex = beg;
2930 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2932 if(cmp(*memTypeIndex, *centerValue))
2934 if(insertIndex != memTypeIndex)
2936 VMA_SWAP(*memTypeIndex, *insertIndex);
2941 if(insertIndex != centerValue)
2943 VMA_SWAP(*insertIndex, *centerValue);
2948 template<
typename Iterator,
typename Compare>
2949 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2953 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2954 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2955 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2959 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2961 #endif // #ifndef VMA_SORT 2970 static inline bool VmaBlocksOnSamePage(
2971 VkDeviceSize resourceAOffset,
2972 VkDeviceSize resourceASize,
2973 VkDeviceSize resourceBOffset,
2974 VkDeviceSize pageSize)
2976 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2977 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2978 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2979 VkDeviceSize resourceBStart = resourceBOffset;
2980 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2981 return resourceAEndPage == resourceBStartPage;
2984 enum VmaSuballocationType
2986 VMA_SUBALLOCATION_TYPE_FREE = 0,
2987 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2988 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2989 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2990 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2991 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2992 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3001 static inline bool VmaIsBufferImageGranularityConflict(
3002 VmaSuballocationType suballocType1,
3003 VmaSuballocationType suballocType2)
3005 if(suballocType1 > suballocType2)
3007 VMA_SWAP(suballocType1, suballocType2);
3010 switch(suballocType1)
3012 case VMA_SUBALLOCATION_TYPE_FREE:
3014 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3016 case VMA_SUBALLOCATION_TYPE_BUFFER:
3018 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3019 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3020 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3022 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3023 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3024 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3025 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3027 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3028 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3036 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3038 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3039 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3040 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3042 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3046 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3048 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3049 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3050 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3052 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3063 VMA_CLASS_NO_COPY(VmaMutexLock)
3065 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3066 m_pMutex(useMutex ? &mutex : VMA_NULL)
3083 VMA_MUTEX* m_pMutex;
3086 #if VMA_DEBUG_GLOBAL_MUTEX 3087 static VMA_MUTEX gDebugGlobalMutex;
3088 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3090 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3094 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3105 template <
typename CmpLess,
typename IterT,
typename KeyT>
3106 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3108 size_t down = 0, up = (end - beg);
3111 const size_t mid = (down + up) / 2;
3112 if(cmp(*(beg+mid), key))
3127 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3129 if((pAllocationCallbacks != VMA_NULL) &&
3130 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3132 return (*pAllocationCallbacks->pfnAllocation)(
3133 pAllocationCallbacks->pUserData,
3136 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3140 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3144 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3146 if((pAllocationCallbacks != VMA_NULL) &&
3147 (pAllocationCallbacks->pfnFree != VMA_NULL))
3149 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3153 VMA_SYSTEM_FREE(ptr);
3157 template<
typename T>
3158 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3160 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3163 template<
typename T>
3164 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3166 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3169 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3171 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3173 template<
typename T>
3174 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3177 VmaFree(pAllocationCallbacks, ptr);
3180 template<
typename T>
3181 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3185 for(
size_t i = count; i--; )
3189 VmaFree(pAllocationCallbacks, ptr);
3194 template<
typename T>
3195 class VmaStlAllocator
3198 const VkAllocationCallbacks*
const m_pCallbacks;
3199 typedef T value_type;
3201 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3202 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3204 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3205 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3207 template<
typename U>
3208 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3210 return m_pCallbacks == rhs.m_pCallbacks;
3212 template<
typename U>
3213 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3215 return m_pCallbacks != rhs.m_pCallbacks;
3218 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3221 #if VMA_USE_STL_VECTOR 3223 #define VmaVector std::vector 3225 template<
typename T,
typename allocatorT>
3226 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3228 vec.insert(vec.begin() + index, item);
3231 template<
typename T,
typename allocatorT>
3232 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3234 vec.erase(vec.begin() + index);
3237 #else // #if VMA_USE_STL_VECTOR 3242 template<
typename T,
typename AllocatorT>
3246 typedef T value_type;
3248 VmaVector(
const AllocatorT& allocator) :
3249 m_Allocator(allocator),
3256 VmaVector(
size_t count,
const AllocatorT& allocator) :
3257 m_Allocator(allocator),
3258 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3264 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3265 m_Allocator(src.m_Allocator),
3266 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3267 m_Count(src.m_Count),
3268 m_Capacity(src.m_Count)
3272 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3278 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3281 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3285 resize(rhs.m_Count);
3288 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3294 bool empty()
const {
return m_Count == 0; }
3295 size_t size()
const {
return m_Count; }
3296 T* data() {
return m_pArray; }
3297 const T* data()
const {
return m_pArray; }
3299 T& operator[](
size_t index)
3301 VMA_HEAVY_ASSERT(index < m_Count);
3302 return m_pArray[index];
3304 const T& operator[](
size_t index)
const 3306 VMA_HEAVY_ASSERT(index < m_Count);
3307 return m_pArray[index];
3312 VMA_HEAVY_ASSERT(m_Count > 0);
3315 const T& front()
const 3317 VMA_HEAVY_ASSERT(m_Count > 0);
3322 VMA_HEAVY_ASSERT(m_Count > 0);
3323 return m_pArray[m_Count - 1];
3325 const T& back()
const 3327 VMA_HEAVY_ASSERT(m_Count > 0);
3328 return m_pArray[m_Count - 1];
3331 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3333 newCapacity = VMA_MAX(newCapacity, m_Count);
3335 if((newCapacity < m_Capacity) && !freeMemory)
3337 newCapacity = m_Capacity;
3340 if(newCapacity != m_Capacity)
3342 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3345 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3347 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3348 m_Capacity = newCapacity;
3349 m_pArray = newArray;
3353 void resize(
size_t newCount,
bool freeMemory =
false)
3355 size_t newCapacity = m_Capacity;
3356 if(newCount > m_Capacity)
3358 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3362 newCapacity = newCount;
3365 if(newCapacity != m_Capacity)
3367 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3368 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3369 if(elementsToCopy != 0)
3371 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3373 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3374 m_Capacity = newCapacity;
3375 m_pArray = newArray;
3381 void clear(
bool freeMemory =
false)
3383 resize(0, freeMemory);
3386 void insert(
size_t index,
const T& src)
3388 VMA_HEAVY_ASSERT(index <= m_Count);
3389 const size_t oldCount = size();
3390 resize(oldCount + 1);
3391 if(index < oldCount)
3393 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3395 m_pArray[index] = src;
3398 void remove(
size_t index)
3400 VMA_HEAVY_ASSERT(index < m_Count);
3401 const size_t oldCount = size();
3402 if(index < oldCount - 1)
3404 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3406 resize(oldCount - 1);
3409 void push_back(
const T& src)
3411 const size_t newIndex = size();
3412 resize(newIndex + 1);
3413 m_pArray[newIndex] = src;
3418 VMA_HEAVY_ASSERT(m_Count > 0);
3422 void push_front(
const T& src)
3429 VMA_HEAVY_ASSERT(m_Count > 0);
3433 typedef T* iterator;
3435 iterator begin() {
return m_pArray; }
3436 iterator end() {
return m_pArray + m_Count; }
3439 AllocatorT m_Allocator;
3445 template<
typename T,
typename allocatorT>
3446 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3448 vec.insert(index, item);
3451 template<
typename T,
typename allocatorT>
3452 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3457 #endif // #if VMA_USE_STL_VECTOR 3459 template<
typename CmpLess,
typename VectorT>
3460 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3462 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3464 vector.data() + vector.size(),
3466 CmpLess()) - vector.data();
3467 VmaVectorInsert(vector, indexToInsert, value);
3468 return indexToInsert;
3471 template<
typename CmpLess,
typename VectorT>
3472 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3475 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3480 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3482 size_t indexToRemove = it - vector.begin();
3483 VmaVectorRemove(vector, indexToRemove);
3489 template<
typename CmpLess,
typename IterT,
typename KeyT>
3490 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3493 typename IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3494 beg, end, value, comparator);
3496 !comparator(*it, value) && !comparator(value, *it))
3511 template<
typename T>
3512 class VmaPoolAllocator
3514 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3516 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3517 ~VmaPoolAllocator();
3525 uint32_t NextFreeIndex;
3532 uint32_t FirstFreeIndex;
3535 const VkAllocationCallbacks* m_pAllocationCallbacks;
3536 size_t m_ItemsPerBlock;
3537 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3539 ItemBlock& CreateNewBlock();
3542 template<
typename T>
3543 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3544 m_pAllocationCallbacks(pAllocationCallbacks),
3545 m_ItemsPerBlock(itemsPerBlock),
3546 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3548 VMA_ASSERT(itemsPerBlock > 0);
3551 template<
typename T>
3552 VmaPoolAllocator<T>::~VmaPoolAllocator()
3557 template<
typename T>
3558 void VmaPoolAllocator<T>::Clear()
3560 for(
size_t i = m_ItemBlocks.size(); i--; )
3561 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3562 m_ItemBlocks.clear();
3565 template<
typename T>
3566 T* VmaPoolAllocator<T>::Alloc()
3568 for(
size_t i = m_ItemBlocks.size(); i--; )
3570 ItemBlock& block = m_ItemBlocks[i];
3572 if(block.FirstFreeIndex != UINT32_MAX)
3574 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3575 block.FirstFreeIndex = pItem->NextFreeIndex;
3576 return &pItem->Value;
3581 ItemBlock& newBlock = CreateNewBlock();
3582 Item*
const pItem = &newBlock.pItems[0];
3583 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3584 return &pItem->Value;
3587 template<
typename T>
3588 void VmaPoolAllocator<T>::Free(T* ptr)
3591 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3593 ItemBlock& block = m_ItemBlocks[i];
3597 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3600 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3602 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3603 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3604 block.FirstFreeIndex = index;
3608 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3611 template<
typename T>
3612 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3614 ItemBlock newBlock = {
3615 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3617 m_ItemBlocks.push_back(newBlock);
3620 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3621 newBlock.pItems[i].NextFreeIndex = i + 1;
3622 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3623 return m_ItemBlocks.back();
3629 #if VMA_USE_STL_LIST 3631 #define VmaList std::list 3633 #else // #if VMA_USE_STL_LIST 3635 template<
typename T>
3644 template<
typename T>
3647 VMA_CLASS_NO_COPY(VmaRawList)
3649 typedef VmaListItem<T> ItemType;
3651 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3655 size_t GetCount()
const {
return m_Count; }
3656 bool IsEmpty()
const {
return m_Count == 0; }
3658 ItemType* Front() {
return m_pFront; }
3659 const ItemType* Front()
const {
return m_pFront; }
3660 ItemType* Back() {
return m_pBack; }
3661 const ItemType* Back()
const {
return m_pBack; }
3663 ItemType* PushBack();
3664 ItemType* PushFront();
3665 ItemType* PushBack(
const T& value);
3666 ItemType* PushFront(
const T& value);
3671 ItemType* InsertBefore(ItemType* pItem);
3673 ItemType* InsertAfter(ItemType* pItem);
3675 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3676 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3678 void Remove(ItemType* pItem);
3681 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3682 VmaPoolAllocator<ItemType> m_ItemAllocator;
3688 template<
typename T>
3689 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3690 m_pAllocationCallbacks(pAllocationCallbacks),
3691 m_ItemAllocator(pAllocationCallbacks, 128),
3698 template<
typename T>
3699 VmaRawList<T>::~VmaRawList()
3705 template<
typename T>
3706 void VmaRawList<T>::Clear()
3708 if(IsEmpty() ==
false)
3710 ItemType* pItem = m_pBack;
3711 while(pItem != VMA_NULL)
3713 ItemType*
const pPrevItem = pItem->pPrev;
3714 m_ItemAllocator.Free(pItem);
3717 m_pFront = VMA_NULL;
3723 template<
typename T>
3724 VmaListItem<T>* VmaRawList<T>::PushBack()
3726 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3727 pNewItem->pNext = VMA_NULL;
3730 pNewItem->pPrev = VMA_NULL;
3731 m_pFront = pNewItem;
3737 pNewItem->pPrev = m_pBack;
3738 m_pBack->pNext = pNewItem;
3745 template<
typename T>
3746 VmaListItem<T>* VmaRawList<T>::PushFront()
3748 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3749 pNewItem->pPrev = VMA_NULL;
3752 pNewItem->pNext = VMA_NULL;
3753 m_pFront = pNewItem;
3759 pNewItem->pNext = m_pFront;
3760 m_pFront->pPrev = pNewItem;
3761 m_pFront = pNewItem;
3767 template<
typename T>
3768 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3770 ItemType*
const pNewItem = PushBack();
3771 pNewItem->Value = value;
3775 template<
typename T>
3776 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3778 ItemType*
const pNewItem = PushFront();
3779 pNewItem->Value = value;
3783 template<
typename T>
3784 void VmaRawList<T>::PopBack()
3786 VMA_HEAVY_ASSERT(m_Count > 0);
3787 ItemType*
const pBackItem = m_pBack;
3788 ItemType*
const pPrevItem = pBackItem->pPrev;
3789 if(pPrevItem != VMA_NULL)
3791 pPrevItem->pNext = VMA_NULL;
3793 m_pBack = pPrevItem;
3794 m_ItemAllocator.Free(pBackItem);
3798 template<
typename T>
3799 void VmaRawList<T>::PopFront()
3801 VMA_HEAVY_ASSERT(m_Count > 0);
3802 ItemType*
const pFrontItem = m_pFront;
3803 ItemType*
const pNextItem = pFrontItem->pNext;
3804 if(pNextItem != VMA_NULL)
3806 pNextItem->pPrev = VMA_NULL;
3808 m_pFront = pNextItem;
3809 m_ItemAllocator.Free(pFrontItem);
3813 template<
typename T>
3814 void VmaRawList<T>::Remove(ItemType* pItem)
3816 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3817 VMA_HEAVY_ASSERT(m_Count > 0);
3819 if(pItem->pPrev != VMA_NULL)
3821 pItem->pPrev->pNext = pItem->pNext;
3825 VMA_HEAVY_ASSERT(m_pFront == pItem);
3826 m_pFront = pItem->pNext;
3829 if(pItem->pNext != VMA_NULL)
3831 pItem->pNext->pPrev = pItem->pPrev;
3835 VMA_HEAVY_ASSERT(m_pBack == pItem);
3836 m_pBack = pItem->pPrev;
3839 m_ItemAllocator.Free(pItem);
3843 template<
typename T>
3844 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3846 if(pItem != VMA_NULL)
3848 ItemType*
const prevItem = pItem->pPrev;
3849 ItemType*
const newItem = m_ItemAllocator.Alloc();
3850 newItem->pPrev = prevItem;
3851 newItem->pNext = pItem;
3852 pItem->pPrev = newItem;
3853 if(prevItem != VMA_NULL)
3855 prevItem->pNext = newItem;
3859 VMA_HEAVY_ASSERT(m_pFront == pItem);
3869 template<
typename T>
3870 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3872 if(pItem != VMA_NULL)
3874 ItemType*
const nextItem = pItem->pNext;
3875 ItemType*
const newItem = m_ItemAllocator.Alloc();
3876 newItem->pNext = nextItem;
3877 newItem->pPrev = pItem;
3878 pItem->pNext = newItem;
3879 if(nextItem != VMA_NULL)
3881 nextItem->pPrev = newItem;
3885 VMA_HEAVY_ASSERT(m_pBack == pItem);
3895 template<
typename T>
3896 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3898 ItemType*
const newItem = InsertBefore(pItem);
3899 newItem->Value = value;
3903 template<
typename T>
3904 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3906 ItemType*
const newItem = InsertAfter(pItem);
3907 newItem->Value = value;
3911 template<
typename T,
typename AllocatorT>
3914 VMA_CLASS_NO_COPY(VmaList)
3925 T& operator*()
const 3927 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3928 return m_pItem->Value;
3930 T* operator->()
const 3932 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3933 return &m_pItem->Value;
3936 iterator& operator++()
3938 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3939 m_pItem = m_pItem->pNext;
3942 iterator& operator--()
3944 if(m_pItem != VMA_NULL)
3946 m_pItem = m_pItem->pPrev;
3950 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3951 m_pItem = m_pList->Back();
3956 iterator operator++(
int)
3958 iterator result = *
this;
3962 iterator operator--(
int)
3964 iterator result = *
this;
3969 bool operator==(
const iterator& rhs)
const 3971 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3972 return m_pItem == rhs.m_pItem;
3974 bool operator!=(
const iterator& rhs)
const 3976 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3977 return m_pItem != rhs.m_pItem;
3981 VmaRawList<T>* m_pList;
3982 VmaListItem<T>* m_pItem;
3984 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3990 friend class VmaList<T, AllocatorT>;
3993 class const_iterator
4002 const_iterator(
const iterator& src) :
4003 m_pList(src.m_pList),
4004 m_pItem(src.m_pItem)
4008 const T& operator*()
const 4010 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4011 return m_pItem->Value;
4013 const T* operator->()
const 4015 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4016 return &m_pItem->Value;
4019 const_iterator& operator++()
4021 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4022 m_pItem = m_pItem->pNext;
4025 const_iterator& operator--()
4027 if(m_pItem != VMA_NULL)
4029 m_pItem = m_pItem->pPrev;
4033 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4034 m_pItem = m_pList->Back();
4039 const_iterator operator++(
int)
4041 const_iterator result = *
this;
4045 const_iterator operator--(
int)
4047 const_iterator result = *
this;
4052 bool operator==(
const const_iterator& rhs)
const 4054 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4055 return m_pItem == rhs.m_pItem;
4057 bool operator!=(
const const_iterator& rhs)
const 4059 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4060 return m_pItem != rhs.m_pItem;
4064 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4070 const VmaRawList<T>* m_pList;
4071 const VmaListItem<T>* m_pItem;
4073 friend class VmaList<T, AllocatorT>;
4076 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4078 bool empty()
const {
return m_RawList.IsEmpty(); }
4079 size_t size()
const {
return m_RawList.GetCount(); }
4081 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4082 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4084 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4085 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4087 void clear() { m_RawList.Clear(); }
4088 void push_back(
const T& value) { m_RawList.PushBack(value); }
4089 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4090 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4093 VmaRawList<T> m_RawList;
4096 #endif // #if VMA_USE_STL_LIST 4104 #if VMA_USE_STL_UNORDERED_MAP 4106 #define VmaPair std::pair 4108 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4109 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4111 #else // #if VMA_USE_STL_UNORDERED_MAP 4113 template<
typename T1,
typename T2>
4119 VmaPair() : first(), second() { }
4120 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4126 template<
typename KeyT,
typename ValueT>
4130 typedef VmaPair<KeyT, ValueT> PairType;
4131 typedef PairType* iterator;
4133 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4135 iterator begin() {
return m_Vector.begin(); }
4136 iterator end() {
return m_Vector.end(); }
4138 void insert(
const PairType& pair);
4139 iterator find(
const KeyT& key);
4140 void erase(iterator it);
4143 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4146 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4148 template<
typename FirstT,
typename SecondT>
4149 struct VmaPairFirstLess
4151 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4153 return lhs.first < rhs.first;
4155 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4157 return lhs.first < rhsFirst;
4161 template<
typename KeyT,
typename ValueT>
4162 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4164 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4166 m_Vector.data() + m_Vector.size(),
4168 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4169 VmaVectorInsert(m_Vector, indexToInsert, pair);
4172 template<
typename KeyT,
typename ValueT>
4173 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4175 PairType* it = VmaBinaryFindFirstNotLess(
4177 m_Vector.data() + m_Vector.size(),
4179 VmaPairFirstLess<KeyT, ValueT>());
4180 if((it != m_Vector.end()) && (it->first == key))
4186 return m_Vector.end();
4190 template<
typename KeyT,
typename ValueT>
4191 void VmaMap<KeyT, ValueT>::erase(iterator it)
4193 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4196 #endif // #if VMA_USE_STL_UNORDERED_MAP 4202 class VmaDeviceMemoryBlock;
4204 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4206 struct VmaAllocation_T
4208 VMA_CLASS_NO_COPY(VmaAllocation_T)
4210 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4214 FLAG_USER_DATA_STRING = 0x01,
4218 enum ALLOCATION_TYPE
4220 ALLOCATION_TYPE_NONE,
4221 ALLOCATION_TYPE_BLOCK,
4222 ALLOCATION_TYPE_DEDICATED,
4225 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4228 m_pUserData(VMA_NULL),
4229 m_LastUseFrameIndex(currentFrameIndex),
4230 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4231 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4233 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4235 #if VMA_STATS_STRING_ENABLED 4236 m_CreationFrameIndex = currentFrameIndex;
4237 m_BufferImageUsage = 0;
4243 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4246 VMA_ASSERT(m_pUserData == VMA_NULL);
4249 void InitBlockAllocation(
4251 VmaDeviceMemoryBlock* block,
4252 VkDeviceSize offset,
4253 VkDeviceSize alignment,
4255 VmaSuballocationType suballocationType,
4259 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4260 VMA_ASSERT(block != VMA_NULL);
4261 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4262 m_Alignment = alignment;
4264 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4265 m_SuballocationType = (uint8_t)suballocationType;
4266 m_BlockAllocation.m_hPool = hPool;
4267 m_BlockAllocation.m_Block = block;
4268 m_BlockAllocation.m_Offset = offset;
4269 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4274 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4275 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4276 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4277 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4278 m_BlockAllocation.m_Block = VMA_NULL;
4279 m_BlockAllocation.m_Offset = 0;
4280 m_BlockAllocation.m_CanBecomeLost =
true;
4283 void ChangeBlockAllocation(
4285 VmaDeviceMemoryBlock* block,
4286 VkDeviceSize offset);
4289 void InitDedicatedAllocation(
4290 uint32_t memoryTypeIndex,
4291 VkDeviceMemory hMemory,
4292 VmaSuballocationType suballocationType,
4296 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4297 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4298 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4301 m_SuballocationType = (uint8_t)suballocationType;
4302 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4303 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4304 m_DedicatedAllocation.m_hMemory = hMemory;
4305 m_DedicatedAllocation.m_pMappedData = pMappedData;
4308 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4309 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4310 VkDeviceSize GetSize()
const {
return m_Size; }
4311 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4312 void* GetUserData()
const {
return m_pUserData; }
4313 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4314 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4316 VmaDeviceMemoryBlock* GetBlock()
const 4318 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4319 return m_BlockAllocation.m_Block;
4321 VkDeviceSize GetOffset()
const;
4322 VkDeviceMemory GetMemory()
const;
4323 uint32_t GetMemoryTypeIndex()
const;
4324 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4325 void* GetMappedData()
const;
4326 bool CanBecomeLost()
const;
4329 uint32_t GetLastUseFrameIndex()
const 4331 return m_LastUseFrameIndex.load();
4333 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4335 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4345 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4347 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4349 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4360 void BlockAllocMap();
4361 void BlockAllocUnmap();
4362 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4365 #if VMA_STATS_STRING_ENABLED 4366 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4367 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4369 void InitBufferImageUsage(uint32_t bufferImageUsage)
4371 VMA_ASSERT(m_BufferImageUsage == 0);
4372 m_BufferImageUsage = bufferImageUsage;
4375 void PrintParameters(
class VmaJsonWriter& json)
const;
4379 VkDeviceSize m_Alignment;
4380 VkDeviceSize m_Size;
4382 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4384 uint8_t m_SuballocationType;
4391 struct BlockAllocation
4394 VmaDeviceMemoryBlock* m_Block;
4395 VkDeviceSize m_Offset;
4396 bool m_CanBecomeLost;
4400 struct DedicatedAllocation
4402 uint32_t m_MemoryTypeIndex;
4403 VkDeviceMemory m_hMemory;
4404 void* m_pMappedData;
4410 BlockAllocation m_BlockAllocation;
4412 DedicatedAllocation m_DedicatedAllocation;
4415 #if VMA_STATS_STRING_ENABLED 4416 uint32_t m_CreationFrameIndex;
4417 uint32_t m_BufferImageUsage;
4427 struct VmaSuballocation
4429 VkDeviceSize offset;
4432 VmaSuballocationType type;
4436 struct VmaSuballocationOffsetLess
4438 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4440 return lhs.offset < rhs.offset;
4443 struct VmaSuballocationOffsetGreater
4445 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4447 return lhs.offset > rhs.offset;
4451 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4454 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4469 struct VmaAllocationRequest
4471 VkDeviceSize offset;
4472 VkDeviceSize sumFreeSize;
4473 VkDeviceSize sumItemSize;
4474 VmaSuballocationList::iterator item;
4475 size_t itemsToMakeLostCount;
4477 VkDeviceSize CalcCost()
const 4479 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4487 class VmaBlockMetadata
4490 VmaBlockMetadata() : m_Size(0) { }
4491 virtual ~VmaBlockMetadata() { }
4492 virtual void Init(VkDeviceSize size) { m_Size = size; }
4495 virtual bool Validate()
const = 0;
4496 VkDeviceSize GetSize()
const {
return m_Size; }
4497 virtual size_t GetAllocationCount()
const = 0;
4498 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4499 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4501 virtual bool IsEmpty()
const = 0;
4503 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4504 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4506 #if VMA_STATS_STRING_ENABLED 4507 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4513 virtual bool CreateAllocationRequest(
4514 uint32_t currentFrameIndex,
4515 uint32_t frameInUseCount,
4516 VkDeviceSize bufferImageGranularity,
4517 VkDeviceSize allocSize,
4518 VkDeviceSize allocAlignment,
4520 VmaSuballocationType allocType,
4521 bool canMakeOtherLost,
4522 VmaAllocationRequest* pAllocationRequest) = 0;
4524 virtual bool MakeRequestedAllocationsLost(
4525 uint32_t currentFrameIndex,
4526 uint32_t frameInUseCount,
4527 VmaAllocationRequest* pAllocationRequest) = 0;
4529 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4531 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4535 const VmaAllocationRequest& request,
4536 VmaSuballocationType type,
4537 VkDeviceSize allocSize,
4543 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4546 #if VMA_STATS_STRING_ENABLED 4547 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4548 VkDeviceSize unusedBytes,
4549 size_t allocationCount,
4550 size_t unusedRangeCount)
const;
4551 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4552 VkDeviceSize offset,
4554 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4555 VkDeviceSize offset,
4556 VkDeviceSize size)
const;
4557 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4561 VkDeviceSize m_Size;
4564 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4566 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4569 virtual ~VmaBlockMetadata_Generic();
4570 virtual void Init(VkDeviceSize size);
4572 virtual bool Validate()
const;
4573 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4574 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4575 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4576 virtual bool IsEmpty()
const;
4578 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4579 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4581 #if VMA_STATS_STRING_ENABLED 4582 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4585 virtual bool CreateAllocationRequest(
4586 uint32_t currentFrameIndex,
4587 uint32_t frameInUseCount,
4588 VkDeviceSize bufferImageGranularity,
4589 VkDeviceSize allocSize,
4590 VkDeviceSize allocAlignment,
4592 VmaSuballocationType allocType,
4593 bool canMakeOtherLost,
4594 VmaAllocationRequest* pAllocationRequest);
4596 virtual bool MakeRequestedAllocationsLost(
4597 uint32_t currentFrameIndex,
4598 uint32_t frameInUseCount,
4599 VmaAllocationRequest* pAllocationRequest);
4601 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4603 virtual VkResult CheckCorruption(
const void* pBlockData);
4606 const VmaAllocationRequest& request,
4607 VmaSuballocationType type,
4608 VkDeviceSize allocSize,
4613 virtual void FreeAtOffset(VkDeviceSize offset);
4616 uint32_t m_FreeCount;
4617 VkDeviceSize m_SumFreeSize;
4618 VmaSuballocationList m_Suballocations;
4621 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4623 bool ValidateFreeSuballocationList()
const;
4627 bool CheckAllocation(
4628 uint32_t currentFrameIndex,
4629 uint32_t frameInUseCount,
4630 VkDeviceSize bufferImageGranularity,
4631 VkDeviceSize allocSize,
4632 VkDeviceSize allocAlignment,
4633 VmaSuballocationType allocType,
4634 VmaSuballocationList::const_iterator suballocItem,
4635 bool canMakeOtherLost,
4636 VkDeviceSize* pOffset,
4637 size_t* itemsToMakeLostCount,
4638 VkDeviceSize* pSumFreeSize,
4639 VkDeviceSize* pSumItemSize)
const;
4641 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4645 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4648 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4651 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4732 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4734 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4737 virtual ~VmaBlockMetadata_Linear();
4738 virtual void Init(VkDeviceSize size);
4740 virtual bool Validate()
const;
4741 virtual size_t GetAllocationCount()
const;
4742 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4743 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4744 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
4746 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4747 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4749 #if VMA_STATS_STRING_ENABLED 4750 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4753 virtual bool CreateAllocationRequest(
4754 uint32_t currentFrameIndex,
4755 uint32_t frameInUseCount,
4756 VkDeviceSize bufferImageGranularity,
4757 VkDeviceSize allocSize,
4758 VkDeviceSize allocAlignment,
4760 VmaSuballocationType allocType,
4761 bool canMakeOtherLost,
4762 VmaAllocationRequest* pAllocationRequest);
4764 virtual bool MakeRequestedAllocationsLost(
4765 uint32_t currentFrameIndex,
4766 uint32_t frameInUseCount,
4767 VmaAllocationRequest* pAllocationRequest);
4769 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4771 virtual VkResult CheckCorruption(
const void* pBlockData);
4774 const VmaAllocationRequest& request,
4775 VmaSuballocationType type,
4776 VkDeviceSize allocSize,
4781 virtual void FreeAtOffset(VkDeviceSize offset);
4791 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
4793 enum SECOND_VECTOR_MODE
4795 SECOND_VECTOR_EMPTY,
4800 SECOND_VECTOR_RING_BUFFER,
4806 SECOND_VECTOR_DOUBLE_STACK,
4809 VkDeviceSize m_SumFreeSize;
4810 SuballocationVectorType m_Suballocations0, m_Suballocations1;
4811 uint32_t m_1stVectorIndex;
4812 SECOND_VECTOR_MODE m_2ndVectorMode;
4814 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
4815 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
4816 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
4817 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
4820 size_t m_1stNullItemsBeginCount;
4822 size_t m_1stNullItemsMiddleCount;
4824 size_t m_2ndNullItemsCount;
4826 bool ShouldCompact1st()
const;
4827 void CleanupAfterFree();
4836 class VmaDeviceMemoryBlock
4838 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
4840 VmaBlockMetadata* m_pMetadata;
4844 ~VmaDeviceMemoryBlock()
4846 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4847 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4853 uint32_t newMemoryTypeIndex,
4854 VkDeviceMemory newMemory,
4855 VkDeviceSize newSize,
4857 bool linearAlgorithm);
4861 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4862 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4863 uint32_t GetId()
const {
return m_Id; }
4864 void* GetMappedData()
const {
return m_pMappedData; }
4867 bool Validate()
const;
4872 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4875 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4876 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4878 VkResult BindBufferMemory(
4882 VkResult BindImageMemory(
4888 uint32_t m_MemoryTypeIndex;
4890 VkDeviceMemory m_hMemory;
4895 uint32_t m_MapCount;
4896 void* m_pMappedData;
4899 struct VmaPointerLess
4901 bool operator()(
const void* lhs,
const void* rhs)
const 4907 class VmaDefragmentator;
4915 struct VmaBlockVector
4917 VMA_CLASS_NO_COPY(VmaBlockVector)
4921 uint32_t memoryTypeIndex,
4922 VkDeviceSize preferredBlockSize,
4923 size_t minBlockCount,
4924 size_t maxBlockCount,
4925 VkDeviceSize bufferImageGranularity,
4926 uint32_t frameInUseCount,
4928 bool linearAlgorithm);
4931 VkResult CreateMinBlocks();
4933 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4934 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4935 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4936 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4937 bool UsesLinearAlgorithm()
const {
return m_LinearAlgorithm; }
4941 bool IsEmpty()
const {
return m_Blocks.empty(); }
4942 bool IsCorruptionDetectionEnabled()
const;
4946 uint32_t currentFrameIndex,
4948 VkDeviceSize alignment,
4950 VmaSuballocationType suballocType,
4959 #if VMA_STATS_STRING_ENABLED 4960 void PrintDetailedMap(
class VmaJsonWriter& json);
4963 void MakePoolAllocationsLost(
4964 uint32_t currentFrameIndex,
4965 size_t* pLostAllocationCount);
4966 VkResult CheckCorruption();
4968 VmaDefragmentator* EnsureDefragmentator(
4970 uint32_t currentFrameIndex);
4972 VkResult Defragment(
4974 VkDeviceSize& maxBytesToMove,
4975 uint32_t& maxAllocationsToMove);
4977 void DestroyDefragmentator();
4980 friend class VmaDefragmentator;
4983 const uint32_t m_MemoryTypeIndex;
4984 const VkDeviceSize m_PreferredBlockSize;
4985 const size_t m_MinBlockCount;
4986 const size_t m_MaxBlockCount;
4987 const VkDeviceSize m_BufferImageGranularity;
4988 const uint32_t m_FrameInUseCount;
4989 const bool m_IsCustomPool;
4990 const bool m_LinearAlgorithm;
4991 bool m_HasEmptyBlock;
4994 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4998 VmaDefragmentator* m_pDefragmentator;
4999 uint32_t m_NextBlockId;
5001 VkDeviceSize CalcMaxBlockSize()
const;
5004 void Remove(VmaDeviceMemoryBlock* pBlock);
5008 void IncrementallySortBlocks();
5010 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5015 VMA_CLASS_NO_COPY(VmaPool_T)
5017 VmaBlockVector m_BlockVector;
5024 uint32_t GetId()
const {
return m_Id; }
5025 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5027 #if VMA_STATS_STRING_ENABLED 5035 class VmaDefragmentator
5037 VMA_CLASS_NO_COPY(VmaDefragmentator)
5040 VmaBlockVector*
const m_pBlockVector;
5041 uint32_t m_CurrentFrameIndex;
5042 VkDeviceSize m_BytesMoved;
5043 uint32_t m_AllocationsMoved;
5045 struct AllocationInfo
5048 VkBool32* m_pChanged;
5051 m_hAllocation(VK_NULL_HANDLE),
5052 m_pChanged(VMA_NULL)
5057 struct AllocationInfoSizeGreater
5059 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5061 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5066 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5070 VmaDeviceMemoryBlock* m_pBlock;
5071 bool m_HasNonMovableAllocations;
5072 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5074 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5076 m_HasNonMovableAllocations(true),
5077 m_Allocations(pAllocationCallbacks),
5078 m_pMappedDataForDefragmentation(VMA_NULL)
5082 void CalcHasNonMovableAllocations()
5084 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5085 const size_t defragmentAllocCount = m_Allocations.size();
5086 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5089 void SortAllocationsBySizeDescecnding()
5091 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5094 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5099 void* m_pMappedDataForDefragmentation;
5102 struct BlockPointerLess
5104 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5106 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5108 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5110 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5116 struct BlockInfoCompareMoveDestination
5118 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5120 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5124 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5128 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5136 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5137 BlockInfoVector m_Blocks;
5139 VkResult DefragmentRound(
5140 VkDeviceSize maxBytesToMove,
5141 uint32_t maxAllocationsToMove);
5143 static bool MoveMakesSense(
5144 size_t dstBlockIndex, VkDeviceSize dstOffset,
5145 size_t srcBlockIndex, VkDeviceSize srcOffset);
5150 VmaBlockVector* pBlockVector,
5151 uint32_t currentFrameIndex);
5153 ~VmaDefragmentator();
5155 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5156 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5158 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5160 VkResult Defragment(
5161 VkDeviceSize maxBytesToMove,
5162 uint32_t maxAllocationsToMove);
5165 #if VMA_RECORDING_ENABLED 5172 void WriteConfiguration(
5173 const VkPhysicalDeviceProperties& devProps,
5174 const VkPhysicalDeviceMemoryProperties& memProps,
5175 bool dedicatedAllocationExtensionEnabled);
5178 void RecordCreateAllocator(uint32_t frameIndex);
5179 void RecordDestroyAllocator(uint32_t frameIndex);
5180 void RecordCreatePool(uint32_t frameIndex,
5183 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5184 void RecordAllocateMemory(uint32_t frameIndex,
5185 const VkMemoryRequirements& vkMemReq,
5188 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5189 const VkMemoryRequirements& vkMemReq,
5190 bool requiresDedicatedAllocation,
5191 bool prefersDedicatedAllocation,
5194 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5195 const VkMemoryRequirements& vkMemReq,
5196 bool requiresDedicatedAllocation,
5197 bool prefersDedicatedAllocation,
5200 void RecordFreeMemory(uint32_t frameIndex,
5202 void RecordSetAllocationUserData(uint32_t frameIndex,
5204 const void* pUserData);
5205 void RecordCreateLostAllocation(uint32_t frameIndex,
5207 void RecordMapMemory(uint32_t frameIndex,
5209 void RecordUnmapMemory(uint32_t frameIndex,
5211 void RecordFlushAllocation(uint32_t frameIndex,
5212 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5213 void RecordInvalidateAllocation(uint32_t frameIndex,
5214 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5215 void RecordCreateBuffer(uint32_t frameIndex,
5216 const VkBufferCreateInfo& bufCreateInfo,
5219 void RecordCreateImage(uint32_t frameIndex,
5220 const VkImageCreateInfo& imageCreateInfo,
5223 void RecordDestroyBuffer(uint32_t frameIndex,
5225 void RecordDestroyImage(uint32_t frameIndex,
5227 void RecordTouchAllocation(uint32_t frameIndex,
5229 void RecordGetAllocationInfo(uint32_t frameIndex,
5231 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5241 class UserDataString
5245 const char* GetString()
const {
return m_Str; }
5255 VMA_MUTEX m_FileMutex;
5257 int64_t m_StartCounter;
5259 void GetBasicParams(CallParams& outParams);
5263 #endif // #if VMA_RECORDING_ENABLED 5266 struct VmaAllocator_T
5268 VMA_CLASS_NO_COPY(VmaAllocator_T)
5271 bool m_UseKhrDedicatedAllocation;
5273 bool m_AllocationCallbacksSpecified;
5274 VkAllocationCallbacks m_AllocationCallbacks;
5278 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5279 VMA_MUTEX m_HeapSizeLimitMutex;
5281 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5282 VkPhysicalDeviceMemoryProperties m_MemProps;
5285 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5288 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5289 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5290 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5296 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5298 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5302 return m_VulkanFunctions;
5305 VkDeviceSize GetBufferImageGranularity()
const 5308 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5309 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5312 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5313 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5315 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5317 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5318 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5321 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5323 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5324 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5327 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5329 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5330 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5331 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5334 bool IsIntegratedGpu()
const 5336 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5339 #if VMA_RECORDING_ENABLED 5340 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5343 void GetBufferMemoryRequirements(
5345 VkMemoryRequirements& memReq,
5346 bool& requiresDedicatedAllocation,
5347 bool& prefersDedicatedAllocation)
const;
5348 void GetImageMemoryRequirements(
5350 VkMemoryRequirements& memReq,
5351 bool& requiresDedicatedAllocation,
5352 bool& prefersDedicatedAllocation)
const;
5355 VkResult AllocateMemory(
5356 const VkMemoryRequirements& vkMemReq,
5357 bool requiresDedicatedAllocation,
5358 bool prefersDedicatedAllocation,
5359 VkBuffer dedicatedBuffer,
5360 VkImage dedicatedImage,
5362 VmaSuballocationType suballocType,
5368 void CalculateStats(
VmaStats* pStats);
5370 #if VMA_STATS_STRING_ENABLED 5371 void PrintDetailedMap(
class VmaJsonWriter& json);
5374 VkResult Defragment(
5376 size_t allocationCount,
5377 VkBool32* pAllocationsChanged,
5385 void DestroyPool(
VmaPool pool);
5388 void SetCurrentFrameIndex(uint32_t frameIndex);
5389 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5391 void MakePoolAllocationsLost(
5393 size_t* pLostAllocationCount);
5394 VkResult CheckPoolCorruption(
VmaPool hPool);
5395 VkResult CheckCorruption(uint32_t memoryTypeBits);
5399 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5400 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5405 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5406 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5408 void FlushOrInvalidateAllocation(
5410 VkDeviceSize offset, VkDeviceSize size,
5411 VMA_CACHE_OPERATION op);
5413 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5416 VkDeviceSize m_PreferredLargeHeapBlockSize;
5418 VkPhysicalDevice m_PhysicalDevice;
5419 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5421 VMA_MUTEX m_PoolsMutex;
5423 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5424 uint32_t m_NextPoolId;
5428 #if VMA_RECORDING_ENABLED 5429 VmaRecorder* m_pRecorder;
5434 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5436 VkResult AllocateMemoryOfType(
5438 VkDeviceSize alignment,
5439 bool dedicatedAllocation,
5440 VkBuffer dedicatedBuffer,
5441 VkImage dedicatedImage,
5443 uint32_t memTypeIndex,
5444 VmaSuballocationType suballocType,
5448 VkResult AllocateDedicatedMemory(
5450 VmaSuballocationType suballocType,
5451 uint32_t memTypeIndex,
5453 bool isUserDataString,
5455 VkBuffer dedicatedBuffer,
5456 VkImage dedicatedImage,
5466 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5468 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5471 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5473 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5476 template<
typename T>
5479 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5482 template<
typename T>
5483 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5485 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5488 template<
typename T>
5489 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5494 VmaFree(hAllocator, ptr);
5498 template<
typename T>
5499 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5503 for(
size_t i = count; i--; )
5505 VmaFree(hAllocator, ptr);
5512 #if VMA_STATS_STRING_ENABLED 5514 class VmaStringBuilder
5517 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5518 size_t GetLength()
const {
return m_Data.size(); }
5519 const char* GetData()
const {
return m_Data.data(); }
5521 void Add(
char ch) { m_Data.push_back(ch); }
5522 void Add(
const char* pStr);
5523 void AddNewLine() { Add(
'\n'); }
5524 void AddNumber(uint32_t num);
5525 void AddNumber(uint64_t num);
5526 void AddPointer(
const void* ptr);
5529 VmaVector< char, VmaStlAllocator<char> > m_Data;
5532 void VmaStringBuilder::Add(
const char* pStr)
5534 const size_t strLen = strlen(pStr);
5537 const size_t oldCount = m_Data.size();
5538 m_Data.resize(oldCount + strLen);
5539 memcpy(m_Data.data() + oldCount, pStr, strLen);
5543 void VmaStringBuilder::AddNumber(uint32_t num)
5546 VmaUint32ToStr(buf,
sizeof(buf), num);
5550 void VmaStringBuilder::AddNumber(uint64_t num)
5553 VmaUint64ToStr(buf,
sizeof(buf), num);
5557 void VmaStringBuilder::AddPointer(
const void* ptr)
5560 VmaPtrToStr(buf,
sizeof(buf), ptr);
5564 #endif // #if VMA_STATS_STRING_ENABLED 5569 #if VMA_STATS_STRING_ENABLED 5573 VMA_CLASS_NO_COPY(VmaJsonWriter)
5575 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5578 void BeginObject(
bool singleLine =
false);
5581 void BeginArray(
bool singleLine =
false);
5584 void WriteString(
const char* pStr);
5585 void BeginString(
const char* pStr = VMA_NULL);
5586 void ContinueString(
const char* pStr);
5587 void ContinueString(uint32_t n);
5588 void ContinueString(uint64_t n);
5589 void ContinueString_Pointer(
const void* ptr);
5590 void EndString(
const char* pStr = VMA_NULL);
5592 void WriteNumber(uint32_t n);
5593 void WriteNumber(uint64_t n);
5594 void WriteBool(
bool b);
5598 static const char*
const INDENT;
5600 enum COLLECTION_TYPE
5602 COLLECTION_TYPE_OBJECT,
5603 COLLECTION_TYPE_ARRAY,
5607 COLLECTION_TYPE type;
5608 uint32_t valueCount;
5609 bool singleLineMode;
5612 VmaStringBuilder& m_SB;
5613 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5614 bool m_InsideString;
5616 void BeginValue(
bool isString);
5617 void WriteIndent(
bool oneLess =
false);
5620 const char*
const VmaJsonWriter::INDENT =
" ";
5622 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5624 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5625 m_InsideString(false)
5629 VmaJsonWriter::~VmaJsonWriter()
5631 VMA_ASSERT(!m_InsideString);
5632 VMA_ASSERT(m_Stack.empty());
5635 void VmaJsonWriter::BeginObject(
bool singleLine)
5637 VMA_ASSERT(!m_InsideString);
5643 item.type = COLLECTION_TYPE_OBJECT;
5644 item.valueCount = 0;
5645 item.singleLineMode = singleLine;
5646 m_Stack.push_back(item);
5649 void VmaJsonWriter::EndObject()
5651 VMA_ASSERT(!m_InsideString);
5656 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5660 void VmaJsonWriter::BeginArray(
bool singleLine)
5662 VMA_ASSERT(!m_InsideString);
5668 item.type = COLLECTION_TYPE_ARRAY;
5669 item.valueCount = 0;
5670 item.singleLineMode = singleLine;
5671 m_Stack.push_back(item);
5674 void VmaJsonWriter::EndArray()
5676 VMA_ASSERT(!m_InsideString);
5681 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5685 void VmaJsonWriter::WriteString(
const char* pStr)
5691 void VmaJsonWriter::BeginString(
const char* pStr)
5693 VMA_ASSERT(!m_InsideString);
5697 m_InsideString =
true;
5698 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5700 ContinueString(pStr);
5704 void VmaJsonWriter::ContinueString(
const char* pStr)
5706 VMA_ASSERT(m_InsideString);
5708 const size_t strLen = strlen(pStr);
5709 for(
size_t i = 0; i < strLen; ++i)
5742 VMA_ASSERT(0 &&
"Character not currently supported.");
5748 void VmaJsonWriter::ContinueString(uint32_t n)
5750 VMA_ASSERT(m_InsideString);
5754 void VmaJsonWriter::ContinueString(uint64_t n)
5756 VMA_ASSERT(m_InsideString);
5760 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5762 VMA_ASSERT(m_InsideString);
5763 m_SB.AddPointer(ptr);
5766 void VmaJsonWriter::EndString(
const char* pStr)
5768 VMA_ASSERT(m_InsideString);
5769 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5771 ContinueString(pStr);
5774 m_InsideString =
false;
5777 void VmaJsonWriter::WriteNumber(uint32_t n)
5779 VMA_ASSERT(!m_InsideString);
5784 void VmaJsonWriter::WriteNumber(uint64_t n)
5786 VMA_ASSERT(!m_InsideString);
5791 void VmaJsonWriter::WriteBool(
bool b)
5793 VMA_ASSERT(!m_InsideString);
5795 m_SB.Add(b ?
"true" :
"false");
5798 void VmaJsonWriter::WriteNull()
5800 VMA_ASSERT(!m_InsideString);
5805 void VmaJsonWriter::BeginValue(
bool isString)
5807 if(!m_Stack.empty())
5809 StackItem& currItem = m_Stack.back();
5810 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5811 currItem.valueCount % 2 == 0)
5813 VMA_ASSERT(isString);
5816 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5817 currItem.valueCount % 2 != 0)
5821 else if(currItem.valueCount > 0)
5830 ++currItem.valueCount;
5834 void VmaJsonWriter::WriteIndent(
bool oneLess)
5836 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
5840 size_t count = m_Stack.size();
5841 if(count > 0 && oneLess)
5845 for(
size_t i = 0; i < count; ++i)
5852 #endif // #if VMA_STATS_STRING_ENABLED 5856 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
5858 if(IsUserDataString())
5860 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
5862 FreeUserDataString(hAllocator);
5864 if(pUserData != VMA_NULL)
5866 const char*
const newStrSrc = (
char*)pUserData;
5867 const size_t newStrLen = strlen(newStrSrc);
5868 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
5869 memcpy(newStrDst, newStrSrc, newStrLen + 1);
5870 m_pUserData = newStrDst;
5875 m_pUserData = pUserData;
5879 void VmaAllocation_T::ChangeBlockAllocation(
5881 VmaDeviceMemoryBlock* block,
5882 VkDeviceSize offset)
5884 VMA_ASSERT(block != VMA_NULL);
5885 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5888 if(block != m_BlockAllocation.m_Block)
5890 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
5891 if(IsPersistentMap())
5893 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
5894 block->Map(hAllocator, mapRefCount, VMA_NULL);
5897 m_BlockAllocation.m_Block = block;
5898 m_BlockAllocation.m_Offset = offset;
5901 VkDeviceSize VmaAllocation_T::GetOffset()
const 5905 case ALLOCATION_TYPE_BLOCK:
5906 return m_BlockAllocation.m_Offset;
5907 case ALLOCATION_TYPE_DEDICATED:
5915 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5919 case ALLOCATION_TYPE_BLOCK:
5920 return m_BlockAllocation.m_Block->GetDeviceMemory();
5921 case ALLOCATION_TYPE_DEDICATED:
5922 return m_DedicatedAllocation.m_hMemory;
5925 return VK_NULL_HANDLE;
5929 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5933 case ALLOCATION_TYPE_BLOCK:
5934 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5935 case ALLOCATION_TYPE_DEDICATED:
5936 return m_DedicatedAllocation.m_MemoryTypeIndex;
5943 void* VmaAllocation_T::GetMappedData()
const 5947 case ALLOCATION_TYPE_BLOCK:
5950 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5951 VMA_ASSERT(pBlockData != VMA_NULL);
5952 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5959 case ALLOCATION_TYPE_DEDICATED:
5960 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5961 return m_DedicatedAllocation.m_pMappedData;
5968 bool VmaAllocation_T::CanBecomeLost()
const 5972 case ALLOCATION_TYPE_BLOCK:
5973 return m_BlockAllocation.m_CanBecomeLost;
5974 case ALLOCATION_TYPE_DEDICATED:
5982 VmaPool VmaAllocation_T::GetPool()
const 5984 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5985 return m_BlockAllocation.m_hPool;
5988 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5990 VMA_ASSERT(CanBecomeLost());
5996 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5999 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6004 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6010 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6020 #if VMA_STATS_STRING_ENABLED 6023 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6032 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6034 json.WriteString(
"Type");
6035 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6037 json.WriteString(
"Size");
6038 json.WriteNumber(m_Size);
6040 if(m_pUserData != VMA_NULL)
6042 json.WriteString(
"UserData");
6043 if(IsUserDataString())
6045 json.WriteString((
const char*)m_pUserData);
6050 json.ContinueString_Pointer(m_pUserData);
6055 json.WriteString(
"CreationFrameIndex");
6056 json.WriteNumber(m_CreationFrameIndex);
6058 json.WriteString(
"LastUseFrameIndex");
6059 json.WriteNumber(GetLastUseFrameIndex());
6061 if(m_BufferImageUsage != 0)
6063 json.WriteString(
"Usage");
6064 json.WriteNumber(m_BufferImageUsage);
6070 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6072 VMA_ASSERT(IsUserDataString());
6073 if(m_pUserData != VMA_NULL)
6075 char*
const oldStr = (
char*)m_pUserData;
6076 const size_t oldStrLen = strlen(oldStr);
6077 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6078 m_pUserData = VMA_NULL;
6082 void VmaAllocation_T::BlockAllocMap()
6084 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6086 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6092 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6096 void VmaAllocation_T::BlockAllocUnmap()
6098 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6100 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6106 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6110 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6112 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6116 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6118 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6119 *ppData = m_DedicatedAllocation.m_pMappedData;
6125 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6126 return VK_ERROR_MEMORY_MAP_FAILED;
6131 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6132 hAllocator->m_hDevice,
6133 m_DedicatedAllocation.m_hMemory,
6138 if(result == VK_SUCCESS)
6140 m_DedicatedAllocation.m_pMappedData = *ppData;
6147 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6149 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6151 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6156 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6157 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6158 hAllocator->m_hDevice,
6159 m_DedicatedAllocation.m_hMemory);
6164 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6168 #if VMA_STATS_STRING_ENABLED 6170 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6174 json.WriteString(
"Blocks");
6177 json.WriteString(
"Allocations");
6180 json.WriteString(
"UnusedRanges");
6183 json.WriteString(
"UsedBytes");
6186 json.WriteString(
"UnusedBytes");
6191 json.WriteString(
"AllocationSize");
6192 json.BeginObject(
true);
6193 json.WriteString(
"Min");
6195 json.WriteString(
"Avg");
6197 json.WriteString(
"Max");
6204 json.WriteString(
"UnusedRangeSize");
6205 json.BeginObject(
true);
6206 json.WriteString(
"Min");
6208 json.WriteString(
"Avg");
6210 json.WriteString(
"Max");
6218 #endif // #if VMA_STATS_STRING_ENABLED 6220 struct VmaSuballocationItemSizeLess
6223 const VmaSuballocationList::iterator lhs,
6224 const VmaSuballocationList::iterator rhs)
const 6226 return lhs->size < rhs->size;
6229 const VmaSuballocationList::iterator lhs,
6230 VkDeviceSize rhsSize)
const 6232 return lhs->size < rhsSize;
6240 #if VMA_STATS_STRING_ENABLED 6242 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6243 VkDeviceSize unusedBytes,
6244 size_t allocationCount,
6245 size_t unusedRangeCount)
const 6249 json.WriteString(
"TotalBytes");
6250 json.WriteNumber(GetSize());
6252 json.WriteString(
"UnusedBytes");
6253 json.WriteNumber(unusedBytes);
6255 json.WriteString(
"Allocations");
6256 json.WriteNumber((uint64_t)allocationCount);
6258 json.WriteString(
"UnusedRanges");
6259 json.WriteNumber((uint64_t)unusedRangeCount);
6261 json.WriteString(
"Suballocations");
6265 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6266 VkDeviceSize offset,
6269 json.BeginObject(
true);
6271 json.WriteString(
"Offset");
6272 json.WriteNumber(offset);
6274 hAllocation->PrintParameters(json);
6279 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6280 VkDeviceSize offset,
6281 VkDeviceSize size)
const 6283 json.BeginObject(
true);
6285 json.WriteString(
"Offset");
6286 json.WriteNumber(offset);
6288 json.WriteString(
"Type");
6289 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6291 json.WriteString(
"Size");
6292 json.WriteNumber(size);
6297 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6303 #endif // #if VMA_STATS_STRING_ENABLED 6308 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6311 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6312 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6316 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6320 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6322 VmaBlockMetadata::Init(size);
6324 m_SumFreeSize = size;
6326 VmaSuballocation suballoc = {};
6327 suballoc.offset = 0;
6328 suballoc.size = size;
6329 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6330 suballoc.hAllocation = VK_NULL_HANDLE;
6332 m_Suballocations.push_back(suballoc);
6333 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6335 m_FreeSuballocationsBySize.push_back(suballocItem);
6338 bool VmaBlockMetadata_Generic::Validate()
const 6340 if(m_Suballocations.empty())
6346 VkDeviceSize calculatedOffset = 0;
6348 uint32_t calculatedFreeCount = 0;
6350 VkDeviceSize calculatedSumFreeSize = 0;
6353 size_t freeSuballocationsToRegister = 0;
6355 bool prevFree =
false;
6357 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6358 suballocItem != m_Suballocations.cend();
6361 const VmaSuballocation& subAlloc = *suballocItem;
6364 if(subAlloc.offset != calculatedOffset)
6369 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6371 if(prevFree && currFree)
6376 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
6383 calculatedSumFreeSize += subAlloc.size;
6384 ++calculatedFreeCount;
6385 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6387 ++freeSuballocationsToRegister;
6391 if(subAlloc.size < VMA_DEBUG_MARGIN)
6398 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
6402 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
6408 if(VMA_DEBUG_MARGIN > 0 && !prevFree)
6414 calculatedOffset += subAlloc.size;
6415 prevFree = currFree;
6420 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
6425 VkDeviceSize lastSize = 0;
6426 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6428 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6431 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
6436 if(suballocItem->size < lastSize)
6441 lastSize = suballocItem->size;
6445 if(!ValidateFreeSuballocationList() ||
6446 (calculatedOffset != GetSize()) ||
6447 (calculatedSumFreeSize != m_SumFreeSize) ||
6448 (calculatedFreeCount != m_FreeCount))
6456 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6458 if(!m_FreeSuballocationsBySize.empty())
6460 return m_FreeSuballocationsBySize.back()->size;
6468 bool VmaBlockMetadata_Generic::IsEmpty()
const 6470 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6473 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6477 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6489 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6490 suballocItem != m_Suballocations.cend();
6493 const VmaSuballocation& suballoc = *suballocItem;
6494 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6507 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6509 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6511 inoutStats.
size += GetSize();
6518 #if VMA_STATS_STRING_ENABLED 6520 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6522 PrintDetailedMap_Begin(json,
6524 m_Suballocations.size() - (size_t)m_FreeCount,
6528 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6529 suballocItem != m_Suballocations.cend();
6530 ++suballocItem, ++i)
6532 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6534 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6538 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6542 PrintDetailedMap_End(json);
6545 #endif // #if VMA_STATS_STRING_ENABLED 6557 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6558 uint32_t currentFrameIndex,
6559 uint32_t frameInUseCount,
6560 VkDeviceSize bufferImageGranularity,
6561 VkDeviceSize allocSize,
6562 VkDeviceSize allocAlignment,
6564 VmaSuballocationType allocType,
6565 bool canMakeOtherLost,
6566 VmaAllocationRequest* pAllocationRequest)
6568 VMA_ASSERT(allocSize > 0);
6569 VMA_ASSERT(!upperAddress);
6570 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6571 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6572 VMA_HEAVY_ASSERT(Validate());
6575 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6581 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6582 if(freeSuballocCount > 0)
6587 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6588 m_FreeSuballocationsBySize.data(),
6589 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6590 allocSize + 2 * VMA_DEBUG_MARGIN,
6591 VmaSuballocationItemSizeLess());
6592 size_t index = it - m_FreeSuballocationsBySize.data();
6593 for(; index < freeSuballocCount; ++index)
6598 bufferImageGranularity,
6602 m_FreeSuballocationsBySize[index],
6604 &pAllocationRequest->offset,
6605 &pAllocationRequest->itemsToMakeLostCount,
6606 &pAllocationRequest->sumFreeSize,
6607 &pAllocationRequest->sumItemSize))
6609 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6617 for(
size_t index = freeSuballocCount; index--; )
6622 bufferImageGranularity,
6626 m_FreeSuballocationsBySize[index],
6628 &pAllocationRequest->offset,
6629 &pAllocationRequest->itemsToMakeLostCount,
6630 &pAllocationRequest->sumFreeSize,
6631 &pAllocationRequest->sumItemSize))
6633 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6640 if(canMakeOtherLost)
6644 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6645 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6647 VmaAllocationRequest tmpAllocRequest = {};
6648 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6649 suballocIt != m_Suballocations.end();
6652 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6653 suballocIt->hAllocation->CanBecomeLost())
6658 bufferImageGranularity,
6664 &tmpAllocRequest.offset,
6665 &tmpAllocRequest.itemsToMakeLostCount,
6666 &tmpAllocRequest.sumFreeSize,
6667 &tmpAllocRequest.sumItemSize))
6669 tmpAllocRequest.item = suballocIt;
6671 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
6673 *pAllocationRequest = tmpAllocRequest;
6679 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
6688 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
6689 uint32_t currentFrameIndex,
6690 uint32_t frameInUseCount,
6691 VmaAllocationRequest* pAllocationRequest)
6693 while(pAllocationRequest->itemsToMakeLostCount > 0)
6695 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
6697 ++pAllocationRequest->item;
6699 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6700 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
6701 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
6702 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6704 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
6705 --pAllocationRequest->itemsToMakeLostCount;
6713 VMA_HEAVY_ASSERT(Validate());
6714 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6715 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
6720 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6722 uint32_t lostAllocationCount = 0;
6723 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6724 it != m_Suballocations.end();
6727 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
6728 it->hAllocation->CanBecomeLost() &&
6729 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6731 it = FreeSuballocation(it);
6732 ++lostAllocationCount;
6735 return lostAllocationCount;
6738 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
6740 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6741 it != m_Suballocations.end();
6744 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6746 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
6748 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
6749 return VK_ERROR_VALIDATION_FAILED_EXT;
6751 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
6753 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
6754 return VK_ERROR_VALIDATION_FAILED_EXT;
6762 void VmaBlockMetadata_Generic::Alloc(
6763 const VmaAllocationRequest& request,
6764 VmaSuballocationType type,
6765 VkDeviceSize allocSize,
6769 VMA_ASSERT(!upperAddress);
6770 VMA_ASSERT(request.item != m_Suballocations.end());
6771 VmaSuballocation& suballoc = *request.item;
6773 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6775 VMA_ASSERT(request.offset >= suballoc.offset);
6776 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
6777 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
6778 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
6782 UnregisterFreeSuballocation(request.item);
6784 suballoc.offset = request.offset;
6785 suballoc.size = allocSize;
6786 suballoc.type = type;
6787 suballoc.hAllocation = hAllocation;
6792 VmaSuballocation paddingSuballoc = {};
6793 paddingSuballoc.offset = request.offset + allocSize;
6794 paddingSuballoc.size = paddingEnd;
6795 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6796 VmaSuballocationList::iterator next = request.item;
6798 const VmaSuballocationList::iterator paddingEndItem =
6799 m_Suballocations.insert(next, paddingSuballoc);
6800 RegisterFreeSuballocation(paddingEndItem);
6806 VmaSuballocation paddingSuballoc = {};
6807 paddingSuballoc.offset = request.offset - paddingBegin;
6808 paddingSuballoc.size = paddingBegin;
6809 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6810 const VmaSuballocationList::iterator paddingBeginItem =
6811 m_Suballocations.insert(request.item, paddingSuballoc);
6812 RegisterFreeSuballocation(paddingBeginItem);
6816 m_FreeCount = m_FreeCount - 1;
6817 if(paddingBegin > 0)
6825 m_SumFreeSize -= allocSize;
6828 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
6830 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6831 suballocItem != m_Suballocations.end();
6834 VmaSuballocation& suballoc = *suballocItem;
6835 if(suballoc.hAllocation == allocation)
6837 FreeSuballocation(suballocItem);
6838 VMA_HEAVY_ASSERT(Validate());
6842 VMA_ASSERT(0 &&
"Not found!");
6845 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
6847 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6848 suballocItem != m_Suballocations.end();
6851 VmaSuballocation& suballoc = *suballocItem;
6852 if(suballoc.offset == offset)
6854 FreeSuballocation(suballocItem);
6858 VMA_ASSERT(0 &&
"Not found!");
6861 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 6863 VkDeviceSize lastSize = 0;
6864 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
6866 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
6868 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6873 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6878 if(it->size < lastSize)
6884 lastSize = it->size;
6889 bool VmaBlockMetadata_Generic::CheckAllocation(
6890 uint32_t currentFrameIndex,
6891 uint32_t frameInUseCount,
6892 VkDeviceSize bufferImageGranularity,
6893 VkDeviceSize allocSize,
6894 VkDeviceSize allocAlignment,
6895 VmaSuballocationType allocType,
6896 VmaSuballocationList::const_iterator suballocItem,
6897 bool canMakeOtherLost,
6898 VkDeviceSize* pOffset,
6899 size_t* itemsToMakeLostCount,
6900 VkDeviceSize* pSumFreeSize,
6901 VkDeviceSize* pSumItemSize)
const 6903 VMA_ASSERT(allocSize > 0);
6904 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6905 VMA_ASSERT(suballocItem != m_Suballocations.cend());
6906 VMA_ASSERT(pOffset != VMA_NULL);
6908 *itemsToMakeLostCount = 0;
6912 if(canMakeOtherLost)
6914 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6916 *pSumFreeSize = suballocItem->size;
6920 if(suballocItem->hAllocation->CanBecomeLost() &&
6921 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6923 ++*itemsToMakeLostCount;
6924 *pSumItemSize = suballocItem->size;
6933 if(GetSize() - suballocItem->offset < allocSize)
6939 *pOffset = suballocItem->offset;
6942 if(VMA_DEBUG_MARGIN > 0)
6944 *pOffset += VMA_DEBUG_MARGIN;
6948 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6952 if(bufferImageGranularity > 1)
6954 bool bufferImageGranularityConflict =
false;
6955 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6956 while(prevSuballocItem != m_Suballocations.cbegin())
6959 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6960 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6962 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6964 bufferImageGranularityConflict =
true;
6972 if(bufferImageGranularityConflict)
6974 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6980 if(*pOffset >= suballocItem->offset + suballocItem->size)
6986 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
6989 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6991 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
6993 if(suballocItem->offset + totalSize > GetSize())
7000 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7001 if(totalSize > suballocItem->size)
7003 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7004 while(remainingSize > 0)
7007 if(lastSuballocItem == m_Suballocations.cend())
7011 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7013 *pSumFreeSize += lastSuballocItem->size;
7017 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7018 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7019 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7021 ++*itemsToMakeLostCount;
7022 *pSumItemSize += lastSuballocItem->size;
7029 remainingSize = (lastSuballocItem->size < remainingSize) ?
7030 remainingSize - lastSuballocItem->size : 0;
7036 if(bufferImageGranularity > 1)
7038 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7040 while(nextSuballocItem != m_Suballocations.cend())
7042 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7043 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7045 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7047 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7048 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7049 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7051 ++*itemsToMakeLostCount;
7070 const VmaSuballocation& suballoc = *suballocItem;
7071 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7073 *pSumFreeSize = suballoc.size;
7076 if(suballoc.size < allocSize)
7082 *pOffset = suballoc.offset;
7085 if(VMA_DEBUG_MARGIN > 0)
7087 *pOffset += VMA_DEBUG_MARGIN;
7091 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7095 if(bufferImageGranularity > 1)
7097 bool bufferImageGranularityConflict =
false;
7098 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7099 while(prevSuballocItem != m_Suballocations.cbegin())
7102 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7103 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7105 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7107 bufferImageGranularityConflict =
true;
7115 if(bufferImageGranularityConflict)
7117 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7122 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7125 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7128 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7135 if(bufferImageGranularity > 1)
7137 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7139 while(nextSuballocItem != m_Suballocations.cend())
7141 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7142 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7144 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7163 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7165 VMA_ASSERT(item != m_Suballocations.end());
7166 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7168 VmaSuballocationList::iterator nextItem = item;
7170 VMA_ASSERT(nextItem != m_Suballocations.end());
7171 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7173 item->size += nextItem->size;
7175 m_Suballocations.erase(nextItem);
7178 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7181 VmaSuballocation& suballoc = *suballocItem;
7182 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7183 suballoc.hAllocation = VK_NULL_HANDLE;
7187 m_SumFreeSize += suballoc.size;
7190 bool mergeWithNext =
false;
7191 bool mergeWithPrev =
false;
7193 VmaSuballocationList::iterator nextItem = suballocItem;
7195 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7197 mergeWithNext =
true;
7200 VmaSuballocationList::iterator prevItem = suballocItem;
7201 if(suballocItem != m_Suballocations.begin())
7204 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7206 mergeWithPrev =
true;
7212 UnregisterFreeSuballocation(nextItem);
7213 MergeFreeWithNext(suballocItem);
7218 UnregisterFreeSuballocation(prevItem);
7219 MergeFreeWithNext(prevItem);
7220 RegisterFreeSuballocation(prevItem);
7225 RegisterFreeSuballocation(suballocItem);
7226 return suballocItem;
7230 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7232 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7233 VMA_ASSERT(item->size > 0);
7237 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7239 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7241 if(m_FreeSuballocationsBySize.empty())
7243 m_FreeSuballocationsBySize.push_back(item);
7247 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7255 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7257 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7258 VMA_ASSERT(item->size > 0);
7262 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7264 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7266 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7267 m_FreeSuballocationsBySize.data(),
7268 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7270 VmaSuballocationItemSizeLess());
7271 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7272 index < m_FreeSuballocationsBySize.size();
7275 if(m_FreeSuballocationsBySize[index] == item)
7277 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7280 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7282 VMA_ASSERT(0 &&
"Not found.");
7291 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7293 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7294 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7295 m_1stVectorIndex(0),
7296 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7297 m_1stNullItemsBeginCount(0),
7298 m_1stNullItemsMiddleCount(0),
7299 m_2ndNullItemsCount(0)
7303 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7307 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7309 VmaBlockMetadata::Init(size);
7310 m_SumFreeSize = size;
7313 bool VmaBlockMetadata_Linear::Validate()
const 7315 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7316 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7318 if(suballocations2nd.empty() != (m_2ndVectorMode == SECOND_VECTOR_EMPTY))
7322 if(suballocations1st.empty() && !suballocations2nd.empty() &&
7323 m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7327 if(!suballocations1st.empty())
7330 if(suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
7335 if(suballocations1st.back().hAllocation == VK_NULL_HANDLE)
7340 if(!suballocations2nd.empty())
7343 if(suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
7349 if(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount > suballocations1st.size())
7353 if(m_2ndNullItemsCount > suballocations2nd.size())
7358 VkDeviceSize sumUsedSize = 0;
7359 const size_t suballoc1stCount = suballocations1st.size();
7360 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7362 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7364 const size_t suballoc2ndCount = suballocations2nd.size();
7365 size_t nullItem2ndCount = 0;
7366 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7368 const VmaSuballocation& suballoc = suballocations2nd[i];
7369 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7371 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7375 if(suballoc.offset < offset)
7382 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7386 if(suballoc.hAllocation->GetSize() != suballoc.size)
7390 sumUsedSize += suballoc.size;
7397 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7400 if(nullItem2ndCount != m_2ndNullItemsCount)
7406 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7408 const VmaSuballocation& suballoc = suballocations1st[i];
7409 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE ||
7410 suballoc.hAllocation != VK_NULL_HANDLE)
7416 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7418 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7420 const VmaSuballocation& suballoc = suballocations1st[i];
7421 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7423 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7427 if(suballoc.offset < offset)
7431 if(i < m_1stNullItemsBeginCount && !currFree)
7438 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7442 if(suballoc.hAllocation->GetSize() != suballoc.size)
7446 sumUsedSize += suballoc.size;
7453 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7455 if(nullItem1stCount != m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount)
7460 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7462 const size_t suballoc2ndCount = suballocations2nd.size();
7463 size_t nullItem2ndCount = 0;
7464 for(
size_t i = suballoc2ndCount; i--; )
7466 const VmaSuballocation& suballoc = suballocations2nd[i];
7467 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7469 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7473 if(suballoc.offset < offset)
7480 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7484 if(suballoc.hAllocation->GetSize() != suballoc.size)
7488 sumUsedSize += suballoc.size;
7495 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7498 if(nullItem2ndCount != m_2ndNullItemsCount)
7504 if(offset > GetSize())
7508 if(m_SumFreeSize != GetSize() - sumUsedSize)
7516 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7518 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7519 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7522 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7524 const VkDeviceSize size = GetSize();
7536 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7538 switch(m_2ndVectorMode)
7540 case SECOND_VECTOR_EMPTY:
7546 const size_t suballocations1stCount = suballocations1st.size();
7547 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7548 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7549 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7551 firstSuballoc.offset,
7552 size - (lastSuballoc.offset + lastSuballoc.size));
7556 case SECOND_VECTOR_RING_BUFFER:
7561 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7562 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7563 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7564 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7568 case SECOND_VECTOR_DOUBLE_STACK:
7573 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7574 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7575 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7576 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7586 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7588 const VkDeviceSize size = GetSize();
7589 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7590 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7591 const size_t suballoc1stCount = suballocations1st.size();
7592 const size_t suballoc2ndCount = suballocations2nd.size();
7603 VkDeviceSize lastOffset = 0;
7605 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7607 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7608 size_t nextAlloc2ndIndex = 0;
7609 while(lastOffset < freeSpace2ndTo1stEnd)
7612 while(nextAlloc2ndIndex < suballoc2ndCount &&
7613 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7615 ++nextAlloc2ndIndex;
7619 if(nextAlloc2ndIndex < suballoc2ndCount)
7621 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7624 if(lastOffset < suballoc.offset)
7627 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7641 lastOffset = suballoc.offset + suballoc.size;
7642 ++nextAlloc2ndIndex;
7648 if(lastOffset < freeSpace2ndTo1stEnd)
7650 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7658 lastOffset = freeSpace2ndTo1stEnd;
7663 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7664 const VkDeviceSize freeSpace1stTo2ndEnd =
7665 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7666 while(lastOffset < freeSpace1stTo2ndEnd)
7669 while(nextAlloc1stIndex < suballoc1stCount &&
7670 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7672 ++nextAlloc1stIndex;
7676 if(nextAlloc1stIndex < suballoc1stCount)
7678 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7681 if(lastOffset < suballoc.offset)
7684 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7698 lastOffset = suballoc.offset + suballoc.size;
7699 ++nextAlloc1stIndex;
7705 if(lastOffset < freeSpace1stTo2ndEnd)
7707 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7715 lastOffset = freeSpace1stTo2ndEnd;
7719 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7721 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7722 while(lastOffset < size)
7725 while(nextAlloc2ndIndex != SIZE_MAX &&
7726 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7728 --nextAlloc2ndIndex;
7732 if(nextAlloc2ndIndex != SIZE_MAX)
7734 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7737 if(lastOffset < suballoc.offset)
7740 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7754 lastOffset = suballoc.offset + suballoc.size;
7755 --nextAlloc2ndIndex;
7761 if(lastOffset < size)
7763 const VkDeviceSize unusedRangeSize = size - lastOffset;
7779 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 7781 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7782 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7783 const VkDeviceSize size = GetSize();
7784 const size_t suballoc1stCount = suballocations1st.size();
7785 const size_t suballoc2ndCount = suballocations2nd.size();
7787 inoutStats.
size += size;
7789 VkDeviceSize lastOffset = 0;
7791 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7793 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7794 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
7795 while(lastOffset < freeSpace2ndTo1stEnd)
7798 while(nextAlloc2ndIndex < suballoc2ndCount &&
7799 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7801 ++nextAlloc2ndIndex;
7805 if(nextAlloc2ndIndex < suballoc2ndCount)
7807 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7810 if(lastOffset < suballoc.offset)
7813 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7824 lastOffset = suballoc.offset + suballoc.size;
7825 ++nextAlloc2ndIndex;
7830 if(lastOffset < freeSpace2ndTo1stEnd)
7833 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7840 lastOffset = freeSpace2ndTo1stEnd;
7845 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7846 const VkDeviceSize freeSpace1stTo2ndEnd =
7847 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7848 while(lastOffset < freeSpace1stTo2ndEnd)
7851 while(nextAlloc1stIndex < suballoc1stCount &&
7852 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7854 ++nextAlloc1stIndex;
7858 if(nextAlloc1stIndex < suballoc1stCount)
7860 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7863 if(lastOffset < suballoc.offset)
7866 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7877 lastOffset = suballoc.offset + suballoc.size;
7878 ++nextAlloc1stIndex;
7883 if(lastOffset < freeSpace1stTo2ndEnd)
7886 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7893 lastOffset = freeSpace1stTo2ndEnd;
7897 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7899 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7900 while(lastOffset < size)
7903 while(nextAlloc2ndIndex != SIZE_MAX &&
7904 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7906 --nextAlloc2ndIndex;
7910 if(nextAlloc2ndIndex != SIZE_MAX)
7912 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7915 if(lastOffset < suballoc.offset)
7918 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7929 lastOffset = suballoc.offset + suballoc.size;
7930 --nextAlloc2ndIndex;
7935 if(lastOffset < size)
7938 const VkDeviceSize unusedRangeSize = size - lastOffset;
7951 #if VMA_STATS_STRING_ENABLED 7952 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 7954 const VkDeviceSize size = GetSize();
7955 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7956 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7957 const size_t suballoc1stCount = suballocations1st.size();
7958 const size_t suballoc2ndCount = suballocations2nd.size();
7962 size_t unusedRangeCount = 0;
7963 VkDeviceSize usedBytes = 0;
7965 VkDeviceSize lastOffset = 0;
7967 size_t alloc2ndCount = 0;
7968 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7970 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7971 size_t nextAlloc2ndIndex = 0;
7972 while(lastOffset < freeSpace2ndTo1stEnd)
7975 while(nextAlloc2ndIndex < suballoc2ndCount &&
7976 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7978 ++nextAlloc2ndIndex;
7982 if(nextAlloc2ndIndex < suballoc2ndCount)
7984 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7987 if(lastOffset < suballoc.offset)
7996 usedBytes += suballoc.size;
7999 lastOffset = suballoc.offset + suballoc.size;
8000 ++nextAlloc2ndIndex;
8005 if(lastOffset < freeSpace2ndTo1stEnd)
8012 lastOffset = freeSpace2ndTo1stEnd;
8017 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8018 size_t alloc1stCount = 0;
8019 const VkDeviceSize freeSpace1stTo2ndEnd =
8020 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8021 while(lastOffset < freeSpace1stTo2ndEnd)
8024 while(nextAlloc1stIndex < suballoc1stCount &&
8025 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8027 ++nextAlloc1stIndex;
8031 if(nextAlloc1stIndex < suballoc1stCount)
8033 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8036 if(lastOffset < suballoc.offset)
8045 usedBytes += suballoc.size;
8048 lastOffset = suballoc.offset + suballoc.size;
8049 ++nextAlloc1stIndex;
8054 if(lastOffset < size)
8061 lastOffset = freeSpace1stTo2ndEnd;
8065 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8067 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8068 while(lastOffset < size)
8071 while(nextAlloc2ndIndex != SIZE_MAX &&
8072 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8074 --nextAlloc2ndIndex;
8078 if(nextAlloc2ndIndex != SIZE_MAX)
8080 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8083 if(lastOffset < suballoc.offset)
8092 usedBytes += suballoc.size;
8095 lastOffset = suballoc.offset + suballoc.size;
8096 --nextAlloc2ndIndex;
8101 if(lastOffset < size)
8113 const VkDeviceSize unusedBytes = size - usedBytes;
8114 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8119 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8121 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8122 size_t nextAlloc2ndIndex = 0;
8123 while(lastOffset < freeSpace2ndTo1stEnd)
8126 while(nextAlloc2ndIndex < suballoc2ndCount &&
8127 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8129 ++nextAlloc2ndIndex;
8133 if(nextAlloc2ndIndex < suballoc2ndCount)
8135 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8138 if(lastOffset < suballoc.offset)
8141 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8142 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8147 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8150 lastOffset = suballoc.offset + suballoc.size;
8151 ++nextAlloc2ndIndex;
8156 if(lastOffset < freeSpace2ndTo1stEnd)
8159 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8160 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8164 lastOffset = freeSpace2ndTo1stEnd;
8169 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8170 while(lastOffset < freeSpace1stTo2ndEnd)
8173 while(nextAlloc1stIndex < suballoc1stCount &&
8174 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8176 ++nextAlloc1stIndex;
8180 if(nextAlloc1stIndex < suballoc1stCount)
8182 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8185 if(lastOffset < suballoc.offset)
8188 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8189 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8194 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8197 lastOffset = suballoc.offset + suballoc.size;
8198 ++nextAlloc1stIndex;
8203 if(lastOffset < freeSpace1stTo2ndEnd)
8206 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8207 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8211 lastOffset = freeSpace1stTo2ndEnd;
8215 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8217 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8218 while(lastOffset < size)
8221 while(nextAlloc2ndIndex != SIZE_MAX &&
8222 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8224 --nextAlloc2ndIndex;
8228 if(nextAlloc2ndIndex != SIZE_MAX)
8230 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8233 if(lastOffset < suballoc.offset)
8236 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8237 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8242 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8245 lastOffset = suballoc.offset + suballoc.size;
8246 --nextAlloc2ndIndex;
8251 if(lastOffset < size)
8254 const VkDeviceSize unusedRangeSize = size - lastOffset;
8255 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8264 PrintDetailedMap_End(json);
8266 #endif // #if VMA_STATS_STRING_ENABLED 8268 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8269 uint32_t currentFrameIndex,
8270 uint32_t frameInUseCount,
8271 VkDeviceSize bufferImageGranularity,
8272 VkDeviceSize allocSize,
8273 VkDeviceSize allocAlignment,
8275 VmaSuballocationType allocType,
8276 bool canMakeOtherLost,
8277 VmaAllocationRequest* pAllocationRequest)
8279 VMA_ASSERT(allocSize > 0);
8280 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8281 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8282 VMA_HEAVY_ASSERT(Validate());
8284 const VkDeviceSize size = GetSize();
8285 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8286 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8290 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8292 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8297 if(allocSize > size)
8301 VkDeviceSize resultBaseOffset = size - allocSize;
8302 if(!suballocations2nd.empty())
8304 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8305 resultBaseOffset = lastSuballoc.offset - allocSize;
8306 if(allocSize > lastSuballoc.offset)
8313 VkDeviceSize resultOffset = resultBaseOffset;
8316 if(VMA_DEBUG_MARGIN > 0)
8318 if(resultOffset < VMA_DEBUG_MARGIN)
8322 resultOffset -= VMA_DEBUG_MARGIN;
8326 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8330 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8332 bool bufferImageGranularityConflict =
false;
8333 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8335 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8336 if(VmaBlocksOnSamePage(nextSuballoc.offset, nextSuballoc.size, resultOffset, bufferImageGranularity))
8338 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8340 bufferImageGranularityConflict =
true;
8348 if(bufferImageGranularityConflict)
8350 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8355 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8356 suballocations1st.back().offset + suballocations1st.back().size :
8358 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8362 if(bufferImageGranularity > 1)
8364 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8366 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8367 if(VmaBlocksOnSamePage(resultOffset, allocSize, prevSuballoc.offset, bufferImageGranularity))
8369 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8383 pAllocationRequest->offset = resultOffset;
8384 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8385 pAllocationRequest->sumItemSize = 0;
8387 pAllocationRequest->itemsToMakeLostCount = 0;
8393 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8397 VkDeviceSize resultBaseOffset = 0;
8398 if(!suballocations1st.empty())
8400 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8401 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8405 VkDeviceSize resultOffset = resultBaseOffset;
8408 if(VMA_DEBUG_MARGIN > 0)
8410 resultOffset += VMA_DEBUG_MARGIN;
8414 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8418 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8420 bool bufferImageGranularityConflict =
false;
8421 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8423 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8424 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8426 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8428 bufferImageGranularityConflict =
true;
8436 if(bufferImageGranularityConflict)
8438 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8442 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8443 suballocations2nd.back().offset : size;
8446 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8450 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8452 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8454 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8455 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8457 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8471 pAllocationRequest->offset = resultOffset;
8472 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8473 pAllocationRequest->sumItemSize = 0;
8475 pAllocationRequest->itemsToMakeLostCount = 0;
8482 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8484 VMA_ASSERT(!suballocations1st.empty());
8486 VkDeviceSize resultBaseOffset = 0;
8487 if(!suballocations2nd.empty())
8489 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8490 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8494 VkDeviceSize resultOffset = resultBaseOffset;
8497 if(VMA_DEBUG_MARGIN > 0)
8499 resultOffset += VMA_DEBUG_MARGIN;
8503 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8507 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8509 bool bufferImageGranularityConflict =
false;
8510 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8512 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8513 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8515 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8517 bufferImageGranularityConflict =
true;
8525 if(bufferImageGranularityConflict)
8527 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8531 pAllocationRequest->itemsToMakeLostCount = 0;
8532 pAllocationRequest->sumItemSize = 0;
8533 size_t index1st = m_1stNullItemsBeginCount;
8535 if(canMakeOtherLost)
8537 while(index1st < suballocations1st.size() &&
8538 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8541 const VmaSuballocation& suballoc = suballocations1st[index1st];
8542 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8548 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8549 if(suballoc.hAllocation->CanBecomeLost() &&
8550 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8552 ++pAllocationRequest->itemsToMakeLostCount;
8553 pAllocationRequest->sumItemSize += suballoc.size;
8565 if(bufferImageGranularity > 1)
8567 while(index1st < suballocations1st.size())
8569 const VmaSuballocation& suballoc = suballocations1st[index1st];
8570 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8572 if(suballoc.hAllocation != VK_NULL_HANDLE)
8575 if(suballoc.hAllocation->CanBecomeLost() &&
8576 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8578 ++pAllocationRequest->itemsToMakeLostCount;
8579 pAllocationRequest->sumItemSize += suballoc.size;
8598 if(index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size ||
8599 index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset)
8603 if(bufferImageGranularity > 1)
8605 for(
size_t nextSuballocIndex = index1st;
8606 nextSuballocIndex < suballocations1st.size();
8607 nextSuballocIndex++)
8609 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8610 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8612 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8626 pAllocationRequest->offset = resultOffset;
8627 pAllocationRequest->sumFreeSize =
8628 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8630 - pAllocationRequest->sumItemSize;
8640 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8641 uint32_t currentFrameIndex,
8642 uint32_t frameInUseCount,
8643 VmaAllocationRequest* pAllocationRequest)
8645 if(pAllocationRequest->itemsToMakeLostCount == 0)
8650 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8652 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8653 size_t index1st = m_1stNullItemsBeginCount;
8654 size_t madeLostCount = 0;
8655 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8657 VMA_ASSERT(index1st < suballocations1st.size());
8658 VmaSuballocation& suballoc = suballocations1st[index1st];
8659 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8661 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8662 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8663 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8665 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8666 suballoc.hAllocation = VK_NULL_HANDLE;
8667 m_SumFreeSize += suballoc.size;
8668 ++m_1stNullItemsMiddleCount;
8685 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8687 uint32_t lostAllocationCount = 0;
8689 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8690 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8692 VmaSuballocation& suballoc = suballocations1st[i];
8693 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8694 suballoc.hAllocation->CanBecomeLost() &&
8695 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8697 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8698 suballoc.hAllocation = VK_NULL_HANDLE;
8699 ++m_1stNullItemsMiddleCount;
8700 m_SumFreeSize += suballoc.size;
8701 ++lostAllocationCount;
8705 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8706 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8708 VmaSuballocation& suballoc = suballocations2nd[i];
8709 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8710 suballoc.hAllocation->CanBecomeLost() &&
8711 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8713 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8714 suballoc.hAllocation = VK_NULL_HANDLE;
8715 ++m_2ndNullItemsCount;
8716 ++lostAllocationCount;
8720 if(lostAllocationCount)
8725 return lostAllocationCount;
8728 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8730 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8731 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8733 const VmaSuballocation& suballoc = suballocations1st[i];
8734 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8736 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8738 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8739 return VK_ERROR_VALIDATION_FAILED_EXT;
8741 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8743 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8744 return VK_ERROR_VALIDATION_FAILED_EXT;
8749 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8750 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8752 const VmaSuballocation& suballoc = suballocations2nd[i];
8753 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8755 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8757 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8758 return VK_ERROR_VALIDATION_FAILED_EXT;
8760 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8762 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8763 return VK_ERROR_VALIDATION_FAILED_EXT;
8771 void VmaBlockMetadata_Linear::Alloc(
8772 const VmaAllocationRequest& request,
8773 VmaSuballocationType type,
8774 VkDeviceSize allocSize,
8778 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
8782 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
8783 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
8784 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8785 suballocations2nd.push_back(newSuballoc);
8786 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
8790 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8793 if(suballocations1st.empty())
8795 suballocations1st.push_back(newSuballoc);
8800 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
8803 VMA_ASSERT(request.offset + allocSize <= GetSize());
8804 suballocations1st.push_back(newSuballoc);
8807 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
8809 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8811 switch(m_2ndVectorMode)
8813 case SECOND_VECTOR_EMPTY:
8815 VMA_ASSERT(suballocations2nd.empty());
8816 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
8818 case SECOND_VECTOR_RING_BUFFER:
8820 VMA_ASSERT(!suballocations2nd.empty());
8822 case SECOND_VECTOR_DOUBLE_STACK:
8823 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
8829 suballocations2nd.push_back(newSuballoc);
8833 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
8838 m_SumFreeSize -= newSuballoc.size;
8841 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
8843 FreeAtOffset(allocation->GetOffset());
8846 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
8848 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8849 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8851 if(!suballocations1st.empty())
8854 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8855 if(firstSuballoc.offset == offset)
8857 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8858 firstSuballoc.hAllocation = VK_NULL_HANDLE;
8859 m_SumFreeSize += firstSuballoc.size;
8860 ++m_1stNullItemsBeginCount;
8867 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
8868 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8870 VmaSuballocation& lastSuballoc = suballocations2nd.back();
8871 if(lastSuballoc.offset == offset)
8873 m_SumFreeSize += lastSuballoc.size;
8874 suballocations2nd.pop_back();
8880 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
8882 VmaSuballocation& lastSuballoc = suballocations1st.back();
8883 if(lastSuballoc.offset == offset)
8885 m_SumFreeSize += lastSuballoc.size;
8886 suballocations1st.pop_back();
8894 VmaSuballocation refSuballoc;
8895 refSuballoc.offset = offset;
8897 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
8898 suballocations1st.begin() + m_1stNullItemsBeginCount,
8899 suballocations1st.end(),
8901 if(it != suballocations1st.end())
8903 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8904 it->hAllocation = VK_NULL_HANDLE;
8905 ++m_1stNullItemsMiddleCount;
8906 m_SumFreeSize += it->size;
8912 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
8915 VmaSuballocation refSuballoc;
8916 refSuballoc.offset = offset;
8918 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
8919 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
8920 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
8921 if(it != suballocations2nd.end())
8923 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8924 it->hAllocation = VK_NULL_HANDLE;
8925 ++m_2ndNullItemsCount;
8926 m_SumFreeSize += it->size;
8932 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
8935 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 8937 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8938 const size_t suballocCount = AccessSuballocations1st().size();
8939 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
8942 void VmaBlockMetadata_Linear::CleanupAfterFree()
8944 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8945 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8949 suballocations1st.clear();
8950 suballocations2nd.clear();
8951 m_1stNullItemsBeginCount = 0;
8952 m_1stNullItemsMiddleCount = 0;
8953 m_2ndNullItemsCount = 0;
8954 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8958 const size_t suballoc1stCount = suballocations1st.size();
8959 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8960 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
8963 while(m_1stNullItemsBeginCount < suballoc1stCount &&
8964 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
8966 ++m_1stNullItemsBeginCount;
8967 --m_1stNullItemsMiddleCount;
8971 while(m_1stNullItemsMiddleCount > 0 &&
8972 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
8974 --m_1stNullItemsMiddleCount;
8975 suballocations1st.pop_back();
8979 while(m_2ndNullItemsCount > 0 &&
8980 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
8982 --m_2ndNullItemsCount;
8983 suballocations2nd.pop_back();
8986 if(ShouldCompact1st())
8988 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
8989 size_t srcIndex = m_1stNullItemsBeginCount;
8990 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
8992 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
8996 if(dstIndex != srcIndex)
8998 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9002 suballocations1st.resize(nonNullItemCount);
9003 m_1stNullItemsBeginCount = 0;
9004 m_1stNullItemsMiddleCount = 0;
9008 if(suballocations2nd.empty())
9010 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9014 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9016 suballocations1st.clear();
9017 m_1stNullItemsBeginCount = 0;
9019 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9022 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9023 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9024 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9025 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9027 ++m_1stNullItemsBeginCount;
9028 --m_1stNullItemsMiddleCount;
9030 m_2ndNullItemsCount = 0;
9031 m_1stVectorIndex ^= 1;
9036 VMA_HEAVY_ASSERT(Validate());
9043 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9044 m_pMetadata(VMA_NULL),
9045 m_MemoryTypeIndex(UINT32_MAX),
9047 m_hMemory(VK_NULL_HANDLE),
9049 m_pMappedData(VMA_NULL)
9053 void VmaDeviceMemoryBlock::Init(
9055 uint32_t newMemoryTypeIndex,
9056 VkDeviceMemory newMemory,
9057 VkDeviceSize newSize,
9059 bool linearAlgorithm)
9061 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9063 m_MemoryTypeIndex = newMemoryTypeIndex;
9065 m_hMemory = newMemory;
9069 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9073 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9075 m_pMetadata->Init(newSize);
9078 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9082 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9084 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9085 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9086 m_hMemory = VK_NULL_HANDLE;
9088 vma_delete(allocator, m_pMetadata);
9089 m_pMetadata = VMA_NULL;
9092 bool VmaDeviceMemoryBlock::Validate()
const 9094 if((m_hMemory == VK_NULL_HANDLE) ||
9095 (m_pMetadata->GetSize() == 0))
9100 return m_pMetadata->Validate();
9103 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9105 void* pData =
nullptr;
9106 VkResult res = Map(hAllocator, 1, &pData);
9107 if(res != VK_SUCCESS)
9112 res = m_pMetadata->CheckCorruption(pData);
9114 Unmap(hAllocator, 1);
9119 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
9126 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9129 m_MapCount += count;
9130 VMA_ASSERT(m_pMappedData != VMA_NULL);
9131 if(ppData != VMA_NULL)
9133 *ppData = m_pMappedData;
9139 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9140 hAllocator->m_hDevice,
9146 if(result == VK_SUCCESS)
9148 if(ppData != VMA_NULL)
9150 *ppData = m_pMappedData;
9158 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
9165 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9166 if(m_MapCount >= count)
9168 m_MapCount -= count;
9171 m_pMappedData = VMA_NULL;
9172 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
9177 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
9181 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9183 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
9184 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
9187 VkResult res = Map(hAllocator, 1, &pData);
9188 if(res != VK_SUCCESS)
9193 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
9194 VmaWriteMagicValue(pData, allocOffset + allocSize);
9196 Unmap(hAllocator, 1);
9201 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9203 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
9204 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
9207 VkResult res = Map(hAllocator, 1, &pData);
9208 if(res != VK_SUCCESS)
9213 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
9215 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
9217 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
9219 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
9222 Unmap(hAllocator, 1);
9227 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
9232 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
9233 hAllocation->GetBlock() ==
this);
9235 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9236 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
9237 hAllocator->m_hDevice,
9240 hAllocation->GetOffset());
9243 VkResult VmaDeviceMemoryBlock::BindImageMemory(
9248 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
9249 hAllocation->GetBlock() ==
this);
9251 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9252 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
9253 hAllocator->m_hDevice,
9256 hAllocation->GetOffset());
9261 memset(&outInfo, 0,
sizeof(outInfo));
9280 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
9288 VmaPool_T::VmaPool_T(
9293 createInfo.memoryTypeIndex,
9294 createInfo.blockSize,
9295 createInfo.minBlockCount,
9296 createInfo.maxBlockCount,
9298 createInfo.frameInUseCount,
9305 VmaPool_T::~VmaPool_T()
9309 #if VMA_STATS_STRING_ENABLED 9311 #endif // #if VMA_STATS_STRING_ENABLED 9313 VmaBlockVector::VmaBlockVector(
9315 uint32_t memoryTypeIndex,
9316 VkDeviceSize preferredBlockSize,
9317 size_t minBlockCount,
9318 size_t maxBlockCount,
9319 VkDeviceSize bufferImageGranularity,
9320 uint32_t frameInUseCount,
9322 bool linearAlgorithm) :
9323 m_hAllocator(hAllocator),
9324 m_MemoryTypeIndex(memoryTypeIndex),
9325 m_PreferredBlockSize(preferredBlockSize),
9326 m_MinBlockCount(minBlockCount),
9327 m_MaxBlockCount(maxBlockCount),
9328 m_BufferImageGranularity(bufferImageGranularity),
9329 m_FrameInUseCount(frameInUseCount),
9330 m_IsCustomPool(isCustomPool),
9331 m_LinearAlgorithm(linearAlgorithm),
9332 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
9333 m_HasEmptyBlock(false),
9334 m_pDefragmentator(VMA_NULL),
9339 VmaBlockVector::~VmaBlockVector()
9341 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
9343 for(
size_t i = m_Blocks.size(); i--; )
9345 m_Blocks[i]->Destroy(m_hAllocator);
9346 vma_delete(m_hAllocator, m_Blocks[i]);
9350 VkResult VmaBlockVector::CreateMinBlocks()
9352 for(
size_t i = 0; i < m_MinBlockCount; ++i)
9354 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
9355 if(res != VK_SUCCESS)
9363 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
9371 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9373 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
9375 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
9377 VMA_HEAVY_ASSERT(pBlock->Validate());
9378 pBlock->m_pMetadata->AddPoolStats(*pStats);
9382 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 9384 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
9385 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
9386 (VMA_DEBUG_MARGIN > 0) &&
9387 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
9390 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
9392 VkResult VmaBlockVector::Allocate(
9394 uint32_t currentFrameIndex,
9396 VkDeviceSize alignment,
9398 VmaSuballocationType suballocType,
9405 const bool canCreateNewBlock =
9407 (m_Blocks.size() < m_MaxBlockCount);
9410 if(isUpperAddress && !m_LinearAlgorithm)
9412 return VK_ERROR_FEATURE_NOT_PRESENT;
9416 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
9418 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9421 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9428 if(!canMakeOtherLost || canCreateNewBlock)
9432 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
9434 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
9435 VMA_ASSERT(pCurrBlock);
9436 VmaAllocationRequest currRequest = {};
9437 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
9440 m_BufferImageGranularity,
9449 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
9453 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
9454 if(res != VK_SUCCESS)
9461 if(pCurrBlock->m_pMetadata->IsEmpty())
9463 m_HasEmptyBlock =
false;
9466 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
9467 pCurrBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
9468 (*pAllocation)->InitBlockAllocation(
9477 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
9478 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
9479 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
9480 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9482 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
9484 if(IsCorruptionDetectionEnabled())
9486 VkResult res = pCurrBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
9487 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
9494 if(canCreateNewBlock)
9497 VkDeviceSize newBlockSize = m_PreferredBlockSize;
9498 uint32_t newBlockSizeShift = 0;
9499 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
9503 if(m_IsCustomPool ==
false)
9506 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
9507 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
9509 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
9510 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
9512 newBlockSize = smallerNewBlockSize;
9513 ++newBlockSizeShift;
9522 size_t newBlockIndex = 0;
9523 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
9525 if(m_IsCustomPool ==
false)
9527 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
9529 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
9530 if(smallerNewBlockSize >= size)
9532 newBlockSize = smallerNewBlockSize;
9533 ++newBlockSizeShift;
9534 res = CreateBlock(newBlockSize, &newBlockIndex);
9543 if(res == VK_SUCCESS)
9545 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
9546 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
9550 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
9551 if(res != VK_SUCCESS)
9558 VmaAllocationRequest allocRequest;
9559 if(pBlock->m_pMetadata->CreateAllocationRequest(
9562 m_BufferImageGranularity,
9570 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
9571 pBlock->m_pMetadata->Alloc(allocRequest, suballocType, size, isUpperAddress, *pAllocation);
9572 (*pAllocation)->InitBlockAllocation(
9575 allocRequest.offset,
9581 VMA_HEAVY_ASSERT(pBlock->Validate());
9582 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
9583 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
9584 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9586 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
9588 if(IsCorruptionDetectionEnabled())
9590 res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, allocRequest.offset, size);
9591 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
9598 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9605 if(canMakeOtherLost)
9607 uint32_t tryIndex = 0;
9608 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
9610 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
9611 VmaAllocationRequest bestRequest = {};
9612 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
9616 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
9618 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
9619 VMA_ASSERT(pCurrBlock);
9620 VmaAllocationRequest currRequest = {};
9621 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
9624 m_BufferImageGranularity,
9632 const VkDeviceSize currRequestCost = currRequest.CalcCost();
9633 if(pBestRequestBlock == VMA_NULL ||
9634 currRequestCost < bestRequestCost)
9636 pBestRequestBlock = pCurrBlock;
9637 bestRequest = currRequest;
9638 bestRequestCost = currRequestCost;
9640 if(bestRequestCost == 0)
9648 if(pBestRequestBlock != VMA_NULL)
9652 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
9653 if(res != VK_SUCCESS)
9659 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
9665 if(pBestRequestBlock->m_pMetadata->IsEmpty())
9667 m_HasEmptyBlock =
false;
9670 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
9671 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
9672 (*pAllocation)->InitBlockAllocation(
9681 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
9682 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
9683 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
9684 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9686 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
9688 if(IsCorruptionDetectionEnabled())
9690 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
9691 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
9706 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
9708 return VK_ERROR_TOO_MANY_OBJECTS;
9712 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9715 void VmaBlockVector::Free(
9718 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
9722 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9724 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9726 if(IsCorruptionDetectionEnabled())
9728 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
9729 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
9732 if(hAllocation->IsPersistentMap())
9734 pBlock->Unmap(m_hAllocator, 1);
9737 pBlock->m_pMetadata->Free(hAllocation);
9738 VMA_HEAVY_ASSERT(pBlock->Validate());
9740 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
9743 if(pBlock->m_pMetadata->IsEmpty())
9746 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
9748 pBlockToDelete = pBlock;
9754 m_HasEmptyBlock =
true;
9759 else if(m_HasEmptyBlock)
9761 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
9762 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
9764 pBlockToDelete = pLastBlock;
9765 m_Blocks.pop_back();
9766 m_HasEmptyBlock =
false;
9770 IncrementallySortBlocks();
9775 if(pBlockToDelete != VMA_NULL)
9777 VMA_DEBUG_LOG(
" Deleted empty allocation");
9778 pBlockToDelete->Destroy(m_hAllocator);
9779 vma_delete(m_hAllocator, pBlockToDelete);
9783 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 9785 VkDeviceSize result = 0;
9786 for(
size_t i = m_Blocks.size(); i--; )
9788 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
9789 if(result >= m_PreferredBlockSize)
9797 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
9799 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
9801 if(m_Blocks[blockIndex] == pBlock)
9803 VmaVectorRemove(m_Blocks, blockIndex);
9810 void VmaBlockVector::IncrementallySortBlocks()
9813 for(
size_t i = 1; i < m_Blocks.size(); ++i)
9815 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
9817 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
9823 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
9825 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
9826 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
9827 allocInfo.allocationSize = blockSize;
9828 VkDeviceMemory mem = VK_NULL_HANDLE;
9829 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
9838 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
9843 allocInfo.allocationSize,
9847 m_Blocks.push_back(pBlock);
9848 if(pNewBlockIndex != VMA_NULL)
9850 *pNewBlockIndex = m_Blocks.size() - 1;
9856 #if VMA_STATS_STRING_ENABLED 9858 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
9860 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9866 json.WriteString(
"MemoryTypeIndex");
9867 json.WriteNumber(m_MemoryTypeIndex);
9869 json.WriteString(
"BlockSize");
9870 json.WriteNumber(m_PreferredBlockSize);
9872 json.WriteString(
"BlockCount");
9873 json.BeginObject(
true);
9874 if(m_MinBlockCount > 0)
9876 json.WriteString(
"Min");
9877 json.WriteNumber((uint64_t)m_MinBlockCount);
9879 if(m_MaxBlockCount < SIZE_MAX)
9881 json.WriteString(
"Max");
9882 json.WriteNumber((uint64_t)m_MaxBlockCount);
9884 json.WriteString(
"Cur");
9885 json.WriteNumber((uint64_t)m_Blocks.size());
9888 if(m_FrameInUseCount > 0)
9890 json.WriteString(
"FrameInUseCount");
9891 json.WriteNumber(m_FrameInUseCount);
9894 if(m_LinearAlgorithm)
9896 json.WriteString(
"LinearAlgorithm");
9897 json.WriteBool(
true);
9902 json.WriteString(
"PreferredBlockSize");
9903 json.WriteNumber(m_PreferredBlockSize);
9906 json.WriteString(
"Blocks");
9908 for(
size_t i = 0; i < m_Blocks.size(); ++i)
9911 json.ContinueString(m_Blocks[i]->GetId());
9914 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
9921 #endif // #if VMA_STATS_STRING_ENABLED 9923 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
9925 uint32_t currentFrameIndex)
9927 if(m_pDefragmentator == VMA_NULL)
9929 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
9935 return m_pDefragmentator;
9938 VkResult VmaBlockVector::Defragment(
9940 VkDeviceSize& maxBytesToMove,
9941 uint32_t& maxAllocationsToMove)
9943 if(m_pDefragmentator == VMA_NULL)
9948 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9951 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
9954 if(pDefragmentationStats != VMA_NULL)
9956 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
9957 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
9960 VMA_ASSERT(bytesMoved <= maxBytesToMove);
9961 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
9967 m_HasEmptyBlock =
false;
9968 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
9970 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
9971 if(pBlock->m_pMetadata->IsEmpty())
9973 if(m_Blocks.size() > m_MinBlockCount)
9975 if(pDefragmentationStats != VMA_NULL)
9978 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
9981 VmaVectorRemove(m_Blocks, blockIndex);
9982 pBlock->Destroy(m_hAllocator);
9983 vma_delete(m_hAllocator, pBlock);
9987 m_HasEmptyBlock =
true;
9995 void VmaBlockVector::DestroyDefragmentator()
9997 if(m_pDefragmentator != VMA_NULL)
9999 vma_delete(m_hAllocator, m_pDefragmentator);
10000 m_pDefragmentator = VMA_NULL;
10004 void VmaBlockVector::MakePoolAllocationsLost(
10005 uint32_t currentFrameIndex,
10006 size_t* pLostAllocationCount)
10008 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10009 size_t lostAllocationCount = 0;
10010 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10012 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10013 VMA_ASSERT(pBlock);
10014 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
10016 if(pLostAllocationCount != VMA_NULL)
10018 *pLostAllocationCount = lostAllocationCount;
10022 VkResult VmaBlockVector::CheckCorruption()
10024 if(!IsCorruptionDetectionEnabled())
10026 return VK_ERROR_FEATURE_NOT_PRESENT;
10029 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10030 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10032 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10033 VMA_ASSERT(pBlock);
10034 VkResult res = pBlock->CheckCorruption(m_hAllocator);
10035 if(res != VK_SUCCESS)
10043 void VmaBlockVector::AddStats(
VmaStats* pStats)
10045 const uint32_t memTypeIndex = m_MemoryTypeIndex;
10046 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
10048 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10050 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10052 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10053 VMA_ASSERT(pBlock);
10054 VMA_HEAVY_ASSERT(pBlock->Validate());
10056 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
10057 VmaAddStatInfo(pStats->
total, allocationStatInfo);
10058 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
10059 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
10066 VmaDefragmentator::VmaDefragmentator(
10068 VmaBlockVector* pBlockVector,
10069 uint32_t currentFrameIndex) :
10070 m_hAllocator(hAllocator),
10071 m_pBlockVector(pBlockVector),
10072 m_CurrentFrameIndex(currentFrameIndex),
10074 m_AllocationsMoved(0),
10075 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
10076 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
10078 VMA_ASSERT(!pBlockVector->UsesLinearAlgorithm());
10081 VmaDefragmentator::~VmaDefragmentator()
10083 for(
size_t i = m_Blocks.size(); i--; )
10085 vma_delete(m_hAllocator, m_Blocks[i]);
10089 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
10091 AllocationInfo allocInfo;
10092 allocInfo.m_hAllocation = hAlloc;
10093 allocInfo.m_pChanged = pChanged;
10094 m_Allocations.push_back(allocInfo);
10097 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
10100 if(m_pMappedDataForDefragmentation)
10102 *ppMappedData = m_pMappedDataForDefragmentation;
10107 if(m_pBlock->GetMappedData())
10109 *ppMappedData = m_pBlock->GetMappedData();
10114 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
10115 *ppMappedData = m_pMappedDataForDefragmentation;
10119 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
10121 if(m_pMappedDataForDefragmentation != VMA_NULL)
10123 m_pBlock->Unmap(hAllocator, 1);
10127 VkResult VmaDefragmentator::DefragmentRound(
10128 VkDeviceSize maxBytesToMove,
10129 uint32_t maxAllocationsToMove)
10131 if(m_Blocks.empty())
10136 size_t srcBlockIndex = m_Blocks.size() - 1;
10137 size_t srcAllocIndex = SIZE_MAX;
10143 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
10145 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
10148 if(srcBlockIndex == 0)
10155 srcAllocIndex = SIZE_MAX;
10160 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
10164 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
10165 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
10167 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
10168 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
10169 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
10170 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
10173 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
10175 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
10176 VmaAllocationRequest dstAllocRequest;
10177 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
10178 m_CurrentFrameIndex,
10179 m_pBlockVector->GetFrameInUseCount(),
10180 m_pBlockVector->GetBufferImageGranularity(),
10186 &dstAllocRequest) &&
10188 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
10190 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
10193 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
10194 (m_BytesMoved + size > maxBytesToMove))
10196 return VK_INCOMPLETE;
10199 void* pDstMappedData = VMA_NULL;
10200 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
10201 if(res != VK_SUCCESS)
10206 void* pSrcMappedData = VMA_NULL;
10207 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
10208 if(res != VK_SUCCESS)
10215 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
10216 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
10217 static_cast<size_t>(size));
10219 if(VMA_DEBUG_MARGIN > 0)
10221 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
10222 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
10225 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
10230 allocInfo.m_hAllocation);
10231 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
10233 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
10235 if(allocInfo.m_pChanged != VMA_NULL)
10237 *allocInfo.m_pChanged = VK_TRUE;
10240 ++m_AllocationsMoved;
10241 m_BytesMoved += size;
10243 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
10251 if(srcAllocIndex > 0)
10257 if(srcBlockIndex > 0)
10260 srcAllocIndex = SIZE_MAX;
10270 VkResult VmaDefragmentator::Defragment(
10271 VkDeviceSize maxBytesToMove,
10272 uint32_t maxAllocationsToMove)
10274 if(m_Allocations.empty())
10280 const size_t blockCount = m_pBlockVector->m_Blocks.size();
10281 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10283 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
10284 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
10285 m_Blocks.push_back(pBlockInfo);
10289 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
10292 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
10294 AllocationInfo& allocInfo = m_Allocations[blockIndex];
10296 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
10298 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
10299 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
10300 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
10302 (*it)->m_Allocations.push_back(allocInfo);
10310 m_Allocations.clear();
10312 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10314 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
10315 pBlockInfo->CalcHasNonMovableAllocations();
10316 pBlockInfo->SortAllocationsBySizeDescecnding();
10320 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
10323 VkResult result = VK_SUCCESS;
10324 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
10326 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
10330 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10332 m_Blocks[blockIndex]->Unmap(m_hAllocator);
10338 bool VmaDefragmentator::MoveMakesSense(
10339 size_t dstBlockIndex, VkDeviceSize dstOffset,
10340 size_t srcBlockIndex, VkDeviceSize srcOffset)
10342 if(dstBlockIndex < srcBlockIndex)
10346 if(dstBlockIndex > srcBlockIndex)
10350 if(dstOffset < srcOffset)
10360 #if VMA_RECORDING_ENABLED 10362 VmaRecorder::VmaRecorder() :
10367 m_StartCounter(INT64_MAX)
10373 m_UseMutex = useMutex;
10374 m_Flags = settings.
flags;
10376 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
10377 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
10380 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
10383 return VK_ERROR_INITIALIZATION_FAILED;
10387 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
10388 fprintf(m_File,
"%s\n",
"1,3");
10393 VmaRecorder::~VmaRecorder()
10395 if(m_File != VMA_NULL)
10401 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
10403 CallParams callParams;
10404 GetBasicParams(callParams);
10406 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10407 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
10411 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
10413 CallParams callParams;
10414 GetBasicParams(callParams);
10416 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10417 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
10423 CallParams callParams;
10424 GetBasicParams(callParams);
10426 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10427 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
10438 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
10440 CallParams callParams;
10441 GetBasicParams(callParams);
10443 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10444 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
10449 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
10450 const VkMemoryRequirements& vkMemReq,
10454 CallParams callParams;
10455 GetBasicParams(callParams);
10457 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10458 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
10459 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10461 vkMemReq.alignment,
10462 vkMemReq.memoryTypeBits,
10470 userDataStr.GetString());
10474 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
10475 const VkMemoryRequirements& vkMemReq,
10476 bool requiresDedicatedAllocation,
10477 bool prefersDedicatedAllocation,
10481 CallParams callParams;
10482 GetBasicParams(callParams);
10484 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10485 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
10486 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10488 vkMemReq.alignment,
10489 vkMemReq.memoryTypeBits,
10490 requiresDedicatedAllocation ? 1 : 0,
10491 prefersDedicatedAllocation ? 1 : 0,
10499 userDataStr.GetString());
10503 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
10504 const VkMemoryRequirements& vkMemReq,
10505 bool requiresDedicatedAllocation,
10506 bool prefersDedicatedAllocation,
10510 CallParams callParams;
10511 GetBasicParams(callParams);
10513 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10514 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
10515 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10517 vkMemReq.alignment,
10518 vkMemReq.memoryTypeBits,
10519 requiresDedicatedAllocation ? 1 : 0,
10520 prefersDedicatedAllocation ? 1 : 0,
10528 userDataStr.GetString());
10532 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
10535 CallParams callParams;
10536 GetBasicParams(callParams);
10538 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10539 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
10544 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
10546 const void* pUserData)
10548 CallParams callParams;
10549 GetBasicParams(callParams);
10551 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10552 UserDataString userDataStr(
10555 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10557 userDataStr.GetString());
10561 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
10564 CallParams callParams;
10565 GetBasicParams(callParams);
10567 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10568 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
10573 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
10576 CallParams callParams;
10577 GetBasicParams(callParams);
10579 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10580 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
10585 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
10588 CallParams callParams;
10589 GetBasicParams(callParams);
10591 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10592 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
10597 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
10598 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
10600 CallParams callParams;
10601 GetBasicParams(callParams);
10603 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10604 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
10611 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
10612 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
10614 CallParams callParams;
10615 GetBasicParams(callParams);
10617 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10618 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
10625 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
10626 const VkBufferCreateInfo& bufCreateInfo,
10630 CallParams callParams;
10631 GetBasicParams(callParams);
10633 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10634 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
10635 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10636 bufCreateInfo.flags,
10637 bufCreateInfo.size,
10638 bufCreateInfo.usage,
10639 bufCreateInfo.sharingMode,
10640 allocCreateInfo.
flags,
10641 allocCreateInfo.
usage,
10645 allocCreateInfo.
pool,
10647 userDataStr.GetString());
10651 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
10652 const VkImageCreateInfo& imageCreateInfo,
10656 CallParams callParams;
10657 GetBasicParams(callParams);
10659 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10660 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
10661 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10662 imageCreateInfo.flags,
10663 imageCreateInfo.imageType,
10664 imageCreateInfo.format,
10665 imageCreateInfo.extent.width,
10666 imageCreateInfo.extent.height,
10667 imageCreateInfo.extent.depth,
10668 imageCreateInfo.mipLevels,
10669 imageCreateInfo.arrayLayers,
10670 imageCreateInfo.samples,
10671 imageCreateInfo.tiling,
10672 imageCreateInfo.usage,
10673 imageCreateInfo.sharingMode,
10674 imageCreateInfo.initialLayout,
10675 allocCreateInfo.
flags,
10676 allocCreateInfo.
usage,
10680 allocCreateInfo.
pool,
10682 userDataStr.GetString());
10686 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
10689 CallParams callParams;
10690 GetBasicParams(callParams);
10692 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10693 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
10698 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
10701 CallParams callParams;
10702 GetBasicParams(callParams);
10704 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10705 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
10710 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
10713 CallParams callParams;
10714 GetBasicParams(callParams);
10716 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10717 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
10722 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
10725 CallParams callParams;
10726 GetBasicParams(callParams);
10728 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10729 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
10734 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
10737 CallParams callParams;
10738 GetBasicParams(callParams);
10740 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10741 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
10748 if(pUserData != VMA_NULL)
10752 m_Str = (
const char*)pUserData;
10756 sprintf_s(m_PtrStr,
"%p", pUserData);
10766 void VmaRecorder::WriteConfiguration(
10767 const VkPhysicalDeviceProperties& devProps,
10768 const VkPhysicalDeviceMemoryProperties& memProps,
10769 bool dedicatedAllocationExtensionEnabled)
10771 fprintf(m_File,
"Config,Begin\n");
10773 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
10774 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
10775 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
10776 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
10777 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
10778 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
10780 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
10781 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
10782 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
10784 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
10785 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
10787 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
10788 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
10790 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
10791 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
10793 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
10794 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
10797 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
10799 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
10800 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
10801 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
10802 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
10803 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
10804 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
10805 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
10806 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
10807 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
10809 fprintf(m_File,
"Config,End\n");
10812 void VmaRecorder::GetBasicParams(CallParams& outParams)
10814 outParams.threadId = GetCurrentThreadId();
10816 LARGE_INTEGER counter;
10817 QueryPerformanceCounter(&counter);
10818 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
10821 void VmaRecorder::Flush()
10829 #endif // #if VMA_RECORDING_ENABLED 10837 m_hDevice(pCreateInfo->device),
10838 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
10839 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
10840 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
10841 m_PreferredLargeHeapBlockSize(0),
10842 m_PhysicalDevice(pCreateInfo->physicalDevice),
10843 m_CurrentFrameIndex(0),
10844 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
10847 ,m_pRecorder(VMA_NULL)
10850 if(VMA_DEBUG_DETECT_CORRUPTION)
10853 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
10858 #if !(VMA_DEDICATED_ALLOCATION) 10861 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
10865 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
10866 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
10867 memset(&m_MemProps, 0,
sizeof(m_MemProps));
10869 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
10870 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
10872 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
10874 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
10885 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
10886 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
10893 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
10895 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
10896 if(limit != VK_WHOLE_SIZE)
10898 m_HeapSizeLimit[heapIndex] = limit;
10899 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
10901 m_MemProps.memoryHeaps[heapIndex].size = limit;
10907 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
10909 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
10911 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
10914 preferredBlockSize,
10917 GetBufferImageGranularity(),
10923 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
10930 VkResult res = VK_SUCCESS;
10935 #if VMA_RECORDING_ENABLED 10936 m_pRecorder = vma_new(
this, VmaRecorder)();
10938 if(res != VK_SUCCESS)
10942 m_pRecorder->WriteConfiguration(
10943 m_PhysicalDeviceProperties,
10945 m_UseKhrDedicatedAllocation);
10946 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
10948 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
10949 return VK_ERROR_FEATURE_NOT_PRESENT;
10956 VmaAllocator_T::~VmaAllocator_T()
10958 #if VMA_RECORDING_ENABLED 10959 if(m_pRecorder != VMA_NULL)
10961 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
10962 vma_delete(
this, m_pRecorder);
10966 VMA_ASSERT(m_Pools.empty());
10968 for(
size_t i = GetMemoryTypeCount(); i--; )
10970 vma_delete(
this, m_pDedicatedAllocations[i]);
10971 vma_delete(
this, m_pBlockVectors[i]);
10975 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
10977 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 10978 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
10979 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
10980 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
10981 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
10982 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
10983 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
10984 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
10985 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
10986 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
10987 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
10988 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
10989 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
10990 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
10991 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
10992 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
10993 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
10994 #if VMA_DEDICATED_ALLOCATION 10995 if(m_UseKhrDedicatedAllocation)
10997 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
10998 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
10999 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
11000 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
11002 #endif // #if VMA_DEDICATED_ALLOCATION 11003 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11005 #define VMA_COPY_IF_NOT_NULL(funcName) \ 11006 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 11008 if(pVulkanFunctions != VMA_NULL)
11010 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
11011 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
11012 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
11013 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
11014 VMA_COPY_IF_NOT_NULL(vkMapMemory);
11015 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
11016 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
11017 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
11018 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
11019 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
11020 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
11021 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
11022 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
11023 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
11024 VMA_COPY_IF_NOT_NULL(vkCreateImage);
11025 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
11026 #if VMA_DEDICATED_ALLOCATION 11027 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
11028 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
11032 #undef VMA_COPY_IF_NOT_NULL 11036 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
11037 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
11038 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
11039 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
11040 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
11041 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
11042 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
11043 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
11044 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
11045 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
11046 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
11047 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
11048 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
11049 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
11050 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
11051 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
11052 #if VMA_DEDICATED_ALLOCATION 11053 if(m_UseKhrDedicatedAllocation)
11055 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
11056 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
11061 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
11063 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
11064 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
11065 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
11066 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
11069 VkResult VmaAllocator_T::AllocateMemoryOfType(
11071 VkDeviceSize alignment,
11072 bool dedicatedAllocation,
11073 VkBuffer dedicatedBuffer,
11074 VkImage dedicatedImage,
11076 uint32_t memTypeIndex,
11077 VmaSuballocationType suballocType,
11080 VMA_ASSERT(pAllocation != VMA_NULL);
11081 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
11087 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
11092 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
11093 VMA_ASSERT(blockVector);
11095 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
11096 bool preferDedicatedMemory =
11097 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
11098 dedicatedAllocation ||
11100 size > preferredBlockSize / 2;
11102 if(preferDedicatedMemory &&
11104 finalCreateInfo.
pool == VK_NULL_HANDLE)
11113 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11117 return AllocateDedicatedMemory(
11131 VkResult res = blockVector->Allocate(
11133 m_CurrentFrameIndex.load(),
11139 if(res == VK_SUCCESS)
11147 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11151 res = AllocateDedicatedMemory(
11157 finalCreateInfo.pUserData,
11161 if(res == VK_SUCCESS)
11164 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
11170 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
11177 VkResult VmaAllocator_T::AllocateDedicatedMemory(
11179 VmaSuballocationType suballocType,
11180 uint32_t memTypeIndex,
11182 bool isUserDataString,
11184 VkBuffer dedicatedBuffer,
11185 VkImage dedicatedImage,
11188 VMA_ASSERT(pAllocation);
11190 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11191 allocInfo.memoryTypeIndex = memTypeIndex;
11192 allocInfo.allocationSize = size;
11194 #if VMA_DEDICATED_ALLOCATION 11195 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
11196 if(m_UseKhrDedicatedAllocation)
11198 if(dedicatedBuffer != VK_NULL_HANDLE)
11200 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
11201 dedicatedAllocInfo.buffer = dedicatedBuffer;
11202 allocInfo.pNext = &dedicatedAllocInfo;
11204 else if(dedicatedImage != VK_NULL_HANDLE)
11206 dedicatedAllocInfo.image = dedicatedImage;
11207 allocInfo.pNext = &dedicatedAllocInfo;
11210 #endif // #if VMA_DEDICATED_ALLOCATION 11213 VkDeviceMemory hMemory = VK_NULL_HANDLE;
11214 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
11217 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
11221 void* pMappedData = VMA_NULL;
11224 res = (*m_VulkanFunctions.vkMapMemory)(
11233 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
11234 FreeVulkanMemory(memTypeIndex, size, hMemory);
11239 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
11240 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
11241 (*pAllocation)->SetUserData(
this, pUserData);
11242 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11244 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11249 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
11250 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
11251 VMA_ASSERT(pDedicatedAllocations);
11252 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
11255 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
11260 void VmaAllocator_T::GetBufferMemoryRequirements(
11262 VkMemoryRequirements& memReq,
11263 bool& requiresDedicatedAllocation,
11264 bool& prefersDedicatedAllocation)
const 11266 #if VMA_DEDICATED_ALLOCATION 11267 if(m_UseKhrDedicatedAllocation)
11269 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
11270 memReqInfo.buffer = hBuffer;
11272 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
11274 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
11275 memReq2.pNext = &memDedicatedReq;
11277 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
11279 memReq = memReq2.memoryRequirements;
11280 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
11281 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
11284 #endif // #if VMA_DEDICATED_ALLOCATION 11286 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
11287 requiresDedicatedAllocation =
false;
11288 prefersDedicatedAllocation =
false;
11292 void VmaAllocator_T::GetImageMemoryRequirements(
11294 VkMemoryRequirements& memReq,
11295 bool& requiresDedicatedAllocation,
11296 bool& prefersDedicatedAllocation)
const 11298 #if VMA_DEDICATED_ALLOCATION 11299 if(m_UseKhrDedicatedAllocation)
11301 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
11302 memReqInfo.image = hImage;
11304 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
11306 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
11307 memReq2.pNext = &memDedicatedReq;
11309 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
11311 memReq = memReq2.memoryRequirements;
11312 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
11313 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
11316 #endif // #if VMA_DEDICATED_ALLOCATION 11318 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
11319 requiresDedicatedAllocation =
false;
11320 prefersDedicatedAllocation =
false;
11324 VkResult VmaAllocator_T::AllocateMemory(
11325 const VkMemoryRequirements& vkMemReq,
11326 bool requiresDedicatedAllocation,
11327 bool prefersDedicatedAllocation,
11328 VkBuffer dedicatedBuffer,
11329 VkImage dedicatedImage,
11331 VmaSuballocationType suballocType,
11337 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
11338 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11343 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
11344 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11346 if(requiresDedicatedAllocation)
11350 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
11351 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11353 if(createInfo.
pool != VK_NULL_HANDLE)
11355 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
11356 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11359 if((createInfo.
pool != VK_NULL_HANDLE) &&
11362 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
11363 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11366 if(createInfo.
pool != VK_NULL_HANDLE)
11368 const VkDeviceSize alignmentForPool = VMA_MAX(
11369 vkMemReq.alignment,
11370 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
11371 return createInfo.
pool->m_BlockVector.Allocate(
11373 m_CurrentFrameIndex.load(),
11383 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
11384 uint32_t memTypeIndex = UINT32_MAX;
11386 if(res == VK_SUCCESS)
11388 VkDeviceSize alignmentForMemType = VMA_MAX(
11389 vkMemReq.alignment,
11390 GetMemoryTypeMinAlignment(memTypeIndex));
11392 res = AllocateMemoryOfType(
11394 alignmentForMemType,
11395 requiresDedicatedAllocation || prefersDedicatedAllocation,
11403 if(res == VK_SUCCESS)
11413 memoryTypeBits &= ~(1u << memTypeIndex);
11416 if(res == VK_SUCCESS)
11418 alignmentForMemType = VMA_MAX(
11419 vkMemReq.alignment,
11420 GetMemoryTypeMinAlignment(memTypeIndex));
11422 res = AllocateMemoryOfType(
11424 alignmentForMemType,
11425 requiresDedicatedAllocation || prefersDedicatedAllocation,
11433 if(res == VK_SUCCESS)
11443 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11454 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
11456 VMA_ASSERT(allocation);
11458 if(allocation->CanBecomeLost() ==
false ||
11459 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11461 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11463 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
11466 switch(allocation->GetType())
11468 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
11470 VmaBlockVector* pBlockVector = VMA_NULL;
11471 VmaPool hPool = allocation->GetPool();
11472 if(hPool != VK_NULL_HANDLE)
11474 pBlockVector = &hPool->m_BlockVector;
11478 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
11479 pBlockVector = m_pBlockVectors[memTypeIndex];
11481 pBlockVector->Free(allocation);
11484 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
11485 FreeDedicatedMemory(allocation);
11492 allocation->SetUserData(
this, VMA_NULL);
11493 vma_delete(
this, allocation);
11496 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
11499 InitStatInfo(pStats->
total);
11500 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
11502 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11506 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11508 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
11509 VMA_ASSERT(pBlockVector);
11510 pBlockVector->AddStats(pStats);
11515 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11516 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
11518 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
11523 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11525 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
11526 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
11527 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
11528 VMA_ASSERT(pDedicatedAllocVector);
11529 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
11532 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
11533 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11534 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11535 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11540 VmaPostprocessCalcStatInfo(pStats->
total);
11541 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
11542 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
11543 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
11544 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
11547 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
11549 VkResult VmaAllocator_T::Defragment(
11551 size_t allocationCount,
11552 VkBool32* pAllocationsChanged,
11556 if(pAllocationsChanged != VMA_NULL)
11558 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
11560 if(pDefragmentationStats != VMA_NULL)
11562 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
11565 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
11567 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
11569 const size_t poolCount = m_Pools.size();
11572 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11575 VMA_ASSERT(hAlloc);
11576 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
11578 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11579 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
11581 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
11583 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
11585 VmaBlockVector* pAllocBlockVector = VMA_NULL;
11587 const VmaPool hAllocPool = hAlloc->GetPool();
11589 if(hAllocPool != VK_NULL_HANDLE)
11592 if(!hAllocPool->m_BlockVector.UsesLinearAlgorithm())
11594 pAllocBlockVector = &hAllocPool->m_BlockVector;
11600 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
11603 if(pAllocBlockVector != VMA_NULL)
11605 VmaDefragmentator*
const pDefragmentator =
11606 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
11607 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
11608 &pAllocationsChanged[allocIndex] : VMA_NULL;
11609 pDefragmentator->AddAllocation(hAlloc, pChanged);
11614 VkResult result = VK_SUCCESS;
11618 VkDeviceSize maxBytesToMove = SIZE_MAX;
11619 uint32_t maxAllocationsToMove = UINT32_MAX;
11620 if(pDefragmentationInfo != VMA_NULL)
11627 for(uint32_t memTypeIndex = 0;
11628 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
11632 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
11634 result = m_pBlockVectors[memTypeIndex]->Defragment(
11635 pDefragmentationStats,
11637 maxAllocationsToMove);
11642 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
11644 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
11645 pDefragmentationStats,
11647 maxAllocationsToMove);
11653 for(
size_t poolIndex = poolCount; poolIndex--; )
11655 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
11659 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
11661 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
11663 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
11672 if(hAllocation->CanBecomeLost())
11678 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11679 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11682 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
11686 pAllocationInfo->
offset = 0;
11687 pAllocationInfo->
size = hAllocation->GetSize();
11689 pAllocationInfo->
pUserData = hAllocation->GetUserData();
11692 else if(localLastUseFrameIndex == localCurrFrameIndex)
11694 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
11695 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
11696 pAllocationInfo->
offset = hAllocation->GetOffset();
11697 pAllocationInfo->
size = hAllocation->GetSize();
11699 pAllocationInfo->
pUserData = hAllocation->GetUserData();
11704 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11706 localLastUseFrameIndex = localCurrFrameIndex;
11713 #if VMA_STATS_STRING_ENABLED 11714 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11715 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11718 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
11719 if(localLastUseFrameIndex == localCurrFrameIndex)
11725 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11727 localLastUseFrameIndex = localCurrFrameIndex;
11733 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
11734 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
11735 pAllocationInfo->
offset = hAllocation->GetOffset();
11736 pAllocationInfo->
size = hAllocation->GetSize();
11737 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
11738 pAllocationInfo->
pUserData = hAllocation->GetUserData();
11742 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
11745 if(hAllocation->CanBecomeLost())
11747 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11748 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11751 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
11755 else if(localLastUseFrameIndex == localCurrFrameIndex)
11761 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11763 localLastUseFrameIndex = localCurrFrameIndex;
11770 #if VMA_STATS_STRING_ENABLED 11771 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11772 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11775 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
11776 if(localLastUseFrameIndex == localCurrFrameIndex)
11782 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11784 localLastUseFrameIndex = localCurrFrameIndex;
11796 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
11804 newCreateInfo.
maxBlockCount = isLinearAlgorithm ? 1 : SIZE_MAX;
11809 return VK_ERROR_INITIALIZATION_FAILED;
11816 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
11818 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
11819 if(res != VK_SUCCESS)
11821 vma_delete(
this, *pPool);
11828 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11829 (*pPool)->SetId(m_NextPoolId++);
11830 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
11836 void VmaAllocator_T::DestroyPool(
VmaPool pool)
11840 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11841 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
11842 VMA_ASSERT(success &&
"Pool not found in Allocator.");
11845 vma_delete(
this, pool);
11850 pool->m_BlockVector.GetPoolStats(pPoolStats);
11853 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
11855 m_CurrentFrameIndex.store(frameIndex);
11858 void VmaAllocator_T::MakePoolAllocationsLost(
11860 size_t* pLostAllocationCount)
11862 hPool->m_BlockVector.MakePoolAllocationsLost(
11863 m_CurrentFrameIndex.load(),
11864 pLostAllocationCount);
11867 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
11869 return hPool->m_BlockVector.CheckCorruption();
11872 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
11874 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
11877 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11879 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
11881 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
11882 VMA_ASSERT(pBlockVector);
11883 VkResult localRes = pBlockVector->CheckCorruption();
11886 case VK_ERROR_FEATURE_NOT_PRESENT:
11889 finalRes = VK_SUCCESS;
11899 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11900 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
11902 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
11904 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
11907 case VK_ERROR_FEATURE_NOT_PRESENT:
11910 finalRes = VK_SUCCESS;
11922 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
11924 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
11925 (*pAllocation)->InitLost();
11928 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
11930 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
11933 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
11935 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
11936 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
11938 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
11939 if(res == VK_SUCCESS)
11941 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
11946 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
11951 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
11954 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
11956 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
11962 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
11964 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
11966 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
11969 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
11971 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
11972 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
11974 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
11975 m_HeapSizeLimit[heapIndex] += size;
11979 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
11981 if(hAllocation->CanBecomeLost())
11983 return VK_ERROR_MEMORY_MAP_FAILED;
11986 switch(hAllocation->GetType())
11988 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
11990 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
11991 char *pBytes = VMA_NULL;
11992 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
11993 if(res == VK_SUCCESS)
11995 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
11996 hAllocation->BlockAllocMap();
12000 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12001 return hAllocation->DedicatedAllocMap(
this, ppData);
12004 return VK_ERROR_MEMORY_MAP_FAILED;
12010 switch(hAllocation->GetType())
12012 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12014 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12015 hAllocation->BlockAllocUnmap();
12016 pBlock->Unmap(
this, 1);
12019 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12020 hAllocation->DedicatedAllocUnmap(
this);
12027 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
12029 VkResult res = VK_SUCCESS;
12030 switch(hAllocation->GetType())
12032 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12033 res = GetVulkanFunctions().vkBindBufferMemory(
12036 hAllocation->GetMemory(),
12039 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12041 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12042 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
12043 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
12052 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
12054 VkResult res = VK_SUCCESS;
12055 switch(hAllocation->GetType())
12057 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12058 res = GetVulkanFunctions().vkBindImageMemory(
12061 hAllocation->GetMemory(),
12064 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12066 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12067 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
12068 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
12077 void VmaAllocator_T::FlushOrInvalidateAllocation(
12079 VkDeviceSize offset, VkDeviceSize size,
12080 VMA_CACHE_OPERATION op)
12082 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
12083 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
12085 const VkDeviceSize allocationSize = hAllocation->GetSize();
12086 VMA_ASSERT(offset <= allocationSize);
12088 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12090 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12091 memRange.memory = hAllocation->GetMemory();
12093 switch(hAllocation->GetType())
12095 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12096 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
12097 if(size == VK_WHOLE_SIZE)
12099 memRange.size = allocationSize - memRange.offset;
12103 VMA_ASSERT(offset + size <= allocationSize);
12104 memRange.size = VMA_MIN(
12105 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
12106 allocationSize - memRange.offset);
12110 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12113 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
12114 if(size == VK_WHOLE_SIZE)
12116 size = allocationSize - offset;
12120 VMA_ASSERT(offset + size <= allocationSize);
12122 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
12125 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
12126 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
12127 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
12128 memRange.offset += allocationOffset;
12129 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
12140 case VMA_CACHE_FLUSH:
12141 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
12143 case VMA_CACHE_INVALIDATE:
12144 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
12153 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
12155 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
12157 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12159 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12160 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12161 VMA_ASSERT(pDedicatedAllocations);
12162 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
12163 VMA_ASSERT(success);
12166 VkDeviceMemory hMemory = allocation->GetMemory();
12168 if(allocation->GetMappedData() != VMA_NULL)
12170 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
12173 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
12175 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
12178 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
12180 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
12181 !hAllocation->CanBecomeLost() &&
12182 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12184 void* pData = VMA_NULL;
12185 VkResult res = Map(hAllocation, &pData);
12186 if(res == VK_SUCCESS)
12188 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
12189 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
12190 Unmap(hAllocation);
12194 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
12199 #if VMA_STATS_STRING_ENABLED 12201 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
12203 bool dedicatedAllocationsStarted =
false;
12204 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12206 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12207 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12208 VMA_ASSERT(pDedicatedAllocVector);
12209 if(pDedicatedAllocVector->empty() ==
false)
12211 if(dedicatedAllocationsStarted ==
false)
12213 dedicatedAllocationsStarted =
true;
12214 json.WriteString(
"DedicatedAllocations");
12215 json.BeginObject();
12218 json.BeginString(
"Type ");
12219 json.ContinueString(memTypeIndex);
12224 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
12226 json.BeginObject(
true);
12228 hAlloc->PrintParameters(json);
12235 if(dedicatedAllocationsStarted)
12241 bool allocationsStarted =
false;
12242 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12244 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
12246 if(allocationsStarted ==
false)
12248 allocationsStarted =
true;
12249 json.WriteString(
"DefaultPools");
12250 json.BeginObject();
12253 json.BeginString(
"Type ");
12254 json.ContinueString(memTypeIndex);
12257 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
12260 if(allocationsStarted)
12268 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12269 const size_t poolCount = m_Pools.size();
12272 json.WriteString(
"Pools");
12273 json.BeginObject();
12274 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
12276 json.BeginString();
12277 json.ContinueString(m_Pools[poolIndex]->GetId());
12280 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
12287 #endif // #if VMA_STATS_STRING_ENABLED 12296 VMA_ASSERT(pCreateInfo && pAllocator);
12297 VMA_DEBUG_LOG(
"vmaCreateAllocator");
12299 return (*pAllocator)->Init(pCreateInfo);
12305 if(allocator != VK_NULL_HANDLE)
12307 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
12308 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
12309 vma_delete(&allocationCallbacks, allocator);
12315 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
12317 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
12318 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
12323 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
12325 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
12326 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
12331 uint32_t memoryTypeIndex,
12332 VkMemoryPropertyFlags* pFlags)
12334 VMA_ASSERT(allocator && pFlags);
12335 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
12336 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
12341 uint32_t frameIndex)
12343 VMA_ASSERT(allocator);
12344 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
12346 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12348 allocator->SetCurrentFrameIndex(frameIndex);
12355 VMA_ASSERT(allocator && pStats);
12356 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12357 allocator->CalculateStats(pStats);
12360 #if VMA_STATS_STRING_ENABLED 12364 char** ppStatsString,
12365 VkBool32 detailedMap)
12367 VMA_ASSERT(allocator && ppStatsString);
12368 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12370 VmaStringBuilder sb(allocator);
12372 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
12373 json.BeginObject();
12376 allocator->CalculateStats(&stats);
12378 json.WriteString(
"Total");
12379 VmaPrintStatInfo(json, stats.
total);
12381 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
12383 json.BeginString(
"Heap ");
12384 json.ContinueString(heapIndex);
12386 json.BeginObject();
12388 json.WriteString(
"Size");
12389 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
12391 json.WriteString(
"Flags");
12392 json.BeginArray(
true);
12393 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
12395 json.WriteString(
"DEVICE_LOCAL");
12401 json.WriteString(
"Stats");
12402 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
12405 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
12407 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
12409 json.BeginString(
"Type ");
12410 json.ContinueString(typeIndex);
12413 json.BeginObject();
12415 json.WriteString(
"Flags");
12416 json.BeginArray(
true);
12417 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
12418 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
12420 json.WriteString(
"DEVICE_LOCAL");
12422 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12424 json.WriteString(
"HOST_VISIBLE");
12426 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
12428 json.WriteString(
"HOST_COHERENT");
12430 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
12432 json.WriteString(
"HOST_CACHED");
12434 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
12436 json.WriteString(
"LAZILY_ALLOCATED");
12442 json.WriteString(
"Stats");
12443 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
12452 if(detailedMap == VK_TRUE)
12454 allocator->PrintDetailedMap(json);
12460 const size_t len = sb.GetLength();
12461 char*
const pChars = vma_new_array(allocator,
char, len + 1);
12464 memcpy(pChars, sb.GetData(), len);
12466 pChars[len] =
'\0';
12467 *ppStatsString = pChars;
12472 char* pStatsString)
12474 if(pStatsString != VMA_NULL)
12476 VMA_ASSERT(allocator);
12477 size_t len = strlen(pStatsString);
12478 vma_delete_array(allocator, pStatsString, len + 1);
12482 #endif // #if VMA_STATS_STRING_ENABLED 12489 uint32_t memoryTypeBits,
12491 uint32_t* pMemoryTypeIndex)
12493 VMA_ASSERT(allocator != VK_NULL_HANDLE);
12494 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
12495 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
12502 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
12503 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
12508 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
12512 switch(pAllocationCreateInfo->
usage)
12517 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12519 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
12523 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12526 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
12527 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12529 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
12533 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
12534 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
12540 *pMemoryTypeIndex = UINT32_MAX;
12541 uint32_t minCost = UINT32_MAX;
12542 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
12543 memTypeIndex < allocator->GetMemoryTypeCount();
12544 ++memTypeIndex, memTypeBit <<= 1)
12547 if((memTypeBit & memoryTypeBits) != 0)
12549 const VkMemoryPropertyFlags currFlags =
12550 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
12552 if((requiredFlags & ~currFlags) == 0)
12555 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
12557 if(currCost < minCost)
12559 *pMemoryTypeIndex = memTypeIndex;
12564 minCost = currCost;
12569 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
12574 const VkBufferCreateInfo* pBufferCreateInfo,
12576 uint32_t* pMemoryTypeIndex)
12578 VMA_ASSERT(allocator != VK_NULL_HANDLE);
12579 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
12580 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
12581 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
12583 const VkDevice hDev = allocator->m_hDevice;
12584 VkBuffer hBuffer = VK_NULL_HANDLE;
12585 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
12586 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
12587 if(res == VK_SUCCESS)
12589 VkMemoryRequirements memReq = {};
12590 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
12591 hDev, hBuffer, &memReq);
12595 memReq.memoryTypeBits,
12596 pAllocationCreateInfo,
12599 allocator->GetVulkanFunctions().vkDestroyBuffer(
12600 hDev, hBuffer, allocator->GetAllocationCallbacks());
12607 const VkImageCreateInfo* pImageCreateInfo,
12609 uint32_t* pMemoryTypeIndex)
12611 VMA_ASSERT(allocator != VK_NULL_HANDLE);
12612 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
12613 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
12614 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
12616 const VkDevice hDev = allocator->m_hDevice;
12617 VkImage hImage = VK_NULL_HANDLE;
12618 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
12619 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
12620 if(res == VK_SUCCESS)
12622 VkMemoryRequirements memReq = {};
12623 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
12624 hDev, hImage, &memReq);
12628 memReq.memoryTypeBits,
12629 pAllocationCreateInfo,
12632 allocator->GetVulkanFunctions().vkDestroyImage(
12633 hDev, hImage, allocator->GetAllocationCallbacks());
12643 VMA_ASSERT(allocator && pCreateInfo && pPool);
12645 VMA_DEBUG_LOG(
"vmaCreatePool");
12647 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12649 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
12651 #if VMA_RECORDING_ENABLED 12652 if(allocator->GetRecorder() != VMA_NULL)
12654 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
12665 VMA_ASSERT(allocator);
12667 if(pool == VK_NULL_HANDLE)
12672 VMA_DEBUG_LOG(
"vmaDestroyPool");
12674 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12676 #if VMA_RECORDING_ENABLED 12677 if(allocator->GetRecorder() != VMA_NULL)
12679 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
12683 allocator->DestroyPool(pool);
12691 VMA_ASSERT(allocator && pool && pPoolStats);
12693 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12695 allocator->GetPoolStats(pool, pPoolStats);
12701 size_t* pLostAllocationCount)
12703 VMA_ASSERT(allocator && pool);
12705 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12707 #if VMA_RECORDING_ENABLED 12708 if(allocator->GetRecorder() != VMA_NULL)
12710 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
12714 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
12719 VMA_ASSERT(allocator && pool);
12721 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12723 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
12725 return allocator->CheckPoolCorruption(pool);
12730 const VkMemoryRequirements* pVkMemoryRequirements,
12735 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
12737 VMA_DEBUG_LOG(
"vmaAllocateMemory");
12739 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12741 VkResult result = allocator->AllocateMemory(
12742 *pVkMemoryRequirements,
12748 VMA_SUBALLOCATION_TYPE_UNKNOWN,
12751 #if VMA_RECORDING_ENABLED 12752 if(allocator->GetRecorder() != VMA_NULL)
12754 allocator->GetRecorder()->RecordAllocateMemory(
12755 allocator->GetCurrentFrameIndex(),
12756 *pVkMemoryRequirements,
12762 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
12764 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
12777 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
12779 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
12781 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12783 VkMemoryRequirements vkMemReq = {};
12784 bool requiresDedicatedAllocation =
false;
12785 bool prefersDedicatedAllocation =
false;
12786 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
12787 requiresDedicatedAllocation,
12788 prefersDedicatedAllocation);
12790 VkResult result = allocator->AllocateMemory(
12792 requiresDedicatedAllocation,
12793 prefersDedicatedAllocation,
12797 VMA_SUBALLOCATION_TYPE_BUFFER,
12800 #if VMA_RECORDING_ENABLED 12801 if(allocator->GetRecorder() != VMA_NULL)
12803 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
12804 allocator->GetCurrentFrameIndex(),
12806 requiresDedicatedAllocation,
12807 prefersDedicatedAllocation,
12813 if(pAllocationInfo && result == VK_SUCCESS)
12815 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
12828 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
12830 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
12832 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12834 VkMemoryRequirements vkMemReq = {};
12835 bool requiresDedicatedAllocation =
false;
12836 bool prefersDedicatedAllocation =
false;
12837 allocator->GetImageMemoryRequirements(image, vkMemReq,
12838 requiresDedicatedAllocation, prefersDedicatedAllocation);
12840 VkResult result = allocator->AllocateMemory(
12842 requiresDedicatedAllocation,
12843 prefersDedicatedAllocation,
12847 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
12850 #if VMA_RECORDING_ENABLED 12851 if(allocator->GetRecorder() != VMA_NULL)
12853 allocator->GetRecorder()->RecordAllocateMemoryForImage(
12854 allocator->GetCurrentFrameIndex(),
12856 requiresDedicatedAllocation,
12857 prefersDedicatedAllocation,
12863 if(pAllocationInfo && result == VK_SUCCESS)
12865 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
12875 VMA_ASSERT(allocator);
12877 if(allocation == VK_NULL_HANDLE)
12882 VMA_DEBUG_LOG(
"vmaFreeMemory");
12884 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12886 #if VMA_RECORDING_ENABLED 12887 if(allocator->GetRecorder() != VMA_NULL)
12889 allocator->GetRecorder()->RecordFreeMemory(
12890 allocator->GetCurrentFrameIndex(),
12895 allocator->FreeMemory(allocation);
12903 VMA_ASSERT(allocator && allocation && pAllocationInfo);
12905 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12907 #if VMA_RECORDING_ENABLED 12908 if(allocator->GetRecorder() != VMA_NULL)
12910 allocator->GetRecorder()->RecordGetAllocationInfo(
12911 allocator->GetCurrentFrameIndex(),
12916 allocator->GetAllocationInfo(allocation, pAllocationInfo);
12923 VMA_ASSERT(allocator && allocation);
12925 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12927 #if VMA_RECORDING_ENABLED 12928 if(allocator->GetRecorder() != VMA_NULL)
12930 allocator->GetRecorder()->RecordTouchAllocation(
12931 allocator->GetCurrentFrameIndex(),
12936 return allocator->TouchAllocation(allocation);
12944 VMA_ASSERT(allocator && allocation);
12946 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12948 allocation->SetUserData(allocator, pUserData);
12950 #if VMA_RECORDING_ENABLED 12951 if(allocator->GetRecorder() != VMA_NULL)
12953 allocator->GetRecorder()->RecordSetAllocationUserData(
12954 allocator->GetCurrentFrameIndex(),
12965 VMA_ASSERT(allocator && pAllocation);
12967 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
12969 allocator->CreateLostAllocation(pAllocation);
12971 #if VMA_RECORDING_ENABLED 12972 if(allocator->GetRecorder() != VMA_NULL)
12974 allocator->GetRecorder()->RecordCreateLostAllocation(
12975 allocator->GetCurrentFrameIndex(),
12986 VMA_ASSERT(allocator && allocation && ppData);
12988 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12990 VkResult res = allocator->Map(allocation, ppData);
12992 #if VMA_RECORDING_ENABLED 12993 if(allocator->GetRecorder() != VMA_NULL)
12995 allocator->GetRecorder()->RecordMapMemory(
12996 allocator->GetCurrentFrameIndex(),
13008 VMA_ASSERT(allocator && allocation);
13010 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13012 #if VMA_RECORDING_ENABLED 13013 if(allocator->GetRecorder() != VMA_NULL)
13015 allocator->GetRecorder()->RecordUnmapMemory(
13016 allocator->GetCurrentFrameIndex(),
13021 allocator->Unmap(allocation);
13026 VMA_ASSERT(allocator && allocation);
13028 VMA_DEBUG_LOG(
"vmaFlushAllocation");
13030 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13032 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
13034 #if VMA_RECORDING_ENABLED 13035 if(allocator->GetRecorder() != VMA_NULL)
13037 allocator->GetRecorder()->RecordFlushAllocation(
13038 allocator->GetCurrentFrameIndex(),
13039 allocation, offset, size);
13046 VMA_ASSERT(allocator && allocation);
13048 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
13050 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13052 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
13054 #if VMA_RECORDING_ENABLED 13055 if(allocator->GetRecorder() != VMA_NULL)
13057 allocator->GetRecorder()->RecordInvalidateAllocation(
13058 allocator->GetCurrentFrameIndex(),
13059 allocation, offset, size);
13066 VMA_ASSERT(allocator);
13068 VMA_DEBUG_LOG(
"vmaCheckCorruption");
13070 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13072 return allocator->CheckCorruption(memoryTypeBits);
13078 size_t allocationCount,
13079 VkBool32* pAllocationsChanged,
13083 VMA_ASSERT(allocator && pAllocations);
13085 VMA_DEBUG_LOG(
"vmaDefragment");
13087 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13089 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
13097 VMA_ASSERT(allocator && allocation && buffer);
13099 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
13101 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13103 return allocator->BindBufferMemory(allocation, buffer);
13111 VMA_ASSERT(allocator && allocation && image);
13113 VMA_DEBUG_LOG(
"vmaBindImageMemory");
13115 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13117 return allocator->BindImageMemory(allocation, image);
13122 const VkBufferCreateInfo* pBufferCreateInfo,
13128 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
13130 VMA_DEBUG_LOG(
"vmaCreateBuffer");
13132 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13134 *pBuffer = VK_NULL_HANDLE;
13135 *pAllocation = VK_NULL_HANDLE;
13138 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
13139 allocator->m_hDevice,
13141 allocator->GetAllocationCallbacks(),
13146 VkMemoryRequirements vkMemReq = {};
13147 bool requiresDedicatedAllocation =
false;
13148 bool prefersDedicatedAllocation =
false;
13149 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
13150 requiresDedicatedAllocation, prefersDedicatedAllocation);
13154 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
13156 VMA_ASSERT(vkMemReq.alignment %
13157 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
13159 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
13161 VMA_ASSERT(vkMemReq.alignment %
13162 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
13164 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
13166 VMA_ASSERT(vkMemReq.alignment %
13167 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
13171 res = allocator->AllocateMemory(
13173 requiresDedicatedAllocation,
13174 prefersDedicatedAllocation,
13177 *pAllocationCreateInfo,
13178 VMA_SUBALLOCATION_TYPE_BUFFER,
13181 #if VMA_RECORDING_ENABLED 13182 if(allocator->GetRecorder() != VMA_NULL)
13184 allocator->GetRecorder()->RecordCreateBuffer(
13185 allocator->GetCurrentFrameIndex(),
13186 *pBufferCreateInfo,
13187 *pAllocationCreateInfo,
13195 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
13199 #if VMA_STATS_STRING_ENABLED 13200 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
13202 if(pAllocationInfo != VMA_NULL)
13204 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13209 allocator->FreeMemory(*pAllocation);
13210 *pAllocation = VK_NULL_HANDLE;
13211 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
13212 *pBuffer = VK_NULL_HANDLE;
13215 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
13216 *pBuffer = VK_NULL_HANDLE;
13227 VMA_ASSERT(allocator);
13229 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
13234 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
13236 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13238 #if VMA_RECORDING_ENABLED 13239 if(allocator->GetRecorder() != VMA_NULL)
13241 allocator->GetRecorder()->RecordDestroyBuffer(
13242 allocator->GetCurrentFrameIndex(),
13247 if(buffer != VK_NULL_HANDLE)
13249 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
13252 if(allocation != VK_NULL_HANDLE)
13254 allocator->FreeMemory(allocation);
13260 const VkImageCreateInfo* pImageCreateInfo,
13266 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
13268 VMA_DEBUG_LOG(
"vmaCreateImage");
13270 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13272 *pImage = VK_NULL_HANDLE;
13273 *pAllocation = VK_NULL_HANDLE;
13276 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
13277 allocator->m_hDevice,
13279 allocator->GetAllocationCallbacks(),
13283 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
13284 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
13285 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
13288 VkMemoryRequirements vkMemReq = {};
13289 bool requiresDedicatedAllocation =
false;
13290 bool prefersDedicatedAllocation =
false;
13291 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
13292 requiresDedicatedAllocation, prefersDedicatedAllocation);
13294 res = allocator->AllocateMemory(
13296 requiresDedicatedAllocation,
13297 prefersDedicatedAllocation,
13300 *pAllocationCreateInfo,
13304 #if VMA_RECORDING_ENABLED 13305 if(allocator->GetRecorder() != VMA_NULL)
13307 allocator->GetRecorder()->RecordCreateImage(
13308 allocator->GetCurrentFrameIndex(),
13310 *pAllocationCreateInfo,
13318 res = allocator->BindImageMemory(*pAllocation, *pImage);
13322 #if VMA_STATS_STRING_ENABLED 13323 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
13325 if(pAllocationInfo != VMA_NULL)
13327 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13332 allocator->FreeMemory(*pAllocation);
13333 *pAllocation = VK_NULL_HANDLE;
13334 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
13335 *pImage = VK_NULL_HANDLE;
13338 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
13339 *pImage = VK_NULL_HANDLE;
13350 VMA_ASSERT(allocator);
13352 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
13357 VMA_DEBUG_LOG(
"vmaDestroyImage");
13359 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13361 #if VMA_RECORDING_ENABLED 13362 if(allocator->GetRecorder() != VMA_NULL)
13364 allocator->GetRecorder()->RecordDestroyImage(
13365 allocator->GetCurrentFrameIndex(),
13370 if(image != VK_NULL_HANDLE)
13372 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
13374 if(allocation != VK_NULL_HANDLE)
13376 allocator->FreeMemory(allocation);
13380 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1429
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1742
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1414
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1498
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1376
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1357
+
Definition: vk_mem_alloc.h:1460
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1441
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1615
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1349
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2004
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1411
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2249
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1834
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1388
+
Definition: vk_mem_alloc.h:1699
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1433
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2109
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1495
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2354
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1923
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1472
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1888
-
Definition: vk_mem_alloc.h:1695
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1338
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1733
-
Definition: vk_mem_alloc.h:1642
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1423
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1993
+
Definition: vk_mem_alloc.h:1779
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1422
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1822
+
Definition: vk_mem_alloc.h:1726
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1507
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1476
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1408
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1560
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1492
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1646
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1730
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1548
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1354
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1547
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2253
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1632
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1438
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1631
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2358
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1440
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1557
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2261
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1717
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2244
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1355
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1280
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1524
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1641
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2366
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1806
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2349
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1439
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1364
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1417
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1501
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1842
-
Definition: vk_mem_alloc.h:1836
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1483
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2014
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1946
+
Definition: vk_mem_alloc.h:1940
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1567
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2119
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1350
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1374
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1754
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1858
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1894
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1434
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1458
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1843
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1962
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1999
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1336
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1845
+
Definition: vk_mem_alloc.h:1420
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1949
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1593
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1677
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2239
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2344
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2257
-
Definition: vk_mem_alloc.h:1632
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1741
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1353
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2362
+
Definition: vk_mem_alloc.h:1716
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1830
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1437
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1553
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1286
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1637
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1370
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1307
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1391
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1378
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1312
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2259
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1462
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1396
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2364
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1728
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1904
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1817
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:2009
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1346
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1536
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1853
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1299
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1430
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1620
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1957
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1383
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1702
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1549
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1303
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1848
-
Definition: vk_mem_alloc.h:1641
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1352
+
Definition: vk_mem_alloc.h:1786
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1633
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1387
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1952
+
Definition: vk_mem_alloc.h:1725
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1436
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1723
-
Definition: vk_mem_alloc.h:1714
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1812
+
Definition: vk_mem_alloc.h:1803
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1539
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1348
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1866
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1426
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1897
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1712
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1747
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1623
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1432
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1971
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1510
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2002
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1801
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1836
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1464
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1555
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1682
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1548
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1548
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1639
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1766
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1632
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1359
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1396
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1301
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1358
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1443
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1480
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1385
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1442
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1880
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1351
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1985
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1435
+
Definition: vk_mem_alloc.h:1797
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1404
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2028
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1420
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1548
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1545
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1488
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2133
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1504
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1632
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1629
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1885
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1990
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2009
-
Definition: vk_mem_alloc.h:1710
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2255
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1344
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2114
+
Definition: vk_mem_alloc.h:1799
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2360
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1428
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1543
-
Definition: vk_mem_alloc.h:1598
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1838
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1627
+
Definition: vk_mem_alloc.h:1682
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1942
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1393
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1541
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1356
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1360
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1669
-
Definition: vk_mem_alloc.h:1625
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2023
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1477
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1625
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1440
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1444
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1753
+
Definition: vk_mem_alloc.h:1709
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2128
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1334
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1418
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1347
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1990
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1431
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:1938
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2095
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1816
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1549
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1905
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1633
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1708
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1368
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1556
+
Definition: vk_mem_alloc.h:1792
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1452
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1640
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1891
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1549
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1996
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1633
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1995
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2100