23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1343 #include <vulkan/vulkan.h> 1345 #if !defined(VMA_DEDICATED_ALLOCATION) 1346 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1347 #define VMA_DEDICATED_ALLOCATION 1 1349 #define VMA_DEDICATED_ALLOCATION 0 1367 uint32_t memoryType,
1368 VkDeviceMemory memory,
1373 uint32_t memoryType,
1374 VkDeviceMemory memory,
1446 #if VMA_DEDICATED_ALLOCATION 1447 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1448 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1469 #ifndef VMA_RECORDING_ENABLED 1471 #define VMA_RECORDING_ENABLED 1 1473 #define VMA_RECORDING_ENABLED 0 1586 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1594 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1604 uint32_t memoryTypeIndex,
1605 VkMemoryPropertyFlags* pFlags);
1617 uint32_t frameIndex);
1650 #define VMA_STATS_STRING_ENABLED 1 1652 #if VMA_STATS_STRING_ENABLED 1659 char** ppStatsString,
1660 VkBool32 detailedMap);
1664 char* pStatsString);
1666 #endif // #if VMA_STATS_STRING_ENABLED 1865 uint32_t memoryTypeBits,
1867 uint32_t* pMemoryTypeIndex);
1883 const VkBufferCreateInfo* pBufferCreateInfo,
1885 uint32_t* pMemoryTypeIndex);
1901 const VkImageCreateInfo* pImageCreateInfo,
1903 uint32_t* pMemoryTypeIndex);
2050 size_t* pLostAllocationCount);
2149 const VkMemoryRequirements* pVkMemoryRequirements,
2459 size_t allocationCount,
2460 VkBool32* pAllocationsChanged,
2526 const VkBufferCreateInfo* pBufferCreateInfo,
2551 const VkImageCreateInfo* pImageCreateInfo,
2577 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2580 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2581 #define VMA_IMPLEMENTATION 2584 #ifdef VMA_IMPLEMENTATION 2585 #undef VMA_IMPLEMENTATION 2607 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2608 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2620 #if VMA_USE_STL_CONTAINERS 2621 #define VMA_USE_STL_VECTOR 1 2622 #define VMA_USE_STL_UNORDERED_MAP 1 2623 #define VMA_USE_STL_LIST 1 2626 #if VMA_USE_STL_VECTOR 2630 #if VMA_USE_STL_UNORDERED_MAP 2631 #include <unordered_map> 2634 #if VMA_USE_STL_LIST 2643 #include <algorithm> 2649 #define VMA_NULL nullptr 2652 #if defined(__APPLE__) || defined(__ANDROID__) 2654 void *aligned_alloc(
size_t alignment,
size_t size)
2657 if(alignment <
sizeof(
void*))
2659 alignment =
sizeof(
void*);
2663 if(posix_memalign(&pointer, alignment, size) == 0)
2677 #define VMA_ASSERT(expr) assert(expr) 2679 #define VMA_ASSERT(expr) 2685 #ifndef VMA_HEAVY_ASSERT 2687 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2689 #define VMA_HEAVY_ASSERT(expr) 2693 #ifndef VMA_ALIGN_OF 2694 #define VMA_ALIGN_OF(type) (__alignof(type)) 2697 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2699 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2701 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2705 #ifndef VMA_SYSTEM_FREE 2707 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2709 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2714 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2718 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2722 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2726 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2729 #ifndef VMA_DEBUG_LOG 2730 #define VMA_DEBUG_LOG(format, ...) 2740 #if VMA_STATS_STRING_ENABLED 2741 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2743 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2745 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2747 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2749 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2751 snprintf(outStr, strLen,
"%p", ptr);
2761 void Lock() { m_Mutex.lock(); }
2762 void Unlock() { m_Mutex.unlock(); }
2766 #define VMA_MUTEX VmaMutex 2777 #ifndef VMA_ATOMIC_UINT32 2778 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2781 #ifndef VMA_BEST_FIT 2794 #define VMA_BEST_FIT (1) 2797 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2802 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2805 #ifndef VMA_DEBUG_ALIGNMENT 2810 #define VMA_DEBUG_ALIGNMENT (1) 2813 #ifndef VMA_DEBUG_MARGIN 2818 #define VMA_DEBUG_MARGIN (0) 2821 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2826 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2829 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2835 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2838 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2843 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2846 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2851 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2854 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2855 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2859 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2860 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2864 #ifndef VMA_CLASS_NO_COPY 2865 #define VMA_CLASS_NO_COPY(className) \ 2867 className(const className&) = delete; \ 2868 className& operator=(const className&) = delete; 2871 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2874 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2876 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2877 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
2883 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2884 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2887 static inline uint32_t VmaCountBitsSet(uint32_t v)
2889 uint32_t c = v - ((v >> 1) & 0x55555555);
2890 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2891 c = ((c >> 4) + c) & 0x0F0F0F0F;
2892 c = ((c >> 8) + c) & 0x00FF00FF;
2893 c = ((c >> 16) + c) & 0x0000FFFF;
2899 template <
typename T>
2900 static inline T VmaAlignUp(T val, T align)
2902 return (val + align - 1) / align * align;
2906 template <
typename T>
2907 static inline T VmaAlignDown(T val, T align)
2909 return val / align * align;
2913 template <
typename T>
2914 inline T VmaRoundDiv(T x, T y)
2916 return (x + (y / (T)2)) / y;
2919 static inline bool VmaStrIsEmpty(
const char* pStr)
2921 return pStr == VMA_NULL || *pStr ==
'\0';
2926 template<
typename Iterator,
typename Compare>
2927 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2929 Iterator centerValue = end; --centerValue;
2930 Iterator insertIndex = beg;
2931 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2933 if(cmp(*memTypeIndex, *centerValue))
2935 if(insertIndex != memTypeIndex)
2937 VMA_SWAP(*memTypeIndex, *insertIndex);
2942 if(insertIndex != centerValue)
2944 VMA_SWAP(*insertIndex, *centerValue);
2949 template<
typename Iterator,
typename Compare>
2950 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2954 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2955 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2956 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2960 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2962 #endif // #ifndef VMA_SORT 2971 static inline bool VmaBlocksOnSamePage(
2972 VkDeviceSize resourceAOffset,
2973 VkDeviceSize resourceASize,
2974 VkDeviceSize resourceBOffset,
2975 VkDeviceSize pageSize)
2977 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2978 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2979 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2980 VkDeviceSize resourceBStart = resourceBOffset;
2981 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2982 return resourceAEndPage == resourceBStartPage;
2985 enum VmaSuballocationType
2987 VMA_SUBALLOCATION_TYPE_FREE = 0,
2988 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2989 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2990 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2991 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2992 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2993 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3002 static inline bool VmaIsBufferImageGranularityConflict(
3003 VmaSuballocationType suballocType1,
3004 VmaSuballocationType suballocType2)
3006 if(suballocType1 > suballocType2)
3008 VMA_SWAP(suballocType1, suballocType2);
3011 switch(suballocType1)
3013 case VMA_SUBALLOCATION_TYPE_FREE:
3015 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3017 case VMA_SUBALLOCATION_TYPE_BUFFER:
3019 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3020 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3021 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3023 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3024 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3025 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3026 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3028 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3029 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3037 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3039 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3040 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3041 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3043 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3047 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3049 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3050 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3051 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3053 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3064 VMA_CLASS_NO_COPY(VmaMutexLock)
3066 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3067 m_pMutex(useMutex ? &mutex : VMA_NULL)
3084 VMA_MUTEX* m_pMutex;
3087 #if VMA_DEBUG_GLOBAL_MUTEX 3088 static VMA_MUTEX gDebugGlobalMutex;
3089 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3091 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3095 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3106 template <
typename CmpLess,
typename IterT,
typename KeyT>
3107 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3109 size_t down = 0, up = (end - beg);
3112 const size_t mid = (down + up) / 2;
3113 if(cmp(*(beg+mid), key))
3128 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3130 if((pAllocationCallbacks != VMA_NULL) &&
3131 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3133 return (*pAllocationCallbacks->pfnAllocation)(
3134 pAllocationCallbacks->pUserData,
3137 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3141 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3145 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3147 if((pAllocationCallbacks != VMA_NULL) &&
3148 (pAllocationCallbacks->pfnFree != VMA_NULL))
3150 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3154 VMA_SYSTEM_FREE(ptr);
3158 template<
typename T>
3159 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3161 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3164 template<
typename T>
3165 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3167 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3170 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3172 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3174 template<
typename T>
3175 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3178 VmaFree(pAllocationCallbacks, ptr);
3181 template<
typename T>
3182 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3186 for(
size_t i = count; i--; )
3190 VmaFree(pAllocationCallbacks, ptr);
3195 template<
typename T>
3196 class VmaStlAllocator
3199 const VkAllocationCallbacks*
const m_pCallbacks;
3200 typedef T value_type;
3202 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3203 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3205 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3206 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3208 template<
typename U>
3209 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3211 return m_pCallbacks == rhs.m_pCallbacks;
3213 template<
typename U>
3214 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3216 return m_pCallbacks != rhs.m_pCallbacks;
3219 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3222 #if VMA_USE_STL_VECTOR 3224 #define VmaVector std::vector 3226 template<
typename T,
typename allocatorT>
3227 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3229 vec.insert(vec.begin() + index, item);
3232 template<
typename T,
typename allocatorT>
3233 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3235 vec.erase(vec.begin() + index);
3238 #else // #if VMA_USE_STL_VECTOR 3243 template<
typename T,
typename AllocatorT>
3247 typedef T value_type;
3249 VmaVector(
const AllocatorT& allocator) :
3250 m_Allocator(allocator),
3257 VmaVector(
size_t count,
const AllocatorT& allocator) :
3258 m_Allocator(allocator),
3259 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3265 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3266 m_Allocator(src.m_Allocator),
3267 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3268 m_Count(src.m_Count),
3269 m_Capacity(src.m_Count)
3273 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3279 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3282 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3286 resize(rhs.m_Count);
3289 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3295 bool empty()
const {
return m_Count == 0; }
3296 size_t size()
const {
return m_Count; }
3297 T* data() {
return m_pArray; }
3298 const T* data()
const {
return m_pArray; }
3300 T& operator[](
size_t index)
3302 VMA_HEAVY_ASSERT(index < m_Count);
3303 return m_pArray[index];
3305 const T& operator[](
size_t index)
const 3307 VMA_HEAVY_ASSERT(index < m_Count);
3308 return m_pArray[index];
3313 VMA_HEAVY_ASSERT(m_Count > 0);
3316 const T& front()
const 3318 VMA_HEAVY_ASSERT(m_Count > 0);
3323 VMA_HEAVY_ASSERT(m_Count > 0);
3324 return m_pArray[m_Count - 1];
3326 const T& back()
const 3328 VMA_HEAVY_ASSERT(m_Count > 0);
3329 return m_pArray[m_Count - 1];
3332 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3334 newCapacity = VMA_MAX(newCapacity, m_Count);
3336 if((newCapacity < m_Capacity) && !freeMemory)
3338 newCapacity = m_Capacity;
3341 if(newCapacity != m_Capacity)
3343 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3346 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3348 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3349 m_Capacity = newCapacity;
3350 m_pArray = newArray;
3354 void resize(
size_t newCount,
bool freeMemory =
false)
3356 size_t newCapacity = m_Capacity;
3357 if(newCount > m_Capacity)
3359 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3363 newCapacity = newCount;
3366 if(newCapacity != m_Capacity)
3368 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3369 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3370 if(elementsToCopy != 0)
3372 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3374 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3375 m_Capacity = newCapacity;
3376 m_pArray = newArray;
3382 void clear(
bool freeMemory =
false)
3384 resize(0, freeMemory);
3387 void insert(
size_t index,
const T& src)
3389 VMA_HEAVY_ASSERT(index <= m_Count);
3390 const size_t oldCount = size();
3391 resize(oldCount + 1);
3392 if(index < oldCount)
3394 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3396 m_pArray[index] = src;
3399 void remove(
size_t index)
3401 VMA_HEAVY_ASSERT(index < m_Count);
3402 const size_t oldCount = size();
3403 if(index < oldCount - 1)
3405 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3407 resize(oldCount - 1);
3410 void push_back(
const T& src)
3412 const size_t newIndex = size();
3413 resize(newIndex + 1);
3414 m_pArray[newIndex] = src;
3419 VMA_HEAVY_ASSERT(m_Count > 0);
3423 void push_front(
const T& src)
3430 VMA_HEAVY_ASSERT(m_Count > 0);
3434 typedef T* iterator;
3436 iterator begin() {
return m_pArray; }
3437 iterator end() {
return m_pArray + m_Count; }
3440 AllocatorT m_Allocator;
3446 template<
typename T,
typename allocatorT>
3447 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3449 vec.insert(index, item);
3452 template<
typename T,
typename allocatorT>
3453 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3458 #endif // #if VMA_USE_STL_VECTOR 3460 template<
typename CmpLess,
typename VectorT>
3461 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3463 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3465 vector.data() + vector.size(),
3467 CmpLess()) - vector.data();
3468 VmaVectorInsert(vector, indexToInsert, value);
3469 return indexToInsert;
3472 template<
typename CmpLess,
typename VectorT>
3473 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3476 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3481 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3483 size_t indexToRemove = it - vector.begin();
3484 VmaVectorRemove(vector, indexToRemove);
3490 template<
typename CmpLess,
typename IterT,
typename KeyT>
3491 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3494 typename IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3495 beg, end, value, comparator);
3497 !comparator(*it, value) && !comparator(value, *it))
3512 template<
typename T>
3513 class VmaPoolAllocator
3515 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3517 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3518 ~VmaPoolAllocator();
3526 uint32_t NextFreeIndex;
3533 uint32_t FirstFreeIndex;
3536 const VkAllocationCallbacks* m_pAllocationCallbacks;
3537 size_t m_ItemsPerBlock;
3538 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3540 ItemBlock& CreateNewBlock();
3543 template<
typename T>
3544 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3545 m_pAllocationCallbacks(pAllocationCallbacks),
3546 m_ItemsPerBlock(itemsPerBlock),
3547 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3549 VMA_ASSERT(itemsPerBlock > 0);
3552 template<
typename T>
3553 VmaPoolAllocator<T>::~VmaPoolAllocator()
3558 template<
typename T>
3559 void VmaPoolAllocator<T>::Clear()
3561 for(
size_t i = m_ItemBlocks.size(); i--; )
3562 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3563 m_ItemBlocks.clear();
3566 template<
typename T>
3567 T* VmaPoolAllocator<T>::Alloc()
3569 for(
size_t i = m_ItemBlocks.size(); i--; )
3571 ItemBlock& block = m_ItemBlocks[i];
3573 if(block.FirstFreeIndex != UINT32_MAX)
3575 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3576 block.FirstFreeIndex = pItem->NextFreeIndex;
3577 return &pItem->Value;
3582 ItemBlock& newBlock = CreateNewBlock();
3583 Item*
const pItem = &newBlock.pItems[0];
3584 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3585 return &pItem->Value;
3588 template<
typename T>
3589 void VmaPoolAllocator<T>::Free(T* ptr)
3592 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3594 ItemBlock& block = m_ItemBlocks[i];
3598 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3601 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3603 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3604 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3605 block.FirstFreeIndex = index;
3609 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3612 template<
typename T>
3613 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3615 ItemBlock newBlock = {
3616 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3618 m_ItemBlocks.push_back(newBlock);
3621 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3622 newBlock.pItems[i].NextFreeIndex = i + 1;
3623 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3624 return m_ItemBlocks.back();
3630 #if VMA_USE_STL_LIST 3632 #define VmaList std::list 3634 #else // #if VMA_USE_STL_LIST 3636 template<
typename T>
3645 template<
typename T>
3648 VMA_CLASS_NO_COPY(VmaRawList)
3650 typedef VmaListItem<T> ItemType;
3652 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3656 size_t GetCount()
const {
return m_Count; }
3657 bool IsEmpty()
const {
return m_Count == 0; }
3659 ItemType* Front() {
return m_pFront; }
3660 const ItemType* Front()
const {
return m_pFront; }
3661 ItemType* Back() {
return m_pBack; }
3662 const ItemType* Back()
const {
return m_pBack; }
3664 ItemType* PushBack();
3665 ItemType* PushFront();
3666 ItemType* PushBack(
const T& value);
3667 ItemType* PushFront(
const T& value);
3672 ItemType* InsertBefore(ItemType* pItem);
3674 ItemType* InsertAfter(ItemType* pItem);
3676 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3677 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3679 void Remove(ItemType* pItem);
3682 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3683 VmaPoolAllocator<ItemType> m_ItemAllocator;
3689 template<
typename T>
3690 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3691 m_pAllocationCallbacks(pAllocationCallbacks),
3692 m_ItemAllocator(pAllocationCallbacks, 128),
3699 template<
typename T>
3700 VmaRawList<T>::~VmaRawList()
3706 template<
typename T>
3707 void VmaRawList<T>::Clear()
3709 if(IsEmpty() ==
false)
3711 ItemType* pItem = m_pBack;
3712 while(pItem != VMA_NULL)
3714 ItemType*
const pPrevItem = pItem->pPrev;
3715 m_ItemAllocator.Free(pItem);
3718 m_pFront = VMA_NULL;
3724 template<
typename T>
3725 VmaListItem<T>* VmaRawList<T>::PushBack()
3727 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3728 pNewItem->pNext = VMA_NULL;
3731 pNewItem->pPrev = VMA_NULL;
3732 m_pFront = pNewItem;
3738 pNewItem->pPrev = m_pBack;
3739 m_pBack->pNext = pNewItem;
3746 template<
typename T>
3747 VmaListItem<T>* VmaRawList<T>::PushFront()
3749 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3750 pNewItem->pPrev = VMA_NULL;
3753 pNewItem->pNext = VMA_NULL;
3754 m_pFront = pNewItem;
3760 pNewItem->pNext = m_pFront;
3761 m_pFront->pPrev = pNewItem;
3762 m_pFront = pNewItem;
3768 template<
typename T>
3769 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3771 ItemType*
const pNewItem = PushBack();
3772 pNewItem->Value = value;
3776 template<
typename T>
3777 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3779 ItemType*
const pNewItem = PushFront();
3780 pNewItem->Value = value;
3784 template<
typename T>
3785 void VmaRawList<T>::PopBack()
3787 VMA_HEAVY_ASSERT(m_Count > 0);
3788 ItemType*
const pBackItem = m_pBack;
3789 ItemType*
const pPrevItem = pBackItem->pPrev;
3790 if(pPrevItem != VMA_NULL)
3792 pPrevItem->pNext = VMA_NULL;
3794 m_pBack = pPrevItem;
3795 m_ItemAllocator.Free(pBackItem);
3799 template<
typename T>
3800 void VmaRawList<T>::PopFront()
3802 VMA_HEAVY_ASSERT(m_Count > 0);
3803 ItemType*
const pFrontItem = m_pFront;
3804 ItemType*
const pNextItem = pFrontItem->pNext;
3805 if(pNextItem != VMA_NULL)
3807 pNextItem->pPrev = VMA_NULL;
3809 m_pFront = pNextItem;
3810 m_ItemAllocator.Free(pFrontItem);
3814 template<
typename T>
3815 void VmaRawList<T>::Remove(ItemType* pItem)
3817 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3818 VMA_HEAVY_ASSERT(m_Count > 0);
3820 if(pItem->pPrev != VMA_NULL)
3822 pItem->pPrev->pNext = pItem->pNext;
3826 VMA_HEAVY_ASSERT(m_pFront == pItem);
3827 m_pFront = pItem->pNext;
3830 if(pItem->pNext != VMA_NULL)
3832 pItem->pNext->pPrev = pItem->pPrev;
3836 VMA_HEAVY_ASSERT(m_pBack == pItem);
3837 m_pBack = pItem->pPrev;
3840 m_ItemAllocator.Free(pItem);
3844 template<
typename T>
3845 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3847 if(pItem != VMA_NULL)
3849 ItemType*
const prevItem = pItem->pPrev;
3850 ItemType*
const newItem = m_ItemAllocator.Alloc();
3851 newItem->pPrev = prevItem;
3852 newItem->pNext = pItem;
3853 pItem->pPrev = newItem;
3854 if(prevItem != VMA_NULL)
3856 prevItem->pNext = newItem;
3860 VMA_HEAVY_ASSERT(m_pFront == pItem);
3870 template<
typename T>
3871 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3873 if(pItem != VMA_NULL)
3875 ItemType*
const nextItem = pItem->pNext;
3876 ItemType*
const newItem = m_ItemAllocator.Alloc();
3877 newItem->pNext = nextItem;
3878 newItem->pPrev = pItem;
3879 pItem->pNext = newItem;
3880 if(nextItem != VMA_NULL)
3882 nextItem->pPrev = newItem;
3886 VMA_HEAVY_ASSERT(m_pBack == pItem);
3896 template<
typename T>
3897 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3899 ItemType*
const newItem = InsertBefore(pItem);
3900 newItem->Value = value;
3904 template<
typename T>
3905 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3907 ItemType*
const newItem = InsertAfter(pItem);
3908 newItem->Value = value;
3912 template<
typename T,
typename AllocatorT>
3915 VMA_CLASS_NO_COPY(VmaList)
3926 T& operator*()
const 3928 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3929 return m_pItem->Value;
3931 T* operator->()
const 3933 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3934 return &m_pItem->Value;
3937 iterator& operator++()
3939 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3940 m_pItem = m_pItem->pNext;
3943 iterator& operator--()
3945 if(m_pItem != VMA_NULL)
3947 m_pItem = m_pItem->pPrev;
3951 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3952 m_pItem = m_pList->Back();
3957 iterator operator++(
int)
3959 iterator result = *
this;
3963 iterator operator--(
int)
3965 iterator result = *
this;
3970 bool operator==(
const iterator& rhs)
const 3972 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3973 return m_pItem == rhs.m_pItem;
3975 bool operator!=(
const iterator& rhs)
const 3977 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3978 return m_pItem != rhs.m_pItem;
3982 VmaRawList<T>* m_pList;
3983 VmaListItem<T>* m_pItem;
3985 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3991 friend class VmaList<T, AllocatorT>;
3994 class const_iterator
4003 const_iterator(
const iterator& src) :
4004 m_pList(src.m_pList),
4005 m_pItem(src.m_pItem)
4009 const T& operator*()
const 4011 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4012 return m_pItem->Value;
4014 const T* operator->()
const 4016 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4017 return &m_pItem->Value;
4020 const_iterator& operator++()
4022 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4023 m_pItem = m_pItem->pNext;
4026 const_iterator& operator--()
4028 if(m_pItem != VMA_NULL)
4030 m_pItem = m_pItem->pPrev;
4034 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4035 m_pItem = m_pList->Back();
4040 const_iterator operator++(
int)
4042 const_iterator result = *
this;
4046 const_iterator operator--(
int)
4048 const_iterator result = *
this;
4053 bool operator==(
const const_iterator& rhs)
const 4055 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4056 return m_pItem == rhs.m_pItem;
4058 bool operator!=(
const const_iterator& rhs)
const 4060 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4061 return m_pItem != rhs.m_pItem;
4065 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4071 const VmaRawList<T>* m_pList;
4072 const VmaListItem<T>* m_pItem;
4074 friend class VmaList<T, AllocatorT>;
4077 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4079 bool empty()
const {
return m_RawList.IsEmpty(); }
4080 size_t size()
const {
return m_RawList.GetCount(); }
4082 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4083 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4085 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4086 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4088 void clear() { m_RawList.Clear(); }
4089 void push_back(
const T& value) { m_RawList.PushBack(value); }
4090 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4091 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4094 VmaRawList<T> m_RawList;
4097 #endif // #if VMA_USE_STL_LIST 4105 #if VMA_USE_STL_UNORDERED_MAP 4107 #define VmaPair std::pair 4109 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4110 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4112 #else // #if VMA_USE_STL_UNORDERED_MAP 4114 template<
typename T1,
typename T2>
4120 VmaPair() : first(), second() { }
4121 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4127 template<
typename KeyT,
typename ValueT>
4131 typedef VmaPair<KeyT, ValueT> PairType;
4132 typedef PairType* iterator;
4134 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4136 iterator begin() {
return m_Vector.begin(); }
4137 iterator end() {
return m_Vector.end(); }
4139 void insert(
const PairType& pair);
4140 iterator find(
const KeyT& key);
4141 void erase(iterator it);
4144 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4147 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4149 template<
typename FirstT,
typename SecondT>
4150 struct VmaPairFirstLess
4152 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4154 return lhs.first < rhs.first;
4156 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4158 return lhs.first < rhsFirst;
4162 template<
typename KeyT,
typename ValueT>
4163 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4165 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4167 m_Vector.data() + m_Vector.size(),
4169 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4170 VmaVectorInsert(m_Vector, indexToInsert, pair);
4173 template<
typename KeyT,
typename ValueT>
4174 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4176 PairType* it = VmaBinaryFindFirstNotLess(
4178 m_Vector.data() + m_Vector.size(),
4180 VmaPairFirstLess<KeyT, ValueT>());
4181 if((it != m_Vector.end()) && (it->first == key))
4187 return m_Vector.end();
4191 template<
typename KeyT,
typename ValueT>
4192 void VmaMap<KeyT, ValueT>::erase(iterator it)
4194 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4197 #endif // #if VMA_USE_STL_UNORDERED_MAP 4203 class VmaDeviceMemoryBlock;
4205 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4207 struct VmaAllocation_T
4209 VMA_CLASS_NO_COPY(VmaAllocation_T)
4211 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4215 FLAG_USER_DATA_STRING = 0x01,
4219 enum ALLOCATION_TYPE
4221 ALLOCATION_TYPE_NONE,
4222 ALLOCATION_TYPE_BLOCK,
4223 ALLOCATION_TYPE_DEDICATED,
4226 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4229 m_pUserData(VMA_NULL),
4230 m_LastUseFrameIndex(currentFrameIndex),
4231 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4232 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4234 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4236 #if VMA_STATS_STRING_ENABLED 4237 m_CreationFrameIndex = currentFrameIndex;
4238 m_BufferImageUsage = 0;
4244 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4247 VMA_ASSERT(m_pUserData == VMA_NULL);
4250 void InitBlockAllocation(
4252 VmaDeviceMemoryBlock* block,
4253 VkDeviceSize offset,
4254 VkDeviceSize alignment,
4256 VmaSuballocationType suballocationType,
4260 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4261 VMA_ASSERT(block != VMA_NULL);
4262 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4263 m_Alignment = alignment;
4265 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4266 m_SuballocationType = (uint8_t)suballocationType;
4267 m_BlockAllocation.m_hPool = hPool;
4268 m_BlockAllocation.m_Block = block;
4269 m_BlockAllocation.m_Offset = offset;
4270 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4275 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4276 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4277 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4278 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4279 m_BlockAllocation.m_Block = VMA_NULL;
4280 m_BlockAllocation.m_Offset = 0;
4281 m_BlockAllocation.m_CanBecomeLost =
true;
4284 void ChangeBlockAllocation(
4286 VmaDeviceMemoryBlock* block,
4287 VkDeviceSize offset);
4290 void InitDedicatedAllocation(
4291 uint32_t memoryTypeIndex,
4292 VkDeviceMemory hMemory,
4293 VmaSuballocationType suballocationType,
4297 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4298 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4299 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4302 m_SuballocationType = (uint8_t)suballocationType;
4303 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4304 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4305 m_DedicatedAllocation.m_hMemory = hMemory;
4306 m_DedicatedAllocation.m_pMappedData = pMappedData;
4309 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4310 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4311 VkDeviceSize GetSize()
const {
return m_Size; }
4312 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4313 void* GetUserData()
const {
return m_pUserData; }
4314 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4315 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4317 VmaDeviceMemoryBlock* GetBlock()
const 4319 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4320 return m_BlockAllocation.m_Block;
4322 VkDeviceSize GetOffset()
const;
4323 VkDeviceMemory GetMemory()
const;
4324 uint32_t GetMemoryTypeIndex()
const;
4325 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4326 void* GetMappedData()
const;
4327 bool CanBecomeLost()
const;
4330 uint32_t GetLastUseFrameIndex()
const 4332 return m_LastUseFrameIndex.load();
4334 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4336 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4346 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4348 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4350 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4361 void BlockAllocMap();
4362 void BlockAllocUnmap();
4363 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4366 #if VMA_STATS_STRING_ENABLED 4367 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4368 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4370 void InitBufferImageUsage(uint32_t bufferImageUsage)
4372 VMA_ASSERT(m_BufferImageUsage == 0);
4373 m_BufferImageUsage = bufferImageUsage;
4376 void PrintParameters(
class VmaJsonWriter& json)
const;
4380 VkDeviceSize m_Alignment;
4381 VkDeviceSize m_Size;
4383 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4385 uint8_t m_SuballocationType;
4392 struct BlockAllocation
4395 VmaDeviceMemoryBlock* m_Block;
4396 VkDeviceSize m_Offset;
4397 bool m_CanBecomeLost;
4401 struct DedicatedAllocation
4403 uint32_t m_MemoryTypeIndex;
4404 VkDeviceMemory m_hMemory;
4405 void* m_pMappedData;
4411 BlockAllocation m_BlockAllocation;
4413 DedicatedAllocation m_DedicatedAllocation;
4416 #if VMA_STATS_STRING_ENABLED 4417 uint32_t m_CreationFrameIndex;
4418 uint32_t m_BufferImageUsage;
4428 struct VmaSuballocation
4430 VkDeviceSize offset;
4433 VmaSuballocationType type;
4437 struct VmaSuballocationOffsetLess
4439 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4441 return lhs.offset < rhs.offset;
4444 struct VmaSuballocationOffsetGreater
4446 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4448 return lhs.offset > rhs.offset;
4452 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4455 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4470 struct VmaAllocationRequest
4472 VkDeviceSize offset;
4473 VkDeviceSize sumFreeSize;
4474 VkDeviceSize sumItemSize;
4475 VmaSuballocationList::iterator item;
4476 size_t itemsToMakeLostCount;
4478 VkDeviceSize CalcCost()
const 4480 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4488 class VmaBlockMetadata
4491 VmaBlockMetadata() : m_Size(0) { }
4492 virtual ~VmaBlockMetadata() { }
4493 virtual void Init(VkDeviceSize size) { m_Size = size; }
4496 virtual bool Validate()
const = 0;
4497 VkDeviceSize GetSize()
const {
return m_Size; }
4498 virtual size_t GetAllocationCount()
const = 0;
4499 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4500 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4502 virtual bool IsEmpty()
const = 0;
4504 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4505 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4507 #if VMA_STATS_STRING_ENABLED 4508 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4514 virtual bool CreateAllocationRequest(
4515 uint32_t currentFrameIndex,
4516 uint32_t frameInUseCount,
4517 VkDeviceSize bufferImageGranularity,
4518 VkDeviceSize allocSize,
4519 VkDeviceSize allocAlignment,
4521 VmaSuballocationType allocType,
4522 bool canMakeOtherLost,
4523 VmaAllocationRequest* pAllocationRequest) = 0;
4525 virtual bool MakeRequestedAllocationsLost(
4526 uint32_t currentFrameIndex,
4527 uint32_t frameInUseCount,
4528 VmaAllocationRequest* pAllocationRequest) = 0;
4530 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4532 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4536 const VmaAllocationRequest& request,
4537 VmaSuballocationType type,
4538 VkDeviceSize allocSize,
4544 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4547 #if VMA_STATS_STRING_ENABLED 4548 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4549 VkDeviceSize unusedBytes,
4550 size_t allocationCount,
4551 size_t unusedRangeCount)
const;
4552 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4553 VkDeviceSize offset,
4555 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4556 VkDeviceSize offset,
4557 VkDeviceSize size)
const;
4558 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4562 VkDeviceSize m_Size;
4565 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4567 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4570 virtual ~VmaBlockMetadata_Generic();
4571 virtual void Init(VkDeviceSize size);
4573 virtual bool Validate()
const;
4574 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4575 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4576 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4577 virtual bool IsEmpty()
const;
4579 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4580 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4582 #if VMA_STATS_STRING_ENABLED 4583 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4586 virtual bool CreateAllocationRequest(
4587 uint32_t currentFrameIndex,
4588 uint32_t frameInUseCount,
4589 VkDeviceSize bufferImageGranularity,
4590 VkDeviceSize allocSize,
4591 VkDeviceSize allocAlignment,
4593 VmaSuballocationType allocType,
4594 bool canMakeOtherLost,
4595 VmaAllocationRequest* pAllocationRequest);
4597 virtual bool MakeRequestedAllocationsLost(
4598 uint32_t currentFrameIndex,
4599 uint32_t frameInUseCount,
4600 VmaAllocationRequest* pAllocationRequest);
4602 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4604 virtual VkResult CheckCorruption(
const void* pBlockData);
4607 const VmaAllocationRequest& request,
4608 VmaSuballocationType type,
4609 VkDeviceSize allocSize,
4614 virtual void FreeAtOffset(VkDeviceSize offset);
4617 uint32_t m_FreeCount;
4618 VkDeviceSize m_SumFreeSize;
4619 VmaSuballocationList m_Suballocations;
4622 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4624 bool ValidateFreeSuballocationList()
const;
4628 bool CheckAllocation(
4629 uint32_t currentFrameIndex,
4630 uint32_t frameInUseCount,
4631 VkDeviceSize bufferImageGranularity,
4632 VkDeviceSize allocSize,
4633 VkDeviceSize allocAlignment,
4634 VmaSuballocationType allocType,
4635 VmaSuballocationList::const_iterator suballocItem,
4636 bool canMakeOtherLost,
4637 VkDeviceSize* pOffset,
4638 size_t* itemsToMakeLostCount,
4639 VkDeviceSize* pSumFreeSize,
4640 VkDeviceSize* pSumItemSize)
const;
4642 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4646 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4649 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4652 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4733 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4735 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4738 virtual ~VmaBlockMetadata_Linear();
4739 virtual void Init(VkDeviceSize size);
4741 virtual bool Validate()
const;
4742 virtual size_t GetAllocationCount()
const;
4743 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4744 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4745 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
4747 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4748 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4750 #if VMA_STATS_STRING_ENABLED 4751 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4754 virtual bool CreateAllocationRequest(
4755 uint32_t currentFrameIndex,
4756 uint32_t frameInUseCount,
4757 VkDeviceSize bufferImageGranularity,
4758 VkDeviceSize allocSize,
4759 VkDeviceSize allocAlignment,
4761 VmaSuballocationType allocType,
4762 bool canMakeOtherLost,
4763 VmaAllocationRequest* pAllocationRequest);
4765 virtual bool MakeRequestedAllocationsLost(
4766 uint32_t currentFrameIndex,
4767 uint32_t frameInUseCount,
4768 VmaAllocationRequest* pAllocationRequest);
4770 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4772 virtual VkResult CheckCorruption(
const void* pBlockData);
4775 const VmaAllocationRequest& request,
4776 VmaSuballocationType type,
4777 VkDeviceSize allocSize,
4782 virtual void FreeAtOffset(VkDeviceSize offset);
4792 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
4794 enum SECOND_VECTOR_MODE
4796 SECOND_VECTOR_EMPTY,
4801 SECOND_VECTOR_RING_BUFFER,
4807 SECOND_VECTOR_DOUBLE_STACK,
4810 VkDeviceSize m_SumFreeSize;
4811 SuballocationVectorType m_Suballocations0, m_Suballocations1;
4812 uint32_t m_1stVectorIndex;
4813 SECOND_VECTOR_MODE m_2ndVectorMode;
4815 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
4816 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
4817 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
4818 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
4821 size_t m_1stNullItemsBeginCount;
4823 size_t m_1stNullItemsMiddleCount;
4825 size_t m_2ndNullItemsCount;
4827 bool ShouldCompact1st()
const;
4828 void CleanupAfterFree();
4837 class VmaDeviceMemoryBlock
4839 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
4841 VmaBlockMetadata* m_pMetadata;
4845 ~VmaDeviceMemoryBlock()
4847 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4848 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4854 uint32_t newMemoryTypeIndex,
4855 VkDeviceMemory newMemory,
4856 VkDeviceSize newSize,
4858 bool linearAlgorithm);
4862 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4863 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4864 uint32_t GetId()
const {
return m_Id; }
4865 void* GetMappedData()
const {
return m_pMappedData; }
4868 bool Validate()
const;
4873 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4876 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4877 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4879 VkResult BindBufferMemory(
4883 VkResult BindImageMemory(
4889 uint32_t m_MemoryTypeIndex;
4891 VkDeviceMemory m_hMemory;
4896 uint32_t m_MapCount;
4897 void* m_pMappedData;
4900 struct VmaPointerLess
4902 bool operator()(
const void* lhs,
const void* rhs)
const 4908 class VmaDefragmentator;
4916 struct VmaBlockVector
4918 VMA_CLASS_NO_COPY(VmaBlockVector)
4922 uint32_t memoryTypeIndex,
4923 VkDeviceSize preferredBlockSize,
4924 size_t minBlockCount,
4925 size_t maxBlockCount,
4926 VkDeviceSize bufferImageGranularity,
4927 uint32_t frameInUseCount,
4929 bool linearAlgorithm);
4932 VkResult CreateMinBlocks();
4934 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4935 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4936 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4937 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4938 bool UsesLinearAlgorithm()
const {
return m_LinearAlgorithm; }
4942 bool IsEmpty()
const {
return m_Blocks.empty(); }
4943 bool IsCorruptionDetectionEnabled()
const;
4947 uint32_t currentFrameIndex,
4949 VkDeviceSize alignment,
4951 VmaSuballocationType suballocType,
4960 #if VMA_STATS_STRING_ENABLED 4961 void PrintDetailedMap(
class VmaJsonWriter& json);
4964 void MakePoolAllocationsLost(
4965 uint32_t currentFrameIndex,
4966 size_t* pLostAllocationCount);
4967 VkResult CheckCorruption();
4969 VmaDefragmentator* EnsureDefragmentator(
4971 uint32_t currentFrameIndex);
4973 VkResult Defragment(
4975 VkDeviceSize& maxBytesToMove,
4976 uint32_t& maxAllocationsToMove);
4978 void DestroyDefragmentator();
4981 friend class VmaDefragmentator;
4984 const uint32_t m_MemoryTypeIndex;
4985 const VkDeviceSize m_PreferredBlockSize;
4986 const size_t m_MinBlockCount;
4987 const size_t m_MaxBlockCount;
4988 const VkDeviceSize m_BufferImageGranularity;
4989 const uint32_t m_FrameInUseCount;
4990 const bool m_IsCustomPool;
4991 const bool m_LinearAlgorithm;
4992 bool m_HasEmptyBlock;
4995 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4999 VmaDefragmentator* m_pDefragmentator;
5000 uint32_t m_NextBlockId;
5002 VkDeviceSize CalcMaxBlockSize()
const;
5005 void Remove(VmaDeviceMemoryBlock* pBlock);
5009 void IncrementallySortBlocks();
5011 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5016 VMA_CLASS_NO_COPY(VmaPool_T)
5018 VmaBlockVector m_BlockVector;
5025 uint32_t GetId()
const {
return m_Id; }
5026 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5028 #if VMA_STATS_STRING_ENABLED 5036 class VmaDefragmentator
5038 VMA_CLASS_NO_COPY(VmaDefragmentator)
5041 VmaBlockVector*
const m_pBlockVector;
5042 uint32_t m_CurrentFrameIndex;
5043 VkDeviceSize m_BytesMoved;
5044 uint32_t m_AllocationsMoved;
5046 struct AllocationInfo
5049 VkBool32* m_pChanged;
5052 m_hAllocation(VK_NULL_HANDLE),
5053 m_pChanged(VMA_NULL)
5058 struct AllocationInfoSizeGreater
5060 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5062 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5067 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5071 VmaDeviceMemoryBlock* m_pBlock;
5072 bool m_HasNonMovableAllocations;
5073 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5075 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5077 m_HasNonMovableAllocations(true),
5078 m_Allocations(pAllocationCallbacks),
5079 m_pMappedDataForDefragmentation(VMA_NULL)
5083 void CalcHasNonMovableAllocations()
5085 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5086 const size_t defragmentAllocCount = m_Allocations.size();
5087 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5090 void SortAllocationsBySizeDescecnding()
5092 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5095 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5100 void* m_pMappedDataForDefragmentation;
5103 struct BlockPointerLess
5105 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5107 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5109 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5111 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5117 struct BlockInfoCompareMoveDestination
5119 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5121 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5125 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5129 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5137 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5138 BlockInfoVector m_Blocks;
5140 VkResult DefragmentRound(
5141 VkDeviceSize maxBytesToMove,
5142 uint32_t maxAllocationsToMove);
5144 static bool MoveMakesSense(
5145 size_t dstBlockIndex, VkDeviceSize dstOffset,
5146 size_t srcBlockIndex, VkDeviceSize srcOffset);
5151 VmaBlockVector* pBlockVector,
5152 uint32_t currentFrameIndex);
5154 ~VmaDefragmentator();
5156 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5157 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5159 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5161 VkResult Defragment(
5162 VkDeviceSize maxBytesToMove,
5163 uint32_t maxAllocationsToMove);
5166 #if VMA_RECORDING_ENABLED 5173 void WriteConfiguration(
5174 const VkPhysicalDeviceProperties& devProps,
5175 const VkPhysicalDeviceMemoryProperties& memProps,
5176 bool dedicatedAllocationExtensionEnabled);
5179 void RecordCreateAllocator(uint32_t frameIndex);
5180 void RecordDestroyAllocator(uint32_t frameIndex);
5181 void RecordCreatePool(uint32_t frameIndex,
5184 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5185 void RecordAllocateMemory(uint32_t frameIndex,
5186 const VkMemoryRequirements& vkMemReq,
5189 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5190 const VkMemoryRequirements& vkMemReq,
5191 bool requiresDedicatedAllocation,
5192 bool prefersDedicatedAllocation,
5195 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5196 const VkMemoryRequirements& vkMemReq,
5197 bool requiresDedicatedAllocation,
5198 bool prefersDedicatedAllocation,
5201 void RecordFreeMemory(uint32_t frameIndex,
5203 void RecordSetAllocationUserData(uint32_t frameIndex,
5205 const void* pUserData);
5206 void RecordCreateLostAllocation(uint32_t frameIndex,
5208 void RecordMapMemory(uint32_t frameIndex,
5210 void RecordUnmapMemory(uint32_t frameIndex,
5212 void RecordFlushAllocation(uint32_t frameIndex,
5213 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5214 void RecordInvalidateAllocation(uint32_t frameIndex,
5215 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5216 void RecordCreateBuffer(uint32_t frameIndex,
5217 const VkBufferCreateInfo& bufCreateInfo,
5220 void RecordCreateImage(uint32_t frameIndex,
5221 const VkImageCreateInfo& imageCreateInfo,
5224 void RecordDestroyBuffer(uint32_t frameIndex,
5226 void RecordDestroyImage(uint32_t frameIndex,
5228 void RecordTouchAllocation(uint32_t frameIndex,
5230 void RecordGetAllocationInfo(uint32_t frameIndex,
5232 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5242 class UserDataString
5246 const char* GetString()
const {
return m_Str; }
5256 VMA_MUTEX m_FileMutex;
5258 int64_t m_StartCounter;
5260 void GetBasicParams(CallParams& outParams);
5264 #endif // #if VMA_RECORDING_ENABLED 5267 struct VmaAllocator_T
5269 VMA_CLASS_NO_COPY(VmaAllocator_T)
5272 bool m_UseKhrDedicatedAllocation;
5274 bool m_AllocationCallbacksSpecified;
5275 VkAllocationCallbacks m_AllocationCallbacks;
5279 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5280 VMA_MUTEX m_HeapSizeLimitMutex;
5282 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5283 VkPhysicalDeviceMemoryProperties m_MemProps;
5286 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5289 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5290 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5291 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5297 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5299 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5303 return m_VulkanFunctions;
5306 VkDeviceSize GetBufferImageGranularity()
const 5309 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5310 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5313 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5314 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5316 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5318 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5319 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5322 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5324 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5325 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5328 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5330 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5331 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5332 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5335 bool IsIntegratedGpu()
const 5337 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5340 #if VMA_RECORDING_ENABLED 5341 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5344 void GetBufferMemoryRequirements(
5346 VkMemoryRequirements& memReq,
5347 bool& requiresDedicatedAllocation,
5348 bool& prefersDedicatedAllocation)
const;
5349 void GetImageMemoryRequirements(
5351 VkMemoryRequirements& memReq,
5352 bool& requiresDedicatedAllocation,
5353 bool& prefersDedicatedAllocation)
const;
5356 VkResult AllocateMemory(
5357 const VkMemoryRequirements& vkMemReq,
5358 bool requiresDedicatedAllocation,
5359 bool prefersDedicatedAllocation,
5360 VkBuffer dedicatedBuffer,
5361 VkImage dedicatedImage,
5363 VmaSuballocationType suballocType,
5369 void CalculateStats(
VmaStats* pStats);
5371 #if VMA_STATS_STRING_ENABLED 5372 void PrintDetailedMap(
class VmaJsonWriter& json);
5375 VkResult Defragment(
5377 size_t allocationCount,
5378 VkBool32* pAllocationsChanged,
5386 void DestroyPool(
VmaPool pool);
5389 void SetCurrentFrameIndex(uint32_t frameIndex);
5390 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5392 void MakePoolAllocationsLost(
5394 size_t* pLostAllocationCount);
5395 VkResult CheckPoolCorruption(
VmaPool hPool);
5396 VkResult CheckCorruption(uint32_t memoryTypeBits);
5400 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5401 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5406 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5407 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5409 void FlushOrInvalidateAllocation(
5411 VkDeviceSize offset, VkDeviceSize size,
5412 VMA_CACHE_OPERATION op);
5414 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5417 VkDeviceSize m_PreferredLargeHeapBlockSize;
5419 VkPhysicalDevice m_PhysicalDevice;
5420 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5422 VMA_MUTEX m_PoolsMutex;
5424 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5425 uint32_t m_NextPoolId;
5429 #if VMA_RECORDING_ENABLED 5430 VmaRecorder* m_pRecorder;
5435 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5437 VkResult AllocateMemoryOfType(
5439 VkDeviceSize alignment,
5440 bool dedicatedAllocation,
5441 VkBuffer dedicatedBuffer,
5442 VkImage dedicatedImage,
5444 uint32_t memTypeIndex,
5445 VmaSuballocationType suballocType,
5449 VkResult AllocateDedicatedMemory(
5451 VmaSuballocationType suballocType,
5452 uint32_t memTypeIndex,
5454 bool isUserDataString,
5456 VkBuffer dedicatedBuffer,
5457 VkImage dedicatedImage,
5467 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5469 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5472 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5474 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5477 template<
typename T>
5480 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5483 template<
typename T>
5484 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5486 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5489 template<
typename T>
5490 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5495 VmaFree(hAllocator, ptr);
5499 template<
typename T>
5500 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5504 for(
size_t i = count; i--; )
5506 VmaFree(hAllocator, ptr);
5513 #if VMA_STATS_STRING_ENABLED 5515 class VmaStringBuilder
5518 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5519 size_t GetLength()
const {
return m_Data.size(); }
5520 const char* GetData()
const {
return m_Data.data(); }
5522 void Add(
char ch) { m_Data.push_back(ch); }
5523 void Add(
const char* pStr);
5524 void AddNewLine() { Add(
'\n'); }
5525 void AddNumber(uint32_t num);
5526 void AddNumber(uint64_t num);
5527 void AddPointer(
const void* ptr);
5530 VmaVector< char, VmaStlAllocator<char> > m_Data;
5533 void VmaStringBuilder::Add(
const char* pStr)
5535 const size_t strLen = strlen(pStr);
5538 const size_t oldCount = m_Data.size();
5539 m_Data.resize(oldCount + strLen);
5540 memcpy(m_Data.data() + oldCount, pStr, strLen);
5544 void VmaStringBuilder::AddNumber(uint32_t num)
5547 VmaUint32ToStr(buf,
sizeof(buf), num);
5551 void VmaStringBuilder::AddNumber(uint64_t num)
5554 VmaUint64ToStr(buf,
sizeof(buf), num);
5558 void VmaStringBuilder::AddPointer(
const void* ptr)
5561 VmaPtrToStr(buf,
sizeof(buf), ptr);
5565 #endif // #if VMA_STATS_STRING_ENABLED 5570 #if VMA_STATS_STRING_ENABLED 5574 VMA_CLASS_NO_COPY(VmaJsonWriter)
5576 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5579 void BeginObject(
bool singleLine =
false);
5582 void BeginArray(
bool singleLine =
false);
5585 void WriteString(
const char* pStr);
5586 void BeginString(
const char* pStr = VMA_NULL);
5587 void ContinueString(
const char* pStr);
5588 void ContinueString(uint32_t n);
5589 void ContinueString(uint64_t n);
5590 void ContinueString_Pointer(
const void* ptr);
5591 void EndString(
const char* pStr = VMA_NULL);
5593 void WriteNumber(uint32_t n);
5594 void WriteNumber(uint64_t n);
5595 void WriteBool(
bool b);
5599 static const char*
const INDENT;
5601 enum COLLECTION_TYPE
5603 COLLECTION_TYPE_OBJECT,
5604 COLLECTION_TYPE_ARRAY,
5608 COLLECTION_TYPE type;
5609 uint32_t valueCount;
5610 bool singleLineMode;
5613 VmaStringBuilder& m_SB;
5614 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5615 bool m_InsideString;
5617 void BeginValue(
bool isString);
5618 void WriteIndent(
bool oneLess =
false);
5621 const char*
const VmaJsonWriter::INDENT =
" ";
5623 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5625 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5626 m_InsideString(false)
5630 VmaJsonWriter::~VmaJsonWriter()
5632 VMA_ASSERT(!m_InsideString);
5633 VMA_ASSERT(m_Stack.empty());
5636 void VmaJsonWriter::BeginObject(
bool singleLine)
5638 VMA_ASSERT(!m_InsideString);
5644 item.type = COLLECTION_TYPE_OBJECT;
5645 item.valueCount = 0;
5646 item.singleLineMode = singleLine;
5647 m_Stack.push_back(item);
5650 void VmaJsonWriter::EndObject()
5652 VMA_ASSERT(!m_InsideString);
5657 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5661 void VmaJsonWriter::BeginArray(
bool singleLine)
5663 VMA_ASSERT(!m_InsideString);
5669 item.type = COLLECTION_TYPE_ARRAY;
5670 item.valueCount = 0;
5671 item.singleLineMode = singleLine;
5672 m_Stack.push_back(item);
5675 void VmaJsonWriter::EndArray()
5677 VMA_ASSERT(!m_InsideString);
5682 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5686 void VmaJsonWriter::WriteString(
const char* pStr)
5692 void VmaJsonWriter::BeginString(
const char* pStr)
5694 VMA_ASSERT(!m_InsideString);
5698 m_InsideString =
true;
5699 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5701 ContinueString(pStr);
5705 void VmaJsonWriter::ContinueString(
const char* pStr)
5707 VMA_ASSERT(m_InsideString);
5709 const size_t strLen = strlen(pStr);
5710 for(
size_t i = 0; i < strLen; ++i)
5743 VMA_ASSERT(0 &&
"Character not currently supported.");
5749 void VmaJsonWriter::ContinueString(uint32_t n)
5751 VMA_ASSERT(m_InsideString);
5755 void VmaJsonWriter::ContinueString(uint64_t n)
5757 VMA_ASSERT(m_InsideString);
5761 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5763 VMA_ASSERT(m_InsideString);
5764 m_SB.AddPointer(ptr);
5767 void VmaJsonWriter::EndString(
const char* pStr)
5769 VMA_ASSERT(m_InsideString);
5770 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5772 ContinueString(pStr);
5775 m_InsideString =
false;
5778 void VmaJsonWriter::WriteNumber(uint32_t n)
5780 VMA_ASSERT(!m_InsideString);
5785 void VmaJsonWriter::WriteNumber(uint64_t n)
5787 VMA_ASSERT(!m_InsideString);
5792 void VmaJsonWriter::WriteBool(
bool b)
5794 VMA_ASSERT(!m_InsideString);
5796 m_SB.Add(b ?
"true" :
"false");
5799 void VmaJsonWriter::WriteNull()
5801 VMA_ASSERT(!m_InsideString);
5806 void VmaJsonWriter::BeginValue(
bool isString)
5808 if(!m_Stack.empty())
5810 StackItem& currItem = m_Stack.back();
5811 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5812 currItem.valueCount % 2 == 0)
5814 VMA_ASSERT(isString);
5817 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5818 currItem.valueCount % 2 != 0)
5822 else if(currItem.valueCount > 0)
5831 ++currItem.valueCount;
5835 void VmaJsonWriter::WriteIndent(
bool oneLess)
5837 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
5841 size_t count = m_Stack.size();
5842 if(count > 0 && oneLess)
5846 for(
size_t i = 0; i < count; ++i)
5853 #endif // #if VMA_STATS_STRING_ENABLED 5857 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
5859 if(IsUserDataString())
5861 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
5863 FreeUserDataString(hAllocator);
5865 if(pUserData != VMA_NULL)
5867 const char*
const newStrSrc = (
char*)pUserData;
5868 const size_t newStrLen = strlen(newStrSrc);
5869 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
5870 memcpy(newStrDst, newStrSrc, newStrLen + 1);
5871 m_pUserData = newStrDst;
5876 m_pUserData = pUserData;
5880 void VmaAllocation_T::ChangeBlockAllocation(
5882 VmaDeviceMemoryBlock* block,
5883 VkDeviceSize offset)
5885 VMA_ASSERT(block != VMA_NULL);
5886 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5889 if(block != m_BlockAllocation.m_Block)
5891 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
5892 if(IsPersistentMap())
5894 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
5895 block->Map(hAllocator, mapRefCount, VMA_NULL);
5898 m_BlockAllocation.m_Block = block;
5899 m_BlockAllocation.m_Offset = offset;
5902 VkDeviceSize VmaAllocation_T::GetOffset()
const 5906 case ALLOCATION_TYPE_BLOCK:
5907 return m_BlockAllocation.m_Offset;
5908 case ALLOCATION_TYPE_DEDICATED:
5916 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5920 case ALLOCATION_TYPE_BLOCK:
5921 return m_BlockAllocation.m_Block->GetDeviceMemory();
5922 case ALLOCATION_TYPE_DEDICATED:
5923 return m_DedicatedAllocation.m_hMemory;
5926 return VK_NULL_HANDLE;
5930 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5934 case ALLOCATION_TYPE_BLOCK:
5935 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5936 case ALLOCATION_TYPE_DEDICATED:
5937 return m_DedicatedAllocation.m_MemoryTypeIndex;
5944 void* VmaAllocation_T::GetMappedData()
const 5948 case ALLOCATION_TYPE_BLOCK:
5951 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5952 VMA_ASSERT(pBlockData != VMA_NULL);
5953 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5960 case ALLOCATION_TYPE_DEDICATED:
5961 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5962 return m_DedicatedAllocation.m_pMappedData;
5969 bool VmaAllocation_T::CanBecomeLost()
const 5973 case ALLOCATION_TYPE_BLOCK:
5974 return m_BlockAllocation.m_CanBecomeLost;
5975 case ALLOCATION_TYPE_DEDICATED:
5983 VmaPool VmaAllocation_T::GetPool()
const 5985 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5986 return m_BlockAllocation.m_hPool;
5989 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5991 VMA_ASSERT(CanBecomeLost());
5997 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6000 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6005 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6011 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6021 #if VMA_STATS_STRING_ENABLED 6024 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6033 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6035 json.WriteString(
"Type");
6036 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6038 json.WriteString(
"Size");
6039 json.WriteNumber(m_Size);
6041 if(m_pUserData != VMA_NULL)
6043 json.WriteString(
"UserData");
6044 if(IsUserDataString())
6046 json.WriteString((
const char*)m_pUserData);
6051 json.ContinueString_Pointer(m_pUserData);
6056 json.WriteString(
"CreationFrameIndex");
6057 json.WriteNumber(m_CreationFrameIndex);
6059 json.WriteString(
"LastUseFrameIndex");
6060 json.WriteNumber(GetLastUseFrameIndex());
6062 if(m_BufferImageUsage != 0)
6064 json.WriteString(
"Usage");
6065 json.WriteNumber(m_BufferImageUsage);
6071 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6073 VMA_ASSERT(IsUserDataString());
6074 if(m_pUserData != VMA_NULL)
6076 char*
const oldStr = (
char*)m_pUserData;
6077 const size_t oldStrLen = strlen(oldStr);
6078 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6079 m_pUserData = VMA_NULL;
6083 void VmaAllocation_T::BlockAllocMap()
6085 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6087 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6093 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6097 void VmaAllocation_T::BlockAllocUnmap()
6099 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6101 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6107 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6111 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6113 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6117 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6119 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6120 *ppData = m_DedicatedAllocation.m_pMappedData;
6126 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6127 return VK_ERROR_MEMORY_MAP_FAILED;
6132 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6133 hAllocator->m_hDevice,
6134 m_DedicatedAllocation.m_hMemory,
6139 if(result == VK_SUCCESS)
6141 m_DedicatedAllocation.m_pMappedData = *ppData;
6148 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6150 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6152 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6157 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6158 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6159 hAllocator->m_hDevice,
6160 m_DedicatedAllocation.m_hMemory);
6165 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6169 #if VMA_STATS_STRING_ENABLED 6171 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6175 json.WriteString(
"Blocks");
6178 json.WriteString(
"Allocations");
6181 json.WriteString(
"UnusedRanges");
6184 json.WriteString(
"UsedBytes");
6187 json.WriteString(
"UnusedBytes");
6192 json.WriteString(
"AllocationSize");
6193 json.BeginObject(
true);
6194 json.WriteString(
"Min");
6196 json.WriteString(
"Avg");
6198 json.WriteString(
"Max");
6205 json.WriteString(
"UnusedRangeSize");
6206 json.BeginObject(
true);
6207 json.WriteString(
"Min");
6209 json.WriteString(
"Avg");
6211 json.WriteString(
"Max");
6219 #endif // #if VMA_STATS_STRING_ENABLED 6221 struct VmaSuballocationItemSizeLess
6224 const VmaSuballocationList::iterator lhs,
6225 const VmaSuballocationList::iterator rhs)
const 6227 return lhs->size < rhs->size;
6230 const VmaSuballocationList::iterator lhs,
6231 VkDeviceSize rhsSize)
const 6233 return lhs->size < rhsSize;
6241 #if VMA_STATS_STRING_ENABLED 6243 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6244 VkDeviceSize unusedBytes,
6245 size_t allocationCount,
6246 size_t unusedRangeCount)
const 6250 json.WriteString(
"TotalBytes");
6251 json.WriteNumber(GetSize());
6253 json.WriteString(
"UnusedBytes");
6254 json.WriteNumber(unusedBytes);
6256 json.WriteString(
"Allocations");
6257 json.WriteNumber((uint64_t)allocationCount);
6259 json.WriteString(
"UnusedRanges");
6260 json.WriteNumber((uint64_t)unusedRangeCount);
6262 json.WriteString(
"Suballocations");
6266 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6267 VkDeviceSize offset,
6270 json.BeginObject(
true);
6272 json.WriteString(
"Offset");
6273 json.WriteNumber(offset);
6275 hAllocation->PrintParameters(json);
6280 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6281 VkDeviceSize offset,
6282 VkDeviceSize size)
const 6284 json.BeginObject(
true);
6286 json.WriteString(
"Offset");
6287 json.WriteNumber(offset);
6289 json.WriteString(
"Type");
6290 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6292 json.WriteString(
"Size");
6293 json.WriteNumber(size);
6298 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6304 #endif // #if VMA_STATS_STRING_ENABLED 6309 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6312 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6313 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6317 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6321 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6323 VmaBlockMetadata::Init(size);
6325 m_SumFreeSize = size;
6327 VmaSuballocation suballoc = {};
6328 suballoc.offset = 0;
6329 suballoc.size = size;
6330 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6331 suballoc.hAllocation = VK_NULL_HANDLE;
6333 m_Suballocations.push_back(suballoc);
6334 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6336 m_FreeSuballocationsBySize.push_back(suballocItem);
6339 bool VmaBlockMetadata_Generic::Validate()
const 6341 if(m_Suballocations.empty())
6347 VkDeviceSize calculatedOffset = 0;
6349 uint32_t calculatedFreeCount = 0;
6351 VkDeviceSize calculatedSumFreeSize = 0;
6354 size_t freeSuballocationsToRegister = 0;
6356 bool prevFree =
false;
6358 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6359 suballocItem != m_Suballocations.cend();
6362 const VmaSuballocation& subAlloc = *suballocItem;
6365 if(subAlloc.offset != calculatedOffset)
6370 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6372 if(prevFree && currFree)
6377 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
6384 calculatedSumFreeSize += subAlloc.size;
6385 ++calculatedFreeCount;
6386 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6388 ++freeSuballocationsToRegister;
6392 if(subAlloc.size < VMA_DEBUG_MARGIN)
6399 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
6403 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
6409 if(VMA_DEBUG_MARGIN > 0 && !prevFree)
6415 calculatedOffset += subAlloc.size;
6416 prevFree = currFree;
6421 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
6426 VkDeviceSize lastSize = 0;
6427 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6429 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6432 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
6437 if(suballocItem->size < lastSize)
6442 lastSize = suballocItem->size;
6446 if(!ValidateFreeSuballocationList() ||
6447 (calculatedOffset != GetSize()) ||
6448 (calculatedSumFreeSize != m_SumFreeSize) ||
6449 (calculatedFreeCount != m_FreeCount))
6457 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6459 if(!m_FreeSuballocationsBySize.empty())
6461 return m_FreeSuballocationsBySize.back()->size;
6469 bool VmaBlockMetadata_Generic::IsEmpty()
const 6471 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6474 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6478 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6490 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6491 suballocItem != m_Suballocations.cend();
6494 const VmaSuballocation& suballoc = *suballocItem;
6495 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6508 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6510 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6512 inoutStats.
size += GetSize();
6519 #if VMA_STATS_STRING_ENABLED 6521 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6523 PrintDetailedMap_Begin(json,
6525 m_Suballocations.size() - (size_t)m_FreeCount,
6529 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6530 suballocItem != m_Suballocations.cend();
6531 ++suballocItem, ++i)
6533 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6535 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6539 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6543 PrintDetailedMap_End(json);
6546 #endif // #if VMA_STATS_STRING_ENABLED 6558 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6559 uint32_t currentFrameIndex,
6560 uint32_t frameInUseCount,
6561 VkDeviceSize bufferImageGranularity,
6562 VkDeviceSize allocSize,
6563 VkDeviceSize allocAlignment,
6565 VmaSuballocationType allocType,
6566 bool canMakeOtherLost,
6567 VmaAllocationRequest* pAllocationRequest)
6569 VMA_ASSERT(allocSize > 0);
6570 VMA_ASSERT(!upperAddress);
6571 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6572 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6573 VMA_HEAVY_ASSERT(Validate());
6576 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6582 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6583 if(freeSuballocCount > 0)
6588 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6589 m_FreeSuballocationsBySize.data(),
6590 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6591 allocSize + 2 * VMA_DEBUG_MARGIN,
6592 VmaSuballocationItemSizeLess());
6593 size_t index = it - m_FreeSuballocationsBySize.data();
6594 for(; index < freeSuballocCount; ++index)
6599 bufferImageGranularity,
6603 m_FreeSuballocationsBySize[index],
6605 &pAllocationRequest->offset,
6606 &pAllocationRequest->itemsToMakeLostCount,
6607 &pAllocationRequest->sumFreeSize,
6608 &pAllocationRequest->sumItemSize))
6610 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6618 for(
size_t index = freeSuballocCount; index--; )
6623 bufferImageGranularity,
6627 m_FreeSuballocationsBySize[index],
6629 &pAllocationRequest->offset,
6630 &pAllocationRequest->itemsToMakeLostCount,
6631 &pAllocationRequest->sumFreeSize,
6632 &pAllocationRequest->sumItemSize))
6634 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6641 if(canMakeOtherLost)
6645 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6646 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6648 VmaAllocationRequest tmpAllocRequest = {};
6649 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6650 suballocIt != m_Suballocations.end();
6653 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6654 suballocIt->hAllocation->CanBecomeLost())
6659 bufferImageGranularity,
6665 &tmpAllocRequest.offset,
6666 &tmpAllocRequest.itemsToMakeLostCount,
6667 &tmpAllocRequest.sumFreeSize,
6668 &tmpAllocRequest.sumItemSize))
6670 tmpAllocRequest.item = suballocIt;
6672 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
6674 *pAllocationRequest = tmpAllocRequest;
6680 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
6689 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
6690 uint32_t currentFrameIndex,
6691 uint32_t frameInUseCount,
6692 VmaAllocationRequest* pAllocationRequest)
6694 while(pAllocationRequest->itemsToMakeLostCount > 0)
6696 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
6698 ++pAllocationRequest->item;
6700 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6701 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
6702 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
6703 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6705 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
6706 --pAllocationRequest->itemsToMakeLostCount;
6714 VMA_HEAVY_ASSERT(Validate());
6715 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6716 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
6721 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6723 uint32_t lostAllocationCount = 0;
6724 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6725 it != m_Suballocations.end();
6728 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
6729 it->hAllocation->CanBecomeLost() &&
6730 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6732 it = FreeSuballocation(it);
6733 ++lostAllocationCount;
6736 return lostAllocationCount;
6739 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
6741 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6742 it != m_Suballocations.end();
6745 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6747 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
6749 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
6750 return VK_ERROR_VALIDATION_FAILED_EXT;
6752 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
6754 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
6755 return VK_ERROR_VALIDATION_FAILED_EXT;
6763 void VmaBlockMetadata_Generic::Alloc(
6764 const VmaAllocationRequest& request,
6765 VmaSuballocationType type,
6766 VkDeviceSize allocSize,
6770 VMA_ASSERT(!upperAddress);
6771 VMA_ASSERT(request.item != m_Suballocations.end());
6772 VmaSuballocation& suballoc = *request.item;
6774 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6776 VMA_ASSERT(request.offset >= suballoc.offset);
6777 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
6778 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
6779 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
6783 UnregisterFreeSuballocation(request.item);
6785 suballoc.offset = request.offset;
6786 suballoc.size = allocSize;
6787 suballoc.type = type;
6788 suballoc.hAllocation = hAllocation;
6793 VmaSuballocation paddingSuballoc = {};
6794 paddingSuballoc.offset = request.offset + allocSize;
6795 paddingSuballoc.size = paddingEnd;
6796 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6797 VmaSuballocationList::iterator next = request.item;
6799 const VmaSuballocationList::iterator paddingEndItem =
6800 m_Suballocations.insert(next, paddingSuballoc);
6801 RegisterFreeSuballocation(paddingEndItem);
6807 VmaSuballocation paddingSuballoc = {};
6808 paddingSuballoc.offset = request.offset - paddingBegin;
6809 paddingSuballoc.size = paddingBegin;
6810 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6811 const VmaSuballocationList::iterator paddingBeginItem =
6812 m_Suballocations.insert(request.item, paddingSuballoc);
6813 RegisterFreeSuballocation(paddingBeginItem);
6817 m_FreeCount = m_FreeCount - 1;
6818 if(paddingBegin > 0)
6826 m_SumFreeSize -= allocSize;
6829 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
6831 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6832 suballocItem != m_Suballocations.end();
6835 VmaSuballocation& suballoc = *suballocItem;
6836 if(suballoc.hAllocation == allocation)
6838 FreeSuballocation(suballocItem);
6839 VMA_HEAVY_ASSERT(Validate());
6843 VMA_ASSERT(0 &&
"Not found!");
6846 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
6848 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6849 suballocItem != m_Suballocations.end();
6852 VmaSuballocation& suballoc = *suballocItem;
6853 if(suballoc.offset == offset)
6855 FreeSuballocation(suballocItem);
6859 VMA_ASSERT(0 &&
"Not found!");
6862 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 6864 VkDeviceSize lastSize = 0;
6865 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
6867 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
6869 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6874 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6879 if(it->size < lastSize)
6885 lastSize = it->size;
6890 bool VmaBlockMetadata_Generic::CheckAllocation(
6891 uint32_t currentFrameIndex,
6892 uint32_t frameInUseCount,
6893 VkDeviceSize bufferImageGranularity,
6894 VkDeviceSize allocSize,
6895 VkDeviceSize allocAlignment,
6896 VmaSuballocationType allocType,
6897 VmaSuballocationList::const_iterator suballocItem,
6898 bool canMakeOtherLost,
6899 VkDeviceSize* pOffset,
6900 size_t* itemsToMakeLostCount,
6901 VkDeviceSize* pSumFreeSize,
6902 VkDeviceSize* pSumItemSize)
const 6904 VMA_ASSERT(allocSize > 0);
6905 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6906 VMA_ASSERT(suballocItem != m_Suballocations.cend());
6907 VMA_ASSERT(pOffset != VMA_NULL);
6909 *itemsToMakeLostCount = 0;
6913 if(canMakeOtherLost)
6915 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6917 *pSumFreeSize = suballocItem->size;
6921 if(suballocItem->hAllocation->CanBecomeLost() &&
6922 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6924 ++*itemsToMakeLostCount;
6925 *pSumItemSize = suballocItem->size;
6934 if(GetSize() - suballocItem->offset < allocSize)
6940 *pOffset = suballocItem->offset;
6943 if(VMA_DEBUG_MARGIN > 0)
6945 *pOffset += VMA_DEBUG_MARGIN;
6949 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6953 if(bufferImageGranularity > 1)
6955 bool bufferImageGranularityConflict =
false;
6956 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6957 while(prevSuballocItem != m_Suballocations.cbegin())
6960 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6961 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6963 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6965 bufferImageGranularityConflict =
true;
6973 if(bufferImageGranularityConflict)
6975 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6981 if(*pOffset >= suballocItem->offset + suballocItem->size)
6987 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
6990 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6992 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
6994 if(suballocItem->offset + totalSize > GetSize())
7001 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7002 if(totalSize > suballocItem->size)
7004 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7005 while(remainingSize > 0)
7008 if(lastSuballocItem == m_Suballocations.cend())
7012 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7014 *pSumFreeSize += lastSuballocItem->size;
7018 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7019 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7020 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7022 ++*itemsToMakeLostCount;
7023 *pSumItemSize += lastSuballocItem->size;
7030 remainingSize = (lastSuballocItem->size < remainingSize) ?
7031 remainingSize - lastSuballocItem->size : 0;
7037 if(bufferImageGranularity > 1)
7039 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7041 while(nextSuballocItem != m_Suballocations.cend())
7043 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7044 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7046 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7048 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7049 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7050 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7052 ++*itemsToMakeLostCount;
7071 const VmaSuballocation& suballoc = *suballocItem;
7072 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7074 *pSumFreeSize = suballoc.size;
7077 if(suballoc.size < allocSize)
7083 *pOffset = suballoc.offset;
7086 if(VMA_DEBUG_MARGIN > 0)
7088 *pOffset += VMA_DEBUG_MARGIN;
7092 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7096 if(bufferImageGranularity > 1)
7098 bool bufferImageGranularityConflict =
false;
7099 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7100 while(prevSuballocItem != m_Suballocations.cbegin())
7103 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7104 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7106 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7108 bufferImageGranularityConflict =
true;
7116 if(bufferImageGranularityConflict)
7118 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7123 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7126 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7129 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7136 if(bufferImageGranularity > 1)
7138 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7140 while(nextSuballocItem != m_Suballocations.cend())
7142 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7143 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7145 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7164 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7166 VMA_ASSERT(item != m_Suballocations.end());
7167 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7169 VmaSuballocationList::iterator nextItem = item;
7171 VMA_ASSERT(nextItem != m_Suballocations.end());
7172 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7174 item->size += nextItem->size;
7176 m_Suballocations.erase(nextItem);
7179 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7182 VmaSuballocation& suballoc = *suballocItem;
7183 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7184 suballoc.hAllocation = VK_NULL_HANDLE;
7188 m_SumFreeSize += suballoc.size;
7191 bool mergeWithNext =
false;
7192 bool mergeWithPrev =
false;
7194 VmaSuballocationList::iterator nextItem = suballocItem;
7196 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7198 mergeWithNext =
true;
7201 VmaSuballocationList::iterator prevItem = suballocItem;
7202 if(suballocItem != m_Suballocations.begin())
7205 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7207 mergeWithPrev =
true;
7213 UnregisterFreeSuballocation(nextItem);
7214 MergeFreeWithNext(suballocItem);
7219 UnregisterFreeSuballocation(prevItem);
7220 MergeFreeWithNext(prevItem);
7221 RegisterFreeSuballocation(prevItem);
7226 RegisterFreeSuballocation(suballocItem);
7227 return suballocItem;
7231 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7233 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7234 VMA_ASSERT(item->size > 0);
7238 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7240 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7242 if(m_FreeSuballocationsBySize.empty())
7244 m_FreeSuballocationsBySize.push_back(item);
7248 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7256 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7258 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7259 VMA_ASSERT(item->size > 0);
7263 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7265 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7267 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7268 m_FreeSuballocationsBySize.data(),
7269 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7271 VmaSuballocationItemSizeLess());
7272 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7273 index < m_FreeSuballocationsBySize.size();
7276 if(m_FreeSuballocationsBySize[index] == item)
7278 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7281 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7283 VMA_ASSERT(0 &&
"Not found.");
7292 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7294 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7295 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7296 m_1stVectorIndex(0),
7297 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7298 m_1stNullItemsBeginCount(0),
7299 m_1stNullItemsMiddleCount(0),
7300 m_2ndNullItemsCount(0)
7304 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7308 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7310 VmaBlockMetadata::Init(size);
7311 m_SumFreeSize = size;
7314 bool VmaBlockMetadata_Linear::Validate()
const 7316 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7317 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7319 if(suballocations2nd.empty() != (m_2ndVectorMode == SECOND_VECTOR_EMPTY))
7323 if(suballocations1st.empty() && !suballocations2nd.empty() &&
7324 m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7328 if(!suballocations1st.empty())
7331 if(suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
7336 if(suballocations1st.back().hAllocation == VK_NULL_HANDLE)
7341 if(!suballocations2nd.empty())
7344 if(suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
7350 if(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount > suballocations1st.size())
7354 if(m_2ndNullItemsCount > suballocations2nd.size())
7359 VkDeviceSize sumUsedSize = 0;
7360 const size_t suballoc1stCount = suballocations1st.size();
7361 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7363 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7365 const size_t suballoc2ndCount = suballocations2nd.size();
7366 size_t nullItem2ndCount = 0;
7367 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7369 const VmaSuballocation& suballoc = suballocations2nd[i];
7370 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7372 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7376 if(suballoc.offset < offset)
7383 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7387 if(suballoc.hAllocation->GetSize() != suballoc.size)
7391 sumUsedSize += suballoc.size;
7398 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7401 if(nullItem2ndCount != m_2ndNullItemsCount)
7407 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7409 const VmaSuballocation& suballoc = suballocations1st[i];
7410 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE ||
7411 suballoc.hAllocation != VK_NULL_HANDLE)
7417 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7419 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7421 const VmaSuballocation& suballoc = suballocations1st[i];
7422 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7424 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7428 if(suballoc.offset < offset)
7432 if(i < m_1stNullItemsBeginCount && !currFree)
7439 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7443 if(suballoc.hAllocation->GetSize() != suballoc.size)
7447 sumUsedSize += suballoc.size;
7454 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7456 if(nullItem1stCount != m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount)
7461 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7463 const size_t suballoc2ndCount = suballocations2nd.size();
7464 size_t nullItem2ndCount = 0;
7465 for(
size_t i = suballoc2ndCount; i--; )
7467 const VmaSuballocation& suballoc = suballocations2nd[i];
7468 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7470 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7474 if(suballoc.offset < offset)
7481 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7485 if(suballoc.hAllocation->GetSize() != suballoc.size)
7489 sumUsedSize += suballoc.size;
7496 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7499 if(nullItem2ndCount != m_2ndNullItemsCount)
7505 if(offset > GetSize())
7509 if(m_SumFreeSize != GetSize() - sumUsedSize)
7517 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7519 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7520 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7523 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7525 const VkDeviceSize size = GetSize();
7537 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7539 switch(m_2ndVectorMode)
7541 case SECOND_VECTOR_EMPTY:
7547 const size_t suballocations1stCount = suballocations1st.size();
7548 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7549 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7550 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7552 firstSuballoc.offset,
7553 size - (lastSuballoc.offset + lastSuballoc.size));
7557 case SECOND_VECTOR_RING_BUFFER:
7562 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7563 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7564 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7565 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7569 case SECOND_VECTOR_DOUBLE_STACK:
7574 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7575 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7576 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7577 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7587 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7589 const VkDeviceSize size = GetSize();
7590 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7591 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7592 const size_t suballoc1stCount = suballocations1st.size();
7593 const size_t suballoc2ndCount = suballocations2nd.size();
7604 VkDeviceSize lastOffset = 0;
7606 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7608 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7609 size_t nextAlloc2ndIndex = 0;
7610 while(lastOffset < freeSpace2ndTo1stEnd)
7613 while(nextAlloc2ndIndex < suballoc2ndCount &&
7614 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7616 ++nextAlloc2ndIndex;
7620 if(nextAlloc2ndIndex < suballoc2ndCount)
7622 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7625 if(lastOffset < suballoc.offset)
7628 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7642 lastOffset = suballoc.offset + suballoc.size;
7643 ++nextAlloc2ndIndex;
7649 if(lastOffset < freeSpace2ndTo1stEnd)
7651 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7659 lastOffset = freeSpace2ndTo1stEnd;
7664 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7665 const VkDeviceSize freeSpace1stTo2ndEnd =
7666 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7667 while(lastOffset < freeSpace1stTo2ndEnd)
7670 while(nextAlloc1stIndex < suballoc1stCount &&
7671 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7673 ++nextAlloc1stIndex;
7677 if(nextAlloc1stIndex < suballoc1stCount)
7679 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7682 if(lastOffset < suballoc.offset)
7685 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7699 lastOffset = suballoc.offset + suballoc.size;
7700 ++nextAlloc1stIndex;
7706 if(lastOffset < freeSpace1stTo2ndEnd)
7708 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7716 lastOffset = freeSpace1stTo2ndEnd;
7720 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7722 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7723 while(lastOffset < size)
7726 while(nextAlloc2ndIndex != SIZE_MAX &&
7727 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7729 --nextAlloc2ndIndex;
7733 if(nextAlloc2ndIndex != SIZE_MAX)
7735 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7738 if(lastOffset < suballoc.offset)
7741 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7755 lastOffset = suballoc.offset + suballoc.size;
7756 --nextAlloc2ndIndex;
7762 if(lastOffset < size)
7764 const VkDeviceSize unusedRangeSize = size - lastOffset;
7780 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 7782 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7783 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7784 const VkDeviceSize size = GetSize();
7785 const size_t suballoc1stCount = suballocations1st.size();
7786 const size_t suballoc2ndCount = suballocations2nd.size();
7788 inoutStats.
size += size;
7790 VkDeviceSize lastOffset = 0;
7792 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7794 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7795 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
7796 while(lastOffset < freeSpace2ndTo1stEnd)
7799 while(nextAlloc2ndIndex < suballoc2ndCount &&
7800 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7802 ++nextAlloc2ndIndex;
7806 if(nextAlloc2ndIndex < suballoc2ndCount)
7808 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7811 if(lastOffset < suballoc.offset)
7814 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7825 lastOffset = suballoc.offset + suballoc.size;
7826 ++nextAlloc2ndIndex;
7831 if(lastOffset < freeSpace2ndTo1stEnd)
7834 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7841 lastOffset = freeSpace2ndTo1stEnd;
7846 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7847 const VkDeviceSize freeSpace1stTo2ndEnd =
7848 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7849 while(lastOffset < freeSpace1stTo2ndEnd)
7852 while(nextAlloc1stIndex < suballoc1stCount &&
7853 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7855 ++nextAlloc1stIndex;
7859 if(nextAlloc1stIndex < suballoc1stCount)
7861 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7864 if(lastOffset < suballoc.offset)
7867 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7878 lastOffset = suballoc.offset + suballoc.size;
7879 ++nextAlloc1stIndex;
7884 if(lastOffset < freeSpace1stTo2ndEnd)
7887 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7894 lastOffset = freeSpace1stTo2ndEnd;
7898 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7900 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7901 while(lastOffset < size)
7904 while(nextAlloc2ndIndex != SIZE_MAX &&
7905 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7907 --nextAlloc2ndIndex;
7911 if(nextAlloc2ndIndex != SIZE_MAX)
7913 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7916 if(lastOffset < suballoc.offset)
7919 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7930 lastOffset = suballoc.offset + suballoc.size;
7931 --nextAlloc2ndIndex;
7936 if(lastOffset < size)
7939 const VkDeviceSize unusedRangeSize = size - lastOffset;
7952 #if VMA_STATS_STRING_ENABLED 7953 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 7955 const VkDeviceSize size = GetSize();
7956 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7957 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7958 const size_t suballoc1stCount = suballocations1st.size();
7959 const size_t suballoc2ndCount = suballocations2nd.size();
7963 size_t unusedRangeCount = 0;
7964 VkDeviceSize usedBytes = 0;
7966 VkDeviceSize lastOffset = 0;
7968 size_t alloc2ndCount = 0;
7969 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7971 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7972 size_t nextAlloc2ndIndex = 0;
7973 while(lastOffset < freeSpace2ndTo1stEnd)
7976 while(nextAlloc2ndIndex < suballoc2ndCount &&
7977 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7979 ++nextAlloc2ndIndex;
7983 if(nextAlloc2ndIndex < suballoc2ndCount)
7985 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7988 if(lastOffset < suballoc.offset)
7997 usedBytes += suballoc.size;
8000 lastOffset = suballoc.offset + suballoc.size;
8001 ++nextAlloc2ndIndex;
8006 if(lastOffset < freeSpace2ndTo1stEnd)
8013 lastOffset = freeSpace2ndTo1stEnd;
8018 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8019 size_t alloc1stCount = 0;
8020 const VkDeviceSize freeSpace1stTo2ndEnd =
8021 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8022 while(lastOffset < freeSpace1stTo2ndEnd)
8025 while(nextAlloc1stIndex < suballoc1stCount &&
8026 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8028 ++nextAlloc1stIndex;
8032 if(nextAlloc1stIndex < suballoc1stCount)
8034 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8037 if(lastOffset < suballoc.offset)
8046 usedBytes += suballoc.size;
8049 lastOffset = suballoc.offset + suballoc.size;
8050 ++nextAlloc1stIndex;
8055 if(lastOffset < size)
8062 lastOffset = freeSpace1stTo2ndEnd;
8066 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8068 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8069 while(lastOffset < size)
8072 while(nextAlloc2ndIndex != SIZE_MAX &&
8073 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8075 --nextAlloc2ndIndex;
8079 if(nextAlloc2ndIndex != SIZE_MAX)
8081 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8084 if(lastOffset < suballoc.offset)
8093 usedBytes += suballoc.size;
8096 lastOffset = suballoc.offset + suballoc.size;
8097 --nextAlloc2ndIndex;
8102 if(lastOffset < size)
8114 const VkDeviceSize unusedBytes = size - usedBytes;
8115 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8120 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8122 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8123 size_t nextAlloc2ndIndex = 0;
8124 while(lastOffset < freeSpace2ndTo1stEnd)
8127 while(nextAlloc2ndIndex < suballoc2ndCount &&
8128 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8130 ++nextAlloc2ndIndex;
8134 if(nextAlloc2ndIndex < suballoc2ndCount)
8136 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8139 if(lastOffset < suballoc.offset)
8142 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8143 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8148 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8151 lastOffset = suballoc.offset + suballoc.size;
8152 ++nextAlloc2ndIndex;
8157 if(lastOffset < freeSpace2ndTo1stEnd)
8160 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8161 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8165 lastOffset = freeSpace2ndTo1stEnd;
8170 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8171 while(lastOffset < freeSpace1stTo2ndEnd)
8174 while(nextAlloc1stIndex < suballoc1stCount &&
8175 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8177 ++nextAlloc1stIndex;
8181 if(nextAlloc1stIndex < suballoc1stCount)
8183 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8186 if(lastOffset < suballoc.offset)
8189 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8190 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8195 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8198 lastOffset = suballoc.offset + suballoc.size;
8199 ++nextAlloc1stIndex;
8204 if(lastOffset < freeSpace1stTo2ndEnd)
8207 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8208 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8212 lastOffset = freeSpace1stTo2ndEnd;
8216 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8218 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8219 while(lastOffset < size)
8222 while(nextAlloc2ndIndex != SIZE_MAX &&
8223 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8225 --nextAlloc2ndIndex;
8229 if(nextAlloc2ndIndex != SIZE_MAX)
8231 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8234 if(lastOffset < suballoc.offset)
8237 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8238 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8243 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8246 lastOffset = suballoc.offset + suballoc.size;
8247 --nextAlloc2ndIndex;
8252 if(lastOffset < size)
8255 const VkDeviceSize unusedRangeSize = size - lastOffset;
8256 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8265 PrintDetailedMap_End(json);
8267 #endif // #if VMA_STATS_STRING_ENABLED 8269 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8270 uint32_t currentFrameIndex,
8271 uint32_t frameInUseCount,
8272 VkDeviceSize bufferImageGranularity,
8273 VkDeviceSize allocSize,
8274 VkDeviceSize allocAlignment,
8276 VmaSuballocationType allocType,
8277 bool canMakeOtherLost,
8278 VmaAllocationRequest* pAllocationRequest)
8280 VMA_ASSERT(allocSize > 0);
8281 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8282 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8283 VMA_HEAVY_ASSERT(Validate());
8285 const VkDeviceSize size = GetSize();
8286 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8287 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8291 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8293 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8298 if(allocSize > size)
8302 VkDeviceSize resultBaseOffset = size - allocSize;
8303 if(!suballocations2nd.empty())
8305 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8306 resultBaseOffset = lastSuballoc.offset - allocSize;
8307 if(allocSize > lastSuballoc.offset)
8314 VkDeviceSize resultOffset = resultBaseOffset;
8317 if(VMA_DEBUG_MARGIN > 0)
8319 if(resultOffset < VMA_DEBUG_MARGIN)
8323 resultOffset -= VMA_DEBUG_MARGIN;
8327 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8331 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8333 bool bufferImageGranularityConflict =
false;
8334 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8336 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8337 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8339 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8341 bufferImageGranularityConflict =
true;
8349 if(bufferImageGranularityConflict)
8351 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8356 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8357 suballocations1st.back().offset + suballocations1st.back().size :
8359 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8363 if(bufferImageGranularity > 1)
8365 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8367 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8368 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8370 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8384 pAllocationRequest->offset = resultOffset;
8385 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8386 pAllocationRequest->sumItemSize = 0;
8388 pAllocationRequest->itemsToMakeLostCount = 0;
8394 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8398 VkDeviceSize resultBaseOffset = 0;
8399 if(!suballocations1st.empty())
8401 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8402 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8406 VkDeviceSize resultOffset = resultBaseOffset;
8409 if(VMA_DEBUG_MARGIN > 0)
8411 resultOffset += VMA_DEBUG_MARGIN;
8415 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8419 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8421 bool bufferImageGranularityConflict =
false;
8422 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8424 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8425 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8427 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8429 bufferImageGranularityConflict =
true;
8437 if(bufferImageGranularityConflict)
8439 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8443 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8444 suballocations2nd.back().offset : size;
8447 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8451 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8453 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8455 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8456 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8458 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8472 pAllocationRequest->offset = resultOffset;
8473 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8474 pAllocationRequest->sumItemSize = 0;
8476 pAllocationRequest->itemsToMakeLostCount = 0;
8483 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8485 VMA_ASSERT(!suballocations1st.empty());
8487 VkDeviceSize resultBaseOffset = 0;
8488 if(!suballocations2nd.empty())
8490 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8491 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8495 VkDeviceSize resultOffset = resultBaseOffset;
8498 if(VMA_DEBUG_MARGIN > 0)
8500 resultOffset += VMA_DEBUG_MARGIN;
8504 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8508 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8510 bool bufferImageGranularityConflict =
false;
8511 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8513 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8514 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8516 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8518 bufferImageGranularityConflict =
true;
8526 if(bufferImageGranularityConflict)
8528 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8532 pAllocationRequest->itemsToMakeLostCount = 0;
8533 pAllocationRequest->sumItemSize = 0;
8534 size_t index1st = m_1stNullItemsBeginCount;
8536 if(canMakeOtherLost)
8538 while(index1st < suballocations1st.size() &&
8539 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8542 const VmaSuballocation& suballoc = suballocations1st[index1st];
8543 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8549 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8550 if(suballoc.hAllocation->CanBecomeLost() &&
8551 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8553 ++pAllocationRequest->itemsToMakeLostCount;
8554 pAllocationRequest->sumItemSize += suballoc.size;
8566 if(bufferImageGranularity > 1)
8568 while(index1st < suballocations1st.size())
8570 const VmaSuballocation& suballoc = suballocations1st[index1st];
8571 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8573 if(suballoc.hAllocation != VK_NULL_HANDLE)
8576 if(suballoc.hAllocation->CanBecomeLost() &&
8577 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8579 ++pAllocationRequest->itemsToMakeLostCount;
8580 pAllocationRequest->sumItemSize += suballoc.size;
8599 if(index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size ||
8600 index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset)
8604 if(bufferImageGranularity > 1)
8606 for(
size_t nextSuballocIndex = index1st;
8607 nextSuballocIndex < suballocations1st.size();
8608 nextSuballocIndex++)
8610 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8611 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8613 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8627 pAllocationRequest->offset = resultOffset;
8628 pAllocationRequest->sumFreeSize =
8629 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8631 - pAllocationRequest->sumItemSize;
8641 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8642 uint32_t currentFrameIndex,
8643 uint32_t frameInUseCount,
8644 VmaAllocationRequest* pAllocationRequest)
8646 if(pAllocationRequest->itemsToMakeLostCount == 0)
8651 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8653 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8654 size_t index1st = m_1stNullItemsBeginCount;
8655 size_t madeLostCount = 0;
8656 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8658 VMA_ASSERT(index1st < suballocations1st.size());
8659 VmaSuballocation& suballoc = suballocations1st[index1st];
8660 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8662 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8663 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8664 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8666 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8667 suballoc.hAllocation = VK_NULL_HANDLE;
8668 m_SumFreeSize += suballoc.size;
8669 ++m_1stNullItemsMiddleCount;
8686 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8688 uint32_t lostAllocationCount = 0;
8690 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8691 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8693 VmaSuballocation& suballoc = suballocations1st[i];
8694 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8695 suballoc.hAllocation->CanBecomeLost() &&
8696 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8698 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8699 suballoc.hAllocation = VK_NULL_HANDLE;
8700 ++m_1stNullItemsMiddleCount;
8701 m_SumFreeSize += suballoc.size;
8702 ++lostAllocationCount;
8706 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8707 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8709 VmaSuballocation& suballoc = suballocations2nd[i];
8710 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8711 suballoc.hAllocation->CanBecomeLost() &&
8712 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8714 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8715 suballoc.hAllocation = VK_NULL_HANDLE;
8716 ++m_2ndNullItemsCount;
8717 ++lostAllocationCount;
8721 if(lostAllocationCount)
8726 return lostAllocationCount;
8729 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8731 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8732 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8734 const VmaSuballocation& suballoc = suballocations1st[i];
8735 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8737 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8739 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8740 return VK_ERROR_VALIDATION_FAILED_EXT;
8742 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8744 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8745 return VK_ERROR_VALIDATION_FAILED_EXT;
8750 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8751 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8753 const VmaSuballocation& suballoc = suballocations2nd[i];
8754 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8756 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8758 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8759 return VK_ERROR_VALIDATION_FAILED_EXT;
8761 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8763 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8764 return VK_ERROR_VALIDATION_FAILED_EXT;
8772 void VmaBlockMetadata_Linear::Alloc(
8773 const VmaAllocationRequest& request,
8774 VmaSuballocationType type,
8775 VkDeviceSize allocSize,
8779 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
8783 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
8784 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
8785 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8786 suballocations2nd.push_back(newSuballoc);
8787 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
8791 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8794 if(suballocations1st.empty())
8796 suballocations1st.push_back(newSuballoc);
8801 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
8804 VMA_ASSERT(request.offset + allocSize <= GetSize());
8805 suballocations1st.push_back(newSuballoc);
8808 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
8810 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8812 switch(m_2ndVectorMode)
8814 case SECOND_VECTOR_EMPTY:
8816 VMA_ASSERT(suballocations2nd.empty());
8817 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
8819 case SECOND_VECTOR_RING_BUFFER:
8821 VMA_ASSERT(!suballocations2nd.empty());
8823 case SECOND_VECTOR_DOUBLE_STACK:
8824 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
8830 suballocations2nd.push_back(newSuballoc);
8834 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
8839 m_SumFreeSize -= newSuballoc.size;
8842 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
8844 FreeAtOffset(allocation->GetOffset());
8847 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
8849 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8850 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8852 if(!suballocations1st.empty())
8855 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8856 if(firstSuballoc.offset == offset)
8858 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8859 firstSuballoc.hAllocation = VK_NULL_HANDLE;
8860 m_SumFreeSize += firstSuballoc.size;
8861 ++m_1stNullItemsBeginCount;
8868 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
8869 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8871 VmaSuballocation& lastSuballoc = suballocations2nd.back();
8872 if(lastSuballoc.offset == offset)
8874 m_SumFreeSize += lastSuballoc.size;
8875 suballocations2nd.pop_back();
8881 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
8883 VmaSuballocation& lastSuballoc = suballocations1st.back();
8884 if(lastSuballoc.offset == offset)
8886 m_SumFreeSize += lastSuballoc.size;
8887 suballocations1st.pop_back();
8895 VmaSuballocation refSuballoc;
8896 refSuballoc.offset = offset;
8898 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
8899 suballocations1st.begin() + m_1stNullItemsBeginCount,
8900 suballocations1st.end(),
8902 if(it != suballocations1st.end())
8904 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8905 it->hAllocation = VK_NULL_HANDLE;
8906 ++m_1stNullItemsMiddleCount;
8907 m_SumFreeSize += it->size;
8913 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
8916 VmaSuballocation refSuballoc;
8917 refSuballoc.offset = offset;
8919 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
8920 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
8921 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
8922 if(it != suballocations2nd.end())
8924 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8925 it->hAllocation = VK_NULL_HANDLE;
8926 ++m_2ndNullItemsCount;
8927 m_SumFreeSize += it->size;
8933 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
8936 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 8938 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8939 const size_t suballocCount = AccessSuballocations1st().size();
8940 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
8943 void VmaBlockMetadata_Linear::CleanupAfterFree()
8945 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8946 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8950 suballocations1st.clear();
8951 suballocations2nd.clear();
8952 m_1stNullItemsBeginCount = 0;
8953 m_1stNullItemsMiddleCount = 0;
8954 m_2ndNullItemsCount = 0;
8955 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8959 const size_t suballoc1stCount = suballocations1st.size();
8960 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8961 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
8964 while(m_1stNullItemsBeginCount < suballoc1stCount &&
8965 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
8967 ++m_1stNullItemsBeginCount;
8968 --m_1stNullItemsMiddleCount;
8972 while(m_1stNullItemsMiddleCount > 0 &&
8973 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
8975 --m_1stNullItemsMiddleCount;
8976 suballocations1st.pop_back();
8980 while(m_2ndNullItemsCount > 0 &&
8981 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
8983 --m_2ndNullItemsCount;
8984 suballocations2nd.pop_back();
8987 if(ShouldCompact1st())
8989 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
8990 size_t srcIndex = m_1stNullItemsBeginCount;
8991 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
8993 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
8997 if(dstIndex != srcIndex)
8999 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9003 suballocations1st.resize(nonNullItemCount);
9004 m_1stNullItemsBeginCount = 0;
9005 m_1stNullItemsMiddleCount = 0;
9009 if(suballocations2nd.empty())
9011 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9015 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9017 suballocations1st.clear();
9018 m_1stNullItemsBeginCount = 0;
9020 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9023 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9024 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9025 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9026 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9028 ++m_1stNullItemsBeginCount;
9029 --m_1stNullItemsMiddleCount;
9031 m_2ndNullItemsCount = 0;
9032 m_1stVectorIndex ^= 1;
9037 VMA_HEAVY_ASSERT(Validate());
9044 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9045 m_pMetadata(VMA_NULL),
9046 m_MemoryTypeIndex(UINT32_MAX),
9048 m_hMemory(VK_NULL_HANDLE),
9050 m_pMappedData(VMA_NULL)
9054 void VmaDeviceMemoryBlock::Init(
9056 uint32_t newMemoryTypeIndex,
9057 VkDeviceMemory newMemory,
9058 VkDeviceSize newSize,
9060 bool linearAlgorithm)
9062 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9064 m_MemoryTypeIndex = newMemoryTypeIndex;
9066 m_hMemory = newMemory;
9070 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9074 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9076 m_pMetadata->Init(newSize);
9079 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9083 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9085 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9086 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9087 m_hMemory = VK_NULL_HANDLE;
9089 vma_delete(allocator, m_pMetadata);
9090 m_pMetadata = VMA_NULL;
9093 bool VmaDeviceMemoryBlock::Validate()
const 9095 if((m_hMemory == VK_NULL_HANDLE) ||
9096 (m_pMetadata->GetSize() == 0))
9101 return m_pMetadata->Validate();
9104 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9106 void* pData =
nullptr;
9107 VkResult res = Map(hAllocator, 1, &pData);
9108 if(res != VK_SUCCESS)
9113 res = m_pMetadata->CheckCorruption(pData);
9115 Unmap(hAllocator, 1);
9120 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
9127 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9130 m_MapCount += count;
9131 VMA_ASSERT(m_pMappedData != VMA_NULL);
9132 if(ppData != VMA_NULL)
9134 *ppData = m_pMappedData;
9140 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9141 hAllocator->m_hDevice,
9147 if(result == VK_SUCCESS)
9149 if(ppData != VMA_NULL)
9151 *ppData = m_pMappedData;
9159 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
9166 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9167 if(m_MapCount >= count)
9169 m_MapCount -= count;
9172 m_pMappedData = VMA_NULL;
9173 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
9178 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
9182 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9184 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
9185 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
9188 VkResult res = Map(hAllocator, 1, &pData);
9189 if(res != VK_SUCCESS)
9194 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
9195 VmaWriteMagicValue(pData, allocOffset + allocSize);
9197 Unmap(hAllocator, 1);
9202 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9204 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
9205 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
9208 VkResult res = Map(hAllocator, 1, &pData);
9209 if(res != VK_SUCCESS)
9214 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
9216 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
9218 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
9220 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
9223 Unmap(hAllocator, 1);
9228 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
9233 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
9234 hAllocation->GetBlock() ==
this);
9236 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9237 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
9238 hAllocator->m_hDevice,
9241 hAllocation->GetOffset());
9244 VkResult VmaDeviceMemoryBlock::BindImageMemory(
9249 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
9250 hAllocation->GetBlock() ==
this);
9252 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9253 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
9254 hAllocator->m_hDevice,
9257 hAllocation->GetOffset());
9262 memset(&outInfo, 0,
sizeof(outInfo));
9281 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
9289 VmaPool_T::VmaPool_T(
9294 createInfo.memoryTypeIndex,
9295 createInfo.blockSize,
9296 createInfo.minBlockCount,
9297 createInfo.maxBlockCount,
9299 createInfo.frameInUseCount,
9306 VmaPool_T::~VmaPool_T()
9310 #if VMA_STATS_STRING_ENABLED 9312 #endif // #if VMA_STATS_STRING_ENABLED 9314 VmaBlockVector::VmaBlockVector(
9316 uint32_t memoryTypeIndex,
9317 VkDeviceSize preferredBlockSize,
9318 size_t minBlockCount,
9319 size_t maxBlockCount,
9320 VkDeviceSize bufferImageGranularity,
9321 uint32_t frameInUseCount,
9323 bool linearAlgorithm) :
9324 m_hAllocator(hAllocator),
9325 m_MemoryTypeIndex(memoryTypeIndex),
9326 m_PreferredBlockSize(preferredBlockSize),
9327 m_MinBlockCount(minBlockCount),
9328 m_MaxBlockCount(maxBlockCount),
9329 m_BufferImageGranularity(bufferImageGranularity),
9330 m_FrameInUseCount(frameInUseCount),
9331 m_IsCustomPool(isCustomPool),
9332 m_LinearAlgorithm(linearAlgorithm),
9333 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
9334 m_HasEmptyBlock(false),
9335 m_pDefragmentator(VMA_NULL),
9340 VmaBlockVector::~VmaBlockVector()
9342 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
9344 for(
size_t i = m_Blocks.size(); i--; )
9346 m_Blocks[i]->Destroy(m_hAllocator);
9347 vma_delete(m_hAllocator, m_Blocks[i]);
9351 VkResult VmaBlockVector::CreateMinBlocks()
9353 for(
size_t i = 0; i < m_MinBlockCount; ++i)
9355 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
9356 if(res != VK_SUCCESS)
9364 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
9372 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9374 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
9376 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
9378 VMA_HEAVY_ASSERT(pBlock->Validate());
9379 pBlock->m_pMetadata->AddPoolStats(*pStats);
9383 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 9385 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
9386 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
9387 (VMA_DEBUG_MARGIN > 0) &&
9388 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
9391 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
9393 VkResult VmaBlockVector::Allocate(
9395 uint32_t currentFrameIndex,
9397 VkDeviceSize alignment,
9399 VmaSuballocationType suballocType,
9406 const bool canCreateNewBlock =
9408 (m_Blocks.size() < m_MaxBlockCount);
9411 if(isUpperAddress && !m_LinearAlgorithm)
9413 return VK_ERROR_FEATURE_NOT_PRESENT;
9417 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
9419 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9422 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9429 if(!canMakeOtherLost || canCreateNewBlock)
9433 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
9435 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
9436 VMA_ASSERT(pCurrBlock);
9437 VmaAllocationRequest currRequest = {};
9438 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
9441 m_BufferImageGranularity,
9450 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
9454 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
9455 if(res != VK_SUCCESS)
9462 if(pCurrBlock->m_pMetadata->IsEmpty())
9464 m_HasEmptyBlock =
false;
9467 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
9468 pCurrBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
9469 (*pAllocation)->InitBlockAllocation(
9478 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
9479 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
9480 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
9481 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9483 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
9485 if(IsCorruptionDetectionEnabled())
9487 VkResult res = pCurrBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
9488 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
9495 if(canCreateNewBlock)
9498 VkDeviceSize newBlockSize = m_PreferredBlockSize;
9499 uint32_t newBlockSizeShift = 0;
9500 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
9504 if(m_IsCustomPool ==
false)
9507 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
9508 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
9510 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
9511 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
9513 newBlockSize = smallerNewBlockSize;
9514 ++newBlockSizeShift;
9523 size_t newBlockIndex = 0;
9524 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
9526 if(m_IsCustomPool ==
false)
9528 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
9530 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
9531 if(smallerNewBlockSize >= size)
9533 newBlockSize = smallerNewBlockSize;
9534 ++newBlockSizeShift;
9535 res = CreateBlock(newBlockSize, &newBlockIndex);
9544 if(res == VK_SUCCESS)
9546 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
9547 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
9551 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
9552 if(res != VK_SUCCESS)
9559 VmaAllocationRequest allocRequest;
9560 if(pBlock->m_pMetadata->CreateAllocationRequest(
9563 m_BufferImageGranularity,
9571 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
9572 pBlock->m_pMetadata->Alloc(allocRequest, suballocType, size, isUpperAddress, *pAllocation);
9573 (*pAllocation)->InitBlockAllocation(
9576 allocRequest.offset,
9582 VMA_HEAVY_ASSERT(pBlock->Validate());
9583 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
9584 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
9585 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9587 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
9589 if(IsCorruptionDetectionEnabled())
9591 res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, allocRequest.offset, size);
9592 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
9599 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9606 if(canMakeOtherLost)
9608 uint32_t tryIndex = 0;
9609 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
9611 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
9612 VmaAllocationRequest bestRequest = {};
9613 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
9617 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
9619 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
9620 VMA_ASSERT(pCurrBlock);
9621 VmaAllocationRequest currRequest = {};
9622 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
9625 m_BufferImageGranularity,
9633 const VkDeviceSize currRequestCost = currRequest.CalcCost();
9634 if(pBestRequestBlock == VMA_NULL ||
9635 currRequestCost < bestRequestCost)
9637 pBestRequestBlock = pCurrBlock;
9638 bestRequest = currRequest;
9639 bestRequestCost = currRequestCost;
9641 if(bestRequestCost == 0)
9649 if(pBestRequestBlock != VMA_NULL)
9653 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
9654 if(res != VK_SUCCESS)
9660 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
9666 if(pBestRequestBlock->m_pMetadata->IsEmpty())
9668 m_HasEmptyBlock =
false;
9671 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
9672 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
9673 (*pAllocation)->InitBlockAllocation(
9682 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
9683 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
9684 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
9685 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9687 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
9689 if(IsCorruptionDetectionEnabled())
9691 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
9692 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
9707 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
9709 return VK_ERROR_TOO_MANY_OBJECTS;
9713 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9716 void VmaBlockVector::Free(
9719 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
9723 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9725 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9727 if(IsCorruptionDetectionEnabled())
9729 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
9730 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
9733 if(hAllocation->IsPersistentMap())
9735 pBlock->Unmap(m_hAllocator, 1);
9738 pBlock->m_pMetadata->Free(hAllocation);
9739 VMA_HEAVY_ASSERT(pBlock->Validate());
9741 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
9744 if(pBlock->m_pMetadata->IsEmpty())
9747 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
9749 pBlockToDelete = pBlock;
9755 m_HasEmptyBlock =
true;
9760 else if(m_HasEmptyBlock)
9762 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
9763 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
9765 pBlockToDelete = pLastBlock;
9766 m_Blocks.pop_back();
9767 m_HasEmptyBlock =
false;
9771 IncrementallySortBlocks();
9776 if(pBlockToDelete != VMA_NULL)
9778 VMA_DEBUG_LOG(
" Deleted empty allocation");
9779 pBlockToDelete->Destroy(m_hAllocator);
9780 vma_delete(m_hAllocator, pBlockToDelete);
9784 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 9786 VkDeviceSize result = 0;
9787 for(
size_t i = m_Blocks.size(); i--; )
9789 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
9790 if(result >= m_PreferredBlockSize)
9798 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
9800 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
9802 if(m_Blocks[blockIndex] == pBlock)
9804 VmaVectorRemove(m_Blocks, blockIndex);
9811 void VmaBlockVector::IncrementallySortBlocks()
9814 for(
size_t i = 1; i < m_Blocks.size(); ++i)
9816 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
9818 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
9824 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
9826 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
9827 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
9828 allocInfo.allocationSize = blockSize;
9829 VkDeviceMemory mem = VK_NULL_HANDLE;
9830 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
9839 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
9844 allocInfo.allocationSize,
9848 m_Blocks.push_back(pBlock);
9849 if(pNewBlockIndex != VMA_NULL)
9851 *pNewBlockIndex = m_Blocks.size() - 1;
9857 #if VMA_STATS_STRING_ENABLED 9859 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
9861 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9867 json.WriteString(
"MemoryTypeIndex");
9868 json.WriteNumber(m_MemoryTypeIndex);
9870 json.WriteString(
"BlockSize");
9871 json.WriteNumber(m_PreferredBlockSize);
9873 json.WriteString(
"BlockCount");
9874 json.BeginObject(
true);
9875 if(m_MinBlockCount > 0)
9877 json.WriteString(
"Min");
9878 json.WriteNumber((uint64_t)m_MinBlockCount);
9880 if(m_MaxBlockCount < SIZE_MAX)
9882 json.WriteString(
"Max");
9883 json.WriteNumber((uint64_t)m_MaxBlockCount);
9885 json.WriteString(
"Cur");
9886 json.WriteNumber((uint64_t)m_Blocks.size());
9889 if(m_FrameInUseCount > 0)
9891 json.WriteString(
"FrameInUseCount");
9892 json.WriteNumber(m_FrameInUseCount);
9895 if(m_LinearAlgorithm)
9897 json.WriteString(
"LinearAlgorithm");
9898 json.WriteBool(
true);
9903 json.WriteString(
"PreferredBlockSize");
9904 json.WriteNumber(m_PreferredBlockSize);
9907 json.WriteString(
"Blocks");
9909 for(
size_t i = 0; i < m_Blocks.size(); ++i)
9912 json.ContinueString(m_Blocks[i]->GetId());
9915 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
9922 #endif // #if VMA_STATS_STRING_ENABLED 9924 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
9926 uint32_t currentFrameIndex)
9928 if(m_pDefragmentator == VMA_NULL)
9930 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
9936 return m_pDefragmentator;
9939 VkResult VmaBlockVector::Defragment(
9941 VkDeviceSize& maxBytesToMove,
9942 uint32_t& maxAllocationsToMove)
9944 if(m_pDefragmentator == VMA_NULL)
9949 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9952 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
9955 if(pDefragmentationStats != VMA_NULL)
9957 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
9958 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
9961 VMA_ASSERT(bytesMoved <= maxBytesToMove);
9962 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
9968 m_HasEmptyBlock =
false;
9969 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
9971 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
9972 if(pBlock->m_pMetadata->IsEmpty())
9974 if(m_Blocks.size() > m_MinBlockCount)
9976 if(pDefragmentationStats != VMA_NULL)
9979 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
9982 VmaVectorRemove(m_Blocks, blockIndex);
9983 pBlock->Destroy(m_hAllocator);
9984 vma_delete(m_hAllocator, pBlock);
9988 m_HasEmptyBlock =
true;
9996 void VmaBlockVector::DestroyDefragmentator()
9998 if(m_pDefragmentator != VMA_NULL)
10000 vma_delete(m_hAllocator, m_pDefragmentator);
10001 m_pDefragmentator = VMA_NULL;
10005 void VmaBlockVector::MakePoolAllocationsLost(
10006 uint32_t currentFrameIndex,
10007 size_t* pLostAllocationCount)
10009 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10010 size_t lostAllocationCount = 0;
10011 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10013 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10014 VMA_ASSERT(pBlock);
10015 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
10017 if(pLostAllocationCount != VMA_NULL)
10019 *pLostAllocationCount = lostAllocationCount;
10023 VkResult VmaBlockVector::CheckCorruption()
10025 if(!IsCorruptionDetectionEnabled())
10027 return VK_ERROR_FEATURE_NOT_PRESENT;
10030 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10031 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10033 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10034 VMA_ASSERT(pBlock);
10035 VkResult res = pBlock->CheckCorruption(m_hAllocator);
10036 if(res != VK_SUCCESS)
10044 void VmaBlockVector::AddStats(
VmaStats* pStats)
10046 const uint32_t memTypeIndex = m_MemoryTypeIndex;
10047 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
10049 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10051 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10053 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10054 VMA_ASSERT(pBlock);
10055 VMA_HEAVY_ASSERT(pBlock->Validate());
10057 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
10058 VmaAddStatInfo(pStats->
total, allocationStatInfo);
10059 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
10060 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
10067 VmaDefragmentator::VmaDefragmentator(
10069 VmaBlockVector* pBlockVector,
10070 uint32_t currentFrameIndex) :
10071 m_hAllocator(hAllocator),
10072 m_pBlockVector(pBlockVector),
10073 m_CurrentFrameIndex(currentFrameIndex),
10075 m_AllocationsMoved(0),
10076 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
10077 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
10079 VMA_ASSERT(!pBlockVector->UsesLinearAlgorithm());
10082 VmaDefragmentator::~VmaDefragmentator()
10084 for(
size_t i = m_Blocks.size(); i--; )
10086 vma_delete(m_hAllocator, m_Blocks[i]);
10090 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
10092 AllocationInfo allocInfo;
10093 allocInfo.m_hAllocation = hAlloc;
10094 allocInfo.m_pChanged = pChanged;
10095 m_Allocations.push_back(allocInfo);
10098 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
10101 if(m_pMappedDataForDefragmentation)
10103 *ppMappedData = m_pMappedDataForDefragmentation;
10108 if(m_pBlock->GetMappedData())
10110 *ppMappedData = m_pBlock->GetMappedData();
10115 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
10116 *ppMappedData = m_pMappedDataForDefragmentation;
10120 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
10122 if(m_pMappedDataForDefragmentation != VMA_NULL)
10124 m_pBlock->Unmap(hAllocator, 1);
10128 VkResult VmaDefragmentator::DefragmentRound(
10129 VkDeviceSize maxBytesToMove,
10130 uint32_t maxAllocationsToMove)
10132 if(m_Blocks.empty())
10137 size_t srcBlockIndex = m_Blocks.size() - 1;
10138 size_t srcAllocIndex = SIZE_MAX;
10144 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
10146 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
10149 if(srcBlockIndex == 0)
10156 srcAllocIndex = SIZE_MAX;
10161 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
10165 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
10166 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
10168 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
10169 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
10170 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
10171 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
10174 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
10176 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
10177 VmaAllocationRequest dstAllocRequest;
10178 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
10179 m_CurrentFrameIndex,
10180 m_pBlockVector->GetFrameInUseCount(),
10181 m_pBlockVector->GetBufferImageGranularity(),
10187 &dstAllocRequest) &&
10189 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
10191 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
10194 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
10195 (m_BytesMoved + size > maxBytesToMove))
10197 return VK_INCOMPLETE;
10200 void* pDstMappedData = VMA_NULL;
10201 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
10202 if(res != VK_SUCCESS)
10207 void* pSrcMappedData = VMA_NULL;
10208 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
10209 if(res != VK_SUCCESS)
10216 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
10217 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
10218 static_cast<size_t>(size));
10220 if(VMA_DEBUG_MARGIN > 0)
10222 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
10223 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
10226 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
10231 allocInfo.m_hAllocation);
10232 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
10234 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
10236 if(allocInfo.m_pChanged != VMA_NULL)
10238 *allocInfo.m_pChanged = VK_TRUE;
10241 ++m_AllocationsMoved;
10242 m_BytesMoved += size;
10244 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
10252 if(srcAllocIndex > 0)
10258 if(srcBlockIndex > 0)
10261 srcAllocIndex = SIZE_MAX;
10271 VkResult VmaDefragmentator::Defragment(
10272 VkDeviceSize maxBytesToMove,
10273 uint32_t maxAllocationsToMove)
10275 if(m_Allocations.empty())
10281 const size_t blockCount = m_pBlockVector->m_Blocks.size();
10282 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10284 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
10285 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
10286 m_Blocks.push_back(pBlockInfo);
10290 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
10293 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
10295 AllocationInfo& allocInfo = m_Allocations[blockIndex];
10297 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
10299 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
10300 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
10301 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
10303 (*it)->m_Allocations.push_back(allocInfo);
10311 m_Allocations.clear();
10313 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10315 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
10316 pBlockInfo->CalcHasNonMovableAllocations();
10317 pBlockInfo->SortAllocationsBySizeDescecnding();
10321 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
10324 VkResult result = VK_SUCCESS;
10325 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
10327 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
10331 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10333 m_Blocks[blockIndex]->Unmap(m_hAllocator);
10339 bool VmaDefragmentator::MoveMakesSense(
10340 size_t dstBlockIndex, VkDeviceSize dstOffset,
10341 size_t srcBlockIndex, VkDeviceSize srcOffset)
10343 if(dstBlockIndex < srcBlockIndex)
10347 if(dstBlockIndex > srcBlockIndex)
10351 if(dstOffset < srcOffset)
10361 #if VMA_RECORDING_ENABLED 10363 VmaRecorder::VmaRecorder() :
10368 m_StartCounter(INT64_MAX)
10374 m_UseMutex = useMutex;
10375 m_Flags = settings.
flags;
10377 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
10378 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
10381 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
10384 return VK_ERROR_INITIALIZATION_FAILED;
10388 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
10389 fprintf(m_File,
"%s\n",
"1,3");
10394 VmaRecorder::~VmaRecorder()
10396 if(m_File != VMA_NULL)
10402 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
10404 CallParams callParams;
10405 GetBasicParams(callParams);
10407 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10408 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
10412 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
10414 CallParams callParams;
10415 GetBasicParams(callParams);
10417 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10418 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
10424 CallParams callParams;
10425 GetBasicParams(callParams);
10427 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10428 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
10439 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
10441 CallParams callParams;
10442 GetBasicParams(callParams);
10444 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10445 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
10450 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
10451 const VkMemoryRequirements& vkMemReq,
10455 CallParams callParams;
10456 GetBasicParams(callParams);
10458 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10459 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
10460 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10462 vkMemReq.alignment,
10463 vkMemReq.memoryTypeBits,
10471 userDataStr.GetString());
10475 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
10476 const VkMemoryRequirements& vkMemReq,
10477 bool requiresDedicatedAllocation,
10478 bool prefersDedicatedAllocation,
10482 CallParams callParams;
10483 GetBasicParams(callParams);
10485 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10486 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
10487 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10489 vkMemReq.alignment,
10490 vkMemReq.memoryTypeBits,
10491 requiresDedicatedAllocation ? 1 : 0,
10492 prefersDedicatedAllocation ? 1 : 0,
10500 userDataStr.GetString());
10504 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
10505 const VkMemoryRequirements& vkMemReq,
10506 bool requiresDedicatedAllocation,
10507 bool prefersDedicatedAllocation,
10511 CallParams callParams;
10512 GetBasicParams(callParams);
10514 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10515 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
10516 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10518 vkMemReq.alignment,
10519 vkMemReq.memoryTypeBits,
10520 requiresDedicatedAllocation ? 1 : 0,
10521 prefersDedicatedAllocation ? 1 : 0,
10529 userDataStr.GetString());
10533 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
10536 CallParams callParams;
10537 GetBasicParams(callParams);
10539 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10540 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
10545 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
10547 const void* pUserData)
10549 CallParams callParams;
10550 GetBasicParams(callParams);
10552 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10553 UserDataString userDataStr(
10556 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10558 userDataStr.GetString());
10562 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
10565 CallParams callParams;
10566 GetBasicParams(callParams);
10568 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10569 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
10574 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
10577 CallParams callParams;
10578 GetBasicParams(callParams);
10580 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10581 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
10586 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
10589 CallParams callParams;
10590 GetBasicParams(callParams);
10592 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10593 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
10598 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
10599 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
10601 CallParams callParams;
10602 GetBasicParams(callParams);
10604 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10605 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
10612 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
10613 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
10615 CallParams callParams;
10616 GetBasicParams(callParams);
10618 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10619 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
10626 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
10627 const VkBufferCreateInfo& bufCreateInfo,
10631 CallParams callParams;
10632 GetBasicParams(callParams);
10634 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10635 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
10636 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10637 bufCreateInfo.flags,
10638 bufCreateInfo.size,
10639 bufCreateInfo.usage,
10640 bufCreateInfo.sharingMode,
10641 allocCreateInfo.
flags,
10642 allocCreateInfo.
usage,
10646 allocCreateInfo.
pool,
10648 userDataStr.GetString());
10652 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
10653 const VkImageCreateInfo& imageCreateInfo,
10657 CallParams callParams;
10658 GetBasicParams(callParams);
10660 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10661 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
10662 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10663 imageCreateInfo.flags,
10664 imageCreateInfo.imageType,
10665 imageCreateInfo.format,
10666 imageCreateInfo.extent.width,
10667 imageCreateInfo.extent.height,
10668 imageCreateInfo.extent.depth,
10669 imageCreateInfo.mipLevels,
10670 imageCreateInfo.arrayLayers,
10671 imageCreateInfo.samples,
10672 imageCreateInfo.tiling,
10673 imageCreateInfo.usage,
10674 imageCreateInfo.sharingMode,
10675 imageCreateInfo.initialLayout,
10676 allocCreateInfo.
flags,
10677 allocCreateInfo.
usage,
10681 allocCreateInfo.
pool,
10683 userDataStr.GetString());
10687 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
10690 CallParams callParams;
10691 GetBasicParams(callParams);
10693 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10694 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
10699 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
10702 CallParams callParams;
10703 GetBasicParams(callParams);
10705 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10706 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
10711 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
10714 CallParams callParams;
10715 GetBasicParams(callParams);
10717 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10718 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
10723 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
10726 CallParams callParams;
10727 GetBasicParams(callParams);
10729 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10730 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
10735 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
10738 CallParams callParams;
10739 GetBasicParams(callParams);
10741 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10742 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
10749 if(pUserData != VMA_NULL)
10753 m_Str = (
const char*)pUserData;
10757 sprintf_s(m_PtrStr,
"%p", pUserData);
10767 void VmaRecorder::WriteConfiguration(
10768 const VkPhysicalDeviceProperties& devProps,
10769 const VkPhysicalDeviceMemoryProperties& memProps,
10770 bool dedicatedAllocationExtensionEnabled)
10772 fprintf(m_File,
"Config,Begin\n");
10774 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
10775 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
10776 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
10777 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
10778 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
10779 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
10781 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
10782 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
10783 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
10785 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
10786 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
10788 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
10789 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
10791 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
10792 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
10794 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
10795 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
10798 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
10800 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
10801 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
10802 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
10803 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
10804 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
10805 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
10806 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
10807 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
10808 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
10810 fprintf(m_File,
"Config,End\n");
10813 void VmaRecorder::GetBasicParams(CallParams& outParams)
10815 outParams.threadId = GetCurrentThreadId();
10817 LARGE_INTEGER counter;
10818 QueryPerformanceCounter(&counter);
10819 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
10822 void VmaRecorder::Flush()
10830 #endif // #if VMA_RECORDING_ENABLED 10838 m_hDevice(pCreateInfo->device),
10839 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
10840 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
10841 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
10842 m_PreferredLargeHeapBlockSize(0),
10843 m_PhysicalDevice(pCreateInfo->physicalDevice),
10844 m_CurrentFrameIndex(0),
10845 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
10848 ,m_pRecorder(VMA_NULL)
10851 if(VMA_DEBUG_DETECT_CORRUPTION)
10854 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
10859 #if !(VMA_DEDICATED_ALLOCATION) 10862 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
10866 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
10867 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
10868 memset(&m_MemProps, 0,
sizeof(m_MemProps));
10870 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
10871 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
10873 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
10875 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
10886 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
10887 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
10894 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
10896 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
10897 if(limit != VK_WHOLE_SIZE)
10899 m_HeapSizeLimit[heapIndex] = limit;
10900 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
10902 m_MemProps.memoryHeaps[heapIndex].size = limit;
10908 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
10910 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
10912 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
10915 preferredBlockSize,
10918 GetBufferImageGranularity(),
10924 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
10931 VkResult res = VK_SUCCESS;
10936 #if VMA_RECORDING_ENABLED 10937 m_pRecorder = vma_new(
this, VmaRecorder)();
10939 if(res != VK_SUCCESS)
10943 m_pRecorder->WriteConfiguration(
10944 m_PhysicalDeviceProperties,
10946 m_UseKhrDedicatedAllocation);
10947 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
10949 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
10950 return VK_ERROR_FEATURE_NOT_PRESENT;
10957 VmaAllocator_T::~VmaAllocator_T()
10959 #if VMA_RECORDING_ENABLED 10960 if(m_pRecorder != VMA_NULL)
10962 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
10963 vma_delete(
this, m_pRecorder);
10967 VMA_ASSERT(m_Pools.empty());
10969 for(
size_t i = GetMemoryTypeCount(); i--; )
10971 vma_delete(
this, m_pDedicatedAllocations[i]);
10972 vma_delete(
this, m_pBlockVectors[i]);
10976 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
10978 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 10979 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
10980 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
10981 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
10982 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
10983 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
10984 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
10985 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
10986 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
10987 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
10988 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
10989 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
10990 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
10991 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
10992 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
10993 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
10994 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
10995 #if VMA_DEDICATED_ALLOCATION 10996 if(m_UseKhrDedicatedAllocation)
10998 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
10999 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
11000 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
11001 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
11003 #endif // #if VMA_DEDICATED_ALLOCATION 11004 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11006 #define VMA_COPY_IF_NOT_NULL(funcName) \ 11007 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 11009 if(pVulkanFunctions != VMA_NULL)
11011 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
11012 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
11013 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
11014 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
11015 VMA_COPY_IF_NOT_NULL(vkMapMemory);
11016 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
11017 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
11018 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
11019 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
11020 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
11021 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
11022 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
11023 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
11024 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
11025 VMA_COPY_IF_NOT_NULL(vkCreateImage);
11026 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
11027 #if VMA_DEDICATED_ALLOCATION 11028 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
11029 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
11033 #undef VMA_COPY_IF_NOT_NULL 11037 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
11038 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
11039 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
11040 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
11041 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
11042 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
11043 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
11044 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
11045 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
11046 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
11047 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
11048 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
11049 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
11050 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
11051 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
11052 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
11053 #if VMA_DEDICATED_ALLOCATION 11054 if(m_UseKhrDedicatedAllocation)
11056 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
11057 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
11062 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
11064 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
11065 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
11066 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
11067 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
11070 VkResult VmaAllocator_T::AllocateMemoryOfType(
11072 VkDeviceSize alignment,
11073 bool dedicatedAllocation,
11074 VkBuffer dedicatedBuffer,
11075 VkImage dedicatedImage,
11077 uint32_t memTypeIndex,
11078 VmaSuballocationType suballocType,
11081 VMA_ASSERT(pAllocation != VMA_NULL);
11082 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
11088 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
11093 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
11094 VMA_ASSERT(blockVector);
11096 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
11097 bool preferDedicatedMemory =
11098 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
11099 dedicatedAllocation ||
11101 size > preferredBlockSize / 2;
11103 if(preferDedicatedMemory &&
11105 finalCreateInfo.
pool == VK_NULL_HANDLE)
11114 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11118 return AllocateDedicatedMemory(
11132 VkResult res = blockVector->Allocate(
11134 m_CurrentFrameIndex.load(),
11140 if(res == VK_SUCCESS)
11148 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11152 res = AllocateDedicatedMemory(
11158 finalCreateInfo.pUserData,
11162 if(res == VK_SUCCESS)
11165 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
11171 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
11178 VkResult VmaAllocator_T::AllocateDedicatedMemory(
11180 VmaSuballocationType suballocType,
11181 uint32_t memTypeIndex,
11183 bool isUserDataString,
11185 VkBuffer dedicatedBuffer,
11186 VkImage dedicatedImage,
11189 VMA_ASSERT(pAllocation);
11191 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11192 allocInfo.memoryTypeIndex = memTypeIndex;
11193 allocInfo.allocationSize = size;
11195 #if VMA_DEDICATED_ALLOCATION 11196 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
11197 if(m_UseKhrDedicatedAllocation)
11199 if(dedicatedBuffer != VK_NULL_HANDLE)
11201 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
11202 dedicatedAllocInfo.buffer = dedicatedBuffer;
11203 allocInfo.pNext = &dedicatedAllocInfo;
11205 else if(dedicatedImage != VK_NULL_HANDLE)
11207 dedicatedAllocInfo.image = dedicatedImage;
11208 allocInfo.pNext = &dedicatedAllocInfo;
11211 #endif // #if VMA_DEDICATED_ALLOCATION 11214 VkDeviceMemory hMemory = VK_NULL_HANDLE;
11215 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
11218 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
11222 void* pMappedData = VMA_NULL;
11225 res = (*m_VulkanFunctions.vkMapMemory)(
11234 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
11235 FreeVulkanMemory(memTypeIndex, size, hMemory);
11240 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
11241 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
11242 (*pAllocation)->SetUserData(
this, pUserData);
11243 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11245 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11250 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
11251 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
11252 VMA_ASSERT(pDedicatedAllocations);
11253 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
11256 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
11261 void VmaAllocator_T::GetBufferMemoryRequirements(
11263 VkMemoryRequirements& memReq,
11264 bool& requiresDedicatedAllocation,
11265 bool& prefersDedicatedAllocation)
const 11267 #if VMA_DEDICATED_ALLOCATION 11268 if(m_UseKhrDedicatedAllocation)
11270 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
11271 memReqInfo.buffer = hBuffer;
11273 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
11275 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
11276 memReq2.pNext = &memDedicatedReq;
11278 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
11280 memReq = memReq2.memoryRequirements;
11281 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
11282 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
11285 #endif // #if VMA_DEDICATED_ALLOCATION 11287 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
11288 requiresDedicatedAllocation =
false;
11289 prefersDedicatedAllocation =
false;
11293 void VmaAllocator_T::GetImageMemoryRequirements(
11295 VkMemoryRequirements& memReq,
11296 bool& requiresDedicatedAllocation,
11297 bool& prefersDedicatedAllocation)
const 11299 #if VMA_DEDICATED_ALLOCATION 11300 if(m_UseKhrDedicatedAllocation)
11302 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
11303 memReqInfo.image = hImage;
11305 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
11307 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
11308 memReq2.pNext = &memDedicatedReq;
11310 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
11312 memReq = memReq2.memoryRequirements;
11313 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
11314 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
11317 #endif // #if VMA_DEDICATED_ALLOCATION 11319 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
11320 requiresDedicatedAllocation =
false;
11321 prefersDedicatedAllocation =
false;
11325 VkResult VmaAllocator_T::AllocateMemory(
11326 const VkMemoryRequirements& vkMemReq,
11327 bool requiresDedicatedAllocation,
11328 bool prefersDedicatedAllocation,
11329 VkBuffer dedicatedBuffer,
11330 VkImage dedicatedImage,
11332 VmaSuballocationType suballocType,
11338 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
11339 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11344 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
11345 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11347 if(requiresDedicatedAllocation)
11351 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
11352 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11354 if(createInfo.
pool != VK_NULL_HANDLE)
11356 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
11357 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11360 if((createInfo.
pool != VK_NULL_HANDLE) &&
11363 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
11364 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11367 if(createInfo.
pool != VK_NULL_HANDLE)
11369 const VkDeviceSize alignmentForPool = VMA_MAX(
11370 vkMemReq.alignment,
11371 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
11372 return createInfo.
pool->m_BlockVector.Allocate(
11374 m_CurrentFrameIndex.load(),
11384 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
11385 uint32_t memTypeIndex = UINT32_MAX;
11387 if(res == VK_SUCCESS)
11389 VkDeviceSize alignmentForMemType = VMA_MAX(
11390 vkMemReq.alignment,
11391 GetMemoryTypeMinAlignment(memTypeIndex));
11393 res = AllocateMemoryOfType(
11395 alignmentForMemType,
11396 requiresDedicatedAllocation || prefersDedicatedAllocation,
11404 if(res == VK_SUCCESS)
11414 memoryTypeBits &= ~(1u << memTypeIndex);
11417 if(res == VK_SUCCESS)
11419 alignmentForMemType = VMA_MAX(
11420 vkMemReq.alignment,
11421 GetMemoryTypeMinAlignment(memTypeIndex));
11423 res = AllocateMemoryOfType(
11425 alignmentForMemType,
11426 requiresDedicatedAllocation || prefersDedicatedAllocation,
11434 if(res == VK_SUCCESS)
11444 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11455 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
11457 VMA_ASSERT(allocation);
11459 if(allocation->CanBecomeLost() ==
false ||
11460 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11462 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11464 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
11467 switch(allocation->GetType())
11469 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
11471 VmaBlockVector* pBlockVector = VMA_NULL;
11472 VmaPool hPool = allocation->GetPool();
11473 if(hPool != VK_NULL_HANDLE)
11475 pBlockVector = &hPool->m_BlockVector;
11479 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
11480 pBlockVector = m_pBlockVectors[memTypeIndex];
11482 pBlockVector->Free(allocation);
11485 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
11486 FreeDedicatedMemory(allocation);
11493 allocation->SetUserData(
this, VMA_NULL);
11494 vma_delete(
this, allocation);
11497 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
11500 InitStatInfo(pStats->
total);
11501 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
11503 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11507 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11509 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
11510 VMA_ASSERT(pBlockVector);
11511 pBlockVector->AddStats(pStats);
11516 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11517 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
11519 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
11524 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11526 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
11527 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
11528 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
11529 VMA_ASSERT(pDedicatedAllocVector);
11530 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
11533 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
11534 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11535 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11536 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11541 VmaPostprocessCalcStatInfo(pStats->
total);
11542 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
11543 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
11544 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
11545 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
11548 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
11550 VkResult VmaAllocator_T::Defragment(
11552 size_t allocationCount,
11553 VkBool32* pAllocationsChanged,
11557 if(pAllocationsChanged != VMA_NULL)
11559 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
11561 if(pDefragmentationStats != VMA_NULL)
11563 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
11566 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
11568 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
11570 const size_t poolCount = m_Pools.size();
11573 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11576 VMA_ASSERT(hAlloc);
11577 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
11579 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11580 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
11582 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
11584 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
11586 VmaBlockVector* pAllocBlockVector = VMA_NULL;
11588 const VmaPool hAllocPool = hAlloc->GetPool();
11590 if(hAllocPool != VK_NULL_HANDLE)
11593 if(!hAllocPool->m_BlockVector.UsesLinearAlgorithm())
11595 pAllocBlockVector = &hAllocPool->m_BlockVector;
11601 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
11604 if(pAllocBlockVector != VMA_NULL)
11606 VmaDefragmentator*
const pDefragmentator =
11607 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
11608 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
11609 &pAllocationsChanged[allocIndex] : VMA_NULL;
11610 pDefragmentator->AddAllocation(hAlloc, pChanged);
11615 VkResult result = VK_SUCCESS;
11619 VkDeviceSize maxBytesToMove = SIZE_MAX;
11620 uint32_t maxAllocationsToMove = UINT32_MAX;
11621 if(pDefragmentationInfo != VMA_NULL)
11628 for(uint32_t memTypeIndex = 0;
11629 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
11633 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
11635 result = m_pBlockVectors[memTypeIndex]->Defragment(
11636 pDefragmentationStats,
11638 maxAllocationsToMove);
11643 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
11645 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
11646 pDefragmentationStats,
11648 maxAllocationsToMove);
11654 for(
size_t poolIndex = poolCount; poolIndex--; )
11656 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
11660 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
11662 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
11664 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
11673 if(hAllocation->CanBecomeLost())
11679 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11680 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11683 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
11687 pAllocationInfo->
offset = 0;
11688 pAllocationInfo->
size = hAllocation->GetSize();
11690 pAllocationInfo->
pUserData = hAllocation->GetUserData();
11693 else if(localLastUseFrameIndex == localCurrFrameIndex)
11695 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
11696 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
11697 pAllocationInfo->
offset = hAllocation->GetOffset();
11698 pAllocationInfo->
size = hAllocation->GetSize();
11700 pAllocationInfo->
pUserData = hAllocation->GetUserData();
11705 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11707 localLastUseFrameIndex = localCurrFrameIndex;
11714 #if VMA_STATS_STRING_ENABLED 11715 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11716 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11719 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
11720 if(localLastUseFrameIndex == localCurrFrameIndex)
11726 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11728 localLastUseFrameIndex = localCurrFrameIndex;
11734 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
11735 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
11736 pAllocationInfo->
offset = hAllocation->GetOffset();
11737 pAllocationInfo->
size = hAllocation->GetSize();
11738 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
11739 pAllocationInfo->
pUserData = hAllocation->GetUserData();
11743 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
11746 if(hAllocation->CanBecomeLost())
11748 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11749 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11752 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
11756 else if(localLastUseFrameIndex == localCurrFrameIndex)
11762 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11764 localLastUseFrameIndex = localCurrFrameIndex;
11771 #if VMA_STATS_STRING_ENABLED 11772 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11773 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11776 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
11777 if(localLastUseFrameIndex == localCurrFrameIndex)
11783 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11785 localLastUseFrameIndex = localCurrFrameIndex;
11797 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
11805 newCreateInfo.
maxBlockCount = isLinearAlgorithm ? 1 : SIZE_MAX;
11810 return VK_ERROR_INITIALIZATION_FAILED;
11817 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
11819 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
11820 if(res != VK_SUCCESS)
11822 vma_delete(
this, *pPool);
11829 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11830 (*pPool)->SetId(m_NextPoolId++);
11831 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
11837 void VmaAllocator_T::DestroyPool(
VmaPool pool)
11841 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11842 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
11843 VMA_ASSERT(success &&
"Pool not found in Allocator.");
11846 vma_delete(
this, pool);
11851 pool->m_BlockVector.GetPoolStats(pPoolStats);
11854 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
11856 m_CurrentFrameIndex.store(frameIndex);
11859 void VmaAllocator_T::MakePoolAllocationsLost(
11861 size_t* pLostAllocationCount)
11863 hPool->m_BlockVector.MakePoolAllocationsLost(
11864 m_CurrentFrameIndex.load(),
11865 pLostAllocationCount);
11868 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
11870 return hPool->m_BlockVector.CheckCorruption();
11873 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
11875 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
11878 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11880 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
11882 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
11883 VMA_ASSERT(pBlockVector);
11884 VkResult localRes = pBlockVector->CheckCorruption();
11887 case VK_ERROR_FEATURE_NOT_PRESENT:
11890 finalRes = VK_SUCCESS;
11900 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11901 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
11903 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
11905 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
11908 case VK_ERROR_FEATURE_NOT_PRESENT:
11911 finalRes = VK_SUCCESS;
11923 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
11925 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
11926 (*pAllocation)->InitLost();
11929 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
11931 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
11934 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
11936 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
11937 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
11939 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
11940 if(res == VK_SUCCESS)
11942 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
11947 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
11952 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
11955 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
11957 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
11963 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
11965 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
11967 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
11970 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
11972 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
11973 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
11975 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
11976 m_HeapSizeLimit[heapIndex] += size;
11980 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
11982 if(hAllocation->CanBecomeLost())
11984 return VK_ERROR_MEMORY_MAP_FAILED;
11987 switch(hAllocation->GetType())
11989 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
11991 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
11992 char *pBytes = VMA_NULL;
11993 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
11994 if(res == VK_SUCCESS)
11996 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
11997 hAllocation->BlockAllocMap();
12001 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12002 return hAllocation->DedicatedAllocMap(
this, ppData);
12005 return VK_ERROR_MEMORY_MAP_FAILED;
12011 switch(hAllocation->GetType())
12013 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12015 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12016 hAllocation->BlockAllocUnmap();
12017 pBlock->Unmap(
this, 1);
12020 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12021 hAllocation->DedicatedAllocUnmap(
this);
12028 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
12030 VkResult res = VK_SUCCESS;
12031 switch(hAllocation->GetType())
12033 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12034 res = GetVulkanFunctions().vkBindBufferMemory(
12037 hAllocation->GetMemory(),
12040 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12042 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12043 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
12044 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
12053 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
12055 VkResult res = VK_SUCCESS;
12056 switch(hAllocation->GetType())
12058 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12059 res = GetVulkanFunctions().vkBindImageMemory(
12062 hAllocation->GetMemory(),
12065 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12067 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12068 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
12069 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
12078 void VmaAllocator_T::FlushOrInvalidateAllocation(
12080 VkDeviceSize offset, VkDeviceSize size,
12081 VMA_CACHE_OPERATION op)
12083 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
12084 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
12086 const VkDeviceSize allocationSize = hAllocation->GetSize();
12087 VMA_ASSERT(offset <= allocationSize);
12089 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12091 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12092 memRange.memory = hAllocation->GetMemory();
12094 switch(hAllocation->GetType())
12096 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12097 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
12098 if(size == VK_WHOLE_SIZE)
12100 memRange.size = allocationSize - memRange.offset;
12104 VMA_ASSERT(offset + size <= allocationSize);
12105 memRange.size = VMA_MIN(
12106 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
12107 allocationSize - memRange.offset);
12111 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12114 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
12115 if(size == VK_WHOLE_SIZE)
12117 size = allocationSize - offset;
12121 VMA_ASSERT(offset + size <= allocationSize);
12123 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
12126 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
12127 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
12128 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
12129 memRange.offset += allocationOffset;
12130 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
12141 case VMA_CACHE_FLUSH:
12142 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
12144 case VMA_CACHE_INVALIDATE:
12145 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
12154 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
12156 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
12158 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12160 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12161 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12162 VMA_ASSERT(pDedicatedAllocations);
12163 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
12164 VMA_ASSERT(success);
12167 VkDeviceMemory hMemory = allocation->GetMemory();
12169 if(allocation->GetMappedData() != VMA_NULL)
12171 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
12174 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
12176 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
12179 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
12181 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
12182 !hAllocation->CanBecomeLost() &&
12183 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12185 void* pData = VMA_NULL;
12186 VkResult res = Map(hAllocation, &pData);
12187 if(res == VK_SUCCESS)
12189 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
12190 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
12191 Unmap(hAllocation);
12195 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
12200 #if VMA_STATS_STRING_ENABLED 12202 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
12204 bool dedicatedAllocationsStarted =
false;
12205 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12207 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12208 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12209 VMA_ASSERT(pDedicatedAllocVector);
12210 if(pDedicatedAllocVector->empty() ==
false)
12212 if(dedicatedAllocationsStarted ==
false)
12214 dedicatedAllocationsStarted =
true;
12215 json.WriteString(
"DedicatedAllocations");
12216 json.BeginObject();
12219 json.BeginString(
"Type ");
12220 json.ContinueString(memTypeIndex);
12225 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
12227 json.BeginObject(
true);
12229 hAlloc->PrintParameters(json);
12236 if(dedicatedAllocationsStarted)
12242 bool allocationsStarted =
false;
12243 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12245 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
12247 if(allocationsStarted ==
false)
12249 allocationsStarted =
true;
12250 json.WriteString(
"DefaultPools");
12251 json.BeginObject();
12254 json.BeginString(
"Type ");
12255 json.ContinueString(memTypeIndex);
12258 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
12261 if(allocationsStarted)
12269 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12270 const size_t poolCount = m_Pools.size();
12273 json.WriteString(
"Pools");
12274 json.BeginObject();
12275 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
12277 json.BeginString();
12278 json.ContinueString(m_Pools[poolIndex]->GetId());
12281 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
12288 #endif // #if VMA_STATS_STRING_ENABLED 12297 VMA_ASSERT(pCreateInfo && pAllocator);
12298 VMA_DEBUG_LOG(
"vmaCreateAllocator");
12300 return (*pAllocator)->Init(pCreateInfo);
12306 if(allocator != VK_NULL_HANDLE)
12308 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
12309 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
12310 vma_delete(&allocationCallbacks, allocator);
12316 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
12318 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
12319 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
12324 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
12326 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
12327 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
12332 uint32_t memoryTypeIndex,
12333 VkMemoryPropertyFlags* pFlags)
12335 VMA_ASSERT(allocator && pFlags);
12336 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
12337 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
12342 uint32_t frameIndex)
12344 VMA_ASSERT(allocator);
12345 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
12347 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12349 allocator->SetCurrentFrameIndex(frameIndex);
12356 VMA_ASSERT(allocator && pStats);
12357 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12358 allocator->CalculateStats(pStats);
12361 #if VMA_STATS_STRING_ENABLED 12365 char** ppStatsString,
12366 VkBool32 detailedMap)
12368 VMA_ASSERT(allocator && ppStatsString);
12369 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12371 VmaStringBuilder sb(allocator);
12373 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
12374 json.BeginObject();
12377 allocator->CalculateStats(&stats);
12379 json.WriteString(
"Total");
12380 VmaPrintStatInfo(json, stats.
total);
12382 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
12384 json.BeginString(
"Heap ");
12385 json.ContinueString(heapIndex);
12387 json.BeginObject();
12389 json.WriteString(
"Size");
12390 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
12392 json.WriteString(
"Flags");
12393 json.BeginArray(
true);
12394 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
12396 json.WriteString(
"DEVICE_LOCAL");
12402 json.WriteString(
"Stats");
12403 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
12406 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
12408 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
12410 json.BeginString(
"Type ");
12411 json.ContinueString(typeIndex);
12414 json.BeginObject();
12416 json.WriteString(
"Flags");
12417 json.BeginArray(
true);
12418 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
12419 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
12421 json.WriteString(
"DEVICE_LOCAL");
12423 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12425 json.WriteString(
"HOST_VISIBLE");
12427 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
12429 json.WriteString(
"HOST_COHERENT");
12431 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
12433 json.WriteString(
"HOST_CACHED");
12435 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
12437 json.WriteString(
"LAZILY_ALLOCATED");
12443 json.WriteString(
"Stats");
12444 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
12453 if(detailedMap == VK_TRUE)
12455 allocator->PrintDetailedMap(json);
12461 const size_t len = sb.GetLength();
12462 char*
const pChars = vma_new_array(allocator,
char, len + 1);
12465 memcpy(pChars, sb.GetData(), len);
12467 pChars[len] =
'\0';
12468 *ppStatsString = pChars;
12473 char* pStatsString)
12475 if(pStatsString != VMA_NULL)
12477 VMA_ASSERT(allocator);
12478 size_t len = strlen(pStatsString);
12479 vma_delete_array(allocator, pStatsString, len + 1);
12483 #endif // #if VMA_STATS_STRING_ENABLED 12490 uint32_t memoryTypeBits,
12492 uint32_t* pMemoryTypeIndex)
12494 VMA_ASSERT(allocator != VK_NULL_HANDLE);
12495 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
12496 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
12503 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
12504 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
12509 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
12513 switch(pAllocationCreateInfo->
usage)
12518 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12520 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
12524 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12527 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
12528 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12530 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
12534 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
12535 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
12541 *pMemoryTypeIndex = UINT32_MAX;
12542 uint32_t minCost = UINT32_MAX;
12543 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
12544 memTypeIndex < allocator->GetMemoryTypeCount();
12545 ++memTypeIndex, memTypeBit <<= 1)
12548 if((memTypeBit & memoryTypeBits) != 0)
12550 const VkMemoryPropertyFlags currFlags =
12551 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
12553 if((requiredFlags & ~currFlags) == 0)
12556 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
12558 if(currCost < minCost)
12560 *pMemoryTypeIndex = memTypeIndex;
12565 minCost = currCost;
12570 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
12575 const VkBufferCreateInfo* pBufferCreateInfo,
12577 uint32_t* pMemoryTypeIndex)
12579 VMA_ASSERT(allocator != VK_NULL_HANDLE);
12580 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
12581 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
12582 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
12584 const VkDevice hDev = allocator->m_hDevice;
12585 VkBuffer hBuffer = VK_NULL_HANDLE;
12586 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
12587 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
12588 if(res == VK_SUCCESS)
12590 VkMemoryRequirements memReq = {};
12591 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
12592 hDev, hBuffer, &memReq);
12596 memReq.memoryTypeBits,
12597 pAllocationCreateInfo,
12600 allocator->GetVulkanFunctions().vkDestroyBuffer(
12601 hDev, hBuffer, allocator->GetAllocationCallbacks());
12608 const VkImageCreateInfo* pImageCreateInfo,
12610 uint32_t* pMemoryTypeIndex)
12612 VMA_ASSERT(allocator != VK_NULL_HANDLE);
12613 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
12614 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
12615 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
12617 const VkDevice hDev = allocator->m_hDevice;
12618 VkImage hImage = VK_NULL_HANDLE;
12619 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
12620 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
12621 if(res == VK_SUCCESS)
12623 VkMemoryRequirements memReq = {};
12624 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
12625 hDev, hImage, &memReq);
12629 memReq.memoryTypeBits,
12630 pAllocationCreateInfo,
12633 allocator->GetVulkanFunctions().vkDestroyImage(
12634 hDev, hImage, allocator->GetAllocationCallbacks());
12644 VMA_ASSERT(allocator && pCreateInfo && pPool);
12646 VMA_DEBUG_LOG(
"vmaCreatePool");
12648 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12650 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
12652 #if VMA_RECORDING_ENABLED 12653 if(allocator->GetRecorder() != VMA_NULL)
12655 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
12666 VMA_ASSERT(allocator);
12668 if(pool == VK_NULL_HANDLE)
12673 VMA_DEBUG_LOG(
"vmaDestroyPool");
12675 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12677 #if VMA_RECORDING_ENABLED 12678 if(allocator->GetRecorder() != VMA_NULL)
12680 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
12684 allocator->DestroyPool(pool);
12692 VMA_ASSERT(allocator && pool && pPoolStats);
12694 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12696 allocator->GetPoolStats(pool, pPoolStats);
12702 size_t* pLostAllocationCount)
12704 VMA_ASSERT(allocator && pool);
12706 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12708 #if VMA_RECORDING_ENABLED 12709 if(allocator->GetRecorder() != VMA_NULL)
12711 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
12715 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
12720 VMA_ASSERT(allocator && pool);
12722 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12724 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
12726 return allocator->CheckPoolCorruption(pool);
12731 const VkMemoryRequirements* pVkMemoryRequirements,
12736 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
12738 VMA_DEBUG_LOG(
"vmaAllocateMemory");
12740 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12742 VkResult result = allocator->AllocateMemory(
12743 *pVkMemoryRequirements,
12749 VMA_SUBALLOCATION_TYPE_UNKNOWN,
12752 #if VMA_RECORDING_ENABLED 12753 if(allocator->GetRecorder() != VMA_NULL)
12755 allocator->GetRecorder()->RecordAllocateMemory(
12756 allocator->GetCurrentFrameIndex(),
12757 *pVkMemoryRequirements,
12763 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
12765 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
12778 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
12780 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
12782 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12784 VkMemoryRequirements vkMemReq = {};
12785 bool requiresDedicatedAllocation =
false;
12786 bool prefersDedicatedAllocation =
false;
12787 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
12788 requiresDedicatedAllocation,
12789 prefersDedicatedAllocation);
12791 VkResult result = allocator->AllocateMemory(
12793 requiresDedicatedAllocation,
12794 prefersDedicatedAllocation,
12798 VMA_SUBALLOCATION_TYPE_BUFFER,
12801 #if VMA_RECORDING_ENABLED 12802 if(allocator->GetRecorder() != VMA_NULL)
12804 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
12805 allocator->GetCurrentFrameIndex(),
12807 requiresDedicatedAllocation,
12808 prefersDedicatedAllocation,
12814 if(pAllocationInfo && result == VK_SUCCESS)
12816 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
12829 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
12831 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
12833 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12835 VkMemoryRequirements vkMemReq = {};
12836 bool requiresDedicatedAllocation =
false;
12837 bool prefersDedicatedAllocation =
false;
12838 allocator->GetImageMemoryRequirements(image, vkMemReq,
12839 requiresDedicatedAllocation, prefersDedicatedAllocation);
12841 VkResult result = allocator->AllocateMemory(
12843 requiresDedicatedAllocation,
12844 prefersDedicatedAllocation,
12848 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
12851 #if VMA_RECORDING_ENABLED 12852 if(allocator->GetRecorder() != VMA_NULL)
12854 allocator->GetRecorder()->RecordAllocateMemoryForImage(
12855 allocator->GetCurrentFrameIndex(),
12857 requiresDedicatedAllocation,
12858 prefersDedicatedAllocation,
12864 if(pAllocationInfo && result == VK_SUCCESS)
12866 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
12876 VMA_ASSERT(allocator);
12878 if(allocation == VK_NULL_HANDLE)
12883 VMA_DEBUG_LOG(
"vmaFreeMemory");
12885 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12887 #if VMA_RECORDING_ENABLED 12888 if(allocator->GetRecorder() != VMA_NULL)
12890 allocator->GetRecorder()->RecordFreeMemory(
12891 allocator->GetCurrentFrameIndex(),
12896 allocator->FreeMemory(allocation);
12904 VMA_ASSERT(allocator && allocation && pAllocationInfo);
12906 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12908 #if VMA_RECORDING_ENABLED 12909 if(allocator->GetRecorder() != VMA_NULL)
12911 allocator->GetRecorder()->RecordGetAllocationInfo(
12912 allocator->GetCurrentFrameIndex(),
12917 allocator->GetAllocationInfo(allocation, pAllocationInfo);
12924 VMA_ASSERT(allocator && allocation);
12926 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12928 #if VMA_RECORDING_ENABLED 12929 if(allocator->GetRecorder() != VMA_NULL)
12931 allocator->GetRecorder()->RecordTouchAllocation(
12932 allocator->GetCurrentFrameIndex(),
12937 return allocator->TouchAllocation(allocation);
12945 VMA_ASSERT(allocator && allocation);
12947 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12949 allocation->SetUserData(allocator, pUserData);
12951 #if VMA_RECORDING_ENABLED 12952 if(allocator->GetRecorder() != VMA_NULL)
12954 allocator->GetRecorder()->RecordSetAllocationUserData(
12955 allocator->GetCurrentFrameIndex(),
12966 VMA_ASSERT(allocator && pAllocation);
12968 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
12970 allocator->CreateLostAllocation(pAllocation);
12972 #if VMA_RECORDING_ENABLED 12973 if(allocator->GetRecorder() != VMA_NULL)
12975 allocator->GetRecorder()->RecordCreateLostAllocation(
12976 allocator->GetCurrentFrameIndex(),
12987 VMA_ASSERT(allocator && allocation && ppData);
12989 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12991 VkResult res = allocator->Map(allocation, ppData);
12993 #if VMA_RECORDING_ENABLED 12994 if(allocator->GetRecorder() != VMA_NULL)
12996 allocator->GetRecorder()->RecordMapMemory(
12997 allocator->GetCurrentFrameIndex(),
13009 VMA_ASSERT(allocator && allocation);
13011 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13013 #if VMA_RECORDING_ENABLED 13014 if(allocator->GetRecorder() != VMA_NULL)
13016 allocator->GetRecorder()->RecordUnmapMemory(
13017 allocator->GetCurrentFrameIndex(),
13022 allocator->Unmap(allocation);
13027 VMA_ASSERT(allocator && allocation);
13029 VMA_DEBUG_LOG(
"vmaFlushAllocation");
13031 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13033 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
13035 #if VMA_RECORDING_ENABLED 13036 if(allocator->GetRecorder() != VMA_NULL)
13038 allocator->GetRecorder()->RecordFlushAllocation(
13039 allocator->GetCurrentFrameIndex(),
13040 allocation, offset, size);
13047 VMA_ASSERT(allocator && allocation);
13049 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
13051 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13053 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
13055 #if VMA_RECORDING_ENABLED 13056 if(allocator->GetRecorder() != VMA_NULL)
13058 allocator->GetRecorder()->RecordInvalidateAllocation(
13059 allocator->GetCurrentFrameIndex(),
13060 allocation, offset, size);
13067 VMA_ASSERT(allocator);
13069 VMA_DEBUG_LOG(
"vmaCheckCorruption");
13071 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13073 return allocator->CheckCorruption(memoryTypeBits);
13079 size_t allocationCount,
13080 VkBool32* pAllocationsChanged,
13084 VMA_ASSERT(allocator && pAllocations);
13086 VMA_DEBUG_LOG(
"vmaDefragment");
13088 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13090 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
13098 VMA_ASSERT(allocator && allocation && buffer);
13100 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
13102 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13104 return allocator->BindBufferMemory(allocation, buffer);
13112 VMA_ASSERT(allocator && allocation && image);
13114 VMA_DEBUG_LOG(
"vmaBindImageMemory");
13116 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13118 return allocator->BindImageMemory(allocation, image);
13123 const VkBufferCreateInfo* pBufferCreateInfo,
13129 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
13131 VMA_DEBUG_LOG(
"vmaCreateBuffer");
13133 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13135 *pBuffer = VK_NULL_HANDLE;
13136 *pAllocation = VK_NULL_HANDLE;
13139 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
13140 allocator->m_hDevice,
13142 allocator->GetAllocationCallbacks(),
13147 VkMemoryRequirements vkMemReq = {};
13148 bool requiresDedicatedAllocation =
false;
13149 bool prefersDedicatedAllocation =
false;
13150 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
13151 requiresDedicatedAllocation, prefersDedicatedAllocation);
13155 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
13157 VMA_ASSERT(vkMemReq.alignment %
13158 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
13160 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
13162 VMA_ASSERT(vkMemReq.alignment %
13163 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
13165 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
13167 VMA_ASSERT(vkMemReq.alignment %
13168 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
13172 res = allocator->AllocateMemory(
13174 requiresDedicatedAllocation,
13175 prefersDedicatedAllocation,
13178 *pAllocationCreateInfo,
13179 VMA_SUBALLOCATION_TYPE_BUFFER,
13182 #if VMA_RECORDING_ENABLED 13183 if(allocator->GetRecorder() != VMA_NULL)
13185 allocator->GetRecorder()->RecordCreateBuffer(
13186 allocator->GetCurrentFrameIndex(),
13187 *pBufferCreateInfo,
13188 *pAllocationCreateInfo,
13196 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
13200 #if VMA_STATS_STRING_ENABLED 13201 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
13203 if(pAllocationInfo != VMA_NULL)
13205 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13210 allocator->FreeMemory(*pAllocation);
13211 *pAllocation = VK_NULL_HANDLE;
13212 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
13213 *pBuffer = VK_NULL_HANDLE;
13216 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
13217 *pBuffer = VK_NULL_HANDLE;
13228 VMA_ASSERT(allocator);
13230 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
13235 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
13237 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13239 #if VMA_RECORDING_ENABLED 13240 if(allocator->GetRecorder() != VMA_NULL)
13242 allocator->GetRecorder()->RecordDestroyBuffer(
13243 allocator->GetCurrentFrameIndex(),
13248 if(buffer != VK_NULL_HANDLE)
13250 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
13253 if(allocation != VK_NULL_HANDLE)
13255 allocator->FreeMemory(allocation);
13261 const VkImageCreateInfo* pImageCreateInfo,
13267 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
13269 VMA_DEBUG_LOG(
"vmaCreateImage");
13271 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13273 *pImage = VK_NULL_HANDLE;
13274 *pAllocation = VK_NULL_HANDLE;
13277 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
13278 allocator->m_hDevice,
13280 allocator->GetAllocationCallbacks(),
13284 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
13285 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
13286 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
13289 VkMemoryRequirements vkMemReq = {};
13290 bool requiresDedicatedAllocation =
false;
13291 bool prefersDedicatedAllocation =
false;
13292 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
13293 requiresDedicatedAllocation, prefersDedicatedAllocation);
13295 res = allocator->AllocateMemory(
13297 requiresDedicatedAllocation,
13298 prefersDedicatedAllocation,
13301 *pAllocationCreateInfo,
13305 #if VMA_RECORDING_ENABLED 13306 if(allocator->GetRecorder() != VMA_NULL)
13308 allocator->GetRecorder()->RecordCreateImage(
13309 allocator->GetCurrentFrameIndex(),
13311 *pAllocationCreateInfo,
13319 res = allocator->BindImageMemory(*pAllocation, *pImage);
13323 #if VMA_STATS_STRING_ENABLED 13324 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
13326 if(pAllocationInfo != VMA_NULL)
13328 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13333 allocator->FreeMemory(*pAllocation);
13334 *pAllocation = VK_NULL_HANDLE;
13335 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
13336 *pImage = VK_NULL_HANDLE;
13339 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
13340 *pImage = VK_NULL_HANDLE;
13351 VMA_ASSERT(allocator);
13353 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
13358 VMA_DEBUG_LOG(
"vmaDestroyImage");
13360 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13362 #if VMA_RECORDING_ENABLED 13363 if(allocator->GetRecorder() != VMA_NULL)
13365 allocator->GetRecorder()->RecordDestroyImage(
13366 allocator->GetCurrentFrameIndex(),
13371 if(image != VK_NULL_HANDLE)
13373 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
13375 if(allocation != VK_NULL_HANDLE)
13377 allocator->FreeMemory(allocation);
13381 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1430
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1743
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1498
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1499
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1460
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1441
+
Definition: vk_mem_alloc.h:1461
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1442
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1699
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1433
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2109
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1495
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2354
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1923
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1472
+
Definition: vk_mem_alloc.h:1700
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1434
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2110
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1496
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2355
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1924
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1473
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1993
-
Definition: vk_mem_alloc.h:1779
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1422
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1822
-
Definition: vk_mem_alloc.h:1726
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1507
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1994
+
Definition: vk_mem_alloc.h:1780
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1423
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1823
+
Definition: vk_mem_alloc.h:1727
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1508
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1560
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1492
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1561
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1493
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1730
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1731
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1632
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1438
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1631
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2358
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1633
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1439
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1632
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2359
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1524
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1641
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2366
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1806
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2349
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1439
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1364
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1525
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1642
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2367
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1807
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2350
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1440
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1365
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1501
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1502
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1946
-
Definition: vk_mem_alloc.h:1940
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1567
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2119
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1947
+
Definition: vk_mem_alloc.h:1941
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1568
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2120
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1434
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1458
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1843
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1962
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1999
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1435
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1459
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1844
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1963
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2000
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1420
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1949
+
Definition: vk_mem_alloc.h:1421
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1950
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1677
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1678
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2344
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2345
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2362
-
Definition: vk_mem_alloc.h:1716
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1830
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1437
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2363
+
Definition: vk_mem_alloc.h:1717
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1831
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1438
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1637
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1370
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1638
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1371
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1391
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1392
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1462
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1396
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2364
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1463
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1397
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2365
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1817
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:2009
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1818
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:2010
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1430
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1620
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1957
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1383
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1431
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1621
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1958
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1384
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1786
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1633
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1387
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1952
-
Definition: vk_mem_alloc.h:1725
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1436
+
Definition: vk_mem_alloc.h:1787
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1634
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1388
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1953
+
Definition: vk_mem_alloc.h:1726
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1437
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1812
-
Definition: vk_mem_alloc.h:1803
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1813
+
Definition: vk_mem_alloc.h:1804
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1623
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1432
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1971
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1510
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2002
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1801
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1836
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1624
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1433
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1972
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1511
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2003
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1802
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1837
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1548
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1639
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1766
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1632
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1549
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1640
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1767
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1633
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1443
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1480
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1385
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1442
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1444
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1481
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1386
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1443
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1985
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1435
-
Definition: vk_mem_alloc.h:1797
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1986
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1436
+
Definition: vk_mem_alloc.h:1798
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1488
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2133
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1504
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1632
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1629
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1489
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2134
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1505
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1633
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1630
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1990
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1991
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2114
-
Definition: vk_mem_alloc.h:1799
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2360
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1428
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2115
+
Definition: vk_mem_alloc.h:1800
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2361
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1429
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1627
-
Definition: vk_mem_alloc.h:1682
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1942
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1628
+
Definition: vk_mem_alloc.h:1683
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1943
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1477
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1625
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1440
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1444
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1753
-
Definition: vk_mem_alloc.h:1709
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2128
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1478
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1626
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1441
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1445
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1754
+
Definition: vk_mem_alloc.h:1710
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2129
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1418
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1419
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1431
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:1938
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2095
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1432
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:1939
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2096
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1905
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1633
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1906
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1634
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1792
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1452
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1640
+
Definition: vk_mem_alloc.h:1793
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1453
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1641
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1996
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1633
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1997
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1634
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2100
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2101