23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1468 #ifndef VMA_RECORDING_ENABLED 1470 #define VMA_RECORDING_ENABLED 1 1472 #define VMA_RECORDING_ENABLED 0 1477 #define NOMINMAX // For windows.h 1480 #include <vulkan/vulkan.h> 1482 #if VMA_RECORDING_ENABLED 1483 #include <windows.h> 1486 #if !defined(VMA_DEDICATED_ALLOCATION) 1487 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1488 #define VMA_DEDICATED_ALLOCATION 1 1490 #define VMA_DEDICATED_ALLOCATION 0 1508 uint32_t memoryType,
1509 VkDeviceMemory memory,
1514 uint32_t memoryType,
1515 VkDeviceMemory memory,
1587 #if VMA_DEDICATED_ALLOCATION 1588 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1589 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1715 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1723 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1733 uint32_t memoryTypeIndex,
1734 VkMemoryPropertyFlags* pFlags);
1746 uint32_t frameIndex);
1779 #define VMA_STATS_STRING_ENABLED 1 1781 #if VMA_STATS_STRING_ENABLED 1788 char** ppStatsString,
1789 VkBool32 detailedMap);
1793 char* pStatsString);
1795 #endif // #if VMA_STATS_STRING_ENABLED 2024 uint32_t memoryTypeBits,
2026 uint32_t* pMemoryTypeIndex);
2042 const VkBufferCreateInfo* pBufferCreateInfo,
2044 uint32_t* pMemoryTypeIndex);
2060 const VkImageCreateInfo* pImageCreateInfo,
2062 uint32_t* pMemoryTypeIndex);
2234 size_t* pLostAllocationCount);
2333 const VkMemoryRequirements* pVkMemoryRequirements,
2595 size_t allocationCount,
2596 VkBool32* pAllocationsChanged,
2662 const VkBufferCreateInfo* pBufferCreateInfo,
2687 const VkImageCreateInfo* pImageCreateInfo,
2713 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2716 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2717 #define VMA_IMPLEMENTATION 2720 #ifdef VMA_IMPLEMENTATION 2721 #undef VMA_IMPLEMENTATION 2743 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2744 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2756 #if VMA_USE_STL_CONTAINERS 2757 #define VMA_USE_STL_VECTOR 1 2758 #define VMA_USE_STL_UNORDERED_MAP 1 2759 #define VMA_USE_STL_LIST 1 2762 #if VMA_USE_STL_VECTOR 2766 #if VMA_USE_STL_UNORDERED_MAP 2767 #include <unordered_map> 2770 #if VMA_USE_STL_LIST 2779 #include <algorithm> 2785 #define VMA_NULL nullptr 2788 #if defined(__APPLE__) || defined(__ANDROID__) 2790 void *aligned_alloc(
size_t alignment,
size_t size)
2793 if(alignment <
sizeof(
void*))
2795 alignment =
sizeof(
void*);
2799 if(posix_memalign(&pointer, alignment, size) == 0)
2813 #define VMA_ASSERT(expr) assert(expr) 2815 #define VMA_ASSERT(expr) 2821 #ifndef VMA_HEAVY_ASSERT 2823 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2825 #define VMA_HEAVY_ASSERT(expr) 2829 #ifndef VMA_ALIGN_OF 2830 #define VMA_ALIGN_OF(type) (__alignof(type)) 2833 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2835 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2837 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2841 #ifndef VMA_SYSTEM_FREE 2843 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2845 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2850 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2854 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2858 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2862 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2865 #ifndef VMA_DEBUG_LOG 2866 #define VMA_DEBUG_LOG(format, ...) 2876 #if VMA_STATS_STRING_ENABLED 2877 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2879 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2881 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2883 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2885 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2887 snprintf(outStr, strLen,
"%p", ptr);
2897 void Lock() { m_Mutex.lock(); }
2898 void Unlock() { m_Mutex.unlock(); }
2902 #define VMA_MUTEX VmaMutex 2913 #ifndef VMA_ATOMIC_UINT32 2914 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2917 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2922 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2925 #ifndef VMA_DEBUG_ALIGNMENT 2930 #define VMA_DEBUG_ALIGNMENT (1) 2933 #ifndef VMA_DEBUG_MARGIN 2938 #define VMA_DEBUG_MARGIN (0) 2941 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2946 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2949 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2955 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2958 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2963 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2966 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2971 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2974 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2975 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2979 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2980 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2984 #ifndef VMA_CLASS_NO_COPY 2985 #define VMA_CLASS_NO_COPY(className) \ 2987 className(const className&) = delete; \ 2988 className& operator=(const className&) = delete; 2991 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2994 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2996 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2997 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3003 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3004 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3007 static inline uint32_t VmaCountBitsSet(uint32_t v)
3009 uint32_t c = v - ((v >> 1) & 0x55555555);
3010 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3011 c = ((c >> 4) + c) & 0x0F0F0F0F;
3012 c = ((c >> 8) + c) & 0x00FF00FF;
3013 c = ((c >> 16) + c) & 0x0000FFFF;
3019 template <
typename T>
3020 static inline T VmaAlignUp(T val, T align)
3022 return (val + align - 1) / align * align;
3026 template <
typename T>
3027 static inline T VmaAlignDown(T val, T align)
3029 return val / align * align;
3033 template <
typename T>
3034 static inline T VmaRoundDiv(T x, T y)
3036 return (x + (y / (T)2)) / y;
3044 template <
typename T>
3045 inline bool VmaIsPow2(T x)
3047 return (x & (x-1)) == 0;
3051 static inline uint32_t VmaNextPow2(uint32_t v)
3062 static inline uint64_t VmaNextPow2(uint64_t v)
3076 static inline uint32_t VmaPrevPow2(uint32_t v)
3086 static inline uint64_t VmaPrevPow2(uint64_t v)
3098 static inline bool VmaStrIsEmpty(
const char* pStr)
3100 return pStr == VMA_NULL || *pStr ==
'\0';
3103 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3121 template<
typename Iterator,
typename Compare>
3122 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3124 Iterator centerValue = end; --centerValue;
3125 Iterator insertIndex = beg;
3126 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3128 if(cmp(*memTypeIndex, *centerValue))
3130 if(insertIndex != memTypeIndex)
3132 VMA_SWAP(*memTypeIndex, *insertIndex);
3137 if(insertIndex != centerValue)
3139 VMA_SWAP(*insertIndex, *centerValue);
3144 template<
typename Iterator,
typename Compare>
3145 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3149 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3150 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3151 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3155 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3157 #endif // #ifndef VMA_SORT 3166 static inline bool VmaBlocksOnSamePage(
3167 VkDeviceSize resourceAOffset,
3168 VkDeviceSize resourceASize,
3169 VkDeviceSize resourceBOffset,
3170 VkDeviceSize pageSize)
3172 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3173 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3174 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3175 VkDeviceSize resourceBStart = resourceBOffset;
3176 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3177 return resourceAEndPage == resourceBStartPage;
3180 enum VmaSuballocationType
3182 VMA_SUBALLOCATION_TYPE_FREE = 0,
3183 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3184 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3185 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3186 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3187 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3188 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3197 static inline bool VmaIsBufferImageGranularityConflict(
3198 VmaSuballocationType suballocType1,
3199 VmaSuballocationType suballocType2)
3201 if(suballocType1 > suballocType2)
3203 VMA_SWAP(suballocType1, suballocType2);
3206 switch(suballocType1)
3208 case VMA_SUBALLOCATION_TYPE_FREE:
3210 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3212 case VMA_SUBALLOCATION_TYPE_BUFFER:
3214 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3215 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3216 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3218 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3219 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3220 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3221 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3223 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3224 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3232 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3234 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3235 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3236 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3238 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3242 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3244 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3245 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3246 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3248 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3259 VMA_CLASS_NO_COPY(VmaMutexLock)
3261 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3262 m_pMutex(useMutex ? &mutex : VMA_NULL)
3279 VMA_MUTEX* m_pMutex;
3282 #if VMA_DEBUG_GLOBAL_MUTEX 3283 static VMA_MUTEX gDebugGlobalMutex;
3284 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3286 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3290 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3301 template <
typename CmpLess,
typename IterT,
typename KeyT>
3302 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3304 size_t down = 0, up = (end - beg);
3307 const size_t mid = (down + up) / 2;
3308 if(cmp(*(beg+mid), key))
3323 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3325 if((pAllocationCallbacks != VMA_NULL) &&
3326 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3328 return (*pAllocationCallbacks->pfnAllocation)(
3329 pAllocationCallbacks->pUserData,
3332 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3336 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3340 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3342 if((pAllocationCallbacks != VMA_NULL) &&
3343 (pAllocationCallbacks->pfnFree != VMA_NULL))
3345 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3349 VMA_SYSTEM_FREE(ptr);
3353 template<
typename T>
3354 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3356 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3359 template<
typename T>
3360 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3362 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3365 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3367 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3369 template<
typename T>
3370 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3373 VmaFree(pAllocationCallbacks, ptr);
3376 template<
typename T>
3377 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3381 for(
size_t i = count; i--; )
3385 VmaFree(pAllocationCallbacks, ptr);
3390 template<
typename T>
3391 class VmaStlAllocator
3394 const VkAllocationCallbacks*
const m_pCallbacks;
3395 typedef T value_type;
3397 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3398 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3400 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3401 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3403 template<
typename U>
3404 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3406 return m_pCallbacks == rhs.m_pCallbacks;
3408 template<
typename U>
3409 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3411 return m_pCallbacks != rhs.m_pCallbacks;
3414 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3417 #if VMA_USE_STL_VECTOR 3419 #define VmaVector std::vector 3421 template<
typename T,
typename allocatorT>
3422 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3424 vec.insert(vec.begin() + index, item);
3427 template<
typename T,
typename allocatorT>
3428 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3430 vec.erase(vec.begin() + index);
3433 #else // #if VMA_USE_STL_VECTOR 3438 template<
typename T,
typename AllocatorT>
3442 typedef T value_type;
3444 VmaVector(
const AllocatorT& allocator) :
3445 m_Allocator(allocator),
3452 VmaVector(
size_t count,
const AllocatorT& allocator) :
3453 m_Allocator(allocator),
3454 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3460 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3461 m_Allocator(src.m_Allocator),
3462 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3463 m_Count(src.m_Count),
3464 m_Capacity(src.m_Count)
3468 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3474 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3477 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3481 resize(rhs.m_Count);
3484 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3490 bool empty()
const {
return m_Count == 0; }
3491 size_t size()
const {
return m_Count; }
3492 T* data() {
return m_pArray; }
3493 const T* data()
const {
return m_pArray; }
3495 T& operator[](
size_t index)
3497 VMA_HEAVY_ASSERT(index < m_Count);
3498 return m_pArray[index];
3500 const T& operator[](
size_t index)
const 3502 VMA_HEAVY_ASSERT(index < m_Count);
3503 return m_pArray[index];
3508 VMA_HEAVY_ASSERT(m_Count > 0);
3511 const T& front()
const 3513 VMA_HEAVY_ASSERT(m_Count > 0);
3518 VMA_HEAVY_ASSERT(m_Count > 0);
3519 return m_pArray[m_Count - 1];
3521 const T& back()
const 3523 VMA_HEAVY_ASSERT(m_Count > 0);
3524 return m_pArray[m_Count - 1];
3527 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3529 newCapacity = VMA_MAX(newCapacity, m_Count);
3531 if((newCapacity < m_Capacity) && !freeMemory)
3533 newCapacity = m_Capacity;
3536 if(newCapacity != m_Capacity)
3538 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3541 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3543 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3544 m_Capacity = newCapacity;
3545 m_pArray = newArray;
3549 void resize(
size_t newCount,
bool freeMemory =
false)
3551 size_t newCapacity = m_Capacity;
3552 if(newCount > m_Capacity)
3554 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3558 newCapacity = newCount;
3561 if(newCapacity != m_Capacity)
3563 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3564 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3565 if(elementsToCopy != 0)
3567 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3569 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3570 m_Capacity = newCapacity;
3571 m_pArray = newArray;
3577 void clear(
bool freeMemory =
false)
3579 resize(0, freeMemory);
3582 void insert(
size_t index,
const T& src)
3584 VMA_HEAVY_ASSERT(index <= m_Count);
3585 const size_t oldCount = size();
3586 resize(oldCount + 1);
3587 if(index < oldCount)
3589 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3591 m_pArray[index] = src;
3594 void remove(
size_t index)
3596 VMA_HEAVY_ASSERT(index < m_Count);
3597 const size_t oldCount = size();
3598 if(index < oldCount - 1)
3600 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3602 resize(oldCount - 1);
3605 void push_back(
const T& src)
3607 const size_t newIndex = size();
3608 resize(newIndex + 1);
3609 m_pArray[newIndex] = src;
3614 VMA_HEAVY_ASSERT(m_Count > 0);
3618 void push_front(
const T& src)
3625 VMA_HEAVY_ASSERT(m_Count > 0);
3629 typedef T* iterator;
3631 iterator begin() {
return m_pArray; }
3632 iterator end() {
return m_pArray + m_Count; }
3635 AllocatorT m_Allocator;
3641 template<
typename T,
typename allocatorT>
3642 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3644 vec.insert(index, item);
3647 template<
typename T,
typename allocatorT>
3648 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3653 #endif // #if VMA_USE_STL_VECTOR 3655 template<
typename CmpLess,
typename VectorT>
3656 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3658 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3660 vector.data() + vector.size(),
3662 CmpLess()) - vector.data();
3663 VmaVectorInsert(vector, indexToInsert, value);
3664 return indexToInsert;
3667 template<
typename CmpLess,
typename VectorT>
3668 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3671 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3676 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3678 size_t indexToRemove = it - vector.begin();
3679 VmaVectorRemove(vector, indexToRemove);
3685 template<
typename CmpLess,
typename IterT,
typename KeyT>
3686 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3689 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3690 beg, end, value, comparator);
3692 (!comparator(*it, value) && !comparator(value, *it)))
3707 template<
typename T>
3708 class VmaPoolAllocator
3710 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3712 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3713 ~VmaPoolAllocator();
3721 uint32_t NextFreeIndex;
3728 uint32_t FirstFreeIndex;
3731 const VkAllocationCallbacks* m_pAllocationCallbacks;
3732 size_t m_ItemsPerBlock;
3733 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3735 ItemBlock& CreateNewBlock();
3738 template<
typename T>
3739 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3740 m_pAllocationCallbacks(pAllocationCallbacks),
3741 m_ItemsPerBlock(itemsPerBlock),
3742 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3744 VMA_ASSERT(itemsPerBlock > 0);
3747 template<
typename T>
3748 VmaPoolAllocator<T>::~VmaPoolAllocator()
3753 template<
typename T>
3754 void VmaPoolAllocator<T>::Clear()
3756 for(
size_t i = m_ItemBlocks.size(); i--; )
3757 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3758 m_ItemBlocks.clear();
3761 template<
typename T>
3762 T* VmaPoolAllocator<T>::Alloc()
3764 for(
size_t i = m_ItemBlocks.size(); i--; )
3766 ItemBlock& block = m_ItemBlocks[i];
3768 if(block.FirstFreeIndex != UINT32_MAX)
3770 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3771 block.FirstFreeIndex = pItem->NextFreeIndex;
3772 return &pItem->Value;
3777 ItemBlock& newBlock = CreateNewBlock();
3778 Item*
const pItem = &newBlock.pItems[0];
3779 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3780 return &pItem->Value;
3783 template<
typename T>
3784 void VmaPoolAllocator<T>::Free(T* ptr)
3787 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3789 ItemBlock& block = m_ItemBlocks[i];
3793 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3796 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3798 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3799 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3800 block.FirstFreeIndex = index;
3804 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3807 template<
typename T>
3808 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3810 ItemBlock newBlock = {
3811 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3813 m_ItemBlocks.push_back(newBlock);
3816 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3817 newBlock.pItems[i].NextFreeIndex = i + 1;
3818 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3819 return m_ItemBlocks.back();
3825 #if VMA_USE_STL_LIST 3827 #define VmaList std::list 3829 #else // #if VMA_USE_STL_LIST 3831 template<
typename T>
3840 template<
typename T>
3843 VMA_CLASS_NO_COPY(VmaRawList)
3845 typedef VmaListItem<T> ItemType;
3847 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3851 size_t GetCount()
const {
return m_Count; }
3852 bool IsEmpty()
const {
return m_Count == 0; }
3854 ItemType* Front() {
return m_pFront; }
3855 const ItemType* Front()
const {
return m_pFront; }
3856 ItemType* Back() {
return m_pBack; }
3857 const ItemType* Back()
const {
return m_pBack; }
3859 ItemType* PushBack();
3860 ItemType* PushFront();
3861 ItemType* PushBack(
const T& value);
3862 ItemType* PushFront(
const T& value);
3867 ItemType* InsertBefore(ItemType* pItem);
3869 ItemType* InsertAfter(ItemType* pItem);
3871 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3872 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3874 void Remove(ItemType* pItem);
3877 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3878 VmaPoolAllocator<ItemType> m_ItemAllocator;
3884 template<
typename T>
3885 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3886 m_pAllocationCallbacks(pAllocationCallbacks),
3887 m_ItemAllocator(pAllocationCallbacks, 128),
3894 template<
typename T>
3895 VmaRawList<T>::~VmaRawList()
3901 template<
typename T>
3902 void VmaRawList<T>::Clear()
3904 if(IsEmpty() ==
false)
3906 ItemType* pItem = m_pBack;
3907 while(pItem != VMA_NULL)
3909 ItemType*
const pPrevItem = pItem->pPrev;
3910 m_ItemAllocator.Free(pItem);
3913 m_pFront = VMA_NULL;
3919 template<
typename T>
3920 VmaListItem<T>* VmaRawList<T>::PushBack()
3922 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3923 pNewItem->pNext = VMA_NULL;
3926 pNewItem->pPrev = VMA_NULL;
3927 m_pFront = pNewItem;
3933 pNewItem->pPrev = m_pBack;
3934 m_pBack->pNext = pNewItem;
3941 template<
typename T>
3942 VmaListItem<T>* VmaRawList<T>::PushFront()
3944 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3945 pNewItem->pPrev = VMA_NULL;
3948 pNewItem->pNext = VMA_NULL;
3949 m_pFront = pNewItem;
3955 pNewItem->pNext = m_pFront;
3956 m_pFront->pPrev = pNewItem;
3957 m_pFront = pNewItem;
3963 template<
typename T>
3964 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3966 ItemType*
const pNewItem = PushBack();
3967 pNewItem->Value = value;
3971 template<
typename T>
3972 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3974 ItemType*
const pNewItem = PushFront();
3975 pNewItem->Value = value;
3979 template<
typename T>
3980 void VmaRawList<T>::PopBack()
3982 VMA_HEAVY_ASSERT(m_Count > 0);
3983 ItemType*
const pBackItem = m_pBack;
3984 ItemType*
const pPrevItem = pBackItem->pPrev;
3985 if(pPrevItem != VMA_NULL)
3987 pPrevItem->pNext = VMA_NULL;
3989 m_pBack = pPrevItem;
3990 m_ItemAllocator.Free(pBackItem);
3994 template<
typename T>
3995 void VmaRawList<T>::PopFront()
3997 VMA_HEAVY_ASSERT(m_Count > 0);
3998 ItemType*
const pFrontItem = m_pFront;
3999 ItemType*
const pNextItem = pFrontItem->pNext;
4000 if(pNextItem != VMA_NULL)
4002 pNextItem->pPrev = VMA_NULL;
4004 m_pFront = pNextItem;
4005 m_ItemAllocator.Free(pFrontItem);
4009 template<
typename T>
4010 void VmaRawList<T>::Remove(ItemType* pItem)
4012 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4013 VMA_HEAVY_ASSERT(m_Count > 0);
4015 if(pItem->pPrev != VMA_NULL)
4017 pItem->pPrev->pNext = pItem->pNext;
4021 VMA_HEAVY_ASSERT(m_pFront == pItem);
4022 m_pFront = pItem->pNext;
4025 if(pItem->pNext != VMA_NULL)
4027 pItem->pNext->pPrev = pItem->pPrev;
4031 VMA_HEAVY_ASSERT(m_pBack == pItem);
4032 m_pBack = pItem->pPrev;
4035 m_ItemAllocator.Free(pItem);
4039 template<
typename T>
4040 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4042 if(pItem != VMA_NULL)
4044 ItemType*
const prevItem = pItem->pPrev;
4045 ItemType*
const newItem = m_ItemAllocator.Alloc();
4046 newItem->pPrev = prevItem;
4047 newItem->pNext = pItem;
4048 pItem->pPrev = newItem;
4049 if(prevItem != VMA_NULL)
4051 prevItem->pNext = newItem;
4055 VMA_HEAVY_ASSERT(m_pFront == pItem);
4065 template<
typename T>
4066 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4068 if(pItem != VMA_NULL)
4070 ItemType*
const nextItem = pItem->pNext;
4071 ItemType*
const newItem = m_ItemAllocator.Alloc();
4072 newItem->pNext = nextItem;
4073 newItem->pPrev = pItem;
4074 pItem->pNext = newItem;
4075 if(nextItem != VMA_NULL)
4077 nextItem->pPrev = newItem;
4081 VMA_HEAVY_ASSERT(m_pBack == pItem);
4091 template<
typename T>
4092 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4094 ItemType*
const newItem = InsertBefore(pItem);
4095 newItem->Value = value;
4099 template<
typename T>
4100 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4102 ItemType*
const newItem = InsertAfter(pItem);
4103 newItem->Value = value;
4107 template<
typename T,
typename AllocatorT>
4110 VMA_CLASS_NO_COPY(VmaList)
4121 T& operator*()
const 4123 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4124 return m_pItem->Value;
4126 T* operator->()
const 4128 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4129 return &m_pItem->Value;
4132 iterator& operator++()
4134 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4135 m_pItem = m_pItem->pNext;
4138 iterator& operator--()
4140 if(m_pItem != VMA_NULL)
4142 m_pItem = m_pItem->pPrev;
4146 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4147 m_pItem = m_pList->Back();
4152 iterator operator++(
int)
4154 iterator result = *
this;
4158 iterator operator--(
int)
4160 iterator result = *
this;
4165 bool operator==(
const iterator& rhs)
const 4167 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4168 return m_pItem == rhs.m_pItem;
4170 bool operator!=(
const iterator& rhs)
const 4172 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4173 return m_pItem != rhs.m_pItem;
4177 VmaRawList<T>* m_pList;
4178 VmaListItem<T>* m_pItem;
4180 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4186 friend class VmaList<T, AllocatorT>;
4189 class const_iterator
4198 const_iterator(
const iterator& src) :
4199 m_pList(src.m_pList),
4200 m_pItem(src.m_pItem)
4204 const T& operator*()
const 4206 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4207 return m_pItem->Value;
4209 const T* operator->()
const 4211 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4212 return &m_pItem->Value;
4215 const_iterator& operator++()
4217 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4218 m_pItem = m_pItem->pNext;
4221 const_iterator& operator--()
4223 if(m_pItem != VMA_NULL)
4225 m_pItem = m_pItem->pPrev;
4229 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4230 m_pItem = m_pList->Back();
4235 const_iterator operator++(
int)
4237 const_iterator result = *
this;
4241 const_iterator operator--(
int)
4243 const_iterator result = *
this;
4248 bool operator==(
const const_iterator& rhs)
const 4250 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4251 return m_pItem == rhs.m_pItem;
4253 bool operator!=(
const const_iterator& rhs)
const 4255 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4256 return m_pItem != rhs.m_pItem;
4260 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4266 const VmaRawList<T>* m_pList;
4267 const VmaListItem<T>* m_pItem;
4269 friend class VmaList<T, AllocatorT>;
4272 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4274 bool empty()
const {
return m_RawList.IsEmpty(); }
4275 size_t size()
const {
return m_RawList.GetCount(); }
4277 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4278 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4280 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4281 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4283 void clear() { m_RawList.Clear(); }
4284 void push_back(
const T& value) { m_RawList.PushBack(value); }
4285 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4286 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4289 VmaRawList<T> m_RawList;
4292 #endif // #if VMA_USE_STL_LIST 4300 #if VMA_USE_STL_UNORDERED_MAP 4302 #define VmaPair std::pair 4304 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4305 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4307 #else // #if VMA_USE_STL_UNORDERED_MAP 4309 template<
typename T1,
typename T2>
4315 VmaPair() : first(), second() { }
4316 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4322 template<
typename KeyT,
typename ValueT>
4326 typedef VmaPair<KeyT, ValueT> PairType;
4327 typedef PairType* iterator;
4329 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4331 iterator begin() {
return m_Vector.begin(); }
4332 iterator end() {
return m_Vector.end(); }
4334 void insert(
const PairType& pair);
4335 iterator find(
const KeyT& key);
4336 void erase(iterator it);
4339 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4342 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4344 template<
typename FirstT,
typename SecondT>
4345 struct VmaPairFirstLess
4347 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4349 return lhs.first < rhs.first;
4351 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4353 return lhs.first < rhsFirst;
4357 template<
typename KeyT,
typename ValueT>
4358 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4360 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4362 m_Vector.data() + m_Vector.size(),
4364 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4365 VmaVectorInsert(m_Vector, indexToInsert, pair);
4368 template<
typename KeyT,
typename ValueT>
4369 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4371 PairType* it = VmaBinaryFindFirstNotLess(
4373 m_Vector.data() + m_Vector.size(),
4375 VmaPairFirstLess<KeyT, ValueT>());
4376 if((it != m_Vector.end()) && (it->first == key))
4382 return m_Vector.end();
4386 template<
typename KeyT,
typename ValueT>
4387 void VmaMap<KeyT, ValueT>::erase(iterator it)
4389 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4392 #endif // #if VMA_USE_STL_UNORDERED_MAP 4398 class VmaDeviceMemoryBlock;
4400 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4402 struct VmaAllocation_T
4404 VMA_CLASS_NO_COPY(VmaAllocation_T)
4406 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4410 FLAG_USER_DATA_STRING = 0x01,
4414 enum ALLOCATION_TYPE
4416 ALLOCATION_TYPE_NONE,
4417 ALLOCATION_TYPE_BLOCK,
4418 ALLOCATION_TYPE_DEDICATED,
4421 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4424 m_pUserData(VMA_NULL),
4425 m_LastUseFrameIndex(currentFrameIndex),
4426 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4427 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4429 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4431 #if VMA_STATS_STRING_ENABLED 4432 m_CreationFrameIndex = currentFrameIndex;
4433 m_BufferImageUsage = 0;
4439 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4442 VMA_ASSERT(m_pUserData == VMA_NULL);
4445 void InitBlockAllocation(
4447 VmaDeviceMemoryBlock* block,
4448 VkDeviceSize offset,
4449 VkDeviceSize alignment,
4451 VmaSuballocationType suballocationType,
4455 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4456 VMA_ASSERT(block != VMA_NULL);
4457 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4458 m_Alignment = alignment;
4460 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4461 m_SuballocationType = (uint8_t)suballocationType;
4462 m_BlockAllocation.m_hPool = hPool;
4463 m_BlockAllocation.m_Block = block;
4464 m_BlockAllocation.m_Offset = offset;
4465 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4470 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4471 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4472 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4473 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4474 m_BlockAllocation.m_Block = VMA_NULL;
4475 m_BlockAllocation.m_Offset = 0;
4476 m_BlockAllocation.m_CanBecomeLost =
true;
4479 void ChangeBlockAllocation(
4481 VmaDeviceMemoryBlock* block,
4482 VkDeviceSize offset);
4485 void InitDedicatedAllocation(
4486 uint32_t memoryTypeIndex,
4487 VkDeviceMemory hMemory,
4488 VmaSuballocationType suballocationType,
4492 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4493 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4494 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4497 m_SuballocationType = (uint8_t)suballocationType;
4498 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4499 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4500 m_DedicatedAllocation.m_hMemory = hMemory;
4501 m_DedicatedAllocation.m_pMappedData = pMappedData;
4504 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4505 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4506 VkDeviceSize GetSize()
const {
return m_Size; }
4507 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4508 void* GetUserData()
const {
return m_pUserData; }
4509 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4510 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4512 VmaDeviceMemoryBlock* GetBlock()
const 4514 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4515 return m_BlockAllocation.m_Block;
4517 VkDeviceSize GetOffset()
const;
4518 VkDeviceMemory GetMemory()
const;
4519 uint32_t GetMemoryTypeIndex()
const;
4520 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4521 void* GetMappedData()
const;
4522 bool CanBecomeLost()
const;
4525 uint32_t GetLastUseFrameIndex()
const 4527 return m_LastUseFrameIndex.load();
4529 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4531 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4541 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4543 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4545 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4556 void BlockAllocMap();
4557 void BlockAllocUnmap();
4558 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4561 #if VMA_STATS_STRING_ENABLED 4562 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4563 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4565 void InitBufferImageUsage(uint32_t bufferImageUsage)
4567 VMA_ASSERT(m_BufferImageUsage == 0);
4568 m_BufferImageUsage = bufferImageUsage;
4571 void PrintParameters(
class VmaJsonWriter& json)
const;
4575 VkDeviceSize m_Alignment;
4576 VkDeviceSize m_Size;
4578 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4580 uint8_t m_SuballocationType;
4587 struct BlockAllocation
4590 VmaDeviceMemoryBlock* m_Block;
4591 VkDeviceSize m_Offset;
4592 bool m_CanBecomeLost;
4596 struct DedicatedAllocation
4598 uint32_t m_MemoryTypeIndex;
4599 VkDeviceMemory m_hMemory;
4600 void* m_pMappedData;
4606 BlockAllocation m_BlockAllocation;
4608 DedicatedAllocation m_DedicatedAllocation;
4611 #if VMA_STATS_STRING_ENABLED 4612 uint32_t m_CreationFrameIndex;
4613 uint32_t m_BufferImageUsage;
4623 struct VmaSuballocation
4625 VkDeviceSize offset;
4628 VmaSuballocationType type;
4632 struct VmaSuballocationOffsetLess
4634 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4636 return lhs.offset < rhs.offset;
4639 struct VmaSuballocationOffsetGreater
4641 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4643 return lhs.offset > rhs.offset;
4647 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4650 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4665 struct VmaAllocationRequest
4667 VkDeviceSize offset;
4668 VkDeviceSize sumFreeSize;
4669 VkDeviceSize sumItemSize;
4670 VmaSuballocationList::iterator item;
4671 size_t itemsToMakeLostCount;
4674 VkDeviceSize CalcCost()
const 4676 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4684 class VmaBlockMetadata
4688 virtual ~VmaBlockMetadata() { }
4689 virtual void Init(VkDeviceSize size) { m_Size = size; }
4692 virtual bool Validate()
const = 0;
4693 VkDeviceSize GetSize()
const {
return m_Size; }
4694 virtual size_t GetAllocationCount()
const = 0;
4695 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4696 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4698 virtual bool IsEmpty()
const = 0;
4700 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4702 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4704 #if VMA_STATS_STRING_ENABLED 4705 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4711 virtual bool CreateAllocationRequest(
4712 uint32_t currentFrameIndex,
4713 uint32_t frameInUseCount,
4714 VkDeviceSize bufferImageGranularity,
4715 VkDeviceSize allocSize,
4716 VkDeviceSize allocAlignment,
4718 VmaSuballocationType allocType,
4719 bool canMakeOtherLost,
4721 VmaAllocationRequest* pAllocationRequest) = 0;
4723 virtual bool MakeRequestedAllocationsLost(
4724 uint32_t currentFrameIndex,
4725 uint32_t frameInUseCount,
4726 VmaAllocationRequest* pAllocationRequest) = 0;
4728 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4730 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4734 const VmaAllocationRequest& request,
4735 VmaSuballocationType type,
4736 VkDeviceSize allocSize,
4742 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4745 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4747 #if VMA_STATS_STRING_ENABLED 4748 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4749 VkDeviceSize unusedBytes,
4750 size_t allocationCount,
4751 size_t unusedRangeCount)
const;
4752 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4753 VkDeviceSize offset,
4755 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4756 VkDeviceSize offset,
4757 VkDeviceSize size)
const;
4758 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4762 VkDeviceSize m_Size;
4763 const VkAllocationCallbacks* m_pAllocationCallbacks;
4766 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4767 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4771 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4773 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4776 virtual ~VmaBlockMetadata_Generic();
4777 virtual void Init(VkDeviceSize size);
4779 virtual bool Validate()
const;
4780 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4781 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4782 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4783 virtual bool IsEmpty()
const;
4785 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4786 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4788 #if VMA_STATS_STRING_ENABLED 4789 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4792 virtual bool CreateAllocationRequest(
4793 uint32_t currentFrameIndex,
4794 uint32_t frameInUseCount,
4795 VkDeviceSize bufferImageGranularity,
4796 VkDeviceSize allocSize,
4797 VkDeviceSize allocAlignment,
4799 VmaSuballocationType allocType,
4800 bool canMakeOtherLost,
4802 VmaAllocationRequest* pAllocationRequest);
4804 virtual bool MakeRequestedAllocationsLost(
4805 uint32_t currentFrameIndex,
4806 uint32_t frameInUseCount,
4807 VmaAllocationRequest* pAllocationRequest);
4809 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4811 virtual VkResult CheckCorruption(
const void* pBlockData);
4814 const VmaAllocationRequest& request,
4815 VmaSuballocationType type,
4816 VkDeviceSize allocSize,
4821 virtual void FreeAtOffset(VkDeviceSize offset);
4824 uint32_t m_FreeCount;
4825 VkDeviceSize m_SumFreeSize;
4826 VmaSuballocationList m_Suballocations;
4829 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4831 bool ValidateFreeSuballocationList()
const;
4835 bool CheckAllocation(
4836 uint32_t currentFrameIndex,
4837 uint32_t frameInUseCount,
4838 VkDeviceSize bufferImageGranularity,
4839 VkDeviceSize allocSize,
4840 VkDeviceSize allocAlignment,
4841 VmaSuballocationType allocType,
4842 VmaSuballocationList::const_iterator suballocItem,
4843 bool canMakeOtherLost,
4844 VkDeviceSize* pOffset,
4845 size_t* itemsToMakeLostCount,
4846 VkDeviceSize* pSumFreeSize,
4847 VkDeviceSize* pSumItemSize)
const;
4849 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4853 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4856 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4859 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4940 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4942 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4945 virtual ~VmaBlockMetadata_Linear();
4946 virtual void Init(VkDeviceSize size);
4948 virtual bool Validate()
const;
4949 virtual size_t GetAllocationCount()
const;
4950 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4951 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4952 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
4954 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4955 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4957 #if VMA_STATS_STRING_ENABLED 4958 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4961 virtual bool CreateAllocationRequest(
4962 uint32_t currentFrameIndex,
4963 uint32_t frameInUseCount,
4964 VkDeviceSize bufferImageGranularity,
4965 VkDeviceSize allocSize,
4966 VkDeviceSize allocAlignment,
4968 VmaSuballocationType allocType,
4969 bool canMakeOtherLost,
4971 VmaAllocationRequest* pAllocationRequest);
4973 virtual bool MakeRequestedAllocationsLost(
4974 uint32_t currentFrameIndex,
4975 uint32_t frameInUseCount,
4976 VmaAllocationRequest* pAllocationRequest);
4978 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4980 virtual VkResult CheckCorruption(
const void* pBlockData);
4983 const VmaAllocationRequest& request,
4984 VmaSuballocationType type,
4985 VkDeviceSize allocSize,
4990 virtual void FreeAtOffset(VkDeviceSize offset);
5000 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5002 enum SECOND_VECTOR_MODE
5004 SECOND_VECTOR_EMPTY,
5009 SECOND_VECTOR_RING_BUFFER,
5015 SECOND_VECTOR_DOUBLE_STACK,
5018 VkDeviceSize m_SumFreeSize;
5019 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5020 uint32_t m_1stVectorIndex;
5021 SECOND_VECTOR_MODE m_2ndVectorMode;
5023 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5024 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5025 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5026 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5029 size_t m_1stNullItemsBeginCount;
5031 size_t m_1stNullItemsMiddleCount;
5033 size_t m_2ndNullItemsCount;
5035 bool ShouldCompact1st()
const;
5036 void CleanupAfterFree();
5050 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5052 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5055 virtual ~VmaBlockMetadata_Buddy();
5056 virtual void Init(VkDeviceSize size);
5058 virtual bool Validate()
const;
5059 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5060 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5061 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5062 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5064 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5065 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5067 #if VMA_STATS_STRING_ENABLED 5068 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5071 virtual bool CreateAllocationRequest(
5072 uint32_t currentFrameIndex,
5073 uint32_t frameInUseCount,
5074 VkDeviceSize bufferImageGranularity,
5075 VkDeviceSize allocSize,
5076 VkDeviceSize allocAlignment,
5078 VmaSuballocationType allocType,
5079 bool canMakeOtherLost,
5081 VmaAllocationRequest* pAllocationRequest);
5083 virtual bool MakeRequestedAllocationsLost(
5084 uint32_t currentFrameIndex,
5085 uint32_t frameInUseCount,
5086 VmaAllocationRequest* pAllocationRequest);
5088 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5090 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5093 const VmaAllocationRequest& request,
5094 VmaSuballocationType type,
5095 VkDeviceSize allocSize,
5099 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5100 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5103 static const VkDeviceSize MIN_NODE_SIZE = 32;
5104 static const size_t MAX_LEVELS = 30;
5106 struct ValidationContext
5108 size_t calculatedAllocationCount;
5109 size_t calculatedFreeCount;
5110 VkDeviceSize calculatedSumFreeSize;
5112 ValidationContext() :
5113 calculatedAllocationCount(0),
5114 calculatedFreeCount(0),
5115 calculatedSumFreeSize(0) { }
5120 VkDeviceSize offset;
5150 VkDeviceSize m_UsableSize;
5151 uint32_t m_LevelCount;
5157 } m_FreeList[MAX_LEVELS];
5159 size_t m_AllocationCount;
5163 VkDeviceSize m_SumFreeSize;
5165 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5166 void DeleteNode(Node* node);
5167 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5168 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5169 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5171 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5172 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5176 void AddToFreeListFront(uint32_t level, Node* node);
5180 void RemoveFromFreeList(uint32_t level, Node* node);
5182 #if VMA_STATS_STRING_ENABLED 5183 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5193 class VmaDeviceMemoryBlock
5195 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5197 VmaBlockMetadata* m_pMetadata;
5201 ~VmaDeviceMemoryBlock()
5203 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5204 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5210 uint32_t newMemoryTypeIndex,
5211 VkDeviceMemory newMemory,
5212 VkDeviceSize newSize,
5214 uint32_t algorithm);
5218 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5219 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5220 uint32_t GetId()
const {
return m_Id; }
5221 void* GetMappedData()
const {
return m_pMappedData; }
5224 bool Validate()
const;
5229 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5232 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5233 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5235 VkResult BindBufferMemory(
5239 VkResult BindImageMemory(
5245 uint32_t m_MemoryTypeIndex;
5247 VkDeviceMemory m_hMemory;
5252 uint32_t m_MapCount;
5253 void* m_pMappedData;
5256 struct VmaPointerLess
5258 bool operator()(
const void* lhs,
const void* rhs)
const 5264 class VmaDefragmentator;
5272 struct VmaBlockVector
5274 VMA_CLASS_NO_COPY(VmaBlockVector)
5278 uint32_t memoryTypeIndex,
5279 VkDeviceSize preferredBlockSize,
5280 size_t minBlockCount,
5281 size_t maxBlockCount,
5282 VkDeviceSize bufferImageGranularity,
5283 uint32_t frameInUseCount,
5285 bool explicitBlockSize,
5286 uint32_t algorithm);
5289 VkResult CreateMinBlocks();
5291 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5292 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5293 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5294 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5295 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5299 bool IsEmpty()
const {
return m_Blocks.empty(); }
5300 bool IsCorruptionDetectionEnabled()
const;
5304 uint32_t currentFrameIndex,
5306 VkDeviceSize alignment,
5308 VmaSuballocationType suballocType,
5317 #if VMA_STATS_STRING_ENABLED 5318 void PrintDetailedMap(
class VmaJsonWriter& json);
5321 void MakePoolAllocationsLost(
5322 uint32_t currentFrameIndex,
5323 size_t* pLostAllocationCount);
5324 VkResult CheckCorruption();
5326 VmaDefragmentator* EnsureDefragmentator(
5328 uint32_t currentFrameIndex);
5330 VkResult Defragment(
5332 VkDeviceSize& maxBytesToMove,
5333 uint32_t& maxAllocationsToMove);
5335 void DestroyDefragmentator();
5338 friend class VmaDefragmentator;
5341 const uint32_t m_MemoryTypeIndex;
5342 const VkDeviceSize m_PreferredBlockSize;
5343 const size_t m_MinBlockCount;
5344 const size_t m_MaxBlockCount;
5345 const VkDeviceSize m_BufferImageGranularity;
5346 const uint32_t m_FrameInUseCount;
5347 const bool m_IsCustomPool;
5348 const bool m_ExplicitBlockSize;
5349 const uint32_t m_Algorithm;
5350 bool m_HasEmptyBlock;
5353 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5357 VmaDefragmentator* m_pDefragmentator;
5358 uint32_t m_NextBlockId;
5360 VkDeviceSize CalcMaxBlockSize()
const;
5363 void Remove(VmaDeviceMemoryBlock* pBlock);
5367 void IncrementallySortBlocks();
5370 VkResult AllocateFromBlock(
5371 VmaDeviceMemoryBlock* pBlock,
5373 uint32_t currentFrameIndex,
5375 VkDeviceSize alignment,
5378 VmaSuballocationType suballocType,
5382 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5387 VMA_CLASS_NO_COPY(VmaPool_T)
5389 VmaBlockVector m_BlockVector;
5394 VkDeviceSize preferredBlockSize);
5397 uint32_t GetId()
const {
return m_Id; }
5398 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5400 #if VMA_STATS_STRING_ENABLED 5408 class VmaDefragmentator
5410 VMA_CLASS_NO_COPY(VmaDefragmentator)
5413 VmaBlockVector*
const m_pBlockVector;
5414 uint32_t m_CurrentFrameIndex;
5415 VkDeviceSize m_BytesMoved;
5416 uint32_t m_AllocationsMoved;
5418 struct AllocationInfo
5421 VkBool32* m_pChanged;
5424 m_hAllocation(VK_NULL_HANDLE),
5425 m_pChanged(VMA_NULL)
5430 struct AllocationInfoSizeGreater
5432 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5434 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5439 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5443 VmaDeviceMemoryBlock* m_pBlock;
5444 bool m_HasNonMovableAllocations;
5445 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5447 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5449 m_HasNonMovableAllocations(true),
5450 m_Allocations(pAllocationCallbacks),
5451 m_pMappedDataForDefragmentation(VMA_NULL)
5455 void CalcHasNonMovableAllocations()
5457 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5458 const size_t defragmentAllocCount = m_Allocations.size();
5459 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5462 void SortAllocationsBySizeDescecnding()
5464 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5467 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5472 void* m_pMappedDataForDefragmentation;
5475 struct BlockPointerLess
5477 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5479 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5481 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5483 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5489 struct BlockInfoCompareMoveDestination
5491 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5493 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5497 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5501 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5509 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5510 BlockInfoVector m_Blocks;
5512 VkResult DefragmentRound(
5513 VkDeviceSize maxBytesToMove,
5514 uint32_t maxAllocationsToMove);
5516 static bool MoveMakesSense(
5517 size_t dstBlockIndex, VkDeviceSize dstOffset,
5518 size_t srcBlockIndex, VkDeviceSize srcOffset);
5523 VmaBlockVector* pBlockVector,
5524 uint32_t currentFrameIndex);
5526 ~VmaDefragmentator();
5528 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5529 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5531 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5533 VkResult Defragment(
5534 VkDeviceSize maxBytesToMove,
5535 uint32_t maxAllocationsToMove);
5538 #if VMA_RECORDING_ENABLED 5545 void WriteConfiguration(
5546 const VkPhysicalDeviceProperties& devProps,
5547 const VkPhysicalDeviceMemoryProperties& memProps,
5548 bool dedicatedAllocationExtensionEnabled);
5551 void RecordCreateAllocator(uint32_t frameIndex);
5552 void RecordDestroyAllocator(uint32_t frameIndex);
5553 void RecordCreatePool(uint32_t frameIndex,
5556 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5557 void RecordAllocateMemory(uint32_t frameIndex,
5558 const VkMemoryRequirements& vkMemReq,
5561 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5562 const VkMemoryRequirements& vkMemReq,
5563 bool requiresDedicatedAllocation,
5564 bool prefersDedicatedAllocation,
5567 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5568 const VkMemoryRequirements& vkMemReq,
5569 bool requiresDedicatedAllocation,
5570 bool prefersDedicatedAllocation,
5573 void RecordFreeMemory(uint32_t frameIndex,
5575 void RecordSetAllocationUserData(uint32_t frameIndex,
5577 const void* pUserData);
5578 void RecordCreateLostAllocation(uint32_t frameIndex,
5580 void RecordMapMemory(uint32_t frameIndex,
5582 void RecordUnmapMemory(uint32_t frameIndex,
5584 void RecordFlushAllocation(uint32_t frameIndex,
5585 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5586 void RecordInvalidateAllocation(uint32_t frameIndex,
5587 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5588 void RecordCreateBuffer(uint32_t frameIndex,
5589 const VkBufferCreateInfo& bufCreateInfo,
5592 void RecordCreateImage(uint32_t frameIndex,
5593 const VkImageCreateInfo& imageCreateInfo,
5596 void RecordDestroyBuffer(uint32_t frameIndex,
5598 void RecordDestroyImage(uint32_t frameIndex,
5600 void RecordTouchAllocation(uint32_t frameIndex,
5602 void RecordGetAllocationInfo(uint32_t frameIndex,
5604 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5614 class UserDataString
5618 const char* GetString()
const {
return m_Str; }
5628 VMA_MUTEX m_FileMutex;
5630 int64_t m_StartCounter;
5632 void GetBasicParams(CallParams& outParams);
5636 #endif // #if VMA_RECORDING_ENABLED 5639 struct VmaAllocator_T
5641 VMA_CLASS_NO_COPY(VmaAllocator_T)
5644 bool m_UseKhrDedicatedAllocation;
5646 bool m_AllocationCallbacksSpecified;
5647 VkAllocationCallbacks m_AllocationCallbacks;
5651 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5652 VMA_MUTEX m_HeapSizeLimitMutex;
5654 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5655 VkPhysicalDeviceMemoryProperties m_MemProps;
5658 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5661 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5662 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5663 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5669 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5671 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5675 return m_VulkanFunctions;
5678 VkDeviceSize GetBufferImageGranularity()
const 5681 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5682 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5685 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5686 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5688 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5690 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5691 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5694 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5696 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5697 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5700 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5702 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5703 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5704 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5707 bool IsIntegratedGpu()
const 5709 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5712 #if VMA_RECORDING_ENABLED 5713 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5716 void GetBufferMemoryRequirements(
5718 VkMemoryRequirements& memReq,
5719 bool& requiresDedicatedAllocation,
5720 bool& prefersDedicatedAllocation)
const;
5721 void GetImageMemoryRequirements(
5723 VkMemoryRequirements& memReq,
5724 bool& requiresDedicatedAllocation,
5725 bool& prefersDedicatedAllocation)
const;
5728 VkResult AllocateMemory(
5729 const VkMemoryRequirements& vkMemReq,
5730 bool requiresDedicatedAllocation,
5731 bool prefersDedicatedAllocation,
5732 VkBuffer dedicatedBuffer,
5733 VkImage dedicatedImage,
5735 VmaSuballocationType suballocType,
5741 void CalculateStats(
VmaStats* pStats);
5743 #if VMA_STATS_STRING_ENABLED 5744 void PrintDetailedMap(
class VmaJsonWriter& json);
5747 VkResult Defragment(
5749 size_t allocationCount,
5750 VkBool32* pAllocationsChanged,
5758 void DestroyPool(
VmaPool pool);
5761 void SetCurrentFrameIndex(uint32_t frameIndex);
5762 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5764 void MakePoolAllocationsLost(
5766 size_t* pLostAllocationCount);
5767 VkResult CheckPoolCorruption(
VmaPool hPool);
5768 VkResult CheckCorruption(uint32_t memoryTypeBits);
5772 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5773 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5778 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5779 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5781 void FlushOrInvalidateAllocation(
5783 VkDeviceSize offset, VkDeviceSize size,
5784 VMA_CACHE_OPERATION op);
5786 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5789 VkDeviceSize m_PreferredLargeHeapBlockSize;
5791 VkPhysicalDevice m_PhysicalDevice;
5792 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5794 VMA_MUTEX m_PoolsMutex;
5796 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5797 uint32_t m_NextPoolId;
5801 #if VMA_RECORDING_ENABLED 5802 VmaRecorder* m_pRecorder;
5807 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5809 VkResult AllocateMemoryOfType(
5811 VkDeviceSize alignment,
5812 bool dedicatedAllocation,
5813 VkBuffer dedicatedBuffer,
5814 VkImage dedicatedImage,
5816 uint32_t memTypeIndex,
5817 VmaSuballocationType suballocType,
5821 VkResult AllocateDedicatedMemory(
5823 VmaSuballocationType suballocType,
5824 uint32_t memTypeIndex,
5826 bool isUserDataString,
5828 VkBuffer dedicatedBuffer,
5829 VkImage dedicatedImage,
5839 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5841 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5844 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5846 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5849 template<
typename T>
5852 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5855 template<
typename T>
5856 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5858 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5861 template<
typename T>
5862 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5867 VmaFree(hAllocator, ptr);
5871 template<
typename T>
5872 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5876 for(
size_t i = count; i--; )
5878 VmaFree(hAllocator, ptr);
5885 #if VMA_STATS_STRING_ENABLED 5887 class VmaStringBuilder
5890 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5891 size_t GetLength()
const {
return m_Data.size(); }
5892 const char* GetData()
const {
return m_Data.data(); }
5894 void Add(
char ch) { m_Data.push_back(ch); }
5895 void Add(
const char* pStr);
5896 void AddNewLine() { Add(
'\n'); }
5897 void AddNumber(uint32_t num);
5898 void AddNumber(uint64_t num);
5899 void AddPointer(
const void* ptr);
5902 VmaVector< char, VmaStlAllocator<char> > m_Data;
5905 void VmaStringBuilder::Add(
const char* pStr)
5907 const size_t strLen = strlen(pStr);
5910 const size_t oldCount = m_Data.size();
5911 m_Data.resize(oldCount + strLen);
5912 memcpy(m_Data.data() + oldCount, pStr, strLen);
5916 void VmaStringBuilder::AddNumber(uint32_t num)
5919 VmaUint32ToStr(buf,
sizeof(buf), num);
5923 void VmaStringBuilder::AddNumber(uint64_t num)
5926 VmaUint64ToStr(buf,
sizeof(buf), num);
5930 void VmaStringBuilder::AddPointer(
const void* ptr)
5933 VmaPtrToStr(buf,
sizeof(buf), ptr);
5937 #endif // #if VMA_STATS_STRING_ENABLED 5942 #if VMA_STATS_STRING_ENABLED 5946 VMA_CLASS_NO_COPY(VmaJsonWriter)
5948 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5951 void BeginObject(
bool singleLine =
false);
5954 void BeginArray(
bool singleLine =
false);
5957 void WriteString(
const char* pStr);
5958 void BeginString(
const char* pStr = VMA_NULL);
5959 void ContinueString(
const char* pStr);
5960 void ContinueString(uint32_t n);
5961 void ContinueString(uint64_t n);
5962 void ContinueString_Pointer(
const void* ptr);
5963 void EndString(
const char* pStr = VMA_NULL);
5965 void WriteNumber(uint32_t n);
5966 void WriteNumber(uint64_t n);
5967 void WriteBool(
bool b);
5971 static const char*
const INDENT;
5973 enum COLLECTION_TYPE
5975 COLLECTION_TYPE_OBJECT,
5976 COLLECTION_TYPE_ARRAY,
5980 COLLECTION_TYPE type;
5981 uint32_t valueCount;
5982 bool singleLineMode;
5985 VmaStringBuilder& m_SB;
5986 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5987 bool m_InsideString;
5989 void BeginValue(
bool isString);
5990 void WriteIndent(
bool oneLess =
false);
5993 const char*
const VmaJsonWriter::INDENT =
" ";
5995 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5997 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5998 m_InsideString(false)
6002 VmaJsonWriter::~VmaJsonWriter()
6004 VMA_ASSERT(!m_InsideString);
6005 VMA_ASSERT(m_Stack.empty());
6008 void VmaJsonWriter::BeginObject(
bool singleLine)
6010 VMA_ASSERT(!m_InsideString);
6016 item.type = COLLECTION_TYPE_OBJECT;
6017 item.valueCount = 0;
6018 item.singleLineMode = singleLine;
6019 m_Stack.push_back(item);
6022 void VmaJsonWriter::EndObject()
6024 VMA_ASSERT(!m_InsideString);
6029 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6033 void VmaJsonWriter::BeginArray(
bool singleLine)
6035 VMA_ASSERT(!m_InsideString);
6041 item.type = COLLECTION_TYPE_ARRAY;
6042 item.valueCount = 0;
6043 item.singleLineMode = singleLine;
6044 m_Stack.push_back(item);
6047 void VmaJsonWriter::EndArray()
6049 VMA_ASSERT(!m_InsideString);
6054 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6058 void VmaJsonWriter::WriteString(
const char* pStr)
6064 void VmaJsonWriter::BeginString(
const char* pStr)
6066 VMA_ASSERT(!m_InsideString);
6070 m_InsideString =
true;
6071 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6073 ContinueString(pStr);
6077 void VmaJsonWriter::ContinueString(
const char* pStr)
6079 VMA_ASSERT(m_InsideString);
6081 const size_t strLen = strlen(pStr);
6082 for(
size_t i = 0; i < strLen; ++i)
6115 VMA_ASSERT(0 &&
"Character not currently supported.");
6121 void VmaJsonWriter::ContinueString(uint32_t n)
6123 VMA_ASSERT(m_InsideString);
6127 void VmaJsonWriter::ContinueString(uint64_t n)
6129 VMA_ASSERT(m_InsideString);
6133 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6135 VMA_ASSERT(m_InsideString);
6136 m_SB.AddPointer(ptr);
6139 void VmaJsonWriter::EndString(
const char* pStr)
6141 VMA_ASSERT(m_InsideString);
6142 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6144 ContinueString(pStr);
6147 m_InsideString =
false;
6150 void VmaJsonWriter::WriteNumber(uint32_t n)
6152 VMA_ASSERT(!m_InsideString);
6157 void VmaJsonWriter::WriteNumber(uint64_t n)
6159 VMA_ASSERT(!m_InsideString);
6164 void VmaJsonWriter::WriteBool(
bool b)
6166 VMA_ASSERT(!m_InsideString);
6168 m_SB.Add(b ?
"true" :
"false");
6171 void VmaJsonWriter::WriteNull()
6173 VMA_ASSERT(!m_InsideString);
6178 void VmaJsonWriter::BeginValue(
bool isString)
6180 if(!m_Stack.empty())
6182 StackItem& currItem = m_Stack.back();
6183 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6184 currItem.valueCount % 2 == 0)
6186 VMA_ASSERT(isString);
6189 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6190 currItem.valueCount % 2 != 0)
6194 else if(currItem.valueCount > 0)
6203 ++currItem.valueCount;
6207 void VmaJsonWriter::WriteIndent(
bool oneLess)
6209 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6213 size_t count = m_Stack.size();
6214 if(count > 0 && oneLess)
6218 for(
size_t i = 0; i < count; ++i)
6225 #endif // #if VMA_STATS_STRING_ENABLED 6229 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6231 if(IsUserDataString())
6233 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6235 FreeUserDataString(hAllocator);
6237 if(pUserData != VMA_NULL)
6239 const char*
const newStrSrc = (
char*)pUserData;
6240 const size_t newStrLen = strlen(newStrSrc);
6241 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6242 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6243 m_pUserData = newStrDst;
6248 m_pUserData = pUserData;
6252 void VmaAllocation_T::ChangeBlockAllocation(
6254 VmaDeviceMemoryBlock* block,
6255 VkDeviceSize offset)
6257 VMA_ASSERT(block != VMA_NULL);
6258 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6261 if(block != m_BlockAllocation.m_Block)
6263 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6264 if(IsPersistentMap())
6266 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6267 block->Map(hAllocator, mapRefCount, VMA_NULL);
6270 m_BlockAllocation.m_Block = block;
6271 m_BlockAllocation.m_Offset = offset;
6274 VkDeviceSize VmaAllocation_T::GetOffset()
const 6278 case ALLOCATION_TYPE_BLOCK:
6279 return m_BlockAllocation.m_Offset;
6280 case ALLOCATION_TYPE_DEDICATED:
6288 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6292 case ALLOCATION_TYPE_BLOCK:
6293 return m_BlockAllocation.m_Block->GetDeviceMemory();
6294 case ALLOCATION_TYPE_DEDICATED:
6295 return m_DedicatedAllocation.m_hMemory;
6298 return VK_NULL_HANDLE;
6302 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6306 case ALLOCATION_TYPE_BLOCK:
6307 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6308 case ALLOCATION_TYPE_DEDICATED:
6309 return m_DedicatedAllocation.m_MemoryTypeIndex;
6316 void* VmaAllocation_T::GetMappedData()
const 6320 case ALLOCATION_TYPE_BLOCK:
6323 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6324 VMA_ASSERT(pBlockData != VMA_NULL);
6325 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6332 case ALLOCATION_TYPE_DEDICATED:
6333 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6334 return m_DedicatedAllocation.m_pMappedData;
6341 bool VmaAllocation_T::CanBecomeLost()
const 6345 case ALLOCATION_TYPE_BLOCK:
6346 return m_BlockAllocation.m_CanBecomeLost;
6347 case ALLOCATION_TYPE_DEDICATED:
6355 VmaPool VmaAllocation_T::GetPool()
const 6357 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6358 return m_BlockAllocation.m_hPool;
6361 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6363 VMA_ASSERT(CanBecomeLost());
6369 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6372 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6377 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6383 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6393 #if VMA_STATS_STRING_ENABLED 6396 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6405 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6407 json.WriteString(
"Type");
6408 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6410 json.WriteString(
"Size");
6411 json.WriteNumber(m_Size);
6413 if(m_pUserData != VMA_NULL)
6415 json.WriteString(
"UserData");
6416 if(IsUserDataString())
6418 json.WriteString((
const char*)m_pUserData);
6423 json.ContinueString_Pointer(m_pUserData);
6428 json.WriteString(
"CreationFrameIndex");
6429 json.WriteNumber(m_CreationFrameIndex);
6431 json.WriteString(
"LastUseFrameIndex");
6432 json.WriteNumber(GetLastUseFrameIndex());
6434 if(m_BufferImageUsage != 0)
6436 json.WriteString(
"Usage");
6437 json.WriteNumber(m_BufferImageUsage);
6443 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6445 VMA_ASSERT(IsUserDataString());
6446 if(m_pUserData != VMA_NULL)
6448 char*
const oldStr = (
char*)m_pUserData;
6449 const size_t oldStrLen = strlen(oldStr);
6450 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6451 m_pUserData = VMA_NULL;
6455 void VmaAllocation_T::BlockAllocMap()
6457 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6459 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6465 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6469 void VmaAllocation_T::BlockAllocUnmap()
6471 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6473 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6479 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6483 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6485 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6489 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6491 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6492 *ppData = m_DedicatedAllocation.m_pMappedData;
6498 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6499 return VK_ERROR_MEMORY_MAP_FAILED;
6504 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6505 hAllocator->m_hDevice,
6506 m_DedicatedAllocation.m_hMemory,
6511 if(result == VK_SUCCESS)
6513 m_DedicatedAllocation.m_pMappedData = *ppData;
6520 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6522 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6524 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6529 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6530 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6531 hAllocator->m_hDevice,
6532 m_DedicatedAllocation.m_hMemory);
6537 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6541 #if VMA_STATS_STRING_ENABLED 6543 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6547 json.WriteString(
"Blocks");
6550 json.WriteString(
"Allocations");
6553 json.WriteString(
"UnusedRanges");
6556 json.WriteString(
"UsedBytes");
6559 json.WriteString(
"UnusedBytes");
6564 json.WriteString(
"AllocationSize");
6565 json.BeginObject(
true);
6566 json.WriteString(
"Min");
6568 json.WriteString(
"Avg");
6570 json.WriteString(
"Max");
6577 json.WriteString(
"UnusedRangeSize");
6578 json.BeginObject(
true);
6579 json.WriteString(
"Min");
6581 json.WriteString(
"Avg");
6583 json.WriteString(
"Max");
6591 #endif // #if VMA_STATS_STRING_ENABLED 6593 struct VmaSuballocationItemSizeLess
6596 const VmaSuballocationList::iterator lhs,
6597 const VmaSuballocationList::iterator rhs)
const 6599 return lhs->size < rhs->size;
6602 const VmaSuballocationList::iterator lhs,
6603 VkDeviceSize rhsSize)
const 6605 return lhs->size < rhsSize;
6613 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6615 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6619 #if VMA_STATS_STRING_ENABLED 6621 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6622 VkDeviceSize unusedBytes,
6623 size_t allocationCount,
6624 size_t unusedRangeCount)
const 6628 json.WriteString(
"TotalBytes");
6629 json.WriteNumber(GetSize());
6631 json.WriteString(
"UnusedBytes");
6632 json.WriteNumber(unusedBytes);
6634 json.WriteString(
"Allocations");
6635 json.WriteNumber((uint64_t)allocationCount);
6637 json.WriteString(
"UnusedRanges");
6638 json.WriteNumber((uint64_t)unusedRangeCount);
6640 json.WriteString(
"Suballocations");
6644 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6645 VkDeviceSize offset,
6648 json.BeginObject(
true);
6650 json.WriteString(
"Offset");
6651 json.WriteNumber(offset);
6653 hAllocation->PrintParameters(json);
6658 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6659 VkDeviceSize offset,
6660 VkDeviceSize size)
const 6662 json.BeginObject(
true);
6664 json.WriteString(
"Offset");
6665 json.WriteNumber(offset);
6667 json.WriteString(
"Type");
6668 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6670 json.WriteString(
"Size");
6671 json.WriteNumber(size);
6676 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6682 #endif // #if VMA_STATS_STRING_ENABLED 6687 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6688 VmaBlockMetadata(hAllocator),
6691 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6692 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6696 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6700 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6702 VmaBlockMetadata::Init(size);
6705 m_SumFreeSize = size;
6707 VmaSuballocation suballoc = {};
6708 suballoc.offset = 0;
6709 suballoc.size = size;
6710 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6711 suballoc.hAllocation = VK_NULL_HANDLE;
6713 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6714 m_Suballocations.push_back(suballoc);
6715 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6717 m_FreeSuballocationsBySize.push_back(suballocItem);
6720 bool VmaBlockMetadata_Generic::Validate()
const 6722 VMA_VALIDATE(!m_Suballocations.empty());
6725 VkDeviceSize calculatedOffset = 0;
6727 uint32_t calculatedFreeCount = 0;
6729 VkDeviceSize calculatedSumFreeSize = 0;
6732 size_t freeSuballocationsToRegister = 0;
6734 bool prevFree =
false;
6736 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6737 suballocItem != m_Suballocations.cend();
6740 const VmaSuballocation& subAlloc = *suballocItem;
6743 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6745 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6747 VMA_VALIDATE(!prevFree || !currFree);
6749 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6753 calculatedSumFreeSize += subAlloc.size;
6754 ++calculatedFreeCount;
6755 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6757 ++freeSuballocationsToRegister;
6761 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6765 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6766 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6769 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6772 calculatedOffset += subAlloc.size;
6773 prevFree = currFree;
6778 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6780 VkDeviceSize lastSize = 0;
6781 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6783 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6786 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6788 VMA_VALIDATE(suballocItem->size >= lastSize);
6790 lastSize = suballocItem->size;
6794 VMA_VALIDATE(ValidateFreeSuballocationList());
6795 VMA_VALIDATE(calculatedOffset == GetSize());
6796 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6797 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6802 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6804 if(!m_FreeSuballocationsBySize.empty())
6806 return m_FreeSuballocationsBySize.back()->size;
6814 bool VmaBlockMetadata_Generic::IsEmpty()
const 6816 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6819 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6823 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6835 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6836 suballocItem != m_Suballocations.cend();
6839 const VmaSuballocation& suballoc = *suballocItem;
6840 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6853 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6855 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6857 inoutStats.
size += GetSize();
6864 #if VMA_STATS_STRING_ENABLED 6866 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6868 PrintDetailedMap_Begin(json,
6870 m_Suballocations.size() - (size_t)m_FreeCount,
6874 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6875 suballocItem != m_Suballocations.cend();
6876 ++suballocItem, ++i)
6878 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6880 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6884 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6888 PrintDetailedMap_End(json);
6891 #endif // #if VMA_STATS_STRING_ENABLED 6893 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6894 uint32_t currentFrameIndex,
6895 uint32_t frameInUseCount,
6896 VkDeviceSize bufferImageGranularity,
6897 VkDeviceSize allocSize,
6898 VkDeviceSize allocAlignment,
6900 VmaSuballocationType allocType,
6901 bool canMakeOtherLost,
6903 VmaAllocationRequest* pAllocationRequest)
6905 VMA_ASSERT(allocSize > 0);
6906 VMA_ASSERT(!upperAddress);
6907 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6908 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6909 VMA_HEAVY_ASSERT(Validate());
6912 if(canMakeOtherLost ==
false &&
6913 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6919 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6920 if(freeSuballocCount > 0)
6925 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6926 m_FreeSuballocationsBySize.data(),
6927 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6928 allocSize + 2 * VMA_DEBUG_MARGIN,
6929 VmaSuballocationItemSizeLess());
6930 size_t index = it - m_FreeSuballocationsBySize.data();
6931 for(; index < freeSuballocCount; ++index)
6936 bufferImageGranularity,
6940 m_FreeSuballocationsBySize[index],
6942 &pAllocationRequest->offset,
6943 &pAllocationRequest->itemsToMakeLostCount,
6944 &pAllocationRequest->sumFreeSize,
6945 &pAllocationRequest->sumItemSize))
6947 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6955 for(
size_t index = freeSuballocCount; index--; )
6960 bufferImageGranularity,
6964 m_FreeSuballocationsBySize[index],
6966 &pAllocationRequest->offset,
6967 &pAllocationRequest->itemsToMakeLostCount,
6968 &pAllocationRequest->sumFreeSize,
6969 &pAllocationRequest->sumItemSize))
6971 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6978 if(canMakeOtherLost)
6982 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6983 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6985 VmaAllocationRequest tmpAllocRequest = {};
6986 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6987 suballocIt != m_Suballocations.end();
6990 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6991 suballocIt->hAllocation->CanBecomeLost())
6996 bufferImageGranularity,
7002 &tmpAllocRequest.offset,
7003 &tmpAllocRequest.itemsToMakeLostCount,
7004 &tmpAllocRequest.sumFreeSize,
7005 &tmpAllocRequest.sumItemSize))
7007 tmpAllocRequest.item = suballocIt;
7009 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7012 *pAllocationRequest = tmpAllocRequest;
7018 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7027 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7028 uint32_t currentFrameIndex,
7029 uint32_t frameInUseCount,
7030 VmaAllocationRequest* pAllocationRequest)
7032 while(pAllocationRequest->itemsToMakeLostCount > 0)
7034 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7036 ++pAllocationRequest->item;
7038 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7039 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7040 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7041 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7043 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7044 --pAllocationRequest->itemsToMakeLostCount;
7052 VMA_HEAVY_ASSERT(Validate());
7053 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7054 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7059 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7061 uint32_t lostAllocationCount = 0;
7062 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7063 it != m_Suballocations.end();
7066 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7067 it->hAllocation->CanBecomeLost() &&
7068 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7070 it = FreeSuballocation(it);
7071 ++lostAllocationCount;
7074 return lostAllocationCount;
7077 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7079 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7080 it != m_Suballocations.end();
7083 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7085 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7087 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7088 return VK_ERROR_VALIDATION_FAILED_EXT;
7090 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7092 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7093 return VK_ERROR_VALIDATION_FAILED_EXT;
7101 void VmaBlockMetadata_Generic::Alloc(
7102 const VmaAllocationRequest& request,
7103 VmaSuballocationType type,
7104 VkDeviceSize allocSize,
7108 VMA_ASSERT(!upperAddress);
7109 VMA_ASSERT(request.item != m_Suballocations.end());
7110 VmaSuballocation& suballoc = *request.item;
7112 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7114 VMA_ASSERT(request.offset >= suballoc.offset);
7115 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7116 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7117 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7121 UnregisterFreeSuballocation(request.item);
7123 suballoc.offset = request.offset;
7124 suballoc.size = allocSize;
7125 suballoc.type = type;
7126 suballoc.hAllocation = hAllocation;
7131 VmaSuballocation paddingSuballoc = {};
7132 paddingSuballoc.offset = request.offset + allocSize;
7133 paddingSuballoc.size = paddingEnd;
7134 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7135 VmaSuballocationList::iterator next = request.item;
7137 const VmaSuballocationList::iterator paddingEndItem =
7138 m_Suballocations.insert(next, paddingSuballoc);
7139 RegisterFreeSuballocation(paddingEndItem);
7145 VmaSuballocation paddingSuballoc = {};
7146 paddingSuballoc.offset = request.offset - paddingBegin;
7147 paddingSuballoc.size = paddingBegin;
7148 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7149 const VmaSuballocationList::iterator paddingBeginItem =
7150 m_Suballocations.insert(request.item, paddingSuballoc);
7151 RegisterFreeSuballocation(paddingBeginItem);
7155 m_FreeCount = m_FreeCount - 1;
7156 if(paddingBegin > 0)
7164 m_SumFreeSize -= allocSize;
7167 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7169 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7170 suballocItem != m_Suballocations.end();
7173 VmaSuballocation& suballoc = *suballocItem;
7174 if(suballoc.hAllocation == allocation)
7176 FreeSuballocation(suballocItem);
7177 VMA_HEAVY_ASSERT(Validate());
7181 VMA_ASSERT(0 &&
"Not found!");
7184 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7186 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7187 suballocItem != m_Suballocations.end();
7190 VmaSuballocation& suballoc = *suballocItem;
7191 if(suballoc.offset == offset)
7193 FreeSuballocation(suballocItem);
7197 VMA_ASSERT(0 &&
"Not found!");
7200 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7202 VkDeviceSize lastSize = 0;
7203 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7205 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7207 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7208 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7209 VMA_VALIDATE(it->size >= lastSize);
7210 lastSize = it->size;
7215 bool VmaBlockMetadata_Generic::CheckAllocation(
7216 uint32_t currentFrameIndex,
7217 uint32_t frameInUseCount,
7218 VkDeviceSize bufferImageGranularity,
7219 VkDeviceSize allocSize,
7220 VkDeviceSize allocAlignment,
7221 VmaSuballocationType allocType,
7222 VmaSuballocationList::const_iterator suballocItem,
7223 bool canMakeOtherLost,
7224 VkDeviceSize* pOffset,
7225 size_t* itemsToMakeLostCount,
7226 VkDeviceSize* pSumFreeSize,
7227 VkDeviceSize* pSumItemSize)
const 7229 VMA_ASSERT(allocSize > 0);
7230 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7231 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7232 VMA_ASSERT(pOffset != VMA_NULL);
7234 *itemsToMakeLostCount = 0;
7238 if(canMakeOtherLost)
7240 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7242 *pSumFreeSize = suballocItem->size;
7246 if(suballocItem->hAllocation->CanBecomeLost() &&
7247 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7249 ++*itemsToMakeLostCount;
7250 *pSumItemSize = suballocItem->size;
7259 if(GetSize() - suballocItem->offset < allocSize)
7265 *pOffset = suballocItem->offset;
7268 if(VMA_DEBUG_MARGIN > 0)
7270 *pOffset += VMA_DEBUG_MARGIN;
7274 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7278 if(bufferImageGranularity > 1)
7280 bool bufferImageGranularityConflict =
false;
7281 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7282 while(prevSuballocItem != m_Suballocations.cbegin())
7285 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7286 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7288 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7290 bufferImageGranularityConflict =
true;
7298 if(bufferImageGranularityConflict)
7300 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7306 if(*pOffset >= suballocItem->offset + suballocItem->size)
7312 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7315 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7317 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7319 if(suballocItem->offset + totalSize > GetSize())
7326 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7327 if(totalSize > suballocItem->size)
7329 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7330 while(remainingSize > 0)
7333 if(lastSuballocItem == m_Suballocations.cend())
7337 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7339 *pSumFreeSize += lastSuballocItem->size;
7343 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7344 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7345 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7347 ++*itemsToMakeLostCount;
7348 *pSumItemSize += lastSuballocItem->size;
7355 remainingSize = (lastSuballocItem->size < remainingSize) ?
7356 remainingSize - lastSuballocItem->size : 0;
7362 if(bufferImageGranularity > 1)
7364 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7366 while(nextSuballocItem != m_Suballocations.cend())
7368 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7369 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7371 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7373 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7374 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7375 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7377 ++*itemsToMakeLostCount;
7396 const VmaSuballocation& suballoc = *suballocItem;
7397 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7399 *pSumFreeSize = suballoc.size;
7402 if(suballoc.size < allocSize)
7408 *pOffset = suballoc.offset;
7411 if(VMA_DEBUG_MARGIN > 0)
7413 *pOffset += VMA_DEBUG_MARGIN;
7417 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7421 if(bufferImageGranularity > 1)
7423 bool bufferImageGranularityConflict =
false;
7424 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7425 while(prevSuballocItem != m_Suballocations.cbegin())
7428 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7429 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7431 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7433 bufferImageGranularityConflict =
true;
7441 if(bufferImageGranularityConflict)
7443 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7448 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7451 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7454 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7461 if(bufferImageGranularity > 1)
7463 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7465 while(nextSuballocItem != m_Suballocations.cend())
7467 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7468 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7470 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7489 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7491 VMA_ASSERT(item != m_Suballocations.end());
7492 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7494 VmaSuballocationList::iterator nextItem = item;
7496 VMA_ASSERT(nextItem != m_Suballocations.end());
7497 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7499 item->size += nextItem->size;
7501 m_Suballocations.erase(nextItem);
7504 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7507 VmaSuballocation& suballoc = *suballocItem;
7508 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7509 suballoc.hAllocation = VK_NULL_HANDLE;
7513 m_SumFreeSize += suballoc.size;
7516 bool mergeWithNext =
false;
7517 bool mergeWithPrev =
false;
7519 VmaSuballocationList::iterator nextItem = suballocItem;
7521 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7523 mergeWithNext =
true;
7526 VmaSuballocationList::iterator prevItem = suballocItem;
7527 if(suballocItem != m_Suballocations.begin())
7530 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7532 mergeWithPrev =
true;
7538 UnregisterFreeSuballocation(nextItem);
7539 MergeFreeWithNext(suballocItem);
7544 UnregisterFreeSuballocation(prevItem);
7545 MergeFreeWithNext(prevItem);
7546 RegisterFreeSuballocation(prevItem);
7551 RegisterFreeSuballocation(suballocItem);
7552 return suballocItem;
7556 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7558 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7559 VMA_ASSERT(item->size > 0);
7563 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7565 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7567 if(m_FreeSuballocationsBySize.empty())
7569 m_FreeSuballocationsBySize.push_back(item);
7573 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7581 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7583 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7584 VMA_ASSERT(item->size > 0);
7588 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7590 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7592 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7593 m_FreeSuballocationsBySize.data(),
7594 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7596 VmaSuballocationItemSizeLess());
7597 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7598 index < m_FreeSuballocationsBySize.size();
7601 if(m_FreeSuballocationsBySize[index] == item)
7603 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7606 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7608 VMA_ASSERT(0 &&
"Not found.");
7617 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7618 VmaBlockMetadata(hAllocator),
7620 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7621 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7622 m_1stVectorIndex(0),
7623 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7624 m_1stNullItemsBeginCount(0),
7625 m_1stNullItemsMiddleCount(0),
7626 m_2ndNullItemsCount(0)
7630 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7634 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7636 VmaBlockMetadata::Init(size);
7637 m_SumFreeSize = size;
7640 bool VmaBlockMetadata_Linear::Validate()
const 7642 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7643 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7645 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7646 VMA_VALIDATE(!suballocations1st.empty() ||
7647 suballocations2nd.empty() ||
7648 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7650 if(!suballocations1st.empty())
7653 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7655 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7657 if(!suballocations2nd.empty())
7660 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7663 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7664 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7666 VkDeviceSize sumUsedSize = 0;
7667 const size_t suballoc1stCount = suballocations1st.size();
7668 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7670 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7672 const size_t suballoc2ndCount = suballocations2nd.size();
7673 size_t nullItem2ndCount = 0;
7674 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7676 const VmaSuballocation& suballoc = suballocations2nd[i];
7677 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7679 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7680 VMA_VALIDATE(suballoc.offset >= offset);
7684 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7685 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7686 sumUsedSize += suballoc.size;
7693 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7696 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7699 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7701 const VmaSuballocation& suballoc = suballocations1st[i];
7702 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7703 suballoc.hAllocation == VK_NULL_HANDLE);
7706 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7708 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7710 const VmaSuballocation& suballoc = suballocations1st[i];
7711 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7713 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7714 VMA_VALIDATE(suballoc.offset >= offset);
7715 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7719 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7720 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7721 sumUsedSize += suballoc.size;
7728 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7730 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7732 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7734 const size_t suballoc2ndCount = suballocations2nd.size();
7735 size_t nullItem2ndCount = 0;
7736 for(
size_t i = suballoc2ndCount; i--; )
7738 const VmaSuballocation& suballoc = suballocations2nd[i];
7739 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7741 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7742 VMA_VALIDATE(suballoc.offset >= offset);
7746 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7747 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7748 sumUsedSize += suballoc.size;
7755 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7758 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7761 VMA_VALIDATE(offset <= GetSize());
7762 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7767 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7769 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7770 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7773 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7775 const VkDeviceSize size = GetSize();
7787 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7789 switch(m_2ndVectorMode)
7791 case SECOND_VECTOR_EMPTY:
7797 const size_t suballocations1stCount = suballocations1st.size();
7798 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7799 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7800 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7802 firstSuballoc.offset,
7803 size - (lastSuballoc.offset + lastSuballoc.size));
7807 case SECOND_VECTOR_RING_BUFFER:
7812 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7813 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7814 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7815 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7819 case SECOND_VECTOR_DOUBLE_STACK:
7824 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7825 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7826 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7827 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7837 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7839 const VkDeviceSize size = GetSize();
7840 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7841 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7842 const size_t suballoc1stCount = suballocations1st.size();
7843 const size_t suballoc2ndCount = suballocations2nd.size();
7854 VkDeviceSize lastOffset = 0;
7856 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7858 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7859 size_t nextAlloc2ndIndex = 0;
7860 while(lastOffset < freeSpace2ndTo1stEnd)
7863 while(nextAlloc2ndIndex < suballoc2ndCount &&
7864 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7866 ++nextAlloc2ndIndex;
7870 if(nextAlloc2ndIndex < suballoc2ndCount)
7872 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7875 if(lastOffset < suballoc.offset)
7878 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7892 lastOffset = suballoc.offset + suballoc.size;
7893 ++nextAlloc2ndIndex;
7899 if(lastOffset < freeSpace2ndTo1stEnd)
7901 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7909 lastOffset = freeSpace2ndTo1stEnd;
7914 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7915 const VkDeviceSize freeSpace1stTo2ndEnd =
7916 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7917 while(lastOffset < freeSpace1stTo2ndEnd)
7920 while(nextAlloc1stIndex < suballoc1stCount &&
7921 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7923 ++nextAlloc1stIndex;
7927 if(nextAlloc1stIndex < suballoc1stCount)
7929 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7932 if(lastOffset < suballoc.offset)
7935 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7949 lastOffset = suballoc.offset + suballoc.size;
7950 ++nextAlloc1stIndex;
7956 if(lastOffset < freeSpace1stTo2ndEnd)
7958 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7966 lastOffset = freeSpace1stTo2ndEnd;
7970 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7972 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7973 while(lastOffset < size)
7976 while(nextAlloc2ndIndex != SIZE_MAX &&
7977 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7979 --nextAlloc2ndIndex;
7983 if(nextAlloc2ndIndex != SIZE_MAX)
7985 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7988 if(lastOffset < suballoc.offset)
7991 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8005 lastOffset = suballoc.offset + suballoc.size;
8006 --nextAlloc2ndIndex;
8012 if(lastOffset < size)
8014 const VkDeviceSize unusedRangeSize = size - lastOffset;
8030 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8032 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8033 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8034 const VkDeviceSize size = GetSize();
8035 const size_t suballoc1stCount = suballocations1st.size();
8036 const size_t suballoc2ndCount = suballocations2nd.size();
8038 inoutStats.
size += size;
8040 VkDeviceSize lastOffset = 0;
8042 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8044 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8045 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8046 while(lastOffset < freeSpace2ndTo1stEnd)
8049 while(nextAlloc2ndIndex < suballoc2ndCount &&
8050 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8052 ++nextAlloc2ndIndex;
8056 if(nextAlloc2ndIndex < suballoc2ndCount)
8058 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8061 if(lastOffset < suballoc.offset)
8064 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8075 lastOffset = suballoc.offset + suballoc.size;
8076 ++nextAlloc2ndIndex;
8081 if(lastOffset < freeSpace2ndTo1stEnd)
8084 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8091 lastOffset = freeSpace2ndTo1stEnd;
8096 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8097 const VkDeviceSize freeSpace1stTo2ndEnd =
8098 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8099 while(lastOffset < freeSpace1stTo2ndEnd)
8102 while(nextAlloc1stIndex < suballoc1stCount &&
8103 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8105 ++nextAlloc1stIndex;
8109 if(nextAlloc1stIndex < suballoc1stCount)
8111 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8114 if(lastOffset < suballoc.offset)
8117 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8128 lastOffset = suballoc.offset + suballoc.size;
8129 ++nextAlloc1stIndex;
8134 if(lastOffset < freeSpace1stTo2ndEnd)
8137 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8144 lastOffset = freeSpace1stTo2ndEnd;
8148 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8150 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8151 while(lastOffset < size)
8154 while(nextAlloc2ndIndex != SIZE_MAX &&
8155 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8157 --nextAlloc2ndIndex;
8161 if(nextAlloc2ndIndex != SIZE_MAX)
8163 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8166 if(lastOffset < suballoc.offset)
8169 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8180 lastOffset = suballoc.offset + suballoc.size;
8181 --nextAlloc2ndIndex;
8186 if(lastOffset < size)
8189 const VkDeviceSize unusedRangeSize = size - lastOffset;
8202 #if VMA_STATS_STRING_ENABLED 8203 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8205 const VkDeviceSize size = GetSize();
8206 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8207 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8208 const size_t suballoc1stCount = suballocations1st.size();
8209 const size_t suballoc2ndCount = suballocations2nd.size();
8213 size_t unusedRangeCount = 0;
8214 VkDeviceSize usedBytes = 0;
8216 VkDeviceSize lastOffset = 0;
8218 size_t alloc2ndCount = 0;
8219 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8221 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8222 size_t nextAlloc2ndIndex = 0;
8223 while(lastOffset < freeSpace2ndTo1stEnd)
8226 while(nextAlloc2ndIndex < suballoc2ndCount &&
8227 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8229 ++nextAlloc2ndIndex;
8233 if(nextAlloc2ndIndex < suballoc2ndCount)
8235 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8238 if(lastOffset < suballoc.offset)
8247 usedBytes += suballoc.size;
8250 lastOffset = suballoc.offset + suballoc.size;
8251 ++nextAlloc2ndIndex;
8256 if(lastOffset < freeSpace2ndTo1stEnd)
8263 lastOffset = freeSpace2ndTo1stEnd;
8268 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8269 size_t alloc1stCount = 0;
8270 const VkDeviceSize freeSpace1stTo2ndEnd =
8271 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8272 while(lastOffset < freeSpace1stTo2ndEnd)
8275 while(nextAlloc1stIndex < suballoc1stCount &&
8276 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8278 ++nextAlloc1stIndex;
8282 if(nextAlloc1stIndex < suballoc1stCount)
8284 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8287 if(lastOffset < suballoc.offset)
8296 usedBytes += suballoc.size;
8299 lastOffset = suballoc.offset + suballoc.size;
8300 ++nextAlloc1stIndex;
8305 if(lastOffset < size)
8312 lastOffset = freeSpace1stTo2ndEnd;
8316 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8318 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8319 while(lastOffset < size)
8322 while(nextAlloc2ndIndex != SIZE_MAX &&
8323 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8325 --nextAlloc2ndIndex;
8329 if(nextAlloc2ndIndex != SIZE_MAX)
8331 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8334 if(lastOffset < suballoc.offset)
8343 usedBytes += suballoc.size;
8346 lastOffset = suballoc.offset + suballoc.size;
8347 --nextAlloc2ndIndex;
8352 if(lastOffset < size)
8364 const VkDeviceSize unusedBytes = size - usedBytes;
8365 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8370 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8372 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8373 size_t nextAlloc2ndIndex = 0;
8374 while(lastOffset < freeSpace2ndTo1stEnd)
8377 while(nextAlloc2ndIndex < suballoc2ndCount &&
8378 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8380 ++nextAlloc2ndIndex;
8384 if(nextAlloc2ndIndex < suballoc2ndCount)
8386 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8389 if(lastOffset < suballoc.offset)
8392 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8393 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8398 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8401 lastOffset = suballoc.offset + suballoc.size;
8402 ++nextAlloc2ndIndex;
8407 if(lastOffset < freeSpace2ndTo1stEnd)
8410 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8411 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8415 lastOffset = freeSpace2ndTo1stEnd;
8420 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8421 while(lastOffset < freeSpace1stTo2ndEnd)
8424 while(nextAlloc1stIndex < suballoc1stCount &&
8425 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8427 ++nextAlloc1stIndex;
8431 if(nextAlloc1stIndex < suballoc1stCount)
8433 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8436 if(lastOffset < suballoc.offset)
8439 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8440 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8445 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8448 lastOffset = suballoc.offset + suballoc.size;
8449 ++nextAlloc1stIndex;
8454 if(lastOffset < freeSpace1stTo2ndEnd)
8457 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8458 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8462 lastOffset = freeSpace1stTo2ndEnd;
8466 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8468 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8469 while(lastOffset < size)
8472 while(nextAlloc2ndIndex != SIZE_MAX &&
8473 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8475 --nextAlloc2ndIndex;
8479 if(nextAlloc2ndIndex != SIZE_MAX)
8481 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8484 if(lastOffset < suballoc.offset)
8487 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8488 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8493 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8496 lastOffset = suballoc.offset + suballoc.size;
8497 --nextAlloc2ndIndex;
8502 if(lastOffset < size)
8505 const VkDeviceSize unusedRangeSize = size - lastOffset;
8506 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8515 PrintDetailedMap_End(json);
8517 #endif // #if VMA_STATS_STRING_ENABLED 8519 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8520 uint32_t currentFrameIndex,
8521 uint32_t frameInUseCount,
8522 VkDeviceSize bufferImageGranularity,
8523 VkDeviceSize allocSize,
8524 VkDeviceSize allocAlignment,
8526 VmaSuballocationType allocType,
8527 bool canMakeOtherLost,
8529 VmaAllocationRequest* pAllocationRequest)
8531 VMA_ASSERT(allocSize > 0);
8532 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8533 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8534 VMA_HEAVY_ASSERT(Validate());
8536 const VkDeviceSize size = GetSize();
8537 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8538 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8542 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8544 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8549 if(allocSize > size)
8553 VkDeviceSize resultBaseOffset = size - allocSize;
8554 if(!suballocations2nd.empty())
8556 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8557 resultBaseOffset = lastSuballoc.offset - allocSize;
8558 if(allocSize > lastSuballoc.offset)
8565 VkDeviceSize resultOffset = resultBaseOffset;
8568 if(VMA_DEBUG_MARGIN > 0)
8570 if(resultOffset < VMA_DEBUG_MARGIN)
8574 resultOffset -= VMA_DEBUG_MARGIN;
8578 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8582 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8584 bool bufferImageGranularityConflict =
false;
8585 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8587 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8588 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8590 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8592 bufferImageGranularityConflict =
true;
8600 if(bufferImageGranularityConflict)
8602 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8607 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8608 suballocations1st.back().offset + suballocations1st.back().size :
8610 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8614 if(bufferImageGranularity > 1)
8616 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8618 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8619 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8621 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8635 pAllocationRequest->offset = resultOffset;
8636 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8637 pAllocationRequest->sumItemSize = 0;
8639 pAllocationRequest->itemsToMakeLostCount = 0;
8645 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8649 VkDeviceSize resultBaseOffset = 0;
8650 if(!suballocations1st.empty())
8652 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8653 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8657 VkDeviceSize resultOffset = resultBaseOffset;
8660 if(VMA_DEBUG_MARGIN > 0)
8662 resultOffset += VMA_DEBUG_MARGIN;
8666 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8670 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8672 bool bufferImageGranularityConflict =
false;
8673 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8675 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8676 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8678 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8680 bufferImageGranularityConflict =
true;
8688 if(bufferImageGranularityConflict)
8690 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8694 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8695 suballocations2nd.back().offset : size;
8698 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8702 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8704 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8706 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8707 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8709 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8723 pAllocationRequest->offset = resultOffset;
8724 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8725 pAllocationRequest->sumItemSize = 0;
8727 pAllocationRequest->itemsToMakeLostCount = 0;
8734 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8736 VMA_ASSERT(!suballocations1st.empty());
8738 VkDeviceSize resultBaseOffset = 0;
8739 if(!suballocations2nd.empty())
8741 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8742 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8746 VkDeviceSize resultOffset = resultBaseOffset;
8749 if(VMA_DEBUG_MARGIN > 0)
8751 resultOffset += VMA_DEBUG_MARGIN;
8755 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8759 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8761 bool bufferImageGranularityConflict =
false;
8762 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8764 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8765 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8767 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8769 bufferImageGranularityConflict =
true;
8777 if(bufferImageGranularityConflict)
8779 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8783 pAllocationRequest->itemsToMakeLostCount = 0;
8784 pAllocationRequest->sumItemSize = 0;
8785 size_t index1st = m_1stNullItemsBeginCount;
8787 if(canMakeOtherLost)
8789 while(index1st < suballocations1st.size() &&
8790 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8793 const VmaSuballocation& suballoc = suballocations1st[index1st];
8794 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8800 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8801 if(suballoc.hAllocation->CanBecomeLost() &&
8802 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8804 ++pAllocationRequest->itemsToMakeLostCount;
8805 pAllocationRequest->sumItemSize += suballoc.size;
8817 if(bufferImageGranularity > 1)
8819 while(index1st < suballocations1st.size())
8821 const VmaSuballocation& suballoc = suballocations1st[index1st];
8822 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8824 if(suballoc.hAllocation != VK_NULL_HANDLE)
8827 if(suballoc.hAllocation->CanBecomeLost() &&
8828 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8830 ++pAllocationRequest->itemsToMakeLostCount;
8831 pAllocationRequest->sumItemSize += suballoc.size;
8850 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
8851 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
8855 if(bufferImageGranularity > 1)
8857 for(
size_t nextSuballocIndex = index1st;
8858 nextSuballocIndex < suballocations1st.size();
8859 nextSuballocIndex++)
8861 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8862 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8864 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8878 pAllocationRequest->offset = resultOffset;
8879 pAllocationRequest->sumFreeSize =
8880 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8882 - pAllocationRequest->sumItemSize;
8892 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8893 uint32_t currentFrameIndex,
8894 uint32_t frameInUseCount,
8895 VmaAllocationRequest* pAllocationRequest)
8897 if(pAllocationRequest->itemsToMakeLostCount == 0)
8902 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8904 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8905 size_t index1st = m_1stNullItemsBeginCount;
8906 size_t madeLostCount = 0;
8907 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8909 VMA_ASSERT(index1st < suballocations1st.size());
8910 VmaSuballocation& suballoc = suballocations1st[index1st];
8911 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8913 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8914 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8915 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8917 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8918 suballoc.hAllocation = VK_NULL_HANDLE;
8919 m_SumFreeSize += suballoc.size;
8920 ++m_1stNullItemsMiddleCount;
8937 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8939 uint32_t lostAllocationCount = 0;
8941 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8942 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8944 VmaSuballocation& suballoc = suballocations1st[i];
8945 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8946 suballoc.hAllocation->CanBecomeLost() &&
8947 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8949 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8950 suballoc.hAllocation = VK_NULL_HANDLE;
8951 ++m_1stNullItemsMiddleCount;
8952 m_SumFreeSize += suballoc.size;
8953 ++lostAllocationCount;
8957 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8958 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8960 VmaSuballocation& suballoc = suballocations2nd[i];
8961 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8962 suballoc.hAllocation->CanBecomeLost() &&
8963 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8965 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8966 suballoc.hAllocation = VK_NULL_HANDLE;
8967 ++m_2ndNullItemsCount;
8968 ++lostAllocationCount;
8972 if(lostAllocationCount)
8977 return lostAllocationCount;
8980 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8982 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8983 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8985 const VmaSuballocation& suballoc = suballocations1st[i];
8986 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8988 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8990 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8991 return VK_ERROR_VALIDATION_FAILED_EXT;
8993 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8995 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8996 return VK_ERROR_VALIDATION_FAILED_EXT;
9001 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9002 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9004 const VmaSuballocation& suballoc = suballocations2nd[i];
9005 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9007 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9009 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9010 return VK_ERROR_VALIDATION_FAILED_EXT;
9012 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9014 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9015 return VK_ERROR_VALIDATION_FAILED_EXT;
9023 void VmaBlockMetadata_Linear::Alloc(
9024 const VmaAllocationRequest& request,
9025 VmaSuballocationType type,
9026 VkDeviceSize allocSize,
9030 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9034 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9035 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9036 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9037 suballocations2nd.push_back(newSuballoc);
9038 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9042 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9045 if(suballocations1st.empty())
9047 suballocations1st.push_back(newSuballoc);
9052 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9055 VMA_ASSERT(request.offset + allocSize <= GetSize());
9056 suballocations1st.push_back(newSuballoc);
9059 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9061 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9063 switch(m_2ndVectorMode)
9065 case SECOND_VECTOR_EMPTY:
9067 VMA_ASSERT(suballocations2nd.empty());
9068 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9070 case SECOND_VECTOR_RING_BUFFER:
9072 VMA_ASSERT(!suballocations2nd.empty());
9074 case SECOND_VECTOR_DOUBLE_STACK:
9075 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9081 suballocations2nd.push_back(newSuballoc);
9085 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9090 m_SumFreeSize -= newSuballoc.size;
9093 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9095 FreeAtOffset(allocation->GetOffset());
9098 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9100 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9101 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9103 if(!suballocations1st.empty())
9106 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9107 if(firstSuballoc.offset == offset)
9109 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9110 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9111 m_SumFreeSize += firstSuballoc.size;
9112 ++m_1stNullItemsBeginCount;
9119 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9120 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9122 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9123 if(lastSuballoc.offset == offset)
9125 m_SumFreeSize += lastSuballoc.size;
9126 suballocations2nd.pop_back();
9132 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9134 VmaSuballocation& lastSuballoc = suballocations1st.back();
9135 if(lastSuballoc.offset == offset)
9137 m_SumFreeSize += lastSuballoc.size;
9138 suballocations1st.pop_back();
9146 VmaSuballocation refSuballoc;
9147 refSuballoc.offset = offset;
9149 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9150 suballocations1st.begin() + m_1stNullItemsBeginCount,
9151 suballocations1st.end(),
9153 if(it != suballocations1st.end())
9155 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9156 it->hAllocation = VK_NULL_HANDLE;
9157 ++m_1stNullItemsMiddleCount;
9158 m_SumFreeSize += it->size;
9164 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9167 VmaSuballocation refSuballoc;
9168 refSuballoc.offset = offset;
9170 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9171 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9172 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9173 if(it != suballocations2nd.end())
9175 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9176 it->hAllocation = VK_NULL_HANDLE;
9177 ++m_2ndNullItemsCount;
9178 m_SumFreeSize += it->size;
9184 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9187 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9189 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9190 const size_t suballocCount = AccessSuballocations1st().size();
9191 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9194 void VmaBlockMetadata_Linear::CleanupAfterFree()
9196 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9197 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9201 suballocations1st.clear();
9202 suballocations2nd.clear();
9203 m_1stNullItemsBeginCount = 0;
9204 m_1stNullItemsMiddleCount = 0;
9205 m_2ndNullItemsCount = 0;
9206 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9210 const size_t suballoc1stCount = suballocations1st.size();
9211 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9212 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9215 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9216 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9218 ++m_1stNullItemsBeginCount;
9219 --m_1stNullItemsMiddleCount;
9223 while(m_1stNullItemsMiddleCount > 0 &&
9224 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9226 --m_1stNullItemsMiddleCount;
9227 suballocations1st.pop_back();
9231 while(m_2ndNullItemsCount > 0 &&
9232 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9234 --m_2ndNullItemsCount;
9235 suballocations2nd.pop_back();
9238 if(ShouldCompact1st())
9240 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9241 size_t srcIndex = m_1stNullItemsBeginCount;
9242 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9244 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9248 if(dstIndex != srcIndex)
9250 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9254 suballocations1st.resize(nonNullItemCount);
9255 m_1stNullItemsBeginCount = 0;
9256 m_1stNullItemsMiddleCount = 0;
9260 if(suballocations2nd.empty())
9262 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9266 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9268 suballocations1st.clear();
9269 m_1stNullItemsBeginCount = 0;
9271 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9274 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9275 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9276 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9277 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9279 ++m_1stNullItemsBeginCount;
9280 --m_1stNullItemsMiddleCount;
9282 m_2ndNullItemsCount = 0;
9283 m_1stVectorIndex ^= 1;
9288 VMA_HEAVY_ASSERT(Validate());
9295 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9296 VmaBlockMetadata(hAllocator),
9298 m_AllocationCount(0),
9302 memset(m_FreeList, 0,
sizeof(m_FreeList));
9305 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9310 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9312 VmaBlockMetadata::Init(size);
9314 m_UsableSize = VmaPrevPow2(size);
9315 m_SumFreeSize = m_UsableSize;
9319 while(m_LevelCount < MAX_LEVELS &&
9320 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9325 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9326 rootNode->offset = 0;
9327 rootNode->type = Node::TYPE_FREE;
9328 rootNode->parent = VMA_NULL;
9329 rootNode->buddy = VMA_NULL;
9332 AddToFreeListFront(0, rootNode);
9335 bool VmaBlockMetadata_Buddy::Validate()
const 9338 ValidationContext ctx;
9339 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9341 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9343 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9344 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9347 for(uint32_t level = 0; level < m_LevelCount; ++level)
9349 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9350 m_FreeList[level].front->free.prev == VMA_NULL);
9352 for(Node* node = m_FreeList[level].front;
9354 node = node->free.next)
9356 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9358 if(node->free.next == VMA_NULL)
9360 VMA_VALIDATE(m_FreeList[level].back == node);
9364 VMA_VALIDATE(node->free.next->free.prev == node);
9370 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9372 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9378 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9380 for(uint32_t level = 0; level < m_LevelCount; ++level)
9382 if(m_FreeList[level].front != VMA_NULL)
9384 return LevelToNodeSize(level);
9390 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9392 const VkDeviceSize unusableSize = GetUnusableSize();
9403 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9405 if(unusableSize > 0)
9414 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9416 const VkDeviceSize unusableSize = GetUnusableSize();
9418 inoutStats.
size += GetSize();
9419 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9424 if(unusableSize > 0)
9431 #if VMA_STATS_STRING_ENABLED 9433 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9437 CalcAllocationStatInfo(stat);
9439 PrintDetailedMap_Begin(
9445 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9447 const VkDeviceSize unusableSize = GetUnusableSize();
9448 if(unusableSize > 0)
9450 PrintDetailedMap_UnusedRange(json,
9455 PrintDetailedMap_End(json);
9458 #endif // #if VMA_STATS_STRING_ENABLED 9460 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9461 uint32_t currentFrameIndex,
9462 uint32_t frameInUseCount,
9463 VkDeviceSize bufferImageGranularity,
9464 VkDeviceSize allocSize,
9465 VkDeviceSize allocAlignment,
9467 VmaSuballocationType allocType,
9468 bool canMakeOtherLost,
9470 VmaAllocationRequest* pAllocationRequest)
9472 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9476 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9477 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9478 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9480 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9481 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9484 if(allocSize > m_UsableSize)
9489 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9490 for(uint32_t level = targetLevel + 1; level--; )
9492 for(Node* freeNode = m_FreeList[level].front;
9493 freeNode != VMA_NULL;
9494 freeNode = freeNode->free.next)
9496 if(freeNode->offset % allocAlignment == 0)
9498 pAllocationRequest->offset = freeNode->offset;
9499 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9500 pAllocationRequest->sumItemSize = 0;
9501 pAllocationRequest->itemsToMakeLostCount = 0;
9502 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9511 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9512 uint32_t currentFrameIndex,
9513 uint32_t frameInUseCount,
9514 VmaAllocationRequest* pAllocationRequest)
9520 return pAllocationRequest->itemsToMakeLostCount == 0;
9523 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9532 void VmaBlockMetadata_Buddy::Alloc(
9533 const VmaAllocationRequest& request,
9534 VmaSuballocationType type,
9535 VkDeviceSize allocSize,
9539 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9540 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9542 Node* currNode = m_FreeList[currLevel].front;
9543 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9544 while(currNode->offset != request.offset)
9546 currNode = currNode->free.next;
9547 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9551 while(currLevel < targetLevel)
9555 RemoveFromFreeList(currLevel, currNode);
9557 const uint32_t childrenLevel = currLevel + 1;
9560 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9561 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9563 leftChild->offset = currNode->offset;
9564 leftChild->type = Node::TYPE_FREE;
9565 leftChild->parent = currNode;
9566 leftChild->buddy = rightChild;
9568 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9569 rightChild->type = Node::TYPE_FREE;
9570 rightChild->parent = currNode;
9571 rightChild->buddy = leftChild;
9574 currNode->type = Node::TYPE_SPLIT;
9575 currNode->split.leftChild = leftChild;
9578 AddToFreeListFront(childrenLevel, rightChild);
9579 AddToFreeListFront(childrenLevel, leftChild);
9584 currNode = m_FreeList[currLevel].front;
9593 VMA_ASSERT(currLevel == targetLevel &&
9594 currNode != VMA_NULL &&
9595 currNode->type == Node::TYPE_FREE);
9596 RemoveFromFreeList(currLevel, currNode);
9599 currNode->type = Node::TYPE_ALLOCATION;
9600 currNode->allocation.alloc = hAllocation;
9602 ++m_AllocationCount;
9604 m_SumFreeSize -= allocSize;
9607 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9609 if(node->type == Node::TYPE_SPLIT)
9611 DeleteNode(node->split.leftChild->buddy);
9612 DeleteNode(node->split.leftChild);
9615 vma_delete(GetAllocationCallbacks(), node);
9618 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9620 VMA_VALIDATE(level < m_LevelCount);
9621 VMA_VALIDATE(curr->parent == parent);
9622 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9623 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9626 case Node::TYPE_FREE:
9628 ctx.calculatedSumFreeSize += levelNodeSize;
9629 ++ctx.calculatedFreeCount;
9631 case Node::TYPE_ALLOCATION:
9632 ++ctx.calculatedAllocationCount;
9633 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9634 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9636 case Node::TYPE_SPLIT:
9638 const uint32_t childrenLevel = level + 1;
9639 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9640 const Node*
const leftChild = curr->split.leftChild;
9641 VMA_VALIDATE(leftChild != VMA_NULL);
9642 VMA_VALIDATE(leftChild->offset == curr->offset);
9643 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9645 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9647 const Node*
const rightChild = leftChild->buddy;
9648 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9649 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9651 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9662 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9666 VkDeviceSize currLevelNodeSize = m_UsableSize;
9667 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9668 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9671 currLevelNodeSize = nextLevelNodeSize;
9672 nextLevelNodeSize = currLevelNodeSize >> 1;
9677 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9680 Node* node = m_Root;
9681 VkDeviceSize nodeOffset = 0;
9683 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9684 while(node->type == Node::TYPE_SPLIT)
9686 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9687 if(offset < nodeOffset + nextLevelSize)
9689 node = node->split.leftChild;
9693 node = node->split.leftChild->buddy;
9694 nodeOffset += nextLevelSize;
9697 levelNodeSize = nextLevelSize;
9700 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9701 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9704 --m_AllocationCount;
9705 m_SumFreeSize += alloc->GetSize();
9707 node->type = Node::TYPE_FREE;
9710 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9712 RemoveFromFreeList(level, node->buddy);
9713 Node*
const parent = node->parent;
9715 vma_delete(GetAllocationCallbacks(), node->buddy);
9716 vma_delete(GetAllocationCallbacks(), node);
9717 parent->type = Node::TYPE_FREE;
9725 AddToFreeListFront(level, node);
9728 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9732 case Node::TYPE_FREE:
9738 case Node::TYPE_ALLOCATION:
9740 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9746 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9747 if(unusedRangeSize > 0)
9756 case Node::TYPE_SPLIT:
9758 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9759 const Node*
const leftChild = node->split.leftChild;
9760 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9761 const Node*
const rightChild = leftChild->buddy;
9762 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9770 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9772 VMA_ASSERT(node->type == Node::TYPE_FREE);
9775 Node*
const frontNode = m_FreeList[level].front;
9776 if(frontNode == VMA_NULL)
9778 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9779 node->free.prev = node->free.next = VMA_NULL;
9780 m_FreeList[level].front = m_FreeList[level].back = node;
9784 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9785 node->free.prev = VMA_NULL;
9786 node->free.next = frontNode;
9787 frontNode->free.prev = node;
9788 m_FreeList[level].front = node;
9792 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9794 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9797 if(node->free.prev == VMA_NULL)
9799 VMA_ASSERT(m_FreeList[level].front == node);
9800 m_FreeList[level].front = node->free.next;
9804 Node*
const prevFreeNode = node->free.prev;
9805 VMA_ASSERT(prevFreeNode->free.next == node);
9806 prevFreeNode->free.next = node->free.next;
9810 if(node->free.next == VMA_NULL)
9812 VMA_ASSERT(m_FreeList[level].back == node);
9813 m_FreeList[level].back = node->free.prev;
9817 Node*
const nextFreeNode = node->free.next;
9818 VMA_ASSERT(nextFreeNode->free.prev == node);
9819 nextFreeNode->free.prev = node->free.prev;
9823 #if VMA_STATS_STRING_ENABLED 9824 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 9828 case Node::TYPE_FREE:
9829 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9831 case Node::TYPE_ALLOCATION:
9833 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
9834 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9835 if(allocSize < levelNodeSize)
9837 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
9841 case Node::TYPE_SPLIT:
9843 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9844 const Node*
const leftChild = node->split.leftChild;
9845 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9846 const Node*
const rightChild = leftChild->buddy;
9847 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9854 #endif // #if VMA_STATS_STRING_ENABLED 9860 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9861 m_pMetadata(VMA_NULL),
9862 m_MemoryTypeIndex(UINT32_MAX),
9864 m_hMemory(VK_NULL_HANDLE),
9866 m_pMappedData(VMA_NULL)
9870 void VmaDeviceMemoryBlock::Init(
9872 uint32_t newMemoryTypeIndex,
9873 VkDeviceMemory newMemory,
9874 VkDeviceSize newSize,
9878 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9880 m_MemoryTypeIndex = newMemoryTypeIndex;
9882 m_hMemory = newMemory;
9887 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9890 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
9896 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9898 m_pMetadata->Init(newSize);
9901 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9905 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9907 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9908 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9909 m_hMemory = VK_NULL_HANDLE;
9911 vma_delete(allocator, m_pMetadata);
9912 m_pMetadata = VMA_NULL;
9915 bool VmaDeviceMemoryBlock::Validate()
const 9917 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
9918 (m_pMetadata->GetSize() != 0));
9920 return m_pMetadata->Validate();
9923 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9925 void* pData =
nullptr;
9926 VkResult res = Map(hAllocator, 1, &pData);
9927 if(res != VK_SUCCESS)
9932 res = m_pMetadata->CheckCorruption(pData);
9934 Unmap(hAllocator, 1);
9939 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
9946 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9949 m_MapCount += count;
9950 VMA_ASSERT(m_pMappedData != VMA_NULL);
9951 if(ppData != VMA_NULL)
9953 *ppData = m_pMappedData;
9959 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9960 hAllocator->m_hDevice,
9966 if(result == VK_SUCCESS)
9968 if(ppData != VMA_NULL)
9970 *ppData = m_pMappedData;
9978 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
9985 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9986 if(m_MapCount >= count)
9988 m_MapCount -= count;
9991 m_pMappedData = VMA_NULL;
9992 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
9997 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10001 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10003 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10004 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10007 VkResult res = Map(hAllocator, 1, &pData);
10008 if(res != VK_SUCCESS)
10013 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10014 VmaWriteMagicValue(pData, allocOffset + allocSize);
10016 Unmap(hAllocator, 1);
10021 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10023 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10024 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10027 VkResult res = Map(hAllocator, 1, &pData);
10028 if(res != VK_SUCCESS)
10033 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10035 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10037 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10039 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10042 Unmap(hAllocator, 1);
10047 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10052 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10053 hAllocation->GetBlock() ==
this);
10055 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10056 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10057 hAllocator->m_hDevice,
10060 hAllocation->GetOffset());
10063 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10068 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10069 hAllocation->GetBlock() ==
this);
10071 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10072 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10073 hAllocator->m_hDevice,
10076 hAllocation->GetOffset());
10081 memset(&outInfo, 0,
sizeof(outInfo));
10100 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10108 VmaPool_T::VmaPool_T(
10111 VkDeviceSize preferredBlockSize) :
10114 createInfo.memoryTypeIndex,
10115 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10116 createInfo.minBlockCount,
10117 createInfo.maxBlockCount,
10119 createInfo.frameInUseCount,
10121 createInfo.blockSize != 0,
10127 VmaPool_T::~VmaPool_T()
10131 #if VMA_STATS_STRING_ENABLED 10133 #endif // #if VMA_STATS_STRING_ENABLED 10135 VmaBlockVector::VmaBlockVector(
10137 uint32_t memoryTypeIndex,
10138 VkDeviceSize preferredBlockSize,
10139 size_t minBlockCount,
10140 size_t maxBlockCount,
10141 VkDeviceSize bufferImageGranularity,
10142 uint32_t frameInUseCount,
10144 bool explicitBlockSize,
10145 uint32_t algorithm) :
10146 m_hAllocator(hAllocator),
10147 m_MemoryTypeIndex(memoryTypeIndex),
10148 m_PreferredBlockSize(preferredBlockSize),
10149 m_MinBlockCount(minBlockCount),
10150 m_MaxBlockCount(maxBlockCount),
10151 m_BufferImageGranularity(bufferImageGranularity),
10152 m_FrameInUseCount(frameInUseCount),
10153 m_IsCustomPool(isCustomPool),
10154 m_ExplicitBlockSize(explicitBlockSize),
10155 m_Algorithm(algorithm),
10156 m_HasEmptyBlock(false),
10157 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10158 m_pDefragmentator(VMA_NULL),
10163 VmaBlockVector::~VmaBlockVector()
10165 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10167 for(
size_t i = m_Blocks.size(); i--; )
10169 m_Blocks[i]->Destroy(m_hAllocator);
10170 vma_delete(m_hAllocator, m_Blocks[i]);
10174 VkResult VmaBlockVector::CreateMinBlocks()
10176 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10178 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10179 if(res != VK_SUCCESS)
10187 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10189 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10191 const size_t blockCount = m_Blocks.size();
10200 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10202 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10203 VMA_ASSERT(pBlock);
10204 VMA_HEAVY_ASSERT(pBlock->Validate());
10205 pBlock->m_pMetadata->AddPoolStats(*pStats);
10209 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10211 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10212 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10213 (VMA_DEBUG_MARGIN > 0) &&
10214 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10217 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10219 VkResult VmaBlockVector::Allocate(
10221 uint32_t currentFrameIndex,
10223 VkDeviceSize alignment,
10225 VmaSuballocationType suballocType,
10232 const bool canCreateNewBlock =
10234 (m_Blocks.size() < m_MaxBlockCount);
10241 canMakeOtherLost =
false;
10245 if(isUpperAddress &&
10248 return VK_ERROR_FEATURE_NOT_PRESENT;
10262 return VK_ERROR_FEATURE_NOT_PRESENT;
10266 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10268 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10271 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10278 if(!canMakeOtherLost || canCreateNewBlock)
10287 if(!m_Blocks.empty())
10289 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10290 VMA_ASSERT(pCurrBlock);
10291 VkResult res = AllocateFromBlock(
10302 if(res == VK_SUCCESS)
10304 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10314 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10316 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10317 VMA_ASSERT(pCurrBlock);
10318 VkResult res = AllocateFromBlock(
10329 if(res == VK_SUCCESS)
10331 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10339 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10341 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10342 VMA_ASSERT(pCurrBlock);
10343 VkResult res = AllocateFromBlock(
10354 if(res == VK_SUCCESS)
10356 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10364 if(canCreateNewBlock)
10367 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10368 uint32_t newBlockSizeShift = 0;
10369 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10371 if(!m_ExplicitBlockSize)
10374 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10375 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10377 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10378 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10380 newBlockSize = smallerNewBlockSize;
10381 ++newBlockSizeShift;
10390 size_t newBlockIndex = 0;
10391 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10393 if(!m_ExplicitBlockSize)
10395 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10397 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10398 if(smallerNewBlockSize >= size)
10400 newBlockSize = smallerNewBlockSize;
10401 ++newBlockSizeShift;
10402 res = CreateBlock(newBlockSize, &newBlockIndex);
10411 if(res == VK_SUCCESS)
10413 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10414 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10416 res = AllocateFromBlock(
10427 if(res == VK_SUCCESS)
10429 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10435 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10442 if(canMakeOtherLost)
10444 uint32_t tryIndex = 0;
10445 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10447 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10448 VmaAllocationRequest bestRequest = {};
10449 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10455 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10457 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10458 VMA_ASSERT(pCurrBlock);
10459 VmaAllocationRequest currRequest = {};
10460 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10463 m_BufferImageGranularity,
10472 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10473 if(pBestRequestBlock == VMA_NULL ||
10474 currRequestCost < bestRequestCost)
10476 pBestRequestBlock = pCurrBlock;
10477 bestRequest = currRequest;
10478 bestRequestCost = currRequestCost;
10480 if(bestRequestCost == 0)
10491 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10493 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10494 VMA_ASSERT(pCurrBlock);
10495 VmaAllocationRequest currRequest = {};
10496 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10499 m_BufferImageGranularity,
10508 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10509 if(pBestRequestBlock == VMA_NULL ||
10510 currRequestCost < bestRequestCost ||
10513 pBestRequestBlock = pCurrBlock;
10514 bestRequest = currRequest;
10515 bestRequestCost = currRequestCost;
10517 if(bestRequestCost == 0 ||
10527 if(pBestRequestBlock != VMA_NULL)
10531 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10532 if(res != VK_SUCCESS)
10538 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10544 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10546 m_HasEmptyBlock =
false;
10549 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10550 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10551 (*pAllocation)->InitBlockAllocation(
10554 bestRequest.offset,
10560 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10561 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10562 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10563 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10565 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10567 if(IsCorruptionDetectionEnabled())
10569 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10570 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10585 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10587 return VK_ERROR_TOO_MANY_OBJECTS;
10591 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10594 void VmaBlockVector::Free(
10597 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10601 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10603 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10605 if(IsCorruptionDetectionEnabled())
10607 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10608 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10611 if(hAllocation->IsPersistentMap())
10613 pBlock->Unmap(m_hAllocator, 1);
10616 pBlock->m_pMetadata->Free(hAllocation);
10617 VMA_HEAVY_ASSERT(pBlock->Validate());
10619 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10622 if(pBlock->m_pMetadata->IsEmpty())
10625 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10627 pBlockToDelete = pBlock;
10633 m_HasEmptyBlock =
true;
10638 else if(m_HasEmptyBlock)
10640 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10641 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10643 pBlockToDelete = pLastBlock;
10644 m_Blocks.pop_back();
10645 m_HasEmptyBlock =
false;
10649 IncrementallySortBlocks();
10654 if(pBlockToDelete != VMA_NULL)
10656 VMA_DEBUG_LOG(
" Deleted empty allocation");
10657 pBlockToDelete->Destroy(m_hAllocator);
10658 vma_delete(m_hAllocator, pBlockToDelete);
10662 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10664 VkDeviceSize result = 0;
10665 for(
size_t i = m_Blocks.size(); i--; )
10667 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10668 if(result >= m_PreferredBlockSize)
10676 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10678 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10680 if(m_Blocks[blockIndex] == pBlock)
10682 VmaVectorRemove(m_Blocks, blockIndex);
10689 void VmaBlockVector::IncrementallySortBlocks()
10694 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10696 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10698 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10705 VkResult VmaBlockVector::AllocateFromBlock(
10706 VmaDeviceMemoryBlock* pBlock,
10708 uint32_t currentFrameIndex,
10710 VkDeviceSize alignment,
10713 VmaSuballocationType suballocType,
10722 VmaAllocationRequest currRequest = {};
10723 if(pBlock->m_pMetadata->CreateAllocationRequest(
10726 m_BufferImageGranularity,
10736 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10740 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10741 if(res != VK_SUCCESS)
10748 if(pBlock->m_pMetadata->IsEmpty())
10750 m_HasEmptyBlock =
false;
10753 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10754 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10755 (*pAllocation)->InitBlockAllocation(
10758 currRequest.offset,
10764 VMA_HEAVY_ASSERT(pBlock->Validate());
10765 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10766 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10768 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10770 if(IsCorruptionDetectionEnabled())
10772 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10773 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10777 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10780 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10782 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10783 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10784 allocInfo.allocationSize = blockSize;
10785 VkDeviceMemory mem = VK_NULL_HANDLE;
10786 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10795 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
10800 allocInfo.allocationSize,
10804 m_Blocks.push_back(pBlock);
10805 if(pNewBlockIndex != VMA_NULL)
10807 *pNewBlockIndex = m_Blocks.size() - 1;
10813 #if VMA_STATS_STRING_ENABLED 10815 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
10817 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10819 json.BeginObject();
10823 json.WriteString(
"MemoryTypeIndex");
10824 json.WriteNumber(m_MemoryTypeIndex);
10826 json.WriteString(
"BlockSize");
10827 json.WriteNumber(m_PreferredBlockSize);
10829 json.WriteString(
"BlockCount");
10830 json.BeginObject(
true);
10831 if(m_MinBlockCount > 0)
10833 json.WriteString(
"Min");
10834 json.WriteNumber((uint64_t)m_MinBlockCount);
10836 if(m_MaxBlockCount < SIZE_MAX)
10838 json.WriteString(
"Max");
10839 json.WriteNumber((uint64_t)m_MaxBlockCount);
10841 json.WriteString(
"Cur");
10842 json.WriteNumber((uint64_t)m_Blocks.size());
10845 if(m_FrameInUseCount > 0)
10847 json.WriteString(
"FrameInUseCount");
10848 json.WriteNumber(m_FrameInUseCount);
10851 if(m_Algorithm != 0)
10853 json.WriteString(
"Algorithm");
10854 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
10859 json.WriteString(
"PreferredBlockSize");
10860 json.WriteNumber(m_PreferredBlockSize);
10863 json.WriteString(
"Blocks");
10864 json.BeginObject();
10865 for(
size_t i = 0; i < m_Blocks.size(); ++i)
10867 json.BeginString();
10868 json.ContinueString(m_Blocks[i]->GetId());
10871 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
10878 #endif // #if VMA_STATS_STRING_ENABLED 10880 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
10882 uint32_t currentFrameIndex)
10884 if(m_pDefragmentator == VMA_NULL)
10886 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
10889 currentFrameIndex);
10892 return m_pDefragmentator;
10895 VkResult VmaBlockVector::Defragment(
10897 VkDeviceSize& maxBytesToMove,
10898 uint32_t& maxAllocationsToMove)
10900 if(m_pDefragmentator == VMA_NULL)
10905 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10908 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
10911 if(pDefragmentationStats != VMA_NULL)
10913 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
10914 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
10915 pDefragmentationStats->
bytesMoved += bytesMoved;
10917 VMA_ASSERT(bytesMoved <= maxBytesToMove);
10918 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
10919 maxBytesToMove -= bytesMoved;
10920 maxAllocationsToMove -= allocationsMoved;
10924 m_HasEmptyBlock =
false;
10925 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10927 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
10928 if(pBlock->m_pMetadata->IsEmpty())
10930 if(m_Blocks.size() > m_MinBlockCount)
10932 if(pDefragmentationStats != VMA_NULL)
10935 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
10938 VmaVectorRemove(m_Blocks, blockIndex);
10939 pBlock->Destroy(m_hAllocator);
10940 vma_delete(m_hAllocator, pBlock);
10944 m_HasEmptyBlock =
true;
10952 void VmaBlockVector::DestroyDefragmentator()
10954 if(m_pDefragmentator != VMA_NULL)
10956 vma_delete(m_hAllocator, m_pDefragmentator);
10957 m_pDefragmentator = VMA_NULL;
10961 void VmaBlockVector::MakePoolAllocationsLost(
10962 uint32_t currentFrameIndex,
10963 size_t* pLostAllocationCount)
10965 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10966 size_t lostAllocationCount = 0;
10967 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10969 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10970 VMA_ASSERT(pBlock);
10971 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
10973 if(pLostAllocationCount != VMA_NULL)
10975 *pLostAllocationCount = lostAllocationCount;
10979 VkResult VmaBlockVector::CheckCorruption()
10981 if(!IsCorruptionDetectionEnabled())
10983 return VK_ERROR_FEATURE_NOT_PRESENT;
10986 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10987 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10989 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10990 VMA_ASSERT(pBlock);
10991 VkResult res = pBlock->CheckCorruption(m_hAllocator);
10992 if(res != VK_SUCCESS)
11000 void VmaBlockVector::AddStats(
VmaStats* pStats)
11002 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11003 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11005 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11007 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11009 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11010 VMA_ASSERT(pBlock);
11011 VMA_HEAVY_ASSERT(pBlock->Validate());
11013 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11014 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11015 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11016 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11023 VmaDefragmentator::VmaDefragmentator(
11025 VmaBlockVector* pBlockVector,
11026 uint32_t currentFrameIndex) :
11027 m_hAllocator(hAllocator),
11028 m_pBlockVector(pBlockVector),
11029 m_CurrentFrameIndex(currentFrameIndex),
11031 m_AllocationsMoved(0),
11032 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11033 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11035 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11038 VmaDefragmentator::~VmaDefragmentator()
11040 for(
size_t i = m_Blocks.size(); i--; )
11042 vma_delete(m_hAllocator, m_Blocks[i]);
11046 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11048 AllocationInfo allocInfo;
11049 allocInfo.m_hAllocation = hAlloc;
11050 allocInfo.m_pChanged = pChanged;
11051 m_Allocations.push_back(allocInfo);
11054 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11057 if(m_pMappedDataForDefragmentation)
11059 *ppMappedData = m_pMappedDataForDefragmentation;
11064 if(m_pBlock->GetMappedData())
11066 *ppMappedData = m_pBlock->GetMappedData();
11071 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11072 *ppMappedData = m_pMappedDataForDefragmentation;
11076 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11078 if(m_pMappedDataForDefragmentation != VMA_NULL)
11080 m_pBlock->Unmap(hAllocator, 1);
11084 VkResult VmaDefragmentator::DefragmentRound(
11085 VkDeviceSize maxBytesToMove,
11086 uint32_t maxAllocationsToMove)
11088 if(m_Blocks.empty())
11093 size_t srcBlockIndex = m_Blocks.size() - 1;
11094 size_t srcAllocIndex = SIZE_MAX;
11100 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11102 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11105 if(srcBlockIndex == 0)
11112 srcAllocIndex = SIZE_MAX;
11117 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11121 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11122 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11124 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11125 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11126 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11127 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11130 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11132 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11133 VmaAllocationRequest dstAllocRequest;
11134 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11135 m_CurrentFrameIndex,
11136 m_pBlockVector->GetFrameInUseCount(),
11137 m_pBlockVector->GetBufferImageGranularity(),
11144 &dstAllocRequest) &&
11146 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11148 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11151 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11152 (m_BytesMoved + size > maxBytesToMove))
11154 return VK_INCOMPLETE;
11157 void* pDstMappedData = VMA_NULL;
11158 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11159 if(res != VK_SUCCESS)
11164 void* pSrcMappedData = VMA_NULL;
11165 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11166 if(res != VK_SUCCESS)
11173 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11174 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11175 static_cast<size_t>(size));
11177 if(VMA_DEBUG_MARGIN > 0)
11179 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11180 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11183 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11188 allocInfo.m_hAllocation);
11189 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11191 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11193 if(allocInfo.m_pChanged != VMA_NULL)
11195 *allocInfo.m_pChanged = VK_TRUE;
11198 ++m_AllocationsMoved;
11199 m_BytesMoved += size;
11201 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11209 if(srcAllocIndex > 0)
11215 if(srcBlockIndex > 0)
11218 srcAllocIndex = SIZE_MAX;
11228 VkResult VmaDefragmentator::Defragment(
11229 VkDeviceSize maxBytesToMove,
11230 uint32_t maxAllocationsToMove)
11232 if(m_Allocations.empty())
11238 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11239 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11241 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11242 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11243 m_Blocks.push_back(pBlockInfo);
11247 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11250 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11252 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11254 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11256 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11257 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11258 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11260 (*it)->m_Allocations.push_back(allocInfo);
11268 m_Allocations.clear();
11270 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11272 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11273 pBlockInfo->CalcHasNonMovableAllocations();
11274 pBlockInfo->SortAllocationsBySizeDescecnding();
11278 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11281 VkResult result = VK_SUCCESS;
11282 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11284 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11288 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11290 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11296 bool VmaDefragmentator::MoveMakesSense(
11297 size_t dstBlockIndex, VkDeviceSize dstOffset,
11298 size_t srcBlockIndex, VkDeviceSize srcOffset)
11300 if(dstBlockIndex < srcBlockIndex)
11304 if(dstBlockIndex > srcBlockIndex)
11308 if(dstOffset < srcOffset)
11318 #if VMA_RECORDING_ENABLED 11320 VmaRecorder::VmaRecorder() :
11325 m_StartCounter(INT64_MAX)
11331 m_UseMutex = useMutex;
11332 m_Flags = settings.
flags;
11334 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11335 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11338 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11341 return VK_ERROR_INITIALIZATION_FAILED;
11345 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11346 fprintf(m_File,
"%s\n",
"1,3");
11351 VmaRecorder::~VmaRecorder()
11353 if(m_File != VMA_NULL)
11359 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11361 CallParams callParams;
11362 GetBasicParams(callParams);
11364 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11365 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11369 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11371 CallParams callParams;
11372 GetBasicParams(callParams);
11374 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11375 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11381 CallParams callParams;
11382 GetBasicParams(callParams);
11384 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11385 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11396 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11398 CallParams callParams;
11399 GetBasicParams(callParams);
11401 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11402 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11407 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11408 const VkMemoryRequirements& vkMemReq,
11412 CallParams callParams;
11413 GetBasicParams(callParams);
11415 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11416 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11417 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11419 vkMemReq.alignment,
11420 vkMemReq.memoryTypeBits,
11428 userDataStr.GetString());
11432 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11433 const VkMemoryRequirements& vkMemReq,
11434 bool requiresDedicatedAllocation,
11435 bool prefersDedicatedAllocation,
11439 CallParams callParams;
11440 GetBasicParams(callParams);
11442 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11443 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11444 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11446 vkMemReq.alignment,
11447 vkMemReq.memoryTypeBits,
11448 requiresDedicatedAllocation ? 1 : 0,
11449 prefersDedicatedAllocation ? 1 : 0,
11457 userDataStr.GetString());
11461 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11462 const VkMemoryRequirements& vkMemReq,
11463 bool requiresDedicatedAllocation,
11464 bool prefersDedicatedAllocation,
11468 CallParams callParams;
11469 GetBasicParams(callParams);
11471 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11472 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11473 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11475 vkMemReq.alignment,
11476 vkMemReq.memoryTypeBits,
11477 requiresDedicatedAllocation ? 1 : 0,
11478 prefersDedicatedAllocation ? 1 : 0,
11486 userDataStr.GetString());
11490 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11493 CallParams callParams;
11494 GetBasicParams(callParams);
11496 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11497 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11502 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11504 const void* pUserData)
11506 CallParams callParams;
11507 GetBasicParams(callParams);
11509 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11510 UserDataString userDataStr(
11513 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11515 userDataStr.GetString());
11519 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11522 CallParams callParams;
11523 GetBasicParams(callParams);
11525 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11526 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11531 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11534 CallParams callParams;
11535 GetBasicParams(callParams);
11537 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11538 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11543 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11546 CallParams callParams;
11547 GetBasicParams(callParams);
11549 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11550 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11555 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11556 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11558 CallParams callParams;
11559 GetBasicParams(callParams);
11561 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11562 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11569 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11570 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11572 CallParams callParams;
11573 GetBasicParams(callParams);
11575 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11576 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11583 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11584 const VkBufferCreateInfo& bufCreateInfo,
11588 CallParams callParams;
11589 GetBasicParams(callParams);
11591 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11592 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11593 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11594 bufCreateInfo.flags,
11595 bufCreateInfo.size,
11596 bufCreateInfo.usage,
11597 bufCreateInfo.sharingMode,
11598 allocCreateInfo.
flags,
11599 allocCreateInfo.
usage,
11603 allocCreateInfo.
pool,
11605 userDataStr.GetString());
11609 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11610 const VkImageCreateInfo& imageCreateInfo,
11614 CallParams callParams;
11615 GetBasicParams(callParams);
11617 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11618 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11619 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11620 imageCreateInfo.flags,
11621 imageCreateInfo.imageType,
11622 imageCreateInfo.format,
11623 imageCreateInfo.extent.width,
11624 imageCreateInfo.extent.height,
11625 imageCreateInfo.extent.depth,
11626 imageCreateInfo.mipLevels,
11627 imageCreateInfo.arrayLayers,
11628 imageCreateInfo.samples,
11629 imageCreateInfo.tiling,
11630 imageCreateInfo.usage,
11631 imageCreateInfo.sharingMode,
11632 imageCreateInfo.initialLayout,
11633 allocCreateInfo.
flags,
11634 allocCreateInfo.
usage,
11638 allocCreateInfo.
pool,
11640 userDataStr.GetString());
11644 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11647 CallParams callParams;
11648 GetBasicParams(callParams);
11650 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11651 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11656 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11659 CallParams callParams;
11660 GetBasicParams(callParams);
11662 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11663 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11668 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11671 CallParams callParams;
11672 GetBasicParams(callParams);
11674 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11675 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11680 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11683 CallParams callParams;
11684 GetBasicParams(callParams);
11686 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11687 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11692 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11695 CallParams callParams;
11696 GetBasicParams(callParams);
11698 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11699 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11706 if(pUserData != VMA_NULL)
11710 m_Str = (
const char*)pUserData;
11714 sprintf_s(m_PtrStr,
"%p", pUserData);
11724 void VmaRecorder::WriteConfiguration(
11725 const VkPhysicalDeviceProperties& devProps,
11726 const VkPhysicalDeviceMemoryProperties& memProps,
11727 bool dedicatedAllocationExtensionEnabled)
11729 fprintf(m_File,
"Config,Begin\n");
11731 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11732 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11733 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11734 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11735 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11736 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11738 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11739 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11740 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11742 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11743 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11745 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11746 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11748 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11749 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11751 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11752 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11755 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11757 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11758 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11759 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11760 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11761 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11762 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11763 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11764 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11765 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11767 fprintf(m_File,
"Config,End\n");
11770 void VmaRecorder::GetBasicParams(CallParams& outParams)
11772 outParams.threadId = GetCurrentThreadId();
11774 LARGE_INTEGER counter;
11775 QueryPerformanceCounter(&counter);
11776 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11779 void VmaRecorder::Flush()
11787 #endif // #if VMA_RECORDING_ENABLED 11795 m_hDevice(pCreateInfo->device),
11796 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
11797 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
11798 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
11799 m_PreferredLargeHeapBlockSize(0),
11800 m_PhysicalDevice(pCreateInfo->physicalDevice),
11801 m_CurrentFrameIndex(0),
11802 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
11805 ,m_pRecorder(VMA_NULL)
11808 if(VMA_DEBUG_DETECT_CORRUPTION)
11811 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
11816 #if !(VMA_DEDICATED_ALLOCATION) 11819 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
11823 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
11824 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
11825 memset(&m_MemProps, 0,
sizeof(m_MemProps));
11827 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
11828 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
11830 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11832 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
11843 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
11844 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
11846 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
11847 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
11848 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
11849 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
11856 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
11858 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
11859 if(limit != VK_WHOLE_SIZE)
11861 m_HeapSizeLimit[heapIndex] = limit;
11862 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
11864 m_MemProps.memoryHeaps[heapIndex].size = limit;
11870 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11872 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
11874 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
11877 preferredBlockSize,
11880 GetBufferImageGranularity(),
11887 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
11894 VkResult res = VK_SUCCESS;
11899 #if VMA_RECORDING_ENABLED 11900 m_pRecorder = vma_new(
this, VmaRecorder)();
11902 if(res != VK_SUCCESS)
11906 m_pRecorder->WriteConfiguration(
11907 m_PhysicalDeviceProperties,
11909 m_UseKhrDedicatedAllocation);
11910 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
11912 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
11913 return VK_ERROR_FEATURE_NOT_PRESENT;
11920 VmaAllocator_T::~VmaAllocator_T()
11922 #if VMA_RECORDING_ENABLED 11923 if(m_pRecorder != VMA_NULL)
11925 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
11926 vma_delete(
this, m_pRecorder);
11930 VMA_ASSERT(m_Pools.empty());
11932 for(
size_t i = GetMemoryTypeCount(); i--; )
11934 vma_delete(
this, m_pDedicatedAllocations[i]);
11935 vma_delete(
this, m_pBlockVectors[i]);
11939 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
11941 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11942 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
11943 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
11944 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
11945 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
11946 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
11947 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
11948 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
11949 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
11950 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
11951 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
11952 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
11953 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
11954 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
11955 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
11956 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
11957 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
11958 #if VMA_DEDICATED_ALLOCATION 11959 if(m_UseKhrDedicatedAllocation)
11961 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
11962 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
11963 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
11964 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
11966 #endif // #if VMA_DEDICATED_ALLOCATION 11967 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11969 #define VMA_COPY_IF_NOT_NULL(funcName) \ 11970 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 11972 if(pVulkanFunctions != VMA_NULL)
11974 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
11975 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
11976 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
11977 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
11978 VMA_COPY_IF_NOT_NULL(vkMapMemory);
11979 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
11980 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
11981 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
11982 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
11983 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
11984 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
11985 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
11986 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
11987 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
11988 VMA_COPY_IF_NOT_NULL(vkCreateImage);
11989 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
11990 #if VMA_DEDICATED_ALLOCATION 11991 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
11992 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
11996 #undef VMA_COPY_IF_NOT_NULL 12000 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12001 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12002 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12003 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12004 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12005 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12006 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12007 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12008 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12009 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12010 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12011 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12012 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12013 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12014 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12015 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12016 #if VMA_DEDICATED_ALLOCATION 12017 if(m_UseKhrDedicatedAllocation)
12019 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12020 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12025 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12027 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12028 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12029 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12030 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12033 VkResult VmaAllocator_T::AllocateMemoryOfType(
12035 VkDeviceSize alignment,
12036 bool dedicatedAllocation,
12037 VkBuffer dedicatedBuffer,
12038 VkImage dedicatedImage,
12040 uint32_t memTypeIndex,
12041 VmaSuballocationType suballocType,
12044 VMA_ASSERT(pAllocation != VMA_NULL);
12045 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12051 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12056 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12057 VMA_ASSERT(blockVector);
12059 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12060 bool preferDedicatedMemory =
12061 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12062 dedicatedAllocation ||
12064 size > preferredBlockSize / 2;
12066 if(preferDedicatedMemory &&
12068 finalCreateInfo.
pool == VK_NULL_HANDLE)
12077 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12081 return AllocateDedicatedMemory(
12095 VkResult res = blockVector->Allocate(
12097 m_CurrentFrameIndex.load(),
12103 if(res == VK_SUCCESS)
12111 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12115 res = AllocateDedicatedMemory(
12121 finalCreateInfo.pUserData,
12125 if(res == VK_SUCCESS)
12128 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12134 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12141 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12143 VmaSuballocationType suballocType,
12144 uint32_t memTypeIndex,
12146 bool isUserDataString,
12148 VkBuffer dedicatedBuffer,
12149 VkImage dedicatedImage,
12152 VMA_ASSERT(pAllocation);
12154 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12155 allocInfo.memoryTypeIndex = memTypeIndex;
12156 allocInfo.allocationSize = size;
12158 #if VMA_DEDICATED_ALLOCATION 12159 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12160 if(m_UseKhrDedicatedAllocation)
12162 if(dedicatedBuffer != VK_NULL_HANDLE)
12164 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12165 dedicatedAllocInfo.buffer = dedicatedBuffer;
12166 allocInfo.pNext = &dedicatedAllocInfo;
12168 else if(dedicatedImage != VK_NULL_HANDLE)
12170 dedicatedAllocInfo.image = dedicatedImage;
12171 allocInfo.pNext = &dedicatedAllocInfo;
12174 #endif // #if VMA_DEDICATED_ALLOCATION 12177 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12178 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12181 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12185 void* pMappedData = VMA_NULL;
12188 res = (*m_VulkanFunctions.vkMapMemory)(
12197 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12198 FreeVulkanMemory(memTypeIndex, size, hMemory);
12203 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12204 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12205 (*pAllocation)->SetUserData(
this, pUserData);
12206 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12208 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12213 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12214 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12215 VMA_ASSERT(pDedicatedAllocations);
12216 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12219 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12224 void VmaAllocator_T::GetBufferMemoryRequirements(
12226 VkMemoryRequirements& memReq,
12227 bool& requiresDedicatedAllocation,
12228 bool& prefersDedicatedAllocation)
const 12230 #if VMA_DEDICATED_ALLOCATION 12231 if(m_UseKhrDedicatedAllocation)
12233 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12234 memReqInfo.buffer = hBuffer;
12236 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12238 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12239 memReq2.pNext = &memDedicatedReq;
12241 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12243 memReq = memReq2.memoryRequirements;
12244 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12245 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12248 #endif // #if VMA_DEDICATED_ALLOCATION 12250 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12251 requiresDedicatedAllocation =
false;
12252 prefersDedicatedAllocation =
false;
12256 void VmaAllocator_T::GetImageMemoryRequirements(
12258 VkMemoryRequirements& memReq,
12259 bool& requiresDedicatedAllocation,
12260 bool& prefersDedicatedAllocation)
const 12262 #if VMA_DEDICATED_ALLOCATION 12263 if(m_UseKhrDedicatedAllocation)
12265 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12266 memReqInfo.image = hImage;
12268 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12270 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12271 memReq2.pNext = &memDedicatedReq;
12273 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12275 memReq = memReq2.memoryRequirements;
12276 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12277 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12280 #endif // #if VMA_DEDICATED_ALLOCATION 12282 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12283 requiresDedicatedAllocation =
false;
12284 prefersDedicatedAllocation =
false;
12288 VkResult VmaAllocator_T::AllocateMemory(
12289 const VkMemoryRequirements& vkMemReq,
12290 bool requiresDedicatedAllocation,
12291 bool prefersDedicatedAllocation,
12292 VkBuffer dedicatedBuffer,
12293 VkImage dedicatedImage,
12295 VmaSuballocationType suballocType,
12298 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12303 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12304 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12309 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12310 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12312 if(requiresDedicatedAllocation)
12316 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12317 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12319 if(createInfo.
pool != VK_NULL_HANDLE)
12321 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12322 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12325 if((createInfo.
pool != VK_NULL_HANDLE) &&
12328 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12329 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12332 if(createInfo.
pool != VK_NULL_HANDLE)
12334 const VkDeviceSize alignmentForPool = VMA_MAX(
12335 vkMemReq.alignment,
12336 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12337 return createInfo.
pool->m_BlockVector.Allocate(
12339 m_CurrentFrameIndex.load(),
12349 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12350 uint32_t memTypeIndex = UINT32_MAX;
12352 if(res == VK_SUCCESS)
12354 VkDeviceSize alignmentForMemType = VMA_MAX(
12355 vkMemReq.alignment,
12356 GetMemoryTypeMinAlignment(memTypeIndex));
12358 res = AllocateMemoryOfType(
12360 alignmentForMemType,
12361 requiresDedicatedAllocation || prefersDedicatedAllocation,
12369 if(res == VK_SUCCESS)
12379 memoryTypeBits &= ~(1u << memTypeIndex);
12382 if(res == VK_SUCCESS)
12384 alignmentForMemType = VMA_MAX(
12385 vkMemReq.alignment,
12386 GetMemoryTypeMinAlignment(memTypeIndex));
12388 res = AllocateMemoryOfType(
12390 alignmentForMemType,
12391 requiresDedicatedAllocation || prefersDedicatedAllocation,
12399 if(res == VK_SUCCESS)
12409 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12420 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12422 VMA_ASSERT(allocation);
12424 if(TouchAllocation(allocation))
12426 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12428 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12431 switch(allocation->GetType())
12433 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12435 VmaBlockVector* pBlockVector = VMA_NULL;
12436 VmaPool hPool = allocation->GetPool();
12437 if(hPool != VK_NULL_HANDLE)
12439 pBlockVector = &hPool->m_BlockVector;
12443 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12444 pBlockVector = m_pBlockVectors[memTypeIndex];
12446 pBlockVector->Free(allocation);
12449 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12450 FreeDedicatedMemory(allocation);
12457 allocation->SetUserData(
this, VMA_NULL);
12458 vma_delete(
this, allocation);
12461 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12464 InitStatInfo(pStats->
total);
12465 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12467 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12471 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12473 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12474 VMA_ASSERT(pBlockVector);
12475 pBlockVector->AddStats(pStats);
12480 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12481 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12483 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12488 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12490 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12491 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12492 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12493 VMA_ASSERT(pDedicatedAllocVector);
12494 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12497 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12498 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12499 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12500 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12505 VmaPostprocessCalcStatInfo(pStats->
total);
12506 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12507 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12508 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12509 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12512 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12514 VkResult VmaAllocator_T::Defragment(
12516 size_t allocationCount,
12517 VkBool32* pAllocationsChanged,
12521 if(pAllocationsChanged != VMA_NULL)
12523 memset(pAllocationsChanged, 0, allocationCount *
sizeof(VkBool32));
12525 if(pDefragmentationStats != VMA_NULL)
12527 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12530 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12532 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12534 const size_t poolCount = m_Pools.size();
12537 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12540 VMA_ASSERT(hAlloc);
12541 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12543 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12544 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12546 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12548 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12550 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12552 const VmaPool hAllocPool = hAlloc->GetPool();
12554 if(hAllocPool != VK_NULL_HANDLE)
12557 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12559 pAllocBlockVector = &hAllocPool->m_BlockVector;
12565 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12568 if(pAllocBlockVector != VMA_NULL)
12570 VmaDefragmentator*
const pDefragmentator =
12571 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12572 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12573 &pAllocationsChanged[allocIndex] : VMA_NULL;
12574 pDefragmentator->AddAllocation(hAlloc, pChanged);
12579 VkResult result = VK_SUCCESS;
12583 VkDeviceSize maxBytesToMove = SIZE_MAX;
12584 uint32_t maxAllocationsToMove = UINT32_MAX;
12585 if(pDefragmentationInfo != VMA_NULL)
12592 for(uint32_t memTypeIndex = 0;
12593 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12597 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12599 result = m_pBlockVectors[memTypeIndex]->Defragment(
12600 pDefragmentationStats,
12602 maxAllocationsToMove);
12607 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12609 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12610 pDefragmentationStats,
12612 maxAllocationsToMove);
12618 for(
size_t poolIndex = poolCount; poolIndex--; )
12620 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12624 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12626 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12628 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12637 if(hAllocation->CanBecomeLost())
12643 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12644 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12647 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12651 pAllocationInfo->
offset = 0;
12652 pAllocationInfo->
size = hAllocation->GetSize();
12654 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12657 else if(localLastUseFrameIndex == localCurrFrameIndex)
12659 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12660 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12661 pAllocationInfo->
offset = hAllocation->GetOffset();
12662 pAllocationInfo->
size = hAllocation->GetSize();
12664 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12669 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12671 localLastUseFrameIndex = localCurrFrameIndex;
12678 #if VMA_STATS_STRING_ENABLED 12679 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12680 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12683 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12684 if(localLastUseFrameIndex == localCurrFrameIndex)
12690 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12692 localLastUseFrameIndex = localCurrFrameIndex;
12698 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12699 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12700 pAllocationInfo->
offset = hAllocation->GetOffset();
12701 pAllocationInfo->
size = hAllocation->GetSize();
12702 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12703 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12707 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12710 if(hAllocation->CanBecomeLost())
12712 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12713 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12716 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12720 else if(localLastUseFrameIndex == localCurrFrameIndex)
12726 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12728 localLastUseFrameIndex = localCurrFrameIndex;
12735 #if VMA_STATS_STRING_ENABLED 12736 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12737 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12740 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12741 if(localLastUseFrameIndex == localCurrFrameIndex)
12747 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12749 localLastUseFrameIndex = localCurrFrameIndex;
12761 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
12771 return VK_ERROR_INITIALIZATION_FAILED;
12774 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
12776 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
12778 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
12779 if(res != VK_SUCCESS)
12781 vma_delete(
this, *pPool);
12788 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12789 (*pPool)->SetId(m_NextPoolId++);
12790 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
12796 void VmaAllocator_T::DestroyPool(
VmaPool pool)
12800 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12801 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
12802 VMA_ASSERT(success &&
"Pool not found in Allocator.");
12805 vma_delete(
this, pool);
12810 pool->m_BlockVector.GetPoolStats(pPoolStats);
12813 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
12815 m_CurrentFrameIndex.store(frameIndex);
12818 void VmaAllocator_T::MakePoolAllocationsLost(
12820 size_t* pLostAllocationCount)
12822 hPool->m_BlockVector.MakePoolAllocationsLost(
12823 m_CurrentFrameIndex.load(),
12824 pLostAllocationCount);
12827 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
12829 return hPool->m_BlockVector.CheckCorruption();
12832 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
12834 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
12837 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12839 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
12841 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12842 VMA_ASSERT(pBlockVector);
12843 VkResult localRes = pBlockVector->CheckCorruption();
12846 case VK_ERROR_FEATURE_NOT_PRESENT:
12849 finalRes = VK_SUCCESS;
12859 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12860 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12862 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
12864 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
12867 case VK_ERROR_FEATURE_NOT_PRESENT:
12870 finalRes = VK_SUCCESS;
12882 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
12884 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
12885 (*pAllocation)->InitLost();
12888 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
12890 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
12893 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12895 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12896 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
12898 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12899 if(res == VK_SUCCESS)
12901 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
12906 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
12911 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12914 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
12916 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
12922 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
12924 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
12926 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
12929 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
12931 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
12932 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12934 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12935 m_HeapSizeLimit[heapIndex] += size;
12939 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
12941 if(hAllocation->CanBecomeLost())
12943 return VK_ERROR_MEMORY_MAP_FAILED;
12946 switch(hAllocation->GetType())
12948 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12950 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12951 char *pBytes = VMA_NULL;
12952 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
12953 if(res == VK_SUCCESS)
12955 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
12956 hAllocation->BlockAllocMap();
12960 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12961 return hAllocation->DedicatedAllocMap(
this, ppData);
12964 return VK_ERROR_MEMORY_MAP_FAILED;
12970 switch(hAllocation->GetType())
12972 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12974 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12975 hAllocation->BlockAllocUnmap();
12976 pBlock->Unmap(
this, 1);
12979 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12980 hAllocation->DedicatedAllocUnmap(
this);
12987 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
12989 VkResult res = VK_SUCCESS;
12990 switch(hAllocation->GetType())
12992 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12993 res = GetVulkanFunctions().vkBindBufferMemory(
12996 hAllocation->GetMemory(),
12999 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13001 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13002 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13003 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13012 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13014 VkResult res = VK_SUCCESS;
13015 switch(hAllocation->GetType())
13017 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13018 res = GetVulkanFunctions().vkBindImageMemory(
13021 hAllocation->GetMemory(),
13024 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13026 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13027 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13028 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13037 void VmaAllocator_T::FlushOrInvalidateAllocation(
13039 VkDeviceSize offset, VkDeviceSize size,
13040 VMA_CACHE_OPERATION op)
13042 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13043 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13045 const VkDeviceSize allocationSize = hAllocation->GetSize();
13046 VMA_ASSERT(offset <= allocationSize);
13048 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13050 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13051 memRange.memory = hAllocation->GetMemory();
13053 switch(hAllocation->GetType())
13055 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13056 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13057 if(size == VK_WHOLE_SIZE)
13059 memRange.size = allocationSize - memRange.offset;
13063 VMA_ASSERT(offset + size <= allocationSize);
13064 memRange.size = VMA_MIN(
13065 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13066 allocationSize - memRange.offset);
13070 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13073 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13074 if(size == VK_WHOLE_SIZE)
13076 size = allocationSize - offset;
13080 VMA_ASSERT(offset + size <= allocationSize);
13082 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13085 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13086 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13087 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13088 memRange.offset += allocationOffset;
13089 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13100 case VMA_CACHE_FLUSH:
13101 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13103 case VMA_CACHE_INVALIDATE:
13104 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13113 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13115 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13117 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13119 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13120 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13121 VMA_ASSERT(pDedicatedAllocations);
13122 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13123 VMA_ASSERT(success);
13126 VkDeviceMemory hMemory = allocation->GetMemory();
13138 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13140 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13143 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13145 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13146 !hAllocation->CanBecomeLost() &&
13147 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13149 void* pData = VMA_NULL;
13150 VkResult res = Map(hAllocation, &pData);
13151 if(res == VK_SUCCESS)
13153 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13154 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13155 Unmap(hAllocation);
13159 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13164 #if VMA_STATS_STRING_ENABLED 13166 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13168 bool dedicatedAllocationsStarted =
false;
13169 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13171 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13172 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13173 VMA_ASSERT(pDedicatedAllocVector);
13174 if(pDedicatedAllocVector->empty() ==
false)
13176 if(dedicatedAllocationsStarted ==
false)
13178 dedicatedAllocationsStarted =
true;
13179 json.WriteString(
"DedicatedAllocations");
13180 json.BeginObject();
13183 json.BeginString(
"Type ");
13184 json.ContinueString(memTypeIndex);
13189 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13191 json.BeginObject(
true);
13193 hAlloc->PrintParameters(json);
13200 if(dedicatedAllocationsStarted)
13206 bool allocationsStarted =
false;
13207 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13209 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13211 if(allocationsStarted ==
false)
13213 allocationsStarted =
true;
13214 json.WriteString(
"DefaultPools");
13215 json.BeginObject();
13218 json.BeginString(
"Type ");
13219 json.ContinueString(memTypeIndex);
13222 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13225 if(allocationsStarted)
13233 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13234 const size_t poolCount = m_Pools.size();
13237 json.WriteString(
"Pools");
13238 json.BeginObject();
13239 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13241 json.BeginString();
13242 json.ContinueString(m_Pools[poolIndex]->GetId());
13245 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13252 #endif // #if VMA_STATS_STRING_ENABLED 13261 VMA_ASSERT(pCreateInfo && pAllocator);
13262 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13264 return (*pAllocator)->Init(pCreateInfo);
13270 if(allocator != VK_NULL_HANDLE)
13272 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13273 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13274 vma_delete(&allocationCallbacks, allocator);
13280 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13282 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13283 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13288 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13290 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13291 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13296 uint32_t memoryTypeIndex,
13297 VkMemoryPropertyFlags* pFlags)
13299 VMA_ASSERT(allocator && pFlags);
13300 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13301 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13306 uint32_t frameIndex)
13308 VMA_ASSERT(allocator);
13309 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13311 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13313 allocator->SetCurrentFrameIndex(frameIndex);
13320 VMA_ASSERT(allocator && pStats);
13321 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13322 allocator->CalculateStats(pStats);
13325 #if VMA_STATS_STRING_ENABLED 13329 char** ppStatsString,
13330 VkBool32 detailedMap)
13332 VMA_ASSERT(allocator && ppStatsString);
13333 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13335 VmaStringBuilder sb(allocator);
13337 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13338 json.BeginObject();
13341 allocator->CalculateStats(&stats);
13343 json.WriteString(
"Total");
13344 VmaPrintStatInfo(json, stats.
total);
13346 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13348 json.BeginString(
"Heap ");
13349 json.ContinueString(heapIndex);
13351 json.BeginObject();
13353 json.WriteString(
"Size");
13354 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13356 json.WriteString(
"Flags");
13357 json.BeginArray(
true);
13358 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13360 json.WriteString(
"DEVICE_LOCAL");
13366 json.WriteString(
"Stats");
13367 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13370 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13372 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13374 json.BeginString(
"Type ");
13375 json.ContinueString(typeIndex);
13378 json.BeginObject();
13380 json.WriteString(
"Flags");
13381 json.BeginArray(
true);
13382 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13383 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13385 json.WriteString(
"DEVICE_LOCAL");
13387 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13389 json.WriteString(
"HOST_VISIBLE");
13391 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13393 json.WriteString(
"HOST_COHERENT");
13395 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13397 json.WriteString(
"HOST_CACHED");
13399 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13401 json.WriteString(
"LAZILY_ALLOCATED");
13407 json.WriteString(
"Stats");
13408 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13417 if(detailedMap == VK_TRUE)
13419 allocator->PrintDetailedMap(json);
13425 const size_t len = sb.GetLength();
13426 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13429 memcpy(pChars, sb.GetData(), len);
13431 pChars[len] =
'\0';
13432 *ppStatsString = pChars;
13437 char* pStatsString)
13439 if(pStatsString != VMA_NULL)
13441 VMA_ASSERT(allocator);
13442 size_t len = strlen(pStatsString);
13443 vma_delete_array(allocator, pStatsString, len + 1);
13447 #endif // #if VMA_STATS_STRING_ENABLED 13454 uint32_t memoryTypeBits,
13456 uint32_t* pMemoryTypeIndex)
13458 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13459 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13460 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13467 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13468 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13473 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13477 switch(pAllocationCreateInfo->
usage)
13482 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13484 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13488 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13491 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13492 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13494 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13498 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13499 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13505 *pMemoryTypeIndex = UINT32_MAX;
13506 uint32_t minCost = UINT32_MAX;
13507 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13508 memTypeIndex < allocator->GetMemoryTypeCount();
13509 ++memTypeIndex, memTypeBit <<= 1)
13512 if((memTypeBit & memoryTypeBits) != 0)
13514 const VkMemoryPropertyFlags currFlags =
13515 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13517 if((requiredFlags & ~currFlags) == 0)
13520 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13522 if(currCost < minCost)
13524 *pMemoryTypeIndex = memTypeIndex;
13529 minCost = currCost;
13534 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13539 const VkBufferCreateInfo* pBufferCreateInfo,
13541 uint32_t* pMemoryTypeIndex)
13543 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13544 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13545 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13546 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13548 const VkDevice hDev = allocator->m_hDevice;
13549 VkBuffer hBuffer = VK_NULL_HANDLE;
13550 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13551 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13552 if(res == VK_SUCCESS)
13554 VkMemoryRequirements memReq = {};
13555 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13556 hDev, hBuffer, &memReq);
13560 memReq.memoryTypeBits,
13561 pAllocationCreateInfo,
13564 allocator->GetVulkanFunctions().vkDestroyBuffer(
13565 hDev, hBuffer, allocator->GetAllocationCallbacks());
13572 const VkImageCreateInfo* pImageCreateInfo,
13574 uint32_t* pMemoryTypeIndex)
13576 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13577 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13578 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13579 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13581 const VkDevice hDev = allocator->m_hDevice;
13582 VkImage hImage = VK_NULL_HANDLE;
13583 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13584 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13585 if(res == VK_SUCCESS)
13587 VkMemoryRequirements memReq = {};
13588 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13589 hDev, hImage, &memReq);
13593 memReq.memoryTypeBits,
13594 pAllocationCreateInfo,
13597 allocator->GetVulkanFunctions().vkDestroyImage(
13598 hDev, hImage, allocator->GetAllocationCallbacks());
13608 VMA_ASSERT(allocator && pCreateInfo && pPool);
13610 VMA_DEBUG_LOG(
"vmaCreatePool");
13612 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13614 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13616 #if VMA_RECORDING_ENABLED 13617 if(allocator->GetRecorder() != VMA_NULL)
13619 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13630 VMA_ASSERT(allocator);
13632 if(pool == VK_NULL_HANDLE)
13637 VMA_DEBUG_LOG(
"vmaDestroyPool");
13639 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13641 #if VMA_RECORDING_ENABLED 13642 if(allocator->GetRecorder() != VMA_NULL)
13644 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13648 allocator->DestroyPool(pool);
13656 VMA_ASSERT(allocator && pool && pPoolStats);
13658 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13660 allocator->GetPoolStats(pool, pPoolStats);
13666 size_t* pLostAllocationCount)
13668 VMA_ASSERT(allocator && pool);
13670 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13672 #if VMA_RECORDING_ENABLED 13673 if(allocator->GetRecorder() != VMA_NULL)
13675 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13679 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13684 VMA_ASSERT(allocator && pool);
13686 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13688 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13690 return allocator->CheckPoolCorruption(pool);
13695 const VkMemoryRequirements* pVkMemoryRequirements,
13700 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13702 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13704 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13706 VkResult result = allocator->AllocateMemory(
13707 *pVkMemoryRequirements,
13713 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13716 #if VMA_RECORDING_ENABLED 13717 if(allocator->GetRecorder() != VMA_NULL)
13719 allocator->GetRecorder()->RecordAllocateMemory(
13720 allocator->GetCurrentFrameIndex(),
13721 *pVkMemoryRequirements,
13727 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13729 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13742 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13744 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
13746 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13748 VkMemoryRequirements vkMemReq = {};
13749 bool requiresDedicatedAllocation =
false;
13750 bool prefersDedicatedAllocation =
false;
13751 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
13752 requiresDedicatedAllocation,
13753 prefersDedicatedAllocation);
13755 VkResult result = allocator->AllocateMemory(
13757 requiresDedicatedAllocation,
13758 prefersDedicatedAllocation,
13762 VMA_SUBALLOCATION_TYPE_BUFFER,
13765 #if VMA_RECORDING_ENABLED 13766 if(allocator->GetRecorder() != VMA_NULL)
13768 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
13769 allocator->GetCurrentFrameIndex(),
13771 requiresDedicatedAllocation,
13772 prefersDedicatedAllocation,
13778 if(pAllocationInfo && result == VK_SUCCESS)
13780 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13793 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13795 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
13797 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13799 VkMemoryRequirements vkMemReq = {};
13800 bool requiresDedicatedAllocation =
false;
13801 bool prefersDedicatedAllocation =
false;
13802 allocator->GetImageMemoryRequirements(image, vkMemReq,
13803 requiresDedicatedAllocation, prefersDedicatedAllocation);
13805 VkResult result = allocator->AllocateMemory(
13807 requiresDedicatedAllocation,
13808 prefersDedicatedAllocation,
13812 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
13815 #if VMA_RECORDING_ENABLED 13816 if(allocator->GetRecorder() != VMA_NULL)
13818 allocator->GetRecorder()->RecordAllocateMemoryForImage(
13819 allocator->GetCurrentFrameIndex(),
13821 requiresDedicatedAllocation,
13822 prefersDedicatedAllocation,
13828 if(pAllocationInfo && result == VK_SUCCESS)
13830 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13840 VMA_ASSERT(allocator);
13842 if(allocation == VK_NULL_HANDLE)
13847 VMA_DEBUG_LOG(
"vmaFreeMemory");
13849 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13851 #if VMA_RECORDING_ENABLED 13852 if(allocator->GetRecorder() != VMA_NULL)
13854 allocator->GetRecorder()->RecordFreeMemory(
13855 allocator->GetCurrentFrameIndex(),
13860 allocator->FreeMemory(allocation);
13868 VMA_ASSERT(allocator && allocation && pAllocationInfo);
13870 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13872 #if VMA_RECORDING_ENABLED 13873 if(allocator->GetRecorder() != VMA_NULL)
13875 allocator->GetRecorder()->RecordGetAllocationInfo(
13876 allocator->GetCurrentFrameIndex(),
13881 allocator->GetAllocationInfo(allocation, pAllocationInfo);
13888 VMA_ASSERT(allocator && allocation);
13890 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13892 #if VMA_RECORDING_ENABLED 13893 if(allocator->GetRecorder() != VMA_NULL)
13895 allocator->GetRecorder()->RecordTouchAllocation(
13896 allocator->GetCurrentFrameIndex(),
13901 return allocator->TouchAllocation(allocation);
13909 VMA_ASSERT(allocator && allocation);
13911 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13913 allocation->SetUserData(allocator, pUserData);
13915 #if VMA_RECORDING_ENABLED 13916 if(allocator->GetRecorder() != VMA_NULL)
13918 allocator->GetRecorder()->RecordSetAllocationUserData(
13919 allocator->GetCurrentFrameIndex(),
13930 VMA_ASSERT(allocator && pAllocation);
13932 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
13934 allocator->CreateLostAllocation(pAllocation);
13936 #if VMA_RECORDING_ENABLED 13937 if(allocator->GetRecorder() != VMA_NULL)
13939 allocator->GetRecorder()->RecordCreateLostAllocation(
13940 allocator->GetCurrentFrameIndex(),
13951 VMA_ASSERT(allocator && allocation && ppData);
13953 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13955 VkResult res = allocator->Map(allocation, ppData);
13957 #if VMA_RECORDING_ENABLED 13958 if(allocator->GetRecorder() != VMA_NULL)
13960 allocator->GetRecorder()->RecordMapMemory(
13961 allocator->GetCurrentFrameIndex(),
13973 VMA_ASSERT(allocator && allocation);
13975 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13977 #if VMA_RECORDING_ENABLED 13978 if(allocator->GetRecorder() != VMA_NULL)
13980 allocator->GetRecorder()->RecordUnmapMemory(
13981 allocator->GetCurrentFrameIndex(),
13986 allocator->Unmap(allocation);
13991 VMA_ASSERT(allocator && allocation);
13993 VMA_DEBUG_LOG(
"vmaFlushAllocation");
13995 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13997 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
13999 #if VMA_RECORDING_ENABLED 14000 if(allocator->GetRecorder() != VMA_NULL)
14002 allocator->GetRecorder()->RecordFlushAllocation(
14003 allocator->GetCurrentFrameIndex(),
14004 allocation, offset, size);
14011 VMA_ASSERT(allocator && allocation);
14013 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14015 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14017 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14019 #if VMA_RECORDING_ENABLED 14020 if(allocator->GetRecorder() != VMA_NULL)
14022 allocator->GetRecorder()->RecordInvalidateAllocation(
14023 allocator->GetCurrentFrameIndex(),
14024 allocation, offset, size);
14031 VMA_ASSERT(allocator);
14033 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14035 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14037 return allocator->CheckCorruption(memoryTypeBits);
14043 size_t allocationCount,
14044 VkBool32* pAllocationsChanged,
14048 VMA_ASSERT(allocator && pAllocations);
14050 VMA_DEBUG_LOG(
"vmaDefragment");
14052 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14054 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14062 VMA_ASSERT(allocator && allocation && buffer);
14064 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14066 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14068 return allocator->BindBufferMemory(allocation, buffer);
14076 VMA_ASSERT(allocator && allocation && image);
14078 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14080 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14082 return allocator->BindImageMemory(allocation, image);
14087 const VkBufferCreateInfo* pBufferCreateInfo,
14093 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14095 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14097 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14099 *pBuffer = VK_NULL_HANDLE;
14100 *pAllocation = VK_NULL_HANDLE;
14103 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14104 allocator->m_hDevice,
14106 allocator->GetAllocationCallbacks(),
14111 VkMemoryRequirements vkMemReq = {};
14112 bool requiresDedicatedAllocation =
false;
14113 bool prefersDedicatedAllocation =
false;
14114 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14115 requiresDedicatedAllocation, prefersDedicatedAllocation);
14119 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14121 VMA_ASSERT(vkMemReq.alignment %
14122 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14124 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14126 VMA_ASSERT(vkMemReq.alignment %
14127 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14129 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14131 VMA_ASSERT(vkMemReq.alignment %
14132 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14136 res = allocator->AllocateMemory(
14138 requiresDedicatedAllocation,
14139 prefersDedicatedAllocation,
14142 *pAllocationCreateInfo,
14143 VMA_SUBALLOCATION_TYPE_BUFFER,
14146 #if VMA_RECORDING_ENABLED 14147 if(allocator->GetRecorder() != VMA_NULL)
14149 allocator->GetRecorder()->RecordCreateBuffer(
14150 allocator->GetCurrentFrameIndex(),
14151 *pBufferCreateInfo,
14152 *pAllocationCreateInfo,
14160 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14164 #if VMA_STATS_STRING_ENABLED 14165 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14167 if(pAllocationInfo != VMA_NULL)
14169 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14174 allocator->FreeMemory(*pAllocation);
14175 *pAllocation = VK_NULL_HANDLE;
14176 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14177 *pBuffer = VK_NULL_HANDLE;
14180 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14181 *pBuffer = VK_NULL_HANDLE;
14192 VMA_ASSERT(allocator);
14194 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14199 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14201 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14203 #if VMA_RECORDING_ENABLED 14204 if(allocator->GetRecorder() != VMA_NULL)
14206 allocator->GetRecorder()->RecordDestroyBuffer(
14207 allocator->GetCurrentFrameIndex(),
14212 if(buffer != VK_NULL_HANDLE)
14214 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14217 if(allocation != VK_NULL_HANDLE)
14219 allocator->FreeMemory(allocation);
14225 const VkImageCreateInfo* pImageCreateInfo,
14231 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14233 VMA_DEBUG_LOG(
"vmaCreateImage");
14235 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14237 *pImage = VK_NULL_HANDLE;
14238 *pAllocation = VK_NULL_HANDLE;
14241 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14242 allocator->m_hDevice,
14244 allocator->GetAllocationCallbacks(),
14248 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14249 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14250 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14253 VkMemoryRequirements vkMemReq = {};
14254 bool requiresDedicatedAllocation =
false;
14255 bool prefersDedicatedAllocation =
false;
14256 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14257 requiresDedicatedAllocation, prefersDedicatedAllocation);
14259 res = allocator->AllocateMemory(
14261 requiresDedicatedAllocation,
14262 prefersDedicatedAllocation,
14265 *pAllocationCreateInfo,
14269 #if VMA_RECORDING_ENABLED 14270 if(allocator->GetRecorder() != VMA_NULL)
14272 allocator->GetRecorder()->RecordCreateImage(
14273 allocator->GetCurrentFrameIndex(),
14275 *pAllocationCreateInfo,
14283 res = allocator->BindImageMemory(*pAllocation, *pImage);
14287 #if VMA_STATS_STRING_ENABLED 14288 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14290 if(pAllocationInfo != VMA_NULL)
14292 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14297 allocator->FreeMemory(*pAllocation);
14298 *pAllocation = VK_NULL_HANDLE;
14299 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14300 *pImage = VK_NULL_HANDLE;
14303 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14304 *pImage = VK_NULL_HANDLE;
14315 VMA_ASSERT(allocator);
14317 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14322 VMA_DEBUG_LOG(
"vmaDestroyImage");
14324 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14326 #if VMA_RECORDING_ENABLED 14327 if(allocator->GetRecorder() != VMA_NULL)
14329 allocator->GetRecorder()->RecordDestroyImage(
14330 allocator->GetCurrentFrameIndex(),
14335 if(image != VK_NULL_HANDLE)
14337 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14339 if(allocation != VK_NULL_HANDLE)
14341 allocator->FreeMemory(allocation);
14345 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1571
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1872
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1624
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1628
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1598
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2190
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1579
+
Definition: vk_mem_alloc.h:1602
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2194
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1583
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1825
-
Definition: vk_mem_alloc.h:1928
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1571
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2290
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1621
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2535
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2079
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1468
+
Definition: vk_mem_alloc.h:1829
+
Definition: vk_mem_alloc.h:1932
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1575
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2294
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1625
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2539
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2083
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1472
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2171
-
Definition: vk_mem_alloc.h:1905
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1560
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1978
-
Definition: vk_mem_alloc.h:1852
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1633
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2107
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2175
+
Definition: vk_mem_alloc.h:1909
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1564
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1982
+
Definition: vk_mem_alloc.h:1856
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1637
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2111
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1686
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1618
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1690
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1622
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1856
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1860
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1758
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1576
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1757
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2539
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1762
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1580
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1761
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2543
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1650
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1767
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2547
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1962
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2530
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1577
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1502
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1654
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1771
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2551
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1966
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2534
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1581
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1506
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1627
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1631
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2121
-
Definition: vk_mem_alloc.h:2115
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1693
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2300
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2125
+
Definition: vk_mem_alloc.h:2119
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1697
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2304
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1572
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1596
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1999
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2141
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2177
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1576
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1600
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2003
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2145
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2181
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1558
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2124
+
Definition: vk_mem_alloc.h:1562
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2128
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1803
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1807
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2525
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2529
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2543
-
Definition: vk_mem_alloc.h:1842
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1986
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1575
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2547
+
Definition: vk_mem_alloc.h:1846
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1990
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1579
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1763
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1508
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1767
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1512
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:1946
+
Definition: vk_mem_alloc.h:1950
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1529
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1533
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1600
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1534
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2545
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1604
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1538
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2549
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1973
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2187
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1977
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2191
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1568
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1746
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2136
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1521
-
Definition: vk_mem_alloc.h:2111
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1572
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1750
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2140
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1525
+
Definition: vk_mem_alloc.h:2115
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1912
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1759
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1525
-
Definition: vk_mem_alloc.h:1936
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2127
-
Definition: vk_mem_alloc.h:1851
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1574
+
Definition: vk_mem_alloc.h:1916
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1763
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1529
+
Definition: vk_mem_alloc.h:1940
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2131
+
Definition: vk_mem_alloc.h:1855
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1578
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1968
-
Definition: vk_mem_alloc.h:1959
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1972
+
Definition: vk_mem_alloc.h:1963
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1749
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1570
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2149
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1636
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2180
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1957
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1992
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1753
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1574
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2153
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1640
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2184
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1961
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1996
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1674
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1765
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1892
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1758
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1678
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1769
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1896
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1762
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1581
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1606
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1523
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1580
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1585
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1610
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1527
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1584
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2163
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1573
-
Definition: vk_mem_alloc.h:1923
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2167
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1577
+
Definition: vk_mem_alloc.h:1927
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1614
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2314
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1630
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1758
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1755
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1618
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2318
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1634
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1762
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1759
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2168
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2172
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
-
Definition: vk_mem_alloc.h:1932
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2295
-
Definition: vk_mem_alloc.h:1943
-
Definition: vk_mem_alloc.h:1955
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2541
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1566
+
Definition: vk_mem_alloc.h:1936
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2299
+
Definition: vk_mem_alloc.h:1947
+
Definition: vk_mem_alloc.h:1959
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2545
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1570
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1753
-
Definition: vk_mem_alloc.h:1808
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2117
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1757
+
Definition: vk_mem_alloc.h:1812
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2121
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1603
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1751
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1578
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1582
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1879
-
Definition: vk_mem_alloc.h:1950
-
Definition: vk_mem_alloc.h:1835
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2309
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1607
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1755
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1582
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1586
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1883
+
Definition: vk_mem_alloc.h:1954
+
Definition: vk_mem_alloc.h:1839
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2313
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1556
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1560
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1569
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2096
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2276
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1573
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2100
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2280
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1940
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2061
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1759
+
Definition: vk_mem_alloc.h:1944
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2065
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1763
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1918
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1590
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1766
+
Definition: vk_mem_alloc.h:1922
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1594
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1770
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2174
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1759
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2178
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1763
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2281
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2285