23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1481 #ifndef VMA_RECORDING_ENABLED 1483 #define VMA_RECORDING_ENABLED 1 1485 #define VMA_RECORDING_ENABLED 0 1490 #define NOMINMAX // For windows.h 1493 #include <vulkan/vulkan.h> 1495 #if VMA_RECORDING_ENABLED 1496 #include <windows.h> 1499 #if !defined(VMA_DEDICATED_ALLOCATION) 1500 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1501 #define VMA_DEDICATED_ALLOCATION 1 1503 #define VMA_DEDICATED_ALLOCATION 0 1521 uint32_t memoryType,
1522 VkDeviceMemory memory,
1527 uint32_t memoryType,
1528 VkDeviceMemory memory,
1600 #if VMA_DEDICATED_ALLOCATION 1601 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1602 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1728 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1736 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1746 uint32_t memoryTypeIndex,
1747 VkMemoryPropertyFlags* pFlags);
1759 uint32_t frameIndex);
1792 #define VMA_STATS_STRING_ENABLED 1 1794 #if VMA_STATS_STRING_ENABLED 1801 char** ppStatsString,
1802 VkBool32 detailedMap);
1806 char* pStatsString);
1808 #endif // #if VMA_STATS_STRING_ENABLED 2037 uint32_t memoryTypeBits,
2039 uint32_t* pMemoryTypeIndex);
2055 const VkBufferCreateInfo* pBufferCreateInfo,
2057 uint32_t* pMemoryTypeIndex);
2073 const VkImageCreateInfo* pImageCreateInfo,
2075 uint32_t* pMemoryTypeIndex);
2247 size_t* pLostAllocationCount);
2346 const VkMemoryRequirements* pVkMemoryRequirements,
2400 VkDeviceSize newSize);
2633 size_t allocationCount,
2634 VkBool32* pAllocationsChanged,
2700 const VkBufferCreateInfo* pBufferCreateInfo,
2725 const VkImageCreateInfo* pImageCreateInfo,
2751 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2754 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2755 #define VMA_IMPLEMENTATION 2758 #ifdef VMA_IMPLEMENTATION 2759 #undef VMA_IMPLEMENTATION 2781 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2782 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2794 #if VMA_USE_STL_CONTAINERS 2795 #define VMA_USE_STL_VECTOR 1 2796 #define VMA_USE_STL_UNORDERED_MAP 1 2797 #define VMA_USE_STL_LIST 1 2800 #if VMA_USE_STL_VECTOR 2804 #if VMA_USE_STL_UNORDERED_MAP 2805 #include <unordered_map> 2808 #if VMA_USE_STL_LIST 2817 #include <algorithm> 2823 #define VMA_NULL nullptr 2826 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 2828 void *aligned_alloc(
size_t alignment,
size_t size)
2831 if(alignment <
sizeof(
void*))
2833 alignment =
sizeof(
void*);
2836 return memalign(alignment, size);
2838 #elif defined(__APPLE__) || defined(__ANDROID__) 2840 void *aligned_alloc(
size_t alignment,
size_t size)
2843 if(alignment <
sizeof(
void*))
2845 alignment =
sizeof(
void*);
2849 if(posix_memalign(&pointer, alignment, size) == 0)
2863 #define VMA_ASSERT(expr) assert(expr) 2865 #define VMA_ASSERT(expr) 2871 #ifndef VMA_HEAVY_ASSERT 2873 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2875 #define VMA_HEAVY_ASSERT(expr) 2879 #ifndef VMA_ALIGN_OF 2880 #define VMA_ALIGN_OF(type) (__alignof(type)) 2883 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2885 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2887 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2891 #ifndef VMA_SYSTEM_FREE 2893 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2895 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2900 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2904 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2908 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2912 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2915 #ifndef VMA_DEBUG_LOG 2916 #define VMA_DEBUG_LOG(format, ...) 2926 #if VMA_STATS_STRING_ENABLED 2927 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2929 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2931 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2933 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2935 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2937 snprintf(outStr, strLen,
"%p", ptr);
2947 void Lock() { m_Mutex.lock(); }
2948 void Unlock() { m_Mutex.unlock(); }
2952 #define VMA_MUTEX VmaMutex 2963 #ifndef VMA_ATOMIC_UINT32 2964 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2967 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2972 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2975 #ifndef VMA_DEBUG_ALIGNMENT 2980 #define VMA_DEBUG_ALIGNMENT (1) 2983 #ifndef VMA_DEBUG_MARGIN 2988 #define VMA_DEBUG_MARGIN (0) 2991 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2996 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2999 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3005 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3008 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3013 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3016 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3021 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3024 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3025 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3029 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3030 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3034 #ifndef VMA_CLASS_NO_COPY 3035 #define VMA_CLASS_NO_COPY(className) \ 3037 className(const className&) = delete; \ 3038 className& operator=(const className&) = delete; 3041 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3044 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3046 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3047 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3053 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3054 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3057 static inline uint32_t VmaCountBitsSet(uint32_t v)
3059 uint32_t c = v - ((v >> 1) & 0x55555555);
3060 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3061 c = ((c >> 4) + c) & 0x0F0F0F0F;
3062 c = ((c >> 8) + c) & 0x00FF00FF;
3063 c = ((c >> 16) + c) & 0x0000FFFF;
3069 template <
typename T>
3070 static inline T VmaAlignUp(T val, T align)
3072 return (val + align - 1) / align * align;
3076 template <
typename T>
3077 static inline T VmaAlignDown(T val, T align)
3079 return val / align * align;
3083 template <
typename T>
3084 static inline T VmaRoundDiv(T x, T y)
3086 return (x + (y / (T)2)) / y;
3094 template <
typename T>
3095 inline bool VmaIsPow2(T x)
3097 return (x & (x-1)) == 0;
3101 static inline uint32_t VmaNextPow2(uint32_t v)
3112 static inline uint64_t VmaNextPow2(uint64_t v)
3126 static inline uint32_t VmaPrevPow2(uint32_t v)
3136 static inline uint64_t VmaPrevPow2(uint64_t v)
3148 static inline bool VmaStrIsEmpty(
const char* pStr)
3150 return pStr == VMA_NULL || *pStr ==
'\0';
3153 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3171 template<
typename Iterator,
typename Compare>
3172 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3174 Iterator centerValue = end; --centerValue;
3175 Iterator insertIndex = beg;
3176 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3178 if(cmp(*memTypeIndex, *centerValue))
3180 if(insertIndex != memTypeIndex)
3182 VMA_SWAP(*memTypeIndex, *insertIndex);
3187 if(insertIndex != centerValue)
3189 VMA_SWAP(*insertIndex, *centerValue);
3194 template<
typename Iterator,
typename Compare>
3195 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3199 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3200 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3201 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3205 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3207 #endif // #ifndef VMA_SORT 3216 static inline bool VmaBlocksOnSamePage(
3217 VkDeviceSize resourceAOffset,
3218 VkDeviceSize resourceASize,
3219 VkDeviceSize resourceBOffset,
3220 VkDeviceSize pageSize)
3222 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3223 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3224 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3225 VkDeviceSize resourceBStart = resourceBOffset;
3226 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3227 return resourceAEndPage == resourceBStartPage;
3230 enum VmaSuballocationType
3232 VMA_SUBALLOCATION_TYPE_FREE = 0,
3233 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3234 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3235 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3236 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3237 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3238 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3247 static inline bool VmaIsBufferImageGranularityConflict(
3248 VmaSuballocationType suballocType1,
3249 VmaSuballocationType suballocType2)
3251 if(suballocType1 > suballocType2)
3253 VMA_SWAP(suballocType1, suballocType2);
3256 switch(suballocType1)
3258 case VMA_SUBALLOCATION_TYPE_FREE:
3260 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3262 case VMA_SUBALLOCATION_TYPE_BUFFER:
3264 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3265 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3266 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3268 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3269 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3270 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3271 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3273 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3274 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3282 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3284 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3285 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3286 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3288 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3292 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3294 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3295 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3296 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3298 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3309 VMA_CLASS_NO_COPY(VmaMutexLock)
3311 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3312 m_pMutex(useMutex ? &mutex : VMA_NULL)
3329 VMA_MUTEX* m_pMutex;
3332 #if VMA_DEBUG_GLOBAL_MUTEX 3333 static VMA_MUTEX gDebugGlobalMutex;
3334 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3336 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3340 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3351 template <
typename CmpLess,
typename IterT,
typename KeyT>
3352 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3354 size_t down = 0, up = (end - beg);
3357 const size_t mid = (down + up) / 2;
3358 if(cmp(*(beg+mid), key))
3373 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3375 if((pAllocationCallbacks != VMA_NULL) &&
3376 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3378 return (*pAllocationCallbacks->pfnAllocation)(
3379 pAllocationCallbacks->pUserData,
3382 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3386 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3390 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3392 if((pAllocationCallbacks != VMA_NULL) &&
3393 (pAllocationCallbacks->pfnFree != VMA_NULL))
3395 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3399 VMA_SYSTEM_FREE(ptr);
3403 template<
typename T>
3404 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3406 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3409 template<
typename T>
3410 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3412 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3415 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3417 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3419 template<
typename T>
3420 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3423 VmaFree(pAllocationCallbacks, ptr);
3426 template<
typename T>
3427 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3431 for(
size_t i = count; i--; )
3435 VmaFree(pAllocationCallbacks, ptr);
3440 template<
typename T>
3441 class VmaStlAllocator
3444 const VkAllocationCallbacks*
const m_pCallbacks;
3445 typedef T value_type;
3447 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3448 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3450 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3451 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3453 template<
typename U>
3454 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3456 return m_pCallbacks == rhs.m_pCallbacks;
3458 template<
typename U>
3459 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3461 return m_pCallbacks != rhs.m_pCallbacks;
3464 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3467 #if VMA_USE_STL_VECTOR 3469 #define VmaVector std::vector 3471 template<
typename T,
typename allocatorT>
3472 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3474 vec.insert(vec.begin() + index, item);
3477 template<
typename T,
typename allocatorT>
3478 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3480 vec.erase(vec.begin() + index);
3483 #else // #if VMA_USE_STL_VECTOR 3488 template<
typename T,
typename AllocatorT>
3492 typedef T value_type;
3494 VmaVector(
const AllocatorT& allocator) :
3495 m_Allocator(allocator),
3502 VmaVector(
size_t count,
const AllocatorT& allocator) :
3503 m_Allocator(allocator),
3504 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3510 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3511 m_Allocator(src.m_Allocator),
3512 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3513 m_Count(src.m_Count),
3514 m_Capacity(src.m_Count)
3518 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3524 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3527 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3531 resize(rhs.m_Count);
3534 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3540 bool empty()
const {
return m_Count == 0; }
3541 size_t size()
const {
return m_Count; }
3542 T* data() {
return m_pArray; }
3543 const T* data()
const {
return m_pArray; }
3545 T& operator[](
size_t index)
3547 VMA_HEAVY_ASSERT(index < m_Count);
3548 return m_pArray[index];
3550 const T& operator[](
size_t index)
const 3552 VMA_HEAVY_ASSERT(index < m_Count);
3553 return m_pArray[index];
3558 VMA_HEAVY_ASSERT(m_Count > 0);
3561 const T& front()
const 3563 VMA_HEAVY_ASSERT(m_Count > 0);
3568 VMA_HEAVY_ASSERT(m_Count > 0);
3569 return m_pArray[m_Count - 1];
3571 const T& back()
const 3573 VMA_HEAVY_ASSERT(m_Count > 0);
3574 return m_pArray[m_Count - 1];
3577 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3579 newCapacity = VMA_MAX(newCapacity, m_Count);
3581 if((newCapacity < m_Capacity) && !freeMemory)
3583 newCapacity = m_Capacity;
3586 if(newCapacity != m_Capacity)
3588 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3591 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3593 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3594 m_Capacity = newCapacity;
3595 m_pArray = newArray;
3599 void resize(
size_t newCount,
bool freeMemory =
false)
3601 size_t newCapacity = m_Capacity;
3602 if(newCount > m_Capacity)
3604 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3608 newCapacity = newCount;
3611 if(newCapacity != m_Capacity)
3613 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3614 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3615 if(elementsToCopy != 0)
3617 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3619 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3620 m_Capacity = newCapacity;
3621 m_pArray = newArray;
3627 void clear(
bool freeMemory =
false)
3629 resize(0, freeMemory);
3632 void insert(
size_t index,
const T& src)
3634 VMA_HEAVY_ASSERT(index <= m_Count);
3635 const size_t oldCount = size();
3636 resize(oldCount + 1);
3637 if(index < oldCount)
3639 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3641 m_pArray[index] = src;
3644 void remove(
size_t index)
3646 VMA_HEAVY_ASSERT(index < m_Count);
3647 const size_t oldCount = size();
3648 if(index < oldCount - 1)
3650 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3652 resize(oldCount - 1);
3655 void push_back(
const T& src)
3657 const size_t newIndex = size();
3658 resize(newIndex + 1);
3659 m_pArray[newIndex] = src;
3664 VMA_HEAVY_ASSERT(m_Count > 0);
3668 void push_front(
const T& src)
3675 VMA_HEAVY_ASSERT(m_Count > 0);
3679 typedef T* iterator;
3681 iterator begin() {
return m_pArray; }
3682 iterator end() {
return m_pArray + m_Count; }
3685 AllocatorT m_Allocator;
3691 template<
typename T,
typename allocatorT>
3692 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3694 vec.insert(index, item);
3697 template<
typename T,
typename allocatorT>
3698 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3703 #endif // #if VMA_USE_STL_VECTOR 3705 template<
typename CmpLess,
typename VectorT>
3706 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3708 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3710 vector.data() + vector.size(),
3712 CmpLess()) - vector.data();
3713 VmaVectorInsert(vector, indexToInsert, value);
3714 return indexToInsert;
3717 template<
typename CmpLess,
typename VectorT>
3718 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3721 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3726 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3728 size_t indexToRemove = it - vector.begin();
3729 VmaVectorRemove(vector, indexToRemove);
3735 template<
typename CmpLess,
typename IterT,
typename KeyT>
3736 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3739 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3740 beg, end, value, comparator);
3742 (!comparator(*it, value) && !comparator(value, *it)))
3757 template<
typename T>
3758 class VmaPoolAllocator
3760 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3762 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3763 ~VmaPoolAllocator();
3771 uint32_t NextFreeIndex;
3778 uint32_t FirstFreeIndex;
3781 const VkAllocationCallbacks* m_pAllocationCallbacks;
3782 size_t m_ItemsPerBlock;
3783 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3785 ItemBlock& CreateNewBlock();
3788 template<
typename T>
3789 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3790 m_pAllocationCallbacks(pAllocationCallbacks),
3791 m_ItemsPerBlock(itemsPerBlock),
3792 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3794 VMA_ASSERT(itemsPerBlock > 0);
3797 template<
typename T>
3798 VmaPoolAllocator<T>::~VmaPoolAllocator()
3803 template<
typename T>
3804 void VmaPoolAllocator<T>::Clear()
3806 for(
size_t i = m_ItemBlocks.size(); i--; )
3807 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3808 m_ItemBlocks.clear();
3811 template<
typename T>
3812 T* VmaPoolAllocator<T>::Alloc()
3814 for(
size_t i = m_ItemBlocks.size(); i--; )
3816 ItemBlock& block = m_ItemBlocks[i];
3818 if(block.FirstFreeIndex != UINT32_MAX)
3820 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3821 block.FirstFreeIndex = pItem->NextFreeIndex;
3822 return &pItem->Value;
3827 ItemBlock& newBlock = CreateNewBlock();
3828 Item*
const pItem = &newBlock.pItems[0];
3829 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3830 return &pItem->Value;
3833 template<
typename T>
3834 void VmaPoolAllocator<T>::Free(T* ptr)
3837 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3839 ItemBlock& block = m_ItemBlocks[i];
3843 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3846 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3848 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3849 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3850 block.FirstFreeIndex = index;
3854 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3857 template<
typename T>
3858 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3860 ItemBlock newBlock = {
3861 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3863 m_ItemBlocks.push_back(newBlock);
3866 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3867 newBlock.pItems[i].NextFreeIndex = i + 1;
3868 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3869 return m_ItemBlocks.back();
3875 #if VMA_USE_STL_LIST 3877 #define VmaList std::list 3879 #else // #if VMA_USE_STL_LIST 3881 template<
typename T>
3890 template<
typename T>
3893 VMA_CLASS_NO_COPY(VmaRawList)
3895 typedef VmaListItem<T> ItemType;
3897 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3901 size_t GetCount()
const {
return m_Count; }
3902 bool IsEmpty()
const {
return m_Count == 0; }
3904 ItemType* Front() {
return m_pFront; }
3905 const ItemType* Front()
const {
return m_pFront; }
3906 ItemType* Back() {
return m_pBack; }
3907 const ItemType* Back()
const {
return m_pBack; }
3909 ItemType* PushBack();
3910 ItemType* PushFront();
3911 ItemType* PushBack(
const T& value);
3912 ItemType* PushFront(
const T& value);
3917 ItemType* InsertBefore(ItemType* pItem);
3919 ItemType* InsertAfter(ItemType* pItem);
3921 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3922 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3924 void Remove(ItemType* pItem);
3927 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3928 VmaPoolAllocator<ItemType> m_ItemAllocator;
3934 template<
typename T>
3935 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3936 m_pAllocationCallbacks(pAllocationCallbacks),
3937 m_ItemAllocator(pAllocationCallbacks, 128),
3944 template<
typename T>
3945 VmaRawList<T>::~VmaRawList()
3951 template<
typename T>
3952 void VmaRawList<T>::Clear()
3954 if(IsEmpty() ==
false)
3956 ItemType* pItem = m_pBack;
3957 while(pItem != VMA_NULL)
3959 ItemType*
const pPrevItem = pItem->pPrev;
3960 m_ItemAllocator.Free(pItem);
3963 m_pFront = VMA_NULL;
3969 template<
typename T>
3970 VmaListItem<T>* VmaRawList<T>::PushBack()
3972 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3973 pNewItem->pNext = VMA_NULL;
3976 pNewItem->pPrev = VMA_NULL;
3977 m_pFront = pNewItem;
3983 pNewItem->pPrev = m_pBack;
3984 m_pBack->pNext = pNewItem;
3991 template<
typename T>
3992 VmaListItem<T>* VmaRawList<T>::PushFront()
3994 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3995 pNewItem->pPrev = VMA_NULL;
3998 pNewItem->pNext = VMA_NULL;
3999 m_pFront = pNewItem;
4005 pNewItem->pNext = m_pFront;
4006 m_pFront->pPrev = pNewItem;
4007 m_pFront = pNewItem;
4013 template<
typename T>
4014 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4016 ItemType*
const pNewItem = PushBack();
4017 pNewItem->Value = value;
4021 template<
typename T>
4022 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4024 ItemType*
const pNewItem = PushFront();
4025 pNewItem->Value = value;
4029 template<
typename T>
4030 void VmaRawList<T>::PopBack()
4032 VMA_HEAVY_ASSERT(m_Count > 0);
4033 ItemType*
const pBackItem = m_pBack;
4034 ItemType*
const pPrevItem = pBackItem->pPrev;
4035 if(pPrevItem != VMA_NULL)
4037 pPrevItem->pNext = VMA_NULL;
4039 m_pBack = pPrevItem;
4040 m_ItemAllocator.Free(pBackItem);
4044 template<
typename T>
4045 void VmaRawList<T>::PopFront()
4047 VMA_HEAVY_ASSERT(m_Count > 0);
4048 ItemType*
const pFrontItem = m_pFront;
4049 ItemType*
const pNextItem = pFrontItem->pNext;
4050 if(pNextItem != VMA_NULL)
4052 pNextItem->pPrev = VMA_NULL;
4054 m_pFront = pNextItem;
4055 m_ItemAllocator.Free(pFrontItem);
4059 template<
typename T>
4060 void VmaRawList<T>::Remove(ItemType* pItem)
4062 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4063 VMA_HEAVY_ASSERT(m_Count > 0);
4065 if(pItem->pPrev != VMA_NULL)
4067 pItem->pPrev->pNext = pItem->pNext;
4071 VMA_HEAVY_ASSERT(m_pFront == pItem);
4072 m_pFront = pItem->pNext;
4075 if(pItem->pNext != VMA_NULL)
4077 pItem->pNext->pPrev = pItem->pPrev;
4081 VMA_HEAVY_ASSERT(m_pBack == pItem);
4082 m_pBack = pItem->pPrev;
4085 m_ItemAllocator.Free(pItem);
4089 template<
typename T>
4090 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4092 if(pItem != VMA_NULL)
4094 ItemType*
const prevItem = pItem->pPrev;
4095 ItemType*
const newItem = m_ItemAllocator.Alloc();
4096 newItem->pPrev = prevItem;
4097 newItem->pNext = pItem;
4098 pItem->pPrev = newItem;
4099 if(prevItem != VMA_NULL)
4101 prevItem->pNext = newItem;
4105 VMA_HEAVY_ASSERT(m_pFront == pItem);
4115 template<
typename T>
4116 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4118 if(pItem != VMA_NULL)
4120 ItemType*
const nextItem = pItem->pNext;
4121 ItemType*
const newItem = m_ItemAllocator.Alloc();
4122 newItem->pNext = nextItem;
4123 newItem->pPrev = pItem;
4124 pItem->pNext = newItem;
4125 if(nextItem != VMA_NULL)
4127 nextItem->pPrev = newItem;
4131 VMA_HEAVY_ASSERT(m_pBack == pItem);
4141 template<
typename T>
4142 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4144 ItemType*
const newItem = InsertBefore(pItem);
4145 newItem->Value = value;
4149 template<
typename T>
4150 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4152 ItemType*
const newItem = InsertAfter(pItem);
4153 newItem->Value = value;
4157 template<
typename T,
typename AllocatorT>
4160 VMA_CLASS_NO_COPY(VmaList)
4171 T& operator*()
const 4173 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4174 return m_pItem->Value;
4176 T* operator->()
const 4178 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4179 return &m_pItem->Value;
4182 iterator& operator++()
4184 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4185 m_pItem = m_pItem->pNext;
4188 iterator& operator--()
4190 if(m_pItem != VMA_NULL)
4192 m_pItem = m_pItem->pPrev;
4196 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4197 m_pItem = m_pList->Back();
4202 iterator operator++(
int)
4204 iterator result = *
this;
4208 iterator operator--(
int)
4210 iterator result = *
this;
4215 bool operator==(
const iterator& rhs)
const 4217 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4218 return m_pItem == rhs.m_pItem;
4220 bool operator!=(
const iterator& rhs)
const 4222 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4223 return m_pItem != rhs.m_pItem;
4227 VmaRawList<T>* m_pList;
4228 VmaListItem<T>* m_pItem;
4230 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4236 friend class VmaList<T, AllocatorT>;
4239 class const_iterator
4248 const_iterator(
const iterator& src) :
4249 m_pList(src.m_pList),
4250 m_pItem(src.m_pItem)
4254 const T& operator*()
const 4256 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4257 return m_pItem->Value;
4259 const T* operator->()
const 4261 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4262 return &m_pItem->Value;
4265 const_iterator& operator++()
4267 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4268 m_pItem = m_pItem->pNext;
4271 const_iterator& operator--()
4273 if(m_pItem != VMA_NULL)
4275 m_pItem = m_pItem->pPrev;
4279 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4280 m_pItem = m_pList->Back();
4285 const_iterator operator++(
int)
4287 const_iterator result = *
this;
4291 const_iterator operator--(
int)
4293 const_iterator result = *
this;
4298 bool operator==(
const const_iterator& rhs)
const 4300 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4301 return m_pItem == rhs.m_pItem;
4303 bool operator!=(
const const_iterator& rhs)
const 4305 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4306 return m_pItem != rhs.m_pItem;
4310 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4316 const VmaRawList<T>* m_pList;
4317 const VmaListItem<T>* m_pItem;
4319 friend class VmaList<T, AllocatorT>;
4322 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4324 bool empty()
const {
return m_RawList.IsEmpty(); }
4325 size_t size()
const {
return m_RawList.GetCount(); }
4327 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4328 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4330 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4331 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4333 void clear() { m_RawList.Clear(); }
4334 void push_back(
const T& value) { m_RawList.PushBack(value); }
4335 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4336 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4339 VmaRawList<T> m_RawList;
4342 #endif // #if VMA_USE_STL_LIST 4350 #if VMA_USE_STL_UNORDERED_MAP 4352 #define VmaPair std::pair 4354 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4355 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4357 #else // #if VMA_USE_STL_UNORDERED_MAP 4359 template<
typename T1,
typename T2>
4365 VmaPair() : first(), second() { }
4366 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4372 template<
typename KeyT,
typename ValueT>
4376 typedef VmaPair<KeyT, ValueT> PairType;
4377 typedef PairType* iterator;
4379 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4381 iterator begin() {
return m_Vector.begin(); }
4382 iterator end() {
return m_Vector.end(); }
4384 void insert(
const PairType& pair);
4385 iterator find(
const KeyT& key);
4386 void erase(iterator it);
4389 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4392 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4394 template<
typename FirstT,
typename SecondT>
4395 struct VmaPairFirstLess
4397 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4399 return lhs.first < rhs.first;
4401 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4403 return lhs.first < rhsFirst;
4407 template<
typename KeyT,
typename ValueT>
4408 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4410 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4412 m_Vector.data() + m_Vector.size(),
4414 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4415 VmaVectorInsert(m_Vector, indexToInsert, pair);
4418 template<
typename KeyT,
typename ValueT>
4419 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4421 PairType* it = VmaBinaryFindFirstNotLess(
4423 m_Vector.data() + m_Vector.size(),
4425 VmaPairFirstLess<KeyT, ValueT>());
4426 if((it != m_Vector.end()) && (it->first == key))
4432 return m_Vector.end();
4436 template<
typename KeyT,
typename ValueT>
4437 void VmaMap<KeyT, ValueT>::erase(iterator it)
4439 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4442 #endif // #if VMA_USE_STL_UNORDERED_MAP 4448 class VmaDeviceMemoryBlock;
4450 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4452 struct VmaAllocation_T
4454 VMA_CLASS_NO_COPY(VmaAllocation_T)
4456 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4460 FLAG_USER_DATA_STRING = 0x01,
4464 enum ALLOCATION_TYPE
4466 ALLOCATION_TYPE_NONE,
4467 ALLOCATION_TYPE_BLOCK,
4468 ALLOCATION_TYPE_DEDICATED,
4471 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4474 m_pUserData(VMA_NULL),
4475 m_LastUseFrameIndex(currentFrameIndex),
4476 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4477 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4479 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4481 #if VMA_STATS_STRING_ENABLED 4482 m_CreationFrameIndex = currentFrameIndex;
4483 m_BufferImageUsage = 0;
4489 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4492 VMA_ASSERT(m_pUserData == VMA_NULL);
4495 void InitBlockAllocation(
4497 VmaDeviceMemoryBlock* block,
4498 VkDeviceSize offset,
4499 VkDeviceSize alignment,
4501 VmaSuballocationType suballocationType,
4505 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4506 VMA_ASSERT(block != VMA_NULL);
4507 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4508 m_Alignment = alignment;
4510 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4511 m_SuballocationType = (uint8_t)suballocationType;
4512 m_BlockAllocation.m_hPool = hPool;
4513 m_BlockAllocation.m_Block = block;
4514 m_BlockAllocation.m_Offset = offset;
4515 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4520 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4521 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4522 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4523 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4524 m_BlockAllocation.m_Block = VMA_NULL;
4525 m_BlockAllocation.m_Offset = 0;
4526 m_BlockAllocation.m_CanBecomeLost =
true;
4529 void ChangeBlockAllocation(
4531 VmaDeviceMemoryBlock* block,
4532 VkDeviceSize offset);
4534 void ChangeSize(VkDeviceSize newSize);
4537 void InitDedicatedAllocation(
4538 uint32_t memoryTypeIndex,
4539 VkDeviceMemory hMemory,
4540 VmaSuballocationType suballocationType,
4544 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4545 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4546 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4549 m_SuballocationType = (uint8_t)suballocationType;
4550 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4551 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4552 m_DedicatedAllocation.m_hMemory = hMemory;
4553 m_DedicatedAllocation.m_pMappedData = pMappedData;
4556 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4557 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4558 VkDeviceSize GetSize()
const {
return m_Size; }
4559 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4560 void* GetUserData()
const {
return m_pUserData; }
4561 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4562 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4564 VmaDeviceMemoryBlock* GetBlock()
const 4566 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4567 return m_BlockAllocation.m_Block;
4569 VkDeviceSize GetOffset()
const;
4570 VkDeviceMemory GetMemory()
const;
4571 uint32_t GetMemoryTypeIndex()
const;
4572 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4573 void* GetMappedData()
const;
4574 bool CanBecomeLost()
const;
4577 uint32_t GetLastUseFrameIndex()
const 4579 return m_LastUseFrameIndex.load();
4581 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4583 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4593 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4595 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4597 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4608 void BlockAllocMap();
4609 void BlockAllocUnmap();
4610 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4613 #if VMA_STATS_STRING_ENABLED 4614 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4615 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4617 void InitBufferImageUsage(uint32_t bufferImageUsage)
4619 VMA_ASSERT(m_BufferImageUsage == 0);
4620 m_BufferImageUsage = bufferImageUsage;
4623 void PrintParameters(
class VmaJsonWriter& json)
const;
4627 VkDeviceSize m_Alignment;
4628 VkDeviceSize m_Size;
4630 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4632 uint8_t m_SuballocationType;
4639 struct BlockAllocation
4642 VmaDeviceMemoryBlock* m_Block;
4643 VkDeviceSize m_Offset;
4644 bool m_CanBecomeLost;
4648 struct DedicatedAllocation
4650 uint32_t m_MemoryTypeIndex;
4651 VkDeviceMemory m_hMemory;
4652 void* m_pMappedData;
4658 BlockAllocation m_BlockAllocation;
4660 DedicatedAllocation m_DedicatedAllocation;
4663 #if VMA_STATS_STRING_ENABLED 4664 uint32_t m_CreationFrameIndex;
4665 uint32_t m_BufferImageUsage;
4675 struct VmaSuballocation
4677 VkDeviceSize offset;
4680 VmaSuballocationType type;
4684 struct VmaSuballocationOffsetLess
4686 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4688 return lhs.offset < rhs.offset;
4691 struct VmaSuballocationOffsetGreater
4693 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4695 return lhs.offset > rhs.offset;
4699 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4702 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4717 struct VmaAllocationRequest
4719 VkDeviceSize offset;
4720 VkDeviceSize sumFreeSize;
4721 VkDeviceSize sumItemSize;
4722 VmaSuballocationList::iterator item;
4723 size_t itemsToMakeLostCount;
4726 VkDeviceSize CalcCost()
const 4728 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4736 class VmaBlockMetadata
4740 virtual ~VmaBlockMetadata() { }
4741 virtual void Init(VkDeviceSize size) { m_Size = size; }
4744 virtual bool Validate()
const = 0;
4745 VkDeviceSize GetSize()
const {
return m_Size; }
4746 virtual size_t GetAllocationCount()
const = 0;
4747 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4748 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4750 virtual bool IsEmpty()
const = 0;
4752 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4754 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4756 #if VMA_STATS_STRING_ENABLED 4757 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4763 virtual bool CreateAllocationRequest(
4764 uint32_t currentFrameIndex,
4765 uint32_t frameInUseCount,
4766 VkDeviceSize bufferImageGranularity,
4767 VkDeviceSize allocSize,
4768 VkDeviceSize allocAlignment,
4770 VmaSuballocationType allocType,
4771 bool canMakeOtherLost,
4773 VmaAllocationRequest* pAllocationRequest) = 0;
4775 virtual bool MakeRequestedAllocationsLost(
4776 uint32_t currentFrameIndex,
4777 uint32_t frameInUseCount,
4778 VmaAllocationRequest* pAllocationRequest) = 0;
4780 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4782 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4786 const VmaAllocationRequest& request,
4787 VmaSuballocationType type,
4788 VkDeviceSize allocSize,
4794 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4797 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
4800 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4802 #if VMA_STATS_STRING_ENABLED 4803 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4804 VkDeviceSize unusedBytes,
4805 size_t allocationCount,
4806 size_t unusedRangeCount)
const;
4807 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4808 VkDeviceSize offset,
4810 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4811 VkDeviceSize offset,
4812 VkDeviceSize size)
const;
4813 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4817 VkDeviceSize m_Size;
4818 const VkAllocationCallbacks* m_pAllocationCallbacks;
4821 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4822 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4826 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4828 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4831 virtual ~VmaBlockMetadata_Generic();
4832 virtual void Init(VkDeviceSize size);
4834 virtual bool Validate()
const;
4835 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4836 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4837 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4838 virtual bool IsEmpty()
const;
4840 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4841 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4843 #if VMA_STATS_STRING_ENABLED 4844 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4847 virtual bool CreateAllocationRequest(
4848 uint32_t currentFrameIndex,
4849 uint32_t frameInUseCount,
4850 VkDeviceSize bufferImageGranularity,
4851 VkDeviceSize allocSize,
4852 VkDeviceSize allocAlignment,
4854 VmaSuballocationType allocType,
4855 bool canMakeOtherLost,
4857 VmaAllocationRequest* pAllocationRequest);
4859 virtual bool MakeRequestedAllocationsLost(
4860 uint32_t currentFrameIndex,
4861 uint32_t frameInUseCount,
4862 VmaAllocationRequest* pAllocationRequest);
4864 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4866 virtual VkResult CheckCorruption(
const void* pBlockData);
4869 const VmaAllocationRequest& request,
4870 VmaSuballocationType type,
4871 VkDeviceSize allocSize,
4876 virtual void FreeAtOffset(VkDeviceSize offset);
4878 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
4881 uint32_t m_FreeCount;
4882 VkDeviceSize m_SumFreeSize;
4883 VmaSuballocationList m_Suballocations;
4886 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4888 bool ValidateFreeSuballocationList()
const;
4892 bool CheckAllocation(
4893 uint32_t currentFrameIndex,
4894 uint32_t frameInUseCount,
4895 VkDeviceSize bufferImageGranularity,
4896 VkDeviceSize allocSize,
4897 VkDeviceSize allocAlignment,
4898 VmaSuballocationType allocType,
4899 VmaSuballocationList::const_iterator suballocItem,
4900 bool canMakeOtherLost,
4901 VkDeviceSize* pOffset,
4902 size_t* itemsToMakeLostCount,
4903 VkDeviceSize* pSumFreeSize,
4904 VkDeviceSize* pSumItemSize)
const;
4906 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4910 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4913 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4916 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4997 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4999 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5002 virtual ~VmaBlockMetadata_Linear();
5003 virtual void Init(VkDeviceSize size);
5005 virtual bool Validate()
const;
5006 virtual size_t GetAllocationCount()
const;
5007 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5008 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5009 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5011 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5012 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5014 #if VMA_STATS_STRING_ENABLED 5015 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5018 virtual bool CreateAllocationRequest(
5019 uint32_t currentFrameIndex,
5020 uint32_t frameInUseCount,
5021 VkDeviceSize bufferImageGranularity,
5022 VkDeviceSize allocSize,
5023 VkDeviceSize allocAlignment,
5025 VmaSuballocationType allocType,
5026 bool canMakeOtherLost,
5028 VmaAllocationRequest* pAllocationRequest);
5030 virtual bool MakeRequestedAllocationsLost(
5031 uint32_t currentFrameIndex,
5032 uint32_t frameInUseCount,
5033 VmaAllocationRequest* pAllocationRequest);
5035 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5037 virtual VkResult CheckCorruption(
const void* pBlockData);
5040 const VmaAllocationRequest& request,
5041 VmaSuballocationType type,
5042 VkDeviceSize allocSize,
5047 virtual void FreeAtOffset(VkDeviceSize offset);
5057 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5059 enum SECOND_VECTOR_MODE
5061 SECOND_VECTOR_EMPTY,
5066 SECOND_VECTOR_RING_BUFFER,
5072 SECOND_VECTOR_DOUBLE_STACK,
5075 VkDeviceSize m_SumFreeSize;
5076 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5077 uint32_t m_1stVectorIndex;
5078 SECOND_VECTOR_MODE m_2ndVectorMode;
5080 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5081 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5082 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5083 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5086 size_t m_1stNullItemsBeginCount;
5088 size_t m_1stNullItemsMiddleCount;
5090 size_t m_2ndNullItemsCount;
5092 bool ShouldCompact1st()
const;
5093 void CleanupAfterFree();
5107 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5109 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5112 virtual ~VmaBlockMetadata_Buddy();
5113 virtual void Init(VkDeviceSize size);
5115 virtual bool Validate()
const;
5116 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5117 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5118 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5119 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5121 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5122 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5124 #if VMA_STATS_STRING_ENABLED 5125 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5128 virtual bool CreateAllocationRequest(
5129 uint32_t currentFrameIndex,
5130 uint32_t frameInUseCount,
5131 VkDeviceSize bufferImageGranularity,
5132 VkDeviceSize allocSize,
5133 VkDeviceSize allocAlignment,
5135 VmaSuballocationType allocType,
5136 bool canMakeOtherLost,
5138 VmaAllocationRequest* pAllocationRequest);
5140 virtual bool MakeRequestedAllocationsLost(
5141 uint32_t currentFrameIndex,
5142 uint32_t frameInUseCount,
5143 VmaAllocationRequest* pAllocationRequest);
5145 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5147 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5150 const VmaAllocationRequest& request,
5151 VmaSuballocationType type,
5152 VkDeviceSize allocSize,
5156 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5157 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5160 static const VkDeviceSize MIN_NODE_SIZE = 32;
5161 static const size_t MAX_LEVELS = 30;
5163 struct ValidationContext
5165 size_t calculatedAllocationCount;
5166 size_t calculatedFreeCount;
5167 VkDeviceSize calculatedSumFreeSize;
5169 ValidationContext() :
5170 calculatedAllocationCount(0),
5171 calculatedFreeCount(0),
5172 calculatedSumFreeSize(0) { }
5177 VkDeviceSize offset;
5207 VkDeviceSize m_UsableSize;
5208 uint32_t m_LevelCount;
5214 } m_FreeList[MAX_LEVELS];
5216 size_t m_AllocationCount;
5220 VkDeviceSize m_SumFreeSize;
5222 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5223 void DeleteNode(Node* node);
5224 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5225 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5226 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5228 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5229 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5233 void AddToFreeListFront(uint32_t level, Node* node);
5237 void RemoveFromFreeList(uint32_t level, Node* node);
5239 #if VMA_STATS_STRING_ENABLED 5240 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5250 class VmaDeviceMemoryBlock
5252 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5254 VmaBlockMetadata* m_pMetadata;
5258 ~VmaDeviceMemoryBlock()
5260 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5261 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5267 uint32_t newMemoryTypeIndex,
5268 VkDeviceMemory newMemory,
5269 VkDeviceSize newSize,
5271 uint32_t algorithm);
5275 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5276 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5277 uint32_t GetId()
const {
return m_Id; }
5278 void* GetMappedData()
const {
return m_pMappedData; }
5281 bool Validate()
const;
5286 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5289 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5290 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5292 VkResult BindBufferMemory(
5296 VkResult BindImageMemory(
5302 uint32_t m_MemoryTypeIndex;
5304 VkDeviceMemory m_hMemory;
5309 uint32_t m_MapCount;
5310 void* m_pMappedData;
5313 struct VmaPointerLess
5315 bool operator()(
const void* lhs,
const void* rhs)
const 5321 class VmaDefragmentator;
5329 struct VmaBlockVector
5331 VMA_CLASS_NO_COPY(VmaBlockVector)
5335 uint32_t memoryTypeIndex,
5336 VkDeviceSize preferredBlockSize,
5337 size_t minBlockCount,
5338 size_t maxBlockCount,
5339 VkDeviceSize bufferImageGranularity,
5340 uint32_t frameInUseCount,
5342 bool explicitBlockSize,
5343 uint32_t algorithm);
5346 VkResult CreateMinBlocks();
5348 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5349 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5350 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5351 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5352 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5356 bool IsEmpty()
const {
return m_Blocks.empty(); }
5357 bool IsCorruptionDetectionEnabled()
const;
5361 uint32_t currentFrameIndex,
5363 VkDeviceSize alignment,
5365 VmaSuballocationType suballocType,
5374 #if VMA_STATS_STRING_ENABLED 5375 void PrintDetailedMap(
class VmaJsonWriter& json);
5378 void MakePoolAllocationsLost(
5379 uint32_t currentFrameIndex,
5380 size_t* pLostAllocationCount);
5381 VkResult CheckCorruption();
5383 VmaDefragmentator* EnsureDefragmentator(
5385 uint32_t currentFrameIndex);
5387 VkResult Defragment(
5389 VkDeviceSize& maxBytesToMove,
5390 uint32_t& maxAllocationsToMove);
5392 void DestroyDefragmentator();
5395 friend class VmaDefragmentator;
5398 const uint32_t m_MemoryTypeIndex;
5399 const VkDeviceSize m_PreferredBlockSize;
5400 const size_t m_MinBlockCount;
5401 const size_t m_MaxBlockCount;
5402 const VkDeviceSize m_BufferImageGranularity;
5403 const uint32_t m_FrameInUseCount;
5404 const bool m_IsCustomPool;
5405 const bool m_ExplicitBlockSize;
5406 const uint32_t m_Algorithm;
5407 bool m_HasEmptyBlock;
5410 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5414 VmaDefragmentator* m_pDefragmentator;
5415 uint32_t m_NextBlockId;
5417 VkDeviceSize CalcMaxBlockSize()
const;
5420 void Remove(VmaDeviceMemoryBlock* pBlock);
5424 void IncrementallySortBlocks();
5427 VkResult AllocateFromBlock(
5428 VmaDeviceMemoryBlock* pBlock,
5430 uint32_t currentFrameIndex,
5432 VkDeviceSize alignment,
5435 VmaSuballocationType suballocType,
5439 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5444 VMA_CLASS_NO_COPY(VmaPool_T)
5446 VmaBlockVector m_BlockVector;
5451 VkDeviceSize preferredBlockSize);
5454 uint32_t GetId()
const {
return m_Id; }
5455 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5457 #if VMA_STATS_STRING_ENABLED 5465 class VmaDefragmentator
5467 VMA_CLASS_NO_COPY(VmaDefragmentator)
5470 VmaBlockVector*
const m_pBlockVector;
5471 uint32_t m_CurrentFrameIndex;
5472 VkDeviceSize m_BytesMoved;
5473 uint32_t m_AllocationsMoved;
5475 struct AllocationInfo
5478 VkBool32* m_pChanged;
5481 m_hAllocation(VK_NULL_HANDLE),
5482 m_pChanged(VMA_NULL)
5487 struct AllocationInfoSizeGreater
5489 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5491 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5496 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5500 VmaDeviceMemoryBlock* m_pBlock;
5501 bool m_HasNonMovableAllocations;
5502 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5504 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5506 m_HasNonMovableAllocations(true),
5507 m_Allocations(pAllocationCallbacks),
5508 m_pMappedDataForDefragmentation(VMA_NULL)
5512 void CalcHasNonMovableAllocations()
5514 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5515 const size_t defragmentAllocCount = m_Allocations.size();
5516 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5519 void SortAllocationsBySizeDescecnding()
5521 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5524 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5529 void* m_pMappedDataForDefragmentation;
5532 struct BlockPointerLess
5534 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5536 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5538 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5540 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5546 struct BlockInfoCompareMoveDestination
5548 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5550 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5554 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5558 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5566 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5567 BlockInfoVector m_Blocks;
5569 VkResult DefragmentRound(
5570 VkDeviceSize maxBytesToMove,
5571 uint32_t maxAllocationsToMove);
5573 static bool MoveMakesSense(
5574 size_t dstBlockIndex, VkDeviceSize dstOffset,
5575 size_t srcBlockIndex, VkDeviceSize srcOffset);
5580 VmaBlockVector* pBlockVector,
5581 uint32_t currentFrameIndex);
5583 ~VmaDefragmentator();
5585 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5586 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5588 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5590 VkResult Defragment(
5591 VkDeviceSize maxBytesToMove,
5592 uint32_t maxAllocationsToMove);
5595 #if VMA_RECORDING_ENABLED 5602 void WriteConfiguration(
5603 const VkPhysicalDeviceProperties& devProps,
5604 const VkPhysicalDeviceMemoryProperties& memProps,
5605 bool dedicatedAllocationExtensionEnabled);
5608 void RecordCreateAllocator(uint32_t frameIndex);
5609 void RecordDestroyAllocator(uint32_t frameIndex);
5610 void RecordCreatePool(uint32_t frameIndex,
5613 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5614 void RecordAllocateMemory(uint32_t frameIndex,
5615 const VkMemoryRequirements& vkMemReq,
5618 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5619 const VkMemoryRequirements& vkMemReq,
5620 bool requiresDedicatedAllocation,
5621 bool prefersDedicatedAllocation,
5624 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5625 const VkMemoryRequirements& vkMemReq,
5626 bool requiresDedicatedAllocation,
5627 bool prefersDedicatedAllocation,
5630 void RecordFreeMemory(uint32_t frameIndex,
5632 void RecordResizeAllocation(
5633 uint32_t frameIndex,
5635 VkDeviceSize newSize);
5636 void RecordSetAllocationUserData(uint32_t frameIndex,
5638 const void* pUserData);
5639 void RecordCreateLostAllocation(uint32_t frameIndex,
5641 void RecordMapMemory(uint32_t frameIndex,
5643 void RecordUnmapMemory(uint32_t frameIndex,
5645 void RecordFlushAllocation(uint32_t frameIndex,
5646 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5647 void RecordInvalidateAllocation(uint32_t frameIndex,
5648 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5649 void RecordCreateBuffer(uint32_t frameIndex,
5650 const VkBufferCreateInfo& bufCreateInfo,
5653 void RecordCreateImage(uint32_t frameIndex,
5654 const VkImageCreateInfo& imageCreateInfo,
5657 void RecordDestroyBuffer(uint32_t frameIndex,
5659 void RecordDestroyImage(uint32_t frameIndex,
5661 void RecordTouchAllocation(uint32_t frameIndex,
5663 void RecordGetAllocationInfo(uint32_t frameIndex,
5665 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5675 class UserDataString
5679 const char* GetString()
const {
return m_Str; }
5689 VMA_MUTEX m_FileMutex;
5691 int64_t m_StartCounter;
5693 void GetBasicParams(CallParams& outParams);
5697 #endif // #if VMA_RECORDING_ENABLED 5700 struct VmaAllocator_T
5702 VMA_CLASS_NO_COPY(VmaAllocator_T)
5705 bool m_UseKhrDedicatedAllocation;
5707 bool m_AllocationCallbacksSpecified;
5708 VkAllocationCallbacks m_AllocationCallbacks;
5712 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5713 VMA_MUTEX m_HeapSizeLimitMutex;
5715 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5716 VkPhysicalDeviceMemoryProperties m_MemProps;
5719 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5722 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5723 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5724 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5730 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5732 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5736 return m_VulkanFunctions;
5739 VkDeviceSize GetBufferImageGranularity()
const 5742 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5743 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5746 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5747 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5749 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5751 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5752 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5755 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5757 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5758 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5761 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5763 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5764 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5765 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5768 bool IsIntegratedGpu()
const 5770 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5773 #if VMA_RECORDING_ENABLED 5774 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5777 void GetBufferMemoryRequirements(
5779 VkMemoryRequirements& memReq,
5780 bool& requiresDedicatedAllocation,
5781 bool& prefersDedicatedAllocation)
const;
5782 void GetImageMemoryRequirements(
5784 VkMemoryRequirements& memReq,
5785 bool& requiresDedicatedAllocation,
5786 bool& prefersDedicatedAllocation)
const;
5789 VkResult AllocateMemory(
5790 const VkMemoryRequirements& vkMemReq,
5791 bool requiresDedicatedAllocation,
5792 bool prefersDedicatedAllocation,
5793 VkBuffer dedicatedBuffer,
5794 VkImage dedicatedImage,
5796 VmaSuballocationType suballocType,
5802 VkResult ResizeAllocation(
5804 VkDeviceSize newSize);
5806 void CalculateStats(
VmaStats* pStats);
5808 #if VMA_STATS_STRING_ENABLED 5809 void PrintDetailedMap(
class VmaJsonWriter& json);
5812 VkResult Defragment(
5814 size_t allocationCount,
5815 VkBool32* pAllocationsChanged,
5823 void DestroyPool(
VmaPool pool);
5826 void SetCurrentFrameIndex(uint32_t frameIndex);
5827 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5829 void MakePoolAllocationsLost(
5831 size_t* pLostAllocationCount);
5832 VkResult CheckPoolCorruption(
VmaPool hPool);
5833 VkResult CheckCorruption(uint32_t memoryTypeBits);
5837 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5838 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5843 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5844 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5846 void FlushOrInvalidateAllocation(
5848 VkDeviceSize offset, VkDeviceSize size,
5849 VMA_CACHE_OPERATION op);
5851 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5854 VkDeviceSize m_PreferredLargeHeapBlockSize;
5856 VkPhysicalDevice m_PhysicalDevice;
5857 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5859 VMA_MUTEX m_PoolsMutex;
5861 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5862 uint32_t m_NextPoolId;
5866 #if VMA_RECORDING_ENABLED 5867 VmaRecorder* m_pRecorder;
5872 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5874 VkResult AllocateMemoryOfType(
5876 VkDeviceSize alignment,
5877 bool dedicatedAllocation,
5878 VkBuffer dedicatedBuffer,
5879 VkImage dedicatedImage,
5881 uint32_t memTypeIndex,
5882 VmaSuballocationType suballocType,
5886 VkResult AllocateDedicatedMemory(
5888 VmaSuballocationType suballocType,
5889 uint32_t memTypeIndex,
5891 bool isUserDataString,
5893 VkBuffer dedicatedBuffer,
5894 VkImage dedicatedImage,
5904 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5906 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5909 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5911 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5914 template<
typename T>
5917 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5920 template<
typename T>
5921 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5923 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5926 template<
typename T>
5927 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5932 VmaFree(hAllocator, ptr);
5936 template<
typename T>
5937 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5941 for(
size_t i = count; i--; )
5943 VmaFree(hAllocator, ptr);
5950 #if VMA_STATS_STRING_ENABLED 5952 class VmaStringBuilder
5955 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5956 size_t GetLength()
const {
return m_Data.size(); }
5957 const char* GetData()
const {
return m_Data.data(); }
5959 void Add(
char ch) { m_Data.push_back(ch); }
5960 void Add(
const char* pStr);
5961 void AddNewLine() { Add(
'\n'); }
5962 void AddNumber(uint32_t num);
5963 void AddNumber(uint64_t num);
5964 void AddPointer(
const void* ptr);
5967 VmaVector< char, VmaStlAllocator<char> > m_Data;
5970 void VmaStringBuilder::Add(
const char* pStr)
5972 const size_t strLen = strlen(pStr);
5975 const size_t oldCount = m_Data.size();
5976 m_Data.resize(oldCount + strLen);
5977 memcpy(m_Data.data() + oldCount, pStr, strLen);
5981 void VmaStringBuilder::AddNumber(uint32_t num)
5984 VmaUint32ToStr(buf,
sizeof(buf), num);
5988 void VmaStringBuilder::AddNumber(uint64_t num)
5991 VmaUint64ToStr(buf,
sizeof(buf), num);
5995 void VmaStringBuilder::AddPointer(
const void* ptr)
5998 VmaPtrToStr(buf,
sizeof(buf), ptr);
6002 #endif // #if VMA_STATS_STRING_ENABLED 6007 #if VMA_STATS_STRING_ENABLED 6011 VMA_CLASS_NO_COPY(VmaJsonWriter)
6013 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6016 void BeginObject(
bool singleLine =
false);
6019 void BeginArray(
bool singleLine =
false);
6022 void WriteString(
const char* pStr);
6023 void BeginString(
const char* pStr = VMA_NULL);
6024 void ContinueString(
const char* pStr);
6025 void ContinueString(uint32_t n);
6026 void ContinueString(uint64_t n);
6027 void ContinueString_Pointer(
const void* ptr);
6028 void EndString(
const char* pStr = VMA_NULL);
6030 void WriteNumber(uint32_t n);
6031 void WriteNumber(uint64_t n);
6032 void WriteBool(
bool b);
6036 static const char*
const INDENT;
6038 enum COLLECTION_TYPE
6040 COLLECTION_TYPE_OBJECT,
6041 COLLECTION_TYPE_ARRAY,
6045 COLLECTION_TYPE type;
6046 uint32_t valueCount;
6047 bool singleLineMode;
6050 VmaStringBuilder& m_SB;
6051 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6052 bool m_InsideString;
6054 void BeginValue(
bool isString);
6055 void WriteIndent(
bool oneLess =
false);
6058 const char*
const VmaJsonWriter::INDENT =
" ";
6060 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6062 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6063 m_InsideString(false)
6067 VmaJsonWriter::~VmaJsonWriter()
6069 VMA_ASSERT(!m_InsideString);
6070 VMA_ASSERT(m_Stack.empty());
6073 void VmaJsonWriter::BeginObject(
bool singleLine)
6075 VMA_ASSERT(!m_InsideString);
6081 item.type = COLLECTION_TYPE_OBJECT;
6082 item.valueCount = 0;
6083 item.singleLineMode = singleLine;
6084 m_Stack.push_back(item);
6087 void VmaJsonWriter::EndObject()
6089 VMA_ASSERT(!m_InsideString);
6094 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6098 void VmaJsonWriter::BeginArray(
bool singleLine)
6100 VMA_ASSERT(!m_InsideString);
6106 item.type = COLLECTION_TYPE_ARRAY;
6107 item.valueCount = 0;
6108 item.singleLineMode = singleLine;
6109 m_Stack.push_back(item);
6112 void VmaJsonWriter::EndArray()
6114 VMA_ASSERT(!m_InsideString);
6119 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6123 void VmaJsonWriter::WriteString(
const char* pStr)
6129 void VmaJsonWriter::BeginString(
const char* pStr)
6131 VMA_ASSERT(!m_InsideString);
6135 m_InsideString =
true;
6136 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6138 ContinueString(pStr);
6142 void VmaJsonWriter::ContinueString(
const char* pStr)
6144 VMA_ASSERT(m_InsideString);
6146 const size_t strLen = strlen(pStr);
6147 for(
size_t i = 0; i < strLen; ++i)
6180 VMA_ASSERT(0 &&
"Character not currently supported.");
6186 void VmaJsonWriter::ContinueString(uint32_t n)
6188 VMA_ASSERT(m_InsideString);
6192 void VmaJsonWriter::ContinueString(uint64_t n)
6194 VMA_ASSERT(m_InsideString);
6198 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6200 VMA_ASSERT(m_InsideString);
6201 m_SB.AddPointer(ptr);
6204 void VmaJsonWriter::EndString(
const char* pStr)
6206 VMA_ASSERT(m_InsideString);
6207 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6209 ContinueString(pStr);
6212 m_InsideString =
false;
6215 void VmaJsonWriter::WriteNumber(uint32_t n)
6217 VMA_ASSERT(!m_InsideString);
6222 void VmaJsonWriter::WriteNumber(uint64_t n)
6224 VMA_ASSERT(!m_InsideString);
6229 void VmaJsonWriter::WriteBool(
bool b)
6231 VMA_ASSERT(!m_InsideString);
6233 m_SB.Add(b ?
"true" :
"false");
6236 void VmaJsonWriter::WriteNull()
6238 VMA_ASSERT(!m_InsideString);
6243 void VmaJsonWriter::BeginValue(
bool isString)
6245 if(!m_Stack.empty())
6247 StackItem& currItem = m_Stack.back();
6248 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6249 currItem.valueCount % 2 == 0)
6251 VMA_ASSERT(isString);
6254 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6255 currItem.valueCount % 2 != 0)
6259 else if(currItem.valueCount > 0)
6268 ++currItem.valueCount;
6272 void VmaJsonWriter::WriteIndent(
bool oneLess)
6274 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6278 size_t count = m_Stack.size();
6279 if(count > 0 && oneLess)
6283 for(
size_t i = 0; i < count; ++i)
6290 #endif // #if VMA_STATS_STRING_ENABLED 6294 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6296 if(IsUserDataString())
6298 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6300 FreeUserDataString(hAllocator);
6302 if(pUserData != VMA_NULL)
6304 const char*
const newStrSrc = (
char*)pUserData;
6305 const size_t newStrLen = strlen(newStrSrc);
6306 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6307 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6308 m_pUserData = newStrDst;
6313 m_pUserData = pUserData;
6317 void VmaAllocation_T::ChangeBlockAllocation(
6319 VmaDeviceMemoryBlock* block,
6320 VkDeviceSize offset)
6322 VMA_ASSERT(block != VMA_NULL);
6323 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6326 if(block != m_BlockAllocation.m_Block)
6328 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6329 if(IsPersistentMap())
6331 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6332 block->Map(hAllocator, mapRefCount, VMA_NULL);
6335 m_BlockAllocation.m_Block = block;
6336 m_BlockAllocation.m_Offset = offset;
6339 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
6341 VMA_ASSERT(newSize > 0);
6345 VkDeviceSize VmaAllocation_T::GetOffset()
const 6349 case ALLOCATION_TYPE_BLOCK:
6350 return m_BlockAllocation.m_Offset;
6351 case ALLOCATION_TYPE_DEDICATED:
6359 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6363 case ALLOCATION_TYPE_BLOCK:
6364 return m_BlockAllocation.m_Block->GetDeviceMemory();
6365 case ALLOCATION_TYPE_DEDICATED:
6366 return m_DedicatedAllocation.m_hMemory;
6369 return VK_NULL_HANDLE;
6373 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6377 case ALLOCATION_TYPE_BLOCK:
6378 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6379 case ALLOCATION_TYPE_DEDICATED:
6380 return m_DedicatedAllocation.m_MemoryTypeIndex;
6387 void* VmaAllocation_T::GetMappedData()
const 6391 case ALLOCATION_TYPE_BLOCK:
6394 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6395 VMA_ASSERT(pBlockData != VMA_NULL);
6396 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6403 case ALLOCATION_TYPE_DEDICATED:
6404 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6405 return m_DedicatedAllocation.m_pMappedData;
6412 bool VmaAllocation_T::CanBecomeLost()
const 6416 case ALLOCATION_TYPE_BLOCK:
6417 return m_BlockAllocation.m_CanBecomeLost;
6418 case ALLOCATION_TYPE_DEDICATED:
6426 VmaPool VmaAllocation_T::GetPool()
const 6428 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6429 return m_BlockAllocation.m_hPool;
6432 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6434 VMA_ASSERT(CanBecomeLost());
6440 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6443 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6448 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6454 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6464 #if VMA_STATS_STRING_ENABLED 6467 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6476 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6478 json.WriteString(
"Type");
6479 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6481 json.WriteString(
"Size");
6482 json.WriteNumber(m_Size);
6484 if(m_pUserData != VMA_NULL)
6486 json.WriteString(
"UserData");
6487 if(IsUserDataString())
6489 json.WriteString((
const char*)m_pUserData);
6494 json.ContinueString_Pointer(m_pUserData);
6499 json.WriteString(
"CreationFrameIndex");
6500 json.WriteNumber(m_CreationFrameIndex);
6502 json.WriteString(
"LastUseFrameIndex");
6503 json.WriteNumber(GetLastUseFrameIndex());
6505 if(m_BufferImageUsage != 0)
6507 json.WriteString(
"Usage");
6508 json.WriteNumber(m_BufferImageUsage);
6514 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6516 VMA_ASSERT(IsUserDataString());
6517 if(m_pUserData != VMA_NULL)
6519 char*
const oldStr = (
char*)m_pUserData;
6520 const size_t oldStrLen = strlen(oldStr);
6521 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6522 m_pUserData = VMA_NULL;
6526 void VmaAllocation_T::BlockAllocMap()
6528 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6530 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6536 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6540 void VmaAllocation_T::BlockAllocUnmap()
6542 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6544 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6550 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6554 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6556 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6560 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6562 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6563 *ppData = m_DedicatedAllocation.m_pMappedData;
6569 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6570 return VK_ERROR_MEMORY_MAP_FAILED;
6575 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6576 hAllocator->m_hDevice,
6577 m_DedicatedAllocation.m_hMemory,
6582 if(result == VK_SUCCESS)
6584 m_DedicatedAllocation.m_pMappedData = *ppData;
6591 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6593 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6595 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6600 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6601 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6602 hAllocator->m_hDevice,
6603 m_DedicatedAllocation.m_hMemory);
6608 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6612 #if VMA_STATS_STRING_ENABLED 6614 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6618 json.WriteString(
"Blocks");
6621 json.WriteString(
"Allocations");
6624 json.WriteString(
"UnusedRanges");
6627 json.WriteString(
"UsedBytes");
6630 json.WriteString(
"UnusedBytes");
6635 json.WriteString(
"AllocationSize");
6636 json.BeginObject(
true);
6637 json.WriteString(
"Min");
6639 json.WriteString(
"Avg");
6641 json.WriteString(
"Max");
6648 json.WriteString(
"UnusedRangeSize");
6649 json.BeginObject(
true);
6650 json.WriteString(
"Min");
6652 json.WriteString(
"Avg");
6654 json.WriteString(
"Max");
6662 #endif // #if VMA_STATS_STRING_ENABLED 6664 struct VmaSuballocationItemSizeLess
6667 const VmaSuballocationList::iterator lhs,
6668 const VmaSuballocationList::iterator rhs)
const 6670 return lhs->size < rhs->size;
6673 const VmaSuballocationList::iterator lhs,
6674 VkDeviceSize rhsSize)
const 6676 return lhs->size < rhsSize;
6684 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6686 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6690 #if VMA_STATS_STRING_ENABLED 6692 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6693 VkDeviceSize unusedBytes,
6694 size_t allocationCount,
6695 size_t unusedRangeCount)
const 6699 json.WriteString(
"TotalBytes");
6700 json.WriteNumber(GetSize());
6702 json.WriteString(
"UnusedBytes");
6703 json.WriteNumber(unusedBytes);
6705 json.WriteString(
"Allocations");
6706 json.WriteNumber((uint64_t)allocationCount);
6708 json.WriteString(
"UnusedRanges");
6709 json.WriteNumber((uint64_t)unusedRangeCount);
6711 json.WriteString(
"Suballocations");
6715 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6716 VkDeviceSize offset,
6719 json.BeginObject(
true);
6721 json.WriteString(
"Offset");
6722 json.WriteNumber(offset);
6724 hAllocation->PrintParameters(json);
6729 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6730 VkDeviceSize offset,
6731 VkDeviceSize size)
const 6733 json.BeginObject(
true);
6735 json.WriteString(
"Offset");
6736 json.WriteNumber(offset);
6738 json.WriteString(
"Type");
6739 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6741 json.WriteString(
"Size");
6742 json.WriteNumber(size);
6747 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6753 #endif // #if VMA_STATS_STRING_ENABLED 6758 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6759 VmaBlockMetadata(hAllocator),
6762 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6763 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6767 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6771 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6773 VmaBlockMetadata::Init(size);
6776 m_SumFreeSize = size;
6778 VmaSuballocation suballoc = {};
6779 suballoc.offset = 0;
6780 suballoc.size = size;
6781 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6782 suballoc.hAllocation = VK_NULL_HANDLE;
6784 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6785 m_Suballocations.push_back(suballoc);
6786 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6788 m_FreeSuballocationsBySize.push_back(suballocItem);
6791 bool VmaBlockMetadata_Generic::Validate()
const 6793 VMA_VALIDATE(!m_Suballocations.empty());
6796 VkDeviceSize calculatedOffset = 0;
6798 uint32_t calculatedFreeCount = 0;
6800 VkDeviceSize calculatedSumFreeSize = 0;
6803 size_t freeSuballocationsToRegister = 0;
6805 bool prevFree =
false;
6807 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6808 suballocItem != m_Suballocations.cend();
6811 const VmaSuballocation& subAlloc = *suballocItem;
6814 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6816 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6818 VMA_VALIDATE(!prevFree || !currFree);
6820 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6824 calculatedSumFreeSize += subAlloc.size;
6825 ++calculatedFreeCount;
6826 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6828 ++freeSuballocationsToRegister;
6832 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6836 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6837 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6840 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6843 calculatedOffset += subAlloc.size;
6844 prevFree = currFree;
6849 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6851 VkDeviceSize lastSize = 0;
6852 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6854 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6857 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6859 VMA_VALIDATE(suballocItem->size >= lastSize);
6861 lastSize = suballocItem->size;
6865 VMA_VALIDATE(ValidateFreeSuballocationList());
6866 VMA_VALIDATE(calculatedOffset == GetSize());
6867 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6868 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6873 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6875 if(!m_FreeSuballocationsBySize.empty())
6877 return m_FreeSuballocationsBySize.back()->size;
6885 bool VmaBlockMetadata_Generic::IsEmpty()
const 6887 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6890 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6894 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6906 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6907 suballocItem != m_Suballocations.cend();
6910 const VmaSuballocation& suballoc = *suballocItem;
6911 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6924 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6926 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6928 inoutStats.
size += GetSize();
6935 #if VMA_STATS_STRING_ENABLED 6937 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6939 PrintDetailedMap_Begin(json,
6941 m_Suballocations.size() - (size_t)m_FreeCount,
6945 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6946 suballocItem != m_Suballocations.cend();
6947 ++suballocItem, ++i)
6949 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6951 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6955 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6959 PrintDetailedMap_End(json);
6962 #endif // #if VMA_STATS_STRING_ENABLED 6964 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6965 uint32_t currentFrameIndex,
6966 uint32_t frameInUseCount,
6967 VkDeviceSize bufferImageGranularity,
6968 VkDeviceSize allocSize,
6969 VkDeviceSize allocAlignment,
6971 VmaSuballocationType allocType,
6972 bool canMakeOtherLost,
6974 VmaAllocationRequest* pAllocationRequest)
6976 VMA_ASSERT(allocSize > 0);
6977 VMA_ASSERT(!upperAddress);
6978 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6979 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6980 VMA_HEAVY_ASSERT(Validate());
6983 if(canMakeOtherLost ==
false &&
6984 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6990 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6991 if(freeSuballocCount > 0)
6996 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6997 m_FreeSuballocationsBySize.data(),
6998 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6999 allocSize + 2 * VMA_DEBUG_MARGIN,
7000 VmaSuballocationItemSizeLess());
7001 size_t index = it - m_FreeSuballocationsBySize.data();
7002 for(; index < freeSuballocCount; ++index)
7007 bufferImageGranularity,
7011 m_FreeSuballocationsBySize[index],
7013 &pAllocationRequest->offset,
7014 &pAllocationRequest->itemsToMakeLostCount,
7015 &pAllocationRequest->sumFreeSize,
7016 &pAllocationRequest->sumItemSize))
7018 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7026 for(
size_t index = freeSuballocCount; index--; )
7031 bufferImageGranularity,
7035 m_FreeSuballocationsBySize[index],
7037 &pAllocationRequest->offset,
7038 &pAllocationRequest->itemsToMakeLostCount,
7039 &pAllocationRequest->sumFreeSize,
7040 &pAllocationRequest->sumItemSize))
7042 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7049 if(canMakeOtherLost)
7053 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7054 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7056 VmaAllocationRequest tmpAllocRequest = {};
7057 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7058 suballocIt != m_Suballocations.end();
7061 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7062 suballocIt->hAllocation->CanBecomeLost())
7067 bufferImageGranularity,
7073 &tmpAllocRequest.offset,
7074 &tmpAllocRequest.itemsToMakeLostCount,
7075 &tmpAllocRequest.sumFreeSize,
7076 &tmpAllocRequest.sumItemSize))
7078 tmpAllocRequest.item = suballocIt;
7080 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7083 *pAllocationRequest = tmpAllocRequest;
7089 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7098 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7099 uint32_t currentFrameIndex,
7100 uint32_t frameInUseCount,
7101 VmaAllocationRequest* pAllocationRequest)
7103 while(pAllocationRequest->itemsToMakeLostCount > 0)
7105 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7107 ++pAllocationRequest->item;
7109 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7110 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7111 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7112 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7114 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7115 --pAllocationRequest->itemsToMakeLostCount;
7123 VMA_HEAVY_ASSERT(Validate());
7124 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7125 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7130 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7132 uint32_t lostAllocationCount = 0;
7133 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7134 it != m_Suballocations.end();
7137 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7138 it->hAllocation->CanBecomeLost() &&
7139 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7141 it = FreeSuballocation(it);
7142 ++lostAllocationCount;
7145 return lostAllocationCount;
7148 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7150 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7151 it != m_Suballocations.end();
7154 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7156 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7158 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7159 return VK_ERROR_VALIDATION_FAILED_EXT;
7161 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7163 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7164 return VK_ERROR_VALIDATION_FAILED_EXT;
7172 void VmaBlockMetadata_Generic::Alloc(
7173 const VmaAllocationRequest& request,
7174 VmaSuballocationType type,
7175 VkDeviceSize allocSize,
7179 VMA_ASSERT(!upperAddress);
7180 VMA_ASSERT(request.item != m_Suballocations.end());
7181 VmaSuballocation& suballoc = *request.item;
7183 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7185 VMA_ASSERT(request.offset >= suballoc.offset);
7186 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7187 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7188 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7192 UnregisterFreeSuballocation(request.item);
7194 suballoc.offset = request.offset;
7195 suballoc.size = allocSize;
7196 suballoc.type = type;
7197 suballoc.hAllocation = hAllocation;
7202 VmaSuballocation paddingSuballoc = {};
7203 paddingSuballoc.offset = request.offset + allocSize;
7204 paddingSuballoc.size = paddingEnd;
7205 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7206 VmaSuballocationList::iterator next = request.item;
7208 const VmaSuballocationList::iterator paddingEndItem =
7209 m_Suballocations.insert(next, paddingSuballoc);
7210 RegisterFreeSuballocation(paddingEndItem);
7216 VmaSuballocation paddingSuballoc = {};
7217 paddingSuballoc.offset = request.offset - paddingBegin;
7218 paddingSuballoc.size = paddingBegin;
7219 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7220 const VmaSuballocationList::iterator paddingBeginItem =
7221 m_Suballocations.insert(request.item, paddingSuballoc);
7222 RegisterFreeSuballocation(paddingBeginItem);
7226 m_FreeCount = m_FreeCount - 1;
7227 if(paddingBegin > 0)
7235 m_SumFreeSize -= allocSize;
7238 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7240 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7241 suballocItem != m_Suballocations.end();
7244 VmaSuballocation& suballoc = *suballocItem;
7245 if(suballoc.hAllocation == allocation)
7247 FreeSuballocation(suballocItem);
7248 VMA_HEAVY_ASSERT(Validate());
7252 VMA_ASSERT(0 &&
"Not found!");
7255 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7257 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7258 suballocItem != m_Suballocations.end();
7261 VmaSuballocation& suballoc = *suballocItem;
7262 if(suballoc.offset == offset)
7264 FreeSuballocation(suballocItem);
7268 VMA_ASSERT(0 &&
"Not found!");
7271 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
7273 typedef VmaSuballocationList::iterator iter_type;
7274 for(iter_type suballocItem = m_Suballocations.begin();
7275 suballocItem != m_Suballocations.end();
7278 VmaSuballocation& suballoc = *suballocItem;
7279 if(suballoc.hAllocation == alloc)
7281 iter_type nextItem = suballocItem;
7285 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
7288 if(newSize < alloc->GetSize())
7290 const VkDeviceSize sizeDiff = suballoc.size - newSize;
7293 if(nextItem != m_Suballocations.end())
7296 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7299 UnregisterFreeSuballocation(nextItem);
7300 nextItem->offset -= sizeDiff;
7301 nextItem->size += sizeDiff;
7302 RegisterFreeSuballocation(nextItem);
7308 VmaSuballocation newFreeSuballoc;
7309 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7310 newFreeSuballoc.offset = suballoc.offset + newSize;
7311 newFreeSuballoc.size = sizeDiff;
7312 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7313 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
7314 RegisterFreeSuballocation(newFreeSuballocIt);
7323 VmaSuballocation newFreeSuballoc;
7324 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7325 newFreeSuballoc.offset = suballoc.offset + newSize;
7326 newFreeSuballoc.size = sizeDiff;
7327 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7328 m_Suballocations.push_back(newFreeSuballoc);
7330 iter_type newFreeSuballocIt = m_Suballocations.end();
7331 RegisterFreeSuballocation(--newFreeSuballocIt);
7336 suballoc.size = newSize;
7337 m_SumFreeSize += sizeDiff;
7342 const VkDeviceSize sizeDiff = newSize - suballoc.size;
7345 if(nextItem != m_Suballocations.end())
7348 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7351 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
7357 if(nextItem->size > sizeDiff)
7360 UnregisterFreeSuballocation(nextItem);
7361 nextItem->offset += sizeDiff;
7362 nextItem->size -= sizeDiff;
7363 RegisterFreeSuballocation(nextItem);
7369 UnregisterFreeSuballocation(nextItem);
7370 m_Suballocations.erase(nextItem);
7386 suballoc.size = newSize;
7387 m_SumFreeSize -= sizeDiff;
7394 VMA_ASSERT(0 &&
"Not found!");
7398 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7400 VkDeviceSize lastSize = 0;
7401 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7403 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7405 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7406 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7407 VMA_VALIDATE(it->size >= lastSize);
7408 lastSize = it->size;
7413 bool VmaBlockMetadata_Generic::CheckAllocation(
7414 uint32_t currentFrameIndex,
7415 uint32_t frameInUseCount,
7416 VkDeviceSize bufferImageGranularity,
7417 VkDeviceSize allocSize,
7418 VkDeviceSize allocAlignment,
7419 VmaSuballocationType allocType,
7420 VmaSuballocationList::const_iterator suballocItem,
7421 bool canMakeOtherLost,
7422 VkDeviceSize* pOffset,
7423 size_t* itemsToMakeLostCount,
7424 VkDeviceSize* pSumFreeSize,
7425 VkDeviceSize* pSumItemSize)
const 7427 VMA_ASSERT(allocSize > 0);
7428 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7429 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7430 VMA_ASSERT(pOffset != VMA_NULL);
7432 *itemsToMakeLostCount = 0;
7436 if(canMakeOtherLost)
7438 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7440 *pSumFreeSize = suballocItem->size;
7444 if(suballocItem->hAllocation->CanBecomeLost() &&
7445 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7447 ++*itemsToMakeLostCount;
7448 *pSumItemSize = suballocItem->size;
7457 if(GetSize() - suballocItem->offset < allocSize)
7463 *pOffset = suballocItem->offset;
7466 if(VMA_DEBUG_MARGIN > 0)
7468 *pOffset += VMA_DEBUG_MARGIN;
7472 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7476 if(bufferImageGranularity > 1)
7478 bool bufferImageGranularityConflict =
false;
7479 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7480 while(prevSuballocItem != m_Suballocations.cbegin())
7483 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7484 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7486 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7488 bufferImageGranularityConflict =
true;
7496 if(bufferImageGranularityConflict)
7498 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7504 if(*pOffset >= suballocItem->offset + suballocItem->size)
7510 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7513 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7515 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7517 if(suballocItem->offset + totalSize > GetSize())
7524 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7525 if(totalSize > suballocItem->size)
7527 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7528 while(remainingSize > 0)
7531 if(lastSuballocItem == m_Suballocations.cend())
7535 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7537 *pSumFreeSize += lastSuballocItem->size;
7541 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7542 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7543 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7545 ++*itemsToMakeLostCount;
7546 *pSumItemSize += lastSuballocItem->size;
7553 remainingSize = (lastSuballocItem->size < remainingSize) ?
7554 remainingSize - lastSuballocItem->size : 0;
7560 if(bufferImageGranularity > 1)
7562 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7564 while(nextSuballocItem != m_Suballocations.cend())
7566 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7567 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7569 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7571 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7572 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7573 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7575 ++*itemsToMakeLostCount;
7594 const VmaSuballocation& suballoc = *suballocItem;
7595 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7597 *pSumFreeSize = suballoc.size;
7600 if(suballoc.size < allocSize)
7606 *pOffset = suballoc.offset;
7609 if(VMA_DEBUG_MARGIN > 0)
7611 *pOffset += VMA_DEBUG_MARGIN;
7615 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7619 if(bufferImageGranularity > 1)
7621 bool bufferImageGranularityConflict =
false;
7622 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7623 while(prevSuballocItem != m_Suballocations.cbegin())
7626 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7627 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7629 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7631 bufferImageGranularityConflict =
true;
7639 if(bufferImageGranularityConflict)
7641 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7646 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7649 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7652 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7659 if(bufferImageGranularity > 1)
7661 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7663 while(nextSuballocItem != m_Suballocations.cend())
7665 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7666 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7668 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7687 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7689 VMA_ASSERT(item != m_Suballocations.end());
7690 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7692 VmaSuballocationList::iterator nextItem = item;
7694 VMA_ASSERT(nextItem != m_Suballocations.end());
7695 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7697 item->size += nextItem->size;
7699 m_Suballocations.erase(nextItem);
7702 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7705 VmaSuballocation& suballoc = *suballocItem;
7706 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7707 suballoc.hAllocation = VK_NULL_HANDLE;
7711 m_SumFreeSize += suballoc.size;
7714 bool mergeWithNext =
false;
7715 bool mergeWithPrev =
false;
7717 VmaSuballocationList::iterator nextItem = suballocItem;
7719 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7721 mergeWithNext =
true;
7724 VmaSuballocationList::iterator prevItem = suballocItem;
7725 if(suballocItem != m_Suballocations.begin())
7728 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7730 mergeWithPrev =
true;
7736 UnregisterFreeSuballocation(nextItem);
7737 MergeFreeWithNext(suballocItem);
7742 UnregisterFreeSuballocation(prevItem);
7743 MergeFreeWithNext(prevItem);
7744 RegisterFreeSuballocation(prevItem);
7749 RegisterFreeSuballocation(suballocItem);
7750 return suballocItem;
7754 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7756 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7757 VMA_ASSERT(item->size > 0);
7761 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7763 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7765 if(m_FreeSuballocationsBySize.empty())
7767 m_FreeSuballocationsBySize.push_back(item);
7771 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7779 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7781 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7782 VMA_ASSERT(item->size > 0);
7786 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7788 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7790 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7791 m_FreeSuballocationsBySize.data(),
7792 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7794 VmaSuballocationItemSizeLess());
7795 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7796 index < m_FreeSuballocationsBySize.size();
7799 if(m_FreeSuballocationsBySize[index] == item)
7801 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7804 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7806 VMA_ASSERT(0 &&
"Not found.");
7815 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7816 VmaBlockMetadata(hAllocator),
7818 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7819 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7820 m_1stVectorIndex(0),
7821 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7822 m_1stNullItemsBeginCount(0),
7823 m_1stNullItemsMiddleCount(0),
7824 m_2ndNullItemsCount(0)
7828 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7832 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7834 VmaBlockMetadata::Init(size);
7835 m_SumFreeSize = size;
7838 bool VmaBlockMetadata_Linear::Validate()
const 7840 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7841 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7843 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7844 VMA_VALIDATE(!suballocations1st.empty() ||
7845 suballocations2nd.empty() ||
7846 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7848 if(!suballocations1st.empty())
7851 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7853 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7855 if(!suballocations2nd.empty())
7858 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7861 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7862 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7864 VkDeviceSize sumUsedSize = 0;
7865 const size_t suballoc1stCount = suballocations1st.size();
7866 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7868 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7870 const size_t suballoc2ndCount = suballocations2nd.size();
7871 size_t nullItem2ndCount = 0;
7872 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7874 const VmaSuballocation& suballoc = suballocations2nd[i];
7875 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7877 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7878 VMA_VALIDATE(suballoc.offset >= offset);
7882 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7883 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7884 sumUsedSize += suballoc.size;
7891 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7894 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7897 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7899 const VmaSuballocation& suballoc = suballocations1st[i];
7900 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7901 suballoc.hAllocation == VK_NULL_HANDLE);
7904 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7906 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7908 const VmaSuballocation& suballoc = suballocations1st[i];
7909 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7911 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7912 VMA_VALIDATE(suballoc.offset >= offset);
7913 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7917 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7918 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7919 sumUsedSize += suballoc.size;
7926 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7928 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7930 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7932 const size_t suballoc2ndCount = suballocations2nd.size();
7933 size_t nullItem2ndCount = 0;
7934 for(
size_t i = suballoc2ndCount; i--; )
7936 const VmaSuballocation& suballoc = suballocations2nd[i];
7937 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7939 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7940 VMA_VALIDATE(suballoc.offset >= offset);
7944 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7945 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7946 sumUsedSize += suballoc.size;
7953 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7956 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7959 VMA_VALIDATE(offset <= GetSize());
7960 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7965 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7967 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7968 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7971 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7973 const VkDeviceSize size = GetSize();
7985 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7987 switch(m_2ndVectorMode)
7989 case SECOND_VECTOR_EMPTY:
7995 const size_t suballocations1stCount = suballocations1st.size();
7996 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7997 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7998 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8000 firstSuballoc.offset,
8001 size - (lastSuballoc.offset + lastSuballoc.size));
8005 case SECOND_VECTOR_RING_BUFFER:
8010 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8011 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8012 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8013 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8017 case SECOND_VECTOR_DOUBLE_STACK:
8022 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8023 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8024 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8025 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8035 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8037 const VkDeviceSize size = GetSize();
8038 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8039 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8040 const size_t suballoc1stCount = suballocations1st.size();
8041 const size_t suballoc2ndCount = suballocations2nd.size();
8052 VkDeviceSize lastOffset = 0;
8054 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8056 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8057 size_t nextAlloc2ndIndex = 0;
8058 while(lastOffset < freeSpace2ndTo1stEnd)
8061 while(nextAlloc2ndIndex < suballoc2ndCount &&
8062 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8064 ++nextAlloc2ndIndex;
8068 if(nextAlloc2ndIndex < suballoc2ndCount)
8070 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8073 if(lastOffset < suballoc.offset)
8076 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8090 lastOffset = suballoc.offset + suballoc.size;
8091 ++nextAlloc2ndIndex;
8097 if(lastOffset < freeSpace2ndTo1stEnd)
8099 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8107 lastOffset = freeSpace2ndTo1stEnd;
8112 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8113 const VkDeviceSize freeSpace1stTo2ndEnd =
8114 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8115 while(lastOffset < freeSpace1stTo2ndEnd)
8118 while(nextAlloc1stIndex < suballoc1stCount &&
8119 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8121 ++nextAlloc1stIndex;
8125 if(nextAlloc1stIndex < suballoc1stCount)
8127 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8130 if(lastOffset < suballoc.offset)
8133 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8147 lastOffset = suballoc.offset + suballoc.size;
8148 ++nextAlloc1stIndex;
8154 if(lastOffset < freeSpace1stTo2ndEnd)
8156 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8164 lastOffset = freeSpace1stTo2ndEnd;
8168 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8170 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8171 while(lastOffset < size)
8174 while(nextAlloc2ndIndex != SIZE_MAX &&
8175 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8177 --nextAlloc2ndIndex;
8181 if(nextAlloc2ndIndex != SIZE_MAX)
8183 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8186 if(lastOffset < suballoc.offset)
8189 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8203 lastOffset = suballoc.offset + suballoc.size;
8204 --nextAlloc2ndIndex;
8210 if(lastOffset < size)
8212 const VkDeviceSize unusedRangeSize = size - lastOffset;
8228 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8230 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8231 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8232 const VkDeviceSize size = GetSize();
8233 const size_t suballoc1stCount = suballocations1st.size();
8234 const size_t suballoc2ndCount = suballocations2nd.size();
8236 inoutStats.
size += size;
8238 VkDeviceSize lastOffset = 0;
8240 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8242 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8243 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8244 while(lastOffset < freeSpace2ndTo1stEnd)
8247 while(nextAlloc2ndIndex < suballoc2ndCount &&
8248 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8250 ++nextAlloc2ndIndex;
8254 if(nextAlloc2ndIndex < suballoc2ndCount)
8256 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8259 if(lastOffset < suballoc.offset)
8262 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8273 lastOffset = suballoc.offset + suballoc.size;
8274 ++nextAlloc2ndIndex;
8279 if(lastOffset < freeSpace2ndTo1stEnd)
8282 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8289 lastOffset = freeSpace2ndTo1stEnd;
8294 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8295 const VkDeviceSize freeSpace1stTo2ndEnd =
8296 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8297 while(lastOffset < freeSpace1stTo2ndEnd)
8300 while(nextAlloc1stIndex < suballoc1stCount &&
8301 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8303 ++nextAlloc1stIndex;
8307 if(nextAlloc1stIndex < suballoc1stCount)
8309 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8312 if(lastOffset < suballoc.offset)
8315 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8326 lastOffset = suballoc.offset + suballoc.size;
8327 ++nextAlloc1stIndex;
8332 if(lastOffset < freeSpace1stTo2ndEnd)
8335 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8342 lastOffset = freeSpace1stTo2ndEnd;
8346 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8348 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8349 while(lastOffset < size)
8352 while(nextAlloc2ndIndex != SIZE_MAX &&
8353 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8355 --nextAlloc2ndIndex;
8359 if(nextAlloc2ndIndex != SIZE_MAX)
8361 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8364 if(lastOffset < suballoc.offset)
8367 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8378 lastOffset = suballoc.offset + suballoc.size;
8379 --nextAlloc2ndIndex;
8384 if(lastOffset < size)
8387 const VkDeviceSize unusedRangeSize = size - lastOffset;
8400 #if VMA_STATS_STRING_ENABLED 8401 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8403 const VkDeviceSize size = GetSize();
8404 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8405 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8406 const size_t suballoc1stCount = suballocations1st.size();
8407 const size_t suballoc2ndCount = suballocations2nd.size();
8411 size_t unusedRangeCount = 0;
8412 VkDeviceSize usedBytes = 0;
8414 VkDeviceSize lastOffset = 0;
8416 size_t alloc2ndCount = 0;
8417 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8419 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8420 size_t nextAlloc2ndIndex = 0;
8421 while(lastOffset < freeSpace2ndTo1stEnd)
8424 while(nextAlloc2ndIndex < suballoc2ndCount &&
8425 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8427 ++nextAlloc2ndIndex;
8431 if(nextAlloc2ndIndex < suballoc2ndCount)
8433 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8436 if(lastOffset < suballoc.offset)
8445 usedBytes += suballoc.size;
8448 lastOffset = suballoc.offset + suballoc.size;
8449 ++nextAlloc2ndIndex;
8454 if(lastOffset < freeSpace2ndTo1stEnd)
8461 lastOffset = freeSpace2ndTo1stEnd;
8466 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8467 size_t alloc1stCount = 0;
8468 const VkDeviceSize freeSpace1stTo2ndEnd =
8469 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8470 while(lastOffset < freeSpace1stTo2ndEnd)
8473 while(nextAlloc1stIndex < suballoc1stCount &&
8474 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8476 ++nextAlloc1stIndex;
8480 if(nextAlloc1stIndex < suballoc1stCount)
8482 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8485 if(lastOffset < suballoc.offset)
8494 usedBytes += suballoc.size;
8497 lastOffset = suballoc.offset + suballoc.size;
8498 ++nextAlloc1stIndex;
8503 if(lastOffset < size)
8510 lastOffset = freeSpace1stTo2ndEnd;
8514 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8516 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8517 while(lastOffset < size)
8520 while(nextAlloc2ndIndex != SIZE_MAX &&
8521 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8523 --nextAlloc2ndIndex;
8527 if(nextAlloc2ndIndex != SIZE_MAX)
8529 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8532 if(lastOffset < suballoc.offset)
8541 usedBytes += suballoc.size;
8544 lastOffset = suballoc.offset + suballoc.size;
8545 --nextAlloc2ndIndex;
8550 if(lastOffset < size)
8562 const VkDeviceSize unusedBytes = size - usedBytes;
8563 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8568 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8570 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8571 size_t nextAlloc2ndIndex = 0;
8572 while(lastOffset < freeSpace2ndTo1stEnd)
8575 while(nextAlloc2ndIndex < suballoc2ndCount &&
8576 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8578 ++nextAlloc2ndIndex;
8582 if(nextAlloc2ndIndex < suballoc2ndCount)
8584 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8587 if(lastOffset < suballoc.offset)
8590 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8591 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8596 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8599 lastOffset = suballoc.offset + suballoc.size;
8600 ++nextAlloc2ndIndex;
8605 if(lastOffset < freeSpace2ndTo1stEnd)
8608 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8609 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8613 lastOffset = freeSpace2ndTo1stEnd;
8618 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8619 while(lastOffset < freeSpace1stTo2ndEnd)
8622 while(nextAlloc1stIndex < suballoc1stCount &&
8623 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8625 ++nextAlloc1stIndex;
8629 if(nextAlloc1stIndex < suballoc1stCount)
8631 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8634 if(lastOffset < suballoc.offset)
8637 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8638 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8643 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8646 lastOffset = suballoc.offset + suballoc.size;
8647 ++nextAlloc1stIndex;
8652 if(lastOffset < freeSpace1stTo2ndEnd)
8655 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8656 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8660 lastOffset = freeSpace1stTo2ndEnd;
8664 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8666 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8667 while(lastOffset < size)
8670 while(nextAlloc2ndIndex != SIZE_MAX &&
8671 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8673 --nextAlloc2ndIndex;
8677 if(nextAlloc2ndIndex != SIZE_MAX)
8679 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8682 if(lastOffset < suballoc.offset)
8685 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8686 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8691 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8694 lastOffset = suballoc.offset + suballoc.size;
8695 --nextAlloc2ndIndex;
8700 if(lastOffset < size)
8703 const VkDeviceSize unusedRangeSize = size - lastOffset;
8704 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8713 PrintDetailedMap_End(json);
8715 #endif // #if VMA_STATS_STRING_ENABLED 8717 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8718 uint32_t currentFrameIndex,
8719 uint32_t frameInUseCount,
8720 VkDeviceSize bufferImageGranularity,
8721 VkDeviceSize allocSize,
8722 VkDeviceSize allocAlignment,
8724 VmaSuballocationType allocType,
8725 bool canMakeOtherLost,
8727 VmaAllocationRequest* pAllocationRequest)
8729 VMA_ASSERT(allocSize > 0);
8730 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8731 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8732 VMA_HEAVY_ASSERT(Validate());
8734 const VkDeviceSize size = GetSize();
8735 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8736 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8740 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8742 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8747 if(allocSize > size)
8751 VkDeviceSize resultBaseOffset = size - allocSize;
8752 if(!suballocations2nd.empty())
8754 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8755 resultBaseOffset = lastSuballoc.offset - allocSize;
8756 if(allocSize > lastSuballoc.offset)
8763 VkDeviceSize resultOffset = resultBaseOffset;
8766 if(VMA_DEBUG_MARGIN > 0)
8768 if(resultOffset < VMA_DEBUG_MARGIN)
8772 resultOffset -= VMA_DEBUG_MARGIN;
8776 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8780 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8782 bool bufferImageGranularityConflict =
false;
8783 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8785 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8786 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8788 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8790 bufferImageGranularityConflict =
true;
8798 if(bufferImageGranularityConflict)
8800 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8805 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8806 suballocations1st.back().offset + suballocations1st.back().size :
8808 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8812 if(bufferImageGranularity > 1)
8814 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8816 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8817 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8819 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8833 pAllocationRequest->offset = resultOffset;
8834 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8835 pAllocationRequest->sumItemSize = 0;
8837 pAllocationRequest->itemsToMakeLostCount = 0;
8843 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8847 VkDeviceSize resultBaseOffset = 0;
8848 if(!suballocations1st.empty())
8850 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8851 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8855 VkDeviceSize resultOffset = resultBaseOffset;
8858 if(VMA_DEBUG_MARGIN > 0)
8860 resultOffset += VMA_DEBUG_MARGIN;
8864 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8868 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8870 bool bufferImageGranularityConflict =
false;
8871 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8873 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8874 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8876 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8878 bufferImageGranularityConflict =
true;
8886 if(bufferImageGranularityConflict)
8888 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8892 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8893 suballocations2nd.back().offset : size;
8896 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8900 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8902 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8904 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8905 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8907 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8921 pAllocationRequest->offset = resultOffset;
8922 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8923 pAllocationRequest->sumItemSize = 0;
8925 pAllocationRequest->itemsToMakeLostCount = 0;
8932 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8934 VMA_ASSERT(!suballocations1st.empty());
8936 VkDeviceSize resultBaseOffset = 0;
8937 if(!suballocations2nd.empty())
8939 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8940 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8944 VkDeviceSize resultOffset = resultBaseOffset;
8947 if(VMA_DEBUG_MARGIN > 0)
8949 resultOffset += VMA_DEBUG_MARGIN;
8953 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8957 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8959 bool bufferImageGranularityConflict =
false;
8960 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8962 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8963 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8965 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8967 bufferImageGranularityConflict =
true;
8975 if(bufferImageGranularityConflict)
8977 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8981 pAllocationRequest->itemsToMakeLostCount = 0;
8982 pAllocationRequest->sumItemSize = 0;
8983 size_t index1st = m_1stNullItemsBeginCount;
8985 if(canMakeOtherLost)
8987 while(index1st < suballocations1st.size() &&
8988 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8991 const VmaSuballocation& suballoc = suballocations1st[index1st];
8992 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8998 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8999 if(suballoc.hAllocation->CanBecomeLost() &&
9000 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9002 ++pAllocationRequest->itemsToMakeLostCount;
9003 pAllocationRequest->sumItemSize += suballoc.size;
9015 if(bufferImageGranularity > 1)
9017 while(index1st < suballocations1st.size())
9019 const VmaSuballocation& suballoc = suballocations1st[index1st];
9020 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9022 if(suballoc.hAllocation != VK_NULL_HANDLE)
9025 if(suballoc.hAllocation->CanBecomeLost() &&
9026 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9028 ++pAllocationRequest->itemsToMakeLostCount;
9029 pAllocationRequest->sumItemSize += suballoc.size;
9048 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9049 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9053 if(bufferImageGranularity > 1)
9055 for(
size_t nextSuballocIndex = index1st;
9056 nextSuballocIndex < suballocations1st.size();
9057 nextSuballocIndex++)
9059 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9060 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9062 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9076 pAllocationRequest->offset = resultOffset;
9077 pAllocationRequest->sumFreeSize =
9078 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9080 - pAllocationRequest->sumItemSize;
9090 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9091 uint32_t currentFrameIndex,
9092 uint32_t frameInUseCount,
9093 VmaAllocationRequest* pAllocationRequest)
9095 if(pAllocationRequest->itemsToMakeLostCount == 0)
9100 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9102 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9103 size_t index1st = m_1stNullItemsBeginCount;
9104 size_t madeLostCount = 0;
9105 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9107 VMA_ASSERT(index1st < suballocations1st.size());
9108 VmaSuballocation& suballoc = suballocations1st[index1st];
9109 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9111 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9112 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9113 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9115 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9116 suballoc.hAllocation = VK_NULL_HANDLE;
9117 m_SumFreeSize += suballoc.size;
9118 ++m_1stNullItemsMiddleCount;
9135 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9137 uint32_t lostAllocationCount = 0;
9139 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9140 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9142 VmaSuballocation& suballoc = suballocations1st[i];
9143 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9144 suballoc.hAllocation->CanBecomeLost() &&
9145 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9147 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9148 suballoc.hAllocation = VK_NULL_HANDLE;
9149 ++m_1stNullItemsMiddleCount;
9150 m_SumFreeSize += suballoc.size;
9151 ++lostAllocationCount;
9155 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9156 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9158 VmaSuballocation& suballoc = suballocations2nd[i];
9159 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9160 suballoc.hAllocation->CanBecomeLost() &&
9161 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9163 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9164 suballoc.hAllocation = VK_NULL_HANDLE;
9165 ++m_2ndNullItemsCount;
9166 ++lostAllocationCount;
9170 if(lostAllocationCount)
9175 return lostAllocationCount;
9178 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9180 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9181 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9183 const VmaSuballocation& suballoc = suballocations1st[i];
9184 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9186 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9188 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9189 return VK_ERROR_VALIDATION_FAILED_EXT;
9191 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9193 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9194 return VK_ERROR_VALIDATION_FAILED_EXT;
9199 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9200 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9202 const VmaSuballocation& suballoc = suballocations2nd[i];
9203 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9205 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9207 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9208 return VK_ERROR_VALIDATION_FAILED_EXT;
9210 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9212 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9213 return VK_ERROR_VALIDATION_FAILED_EXT;
9221 void VmaBlockMetadata_Linear::Alloc(
9222 const VmaAllocationRequest& request,
9223 VmaSuballocationType type,
9224 VkDeviceSize allocSize,
9228 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9232 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9233 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9234 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9235 suballocations2nd.push_back(newSuballoc);
9236 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9240 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9243 if(suballocations1st.empty())
9245 suballocations1st.push_back(newSuballoc);
9250 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9253 VMA_ASSERT(request.offset + allocSize <= GetSize());
9254 suballocations1st.push_back(newSuballoc);
9257 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9259 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9261 switch(m_2ndVectorMode)
9263 case SECOND_VECTOR_EMPTY:
9265 VMA_ASSERT(suballocations2nd.empty());
9266 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9268 case SECOND_VECTOR_RING_BUFFER:
9270 VMA_ASSERT(!suballocations2nd.empty());
9272 case SECOND_VECTOR_DOUBLE_STACK:
9273 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9279 suballocations2nd.push_back(newSuballoc);
9283 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9288 m_SumFreeSize -= newSuballoc.size;
9291 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9293 FreeAtOffset(allocation->GetOffset());
9296 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9298 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9299 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9301 if(!suballocations1st.empty())
9304 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9305 if(firstSuballoc.offset == offset)
9307 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9308 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9309 m_SumFreeSize += firstSuballoc.size;
9310 ++m_1stNullItemsBeginCount;
9317 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9318 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9320 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9321 if(lastSuballoc.offset == offset)
9323 m_SumFreeSize += lastSuballoc.size;
9324 suballocations2nd.pop_back();
9330 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9332 VmaSuballocation& lastSuballoc = suballocations1st.back();
9333 if(lastSuballoc.offset == offset)
9335 m_SumFreeSize += lastSuballoc.size;
9336 suballocations1st.pop_back();
9344 VmaSuballocation refSuballoc;
9345 refSuballoc.offset = offset;
9347 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9348 suballocations1st.begin() + m_1stNullItemsBeginCount,
9349 suballocations1st.end(),
9351 if(it != suballocations1st.end())
9353 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9354 it->hAllocation = VK_NULL_HANDLE;
9355 ++m_1stNullItemsMiddleCount;
9356 m_SumFreeSize += it->size;
9362 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9365 VmaSuballocation refSuballoc;
9366 refSuballoc.offset = offset;
9368 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9369 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9370 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9371 if(it != suballocations2nd.end())
9373 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9374 it->hAllocation = VK_NULL_HANDLE;
9375 ++m_2ndNullItemsCount;
9376 m_SumFreeSize += it->size;
9382 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9385 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9387 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9388 const size_t suballocCount = AccessSuballocations1st().size();
9389 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9392 void VmaBlockMetadata_Linear::CleanupAfterFree()
9394 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9395 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9399 suballocations1st.clear();
9400 suballocations2nd.clear();
9401 m_1stNullItemsBeginCount = 0;
9402 m_1stNullItemsMiddleCount = 0;
9403 m_2ndNullItemsCount = 0;
9404 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9408 const size_t suballoc1stCount = suballocations1st.size();
9409 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9410 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9413 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9414 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9416 ++m_1stNullItemsBeginCount;
9417 --m_1stNullItemsMiddleCount;
9421 while(m_1stNullItemsMiddleCount > 0 &&
9422 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9424 --m_1stNullItemsMiddleCount;
9425 suballocations1st.pop_back();
9429 while(m_2ndNullItemsCount > 0 &&
9430 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9432 --m_2ndNullItemsCount;
9433 suballocations2nd.pop_back();
9436 if(ShouldCompact1st())
9438 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9439 size_t srcIndex = m_1stNullItemsBeginCount;
9440 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9442 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9446 if(dstIndex != srcIndex)
9448 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9452 suballocations1st.resize(nonNullItemCount);
9453 m_1stNullItemsBeginCount = 0;
9454 m_1stNullItemsMiddleCount = 0;
9458 if(suballocations2nd.empty())
9460 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9464 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9466 suballocations1st.clear();
9467 m_1stNullItemsBeginCount = 0;
9469 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9472 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9473 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9474 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9475 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9477 ++m_1stNullItemsBeginCount;
9478 --m_1stNullItemsMiddleCount;
9480 m_2ndNullItemsCount = 0;
9481 m_1stVectorIndex ^= 1;
9486 VMA_HEAVY_ASSERT(Validate());
9493 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9494 VmaBlockMetadata(hAllocator),
9496 m_AllocationCount(0),
9500 memset(m_FreeList, 0,
sizeof(m_FreeList));
9503 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9508 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9510 VmaBlockMetadata::Init(size);
9512 m_UsableSize = VmaPrevPow2(size);
9513 m_SumFreeSize = m_UsableSize;
9517 while(m_LevelCount < MAX_LEVELS &&
9518 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9523 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9524 rootNode->offset = 0;
9525 rootNode->type = Node::TYPE_FREE;
9526 rootNode->parent = VMA_NULL;
9527 rootNode->buddy = VMA_NULL;
9530 AddToFreeListFront(0, rootNode);
9533 bool VmaBlockMetadata_Buddy::Validate()
const 9536 ValidationContext ctx;
9537 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9539 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9541 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9542 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9545 for(uint32_t level = 0; level < m_LevelCount; ++level)
9547 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9548 m_FreeList[level].front->free.prev == VMA_NULL);
9550 for(Node* node = m_FreeList[level].front;
9552 node = node->free.next)
9554 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9556 if(node->free.next == VMA_NULL)
9558 VMA_VALIDATE(m_FreeList[level].back == node);
9562 VMA_VALIDATE(node->free.next->free.prev == node);
9568 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9570 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9576 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9578 for(uint32_t level = 0; level < m_LevelCount; ++level)
9580 if(m_FreeList[level].front != VMA_NULL)
9582 return LevelToNodeSize(level);
9588 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9590 const VkDeviceSize unusableSize = GetUnusableSize();
9601 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9603 if(unusableSize > 0)
9612 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9614 const VkDeviceSize unusableSize = GetUnusableSize();
9616 inoutStats.
size += GetSize();
9617 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9622 if(unusableSize > 0)
9629 #if VMA_STATS_STRING_ENABLED 9631 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9635 CalcAllocationStatInfo(stat);
9637 PrintDetailedMap_Begin(
9643 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9645 const VkDeviceSize unusableSize = GetUnusableSize();
9646 if(unusableSize > 0)
9648 PrintDetailedMap_UnusedRange(json,
9653 PrintDetailedMap_End(json);
9656 #endif // #if VMA_STATS_STRING_ENABLED 9658 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9659 uint32_t currentFrameIndex,
9660 uint32_t frameInUseCount,
9661 VkDeviceSize bufferImageGranularity,
9662 VkDeviceSize allocSize,
9663 VkDeviceSize allocAlignment,
9665 VmaSuballocationType allocType,
9666 bool canMakeOtherLost,
9668 VmaAllocationRequest* pAllocationRequest)
9670 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9674 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9675 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9676 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9678 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9679 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9682 if(allocSize > m_UsableSize)
9687 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9688 for(uint32_t level = targetLevel + 1; level--; )
9690 for(Node* freeNode = m_FreeList[level].front;
9691 freeNode != VMA_NULL;
9692 freeNode = freeNode->free.next)
9694 if(freeNode->offset % allocAlignment == 0)
9696 pAllocationRequest->offset = freeNode->offset;
9697 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9698 pAllocationRequest->sumItemSize = 0;
9699 pAllocationRequest->itemsToMakeLostCount = 0;
9700 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9709 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9710 uint32_t currentFrameIndex,
9711 uint32_t frameInUseCount,
9712 VmaAllocationRequest* pAllocationRequest)
9718 return pAllocationRequest->itemsToMakeLostCount == 0;
9721 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9730 void VmaBlockMetadata_Buddy::Alloc(
9731 const VmaAllocationRequest& request,
9732 VmaSuballocationType type,
9733 VkDeviceSize allocSize,
9737 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9738 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9740 Node* currNode = m_FreeList[currLevel].front;
9741 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9742 while(currNode->offset != request.offset)
9744 currNode = currNode->free.next;
9745 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9749 while(currLevel < targetLevel)
9753 RemoveFromFreeList(currLevel, currNode);
9755 const uint32_t childrenLevel = currLevel + 1;
9758 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9759 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9761 leftChild->offset = currNode->offset;
9762 leftChild->type = Node::TYPE_FREE;
9763 leftChild->parent = currNode;
9764 leftChild->buddy = rightChild;
9766 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9767 rightChild->type = Node::TYPE_FREE;
9768 rightChild->parent = currNode;
9769 rightChild->buddy = leftChild;
9772 currNode->type = Node::TYPE_SPLIT;
9773 currNode->split.leftChild = leftChild;
9776 AddToFreeListFront(childrenLevel, rightChild);
9777 AddToFreeListFront(childrenLevel, leftChild);
9782 currNode = m_FreeList[currLevel].front;
9791 VMA_ASSERT(currLevel == targetLevel &&
9792 currNode != VMA_NULL &&
9793 currNode->type == Node::TYPE_FREE);
9794 RemoveFromFreeList(currLevel, currNode);
9797 currNode->type = Node::TYPE_ALLOCATION;
9798 currNode->allocation.alloc = hAllocation;
9800 ++m_AllocationCount;
9802 m_SumFreeSize -= allocSize;
9805 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9807 if(node->type == Node::TYPE_SPLIT)
9809 DeleteNode(node->split.leftChild->buddy);
9810 DeleteNode(node->split.leftChild);
9813 vma_delete(GetAllocationCallbacks(), node);
9816 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9818 VMA_VALIDATE(level < m_LevelCount);
9819 VMA_VALIDATE(curr->parent == parent);
9820 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9821 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9824 case Node::TYPE_FREE:
9826 ctx.calculatedSumFreeSize += levelNodeSize;
9827 ++ctx.calculatedFreeCount;
9829 case Node::TYPE_ALLOCATION:
9830 ++ctx.calculatedAllocationCount;
9831 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9832 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9834 case Node::TYPE_SPLIT:
9836 const uint32_t childrenLevel = level + 1;
9837 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9838 const Node*
const leftChild = curr->split.leftChild;
9839 VMA_VALIDATE(leftChild != VMA_NULL);
9840 VMA_VALIDATE(leftChild->offset == curr->offset);
9841 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9843 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9845 const Node*
const rightChild = leftChild->buddy;
9846 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9847 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9849 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9860 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9864 VkDeviceSize currLevelNodeSize = m_UsableSize;
9865 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9866 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9869 currLevelNodeSize = nextLevelNodeSize;
9870 nextLevelNodeSize = currLevelNodeSize >> 1;
9875 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9878 Node* node = m_Root;
9879 VkDeviceSize nodeOffset = 0;
9881 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9882 while(node->type == Node::TYPE_SPLIT)
9884 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9885 if(offset < nodeOffset + nextLevelSize)
9887 node = node->split.leftChild;
9891 node = node->split.leftChild->buddy;
9892 nodeOffset += nextLevelSize;
9895 levelNodeSize = nextLevelSize;
9898 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9899 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9902 --m_AllocationCount;
9903 m_SumFreeSize += alloc->GetSize();
9905 node->type = Node::TYPE_FREE;
9908 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9910 RemoveFromFreeList(level, node->buddy);
9911 Node*
const parent = node->parent;
9913 vma_delete(GetAllocationCallbacks(), node->buddy);
9914 vma_delete(GetAllocationCallbacks(), node);
9915 parent->type = Node::TYPE_FREE;
9923 AddToFreeListFront(level, node);
9926 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9930 case Node::TYPE_FREE:
9936 case Node::TYPE_ALLOCATION:
9938 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9944 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9945 if(unusedRangeSize > 0)
9954 case Node::TYPE_SPLIT:
9956 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9957 const Node*
const leftChild = node->split.leftChild;
9958 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9959 const Node*
const rightChild = leftChild->buddy;
9960 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9968 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9970 VMA_ASSERT(node->type == Node::TYPE_FREE);
9973 Node*
const frontNode = m_FreeList[level].front;
9974 if(frontNode == VMA_NULL)
9976 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9977 node->free.prev = node->free.next = VMA_NULL;
9978 m_FreeList[level].front = m_FreeList[level].back = node;
9982 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9983 node->free.prev = VMA_NULL;
9984 node->free.next = frontNode;
9985 frontNode->free.prev = node;
9986 m_FreeList[level].front = node;
9990 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9992 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9995 if(node->free.prev == VMA_NULL)
9997 VMA_ASSERT(m_FreeList[level].front == node);
9998 m_FreeList[level].front = node->free.next;
10002 Node*
const prevFreeNode = node->free.prev;
10003 VMA_ASSERT(prevFreeNode->free.next == node);
10004 prevFreeNode->free.next = node->free.next;
10008 if(node->free.next == VMA_NULL)
10010 VMA_ASSERT(m_FreeList[level].back == node);
10011 m_FreeList[level].back = node->free.prev;
10015 Node*
const nextFreeNode = node->free.next;
10016 VMA_ASSERT(nextFreeNode->free.prev == node);
10017 nextFreeNode->free.prev = node->free.prev;
10021 #if VMA_STATS_STRING_ENABLED 10022 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10026 case Node::TYPE_FREE:
10027 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10029 case Node::TYPE_ALLOCATION:
10031 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10032 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10033 if(allocSize < levelNodeSize)
10035 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10039 case Node::TYPE_SPLIT:
10041 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10042 const Node*
const leftChild = node->split.leftChild;
10043 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10044 const Node*
const rightChild = leftChild->buddy;
10045 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10052 #endif // #if VMA_STATS_STRING_ENABLED 10058 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10059 m_pMetadata(VMA_NULL),
10060 m_MemoryTypeIndex(UINT32_MAX),
10062 m_hMemory(VK_NULL_HANDLE),
10064 m_pMappedData(VMA_NULL)
10068 void VmaDeviceMemoryBlock::Init(
10070 uint32_t newMemoryTypeIndex,
10071 VkDeviceMemory newMemory,
10072 VkDeviceSize newSize,
10074 uint32_t algorithm)
10076 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10078 m_MemoryTypeIndex = newMemoryTypeIndex;
10080 m_hMemory = newMemory;
10085 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10088 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10094 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10096 m_pMetadata->Init(newSize);
10099 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10103 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10105 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10106 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10107 m_hMemory = VK_NULL_HANDLE;
10109 vma_delete(allocator, m_pMetadata);
10110 m_pMetadata = VMA_NULL;
10113 bool VmaDeviceMemoryBlock::Validate()
const 10115 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
10116 (m_pMetadata->GetSize() != 0));
10118 return m_pMetadata->Validate();
10121 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
10123 void* pData =
nullptr;
10124 VkResult res = Map(hAllocator, 1, &pData);
10125 if(res != VK_SUCCESS)
10130 res = m_pMetadata->CheckCorruption(pData);
10132 Unmap(hAllocator, 1);
10137 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10144 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10145 if(m_MapCount != 0)
10147 m_MapCount += count;
10148 VMA_ASSERT(m_pMappedData != VMA_NULL);
10149 if(ppData != VMA_NULL)
10151 *ppData = m_pMappedData;
10157 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10158 hAllocator->m_hDevice,
10164 if(result == VK_SUCCESS)
10166 if(ppData != VMA_NULL)
10168 *ppData = m_pMappedData;
10170 m_MapCount = count;
10176 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10183 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10184 if(m_MapCount >= count)
10186 m_MapCount -= count;
10187 if(m_MapCount == 0)
10189 m_pMappedData = VMA_NULL;
10190 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10195 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10199 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10201 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10202 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10205 VkResult res = Map(hAllocator, 1, &pData);
10206 if(res != VK_SUCCESS)
10211 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10212 VmaWriteMagicValue(pData, allocOffset + allocSize);
10214 Unmap(hAllocator, 1);
10219 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10221 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10222 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10225 VkResult res = Map(hAllocator, 1, &pData);
10226 if(res != VK_SUCCESS)
10231 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10233 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10235 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10237 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10240 Unmap(hAllocator, 1);
10245 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10250 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10251 hAllocation->GetBlock() ==
this);
10253 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10254 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10255 hAllocator->m_hDevice,
10258 hAllocation->GetOffset());
10261 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10266 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10267 hAllocation->GetBlock() ==
this);
10269 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10270 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10271 hAllocator->m_hDevice,
10274 hAllocation->GetOffset());
10279 memset(&outInfo, 0,
sizeof(outInfo));
10298 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10306 VmaPool_T::VmaPool_T(
10309 VkDeviceSize preferredBlockSize) :
10312 createInfo.memoryTypeIndex,
10313 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10314 createInfo.minBlockCount,
10315 createInfo.maxBlockCount,
10317 createInfo.frameInUseCount,
10319 createInfo.blockSize != 0,
10325 VmaPool_T::~VmaPool_T()
10329 #if VMA_STATS_STRING_ENABLED 10331 #endif // #if VMA_STATS_STRING_ENABLED 10333 VmaBlockVector::VmaBlockVector(
10335 uint32_t memoryTypeIndex,
10336 VkDeviceSize preferredBlockSize,
10337 size_t minBlockCount,
10338 size_t maxBlockCount,
10339 VkDeviceSize bufferImageGranularity,
10340 uint32_t frameInUseCount,
10342 bool explicitBlockSize,
10343 uint32_t algorithm) :
10344 m_hAllocator(hAllocator),
10345 m_MemoryTypeIndex(memoryTypeIndex),
10346 m_PreferredBlockSize(preferredBlockSize),
10347 m_MinBlockCount(minBlockCount),
10348 m_MaxBlockCount(maxBlockCount),
10349 m_BufferImageGranularity(bufferImageGranularity),
10350 m_FrameInUseCount(frameInUseCount),
10351 m_IsCustomPool(isCustomPool),
10352 m_ExplicitBlockSize(explicitBlockSize),
10353 m_Algorithm(algorithm),
10354 m_HasEmptyBlock(false),
10355 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10356 m_pDefragmentator(VMA_NULL),
10361 VmaBlockVector::~VmaBlockVector()
10363 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10365 for(
size_t i = m_Blocks.size(); i--; )
10367 m_Blocks[i]->Destroy(m_hAllocator);
10368 vma_delete(m_hAllocator, m_Blocks[i]);
10372 VkResult VmaBlockVector::CreateMinBlocks()
10374 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10376 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10377 if(res != VK_SUCCESS)
10385 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10387 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10389 const size_t blockCount = m_Blocks.size();
10398 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10400 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10401 VMA_ASSERT(pBlock);
10402 VMA_HEAVY_ASSERT(pBlock->Validate());
10403 pBlock->m_pMetadata->AddPoolStats(*pStats);
10407 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10409 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10410 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10411 (VMA_DEBUG_MARGIN > 0) &&
10412 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10415 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10417 VkResult VmaBlockVector::Allocate(
10419 uint32_t currentFrameIndex,
10421 VkDeviceSize alignment,
10423 VmaSuballocationType suballocType,
10430 const bool canCreateNewBlock =
10432 (m_Blocks.size() < m_MaxBlockCount);
10439 canMakeOtherLost =
false;
10443 if(isUpperAddress &&
10446 return VK_ERROR_FEATURE_NOT_PRESENT;
10460 return VK_ERROR_FEATURE_NOT_PRESENT;
10464 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10466 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10469 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10476 if(!canMakeOtherLost || canCreateNewBlock)
10485 if(!m_Blocks.empty())
10487 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10488 VMA_ASSERT(pCurrBlock);
10489 VkResult res = AllocateFromBlock(
10500 if(res == VK_SUCCESS)
10502 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10512 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10514 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10515 VMA_ASSERT(pCurrBlock);
10516 VkResult res = AllocateFromBlock(
10527 if(res == VK_SUCCESS)
10529 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10537 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10539 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10540 VMA_ASSERT(pCurrBlock);
10541 VkResult res = AllocateFromBlock(
10552 if(res == VK_SUCCESS)
10554 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10562 if(canCreateNewBlock)
10565 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10566 uint32_t newBlockSizeShift = 0;
10567 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10569 if(!m_ExplicitBlockSize)
10572 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10573 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10575 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10576 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10578 newBlockSize = smallerNewBlockSize;
10579 ++newBlockSizeShift;
10588 size_t newBlockIndex = 0;
10589 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10591 if(!m_ExplicitBlockSize)
10593 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10595 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10596 if(smallerNewBlockSize >= size)
10598 newBlockSize = smallerNewBlockSize;
10599 ++newBlockSizeShift;
10600 res = CreateBlock(newBlockSize, &newBlockIndex);
10609 if(res == VK_SUCCESS)
10611 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10612 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10614 res = AllocateFromBlock(
10625 if(res == VK_SUCCESS)
10627 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10633 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10640 if(canMakeOtherLost)
10642 uint32_t tryIndex = 0;
10643 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10645 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10646 VmaAllocationRequest bestRequest = {};
10647 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10653 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10655 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10656 VMA_ASSERT(pCurrBlock);
10657 VmaAllocationRequest currRequest = {};
10658 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10661 m_BufferImageGranularity,
10670 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10671 if(pBestRequestBlock == VMA_NULL ||
10672 currRequestCost < bestRequestCost)
10674 pBestRequestBlock = pCurrBlock;
10675 bestRequest = currRequest;
10676 bestRequestCost = currRequestCost;
10678 if(bestRequestCost == 0)
10689 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10691 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10692 VMA_ASSERT(pCurrBlock);
10693 VmaAllocationRequest currRequest = {};
10694 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10697 m_BufferImageGranularity,
10706 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10707 if(pBestRequestBlock == VMA_NULL ||
10708 currRequestCost < bestRequestCost ||
10711 pBestRequestBlock = pCurrBlock;
10712 bestRequest = currRequest;
10713 bestRequestCost = currRequestCost;
10715 if(bestRequestCost == 0 ||
10725 if(pBestRequestBlock != VMA_NULL)
10729 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10730 if(res != VK_SUCCESS)
10736 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10742 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10744 m_HasEmptyBlock =
false;
10747 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10748 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10749 (*pAllocation)->InitBlockAllocation(
10752 bestRequest.offset,
10758 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10759 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10760 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10761 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10763 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10765 if(IsCorruptionDetectionEnabled())
10767 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10768 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10783 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10785 return VK_ERROR_TOO_MANY_OBJECTS;
10789 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10792 void VmaBlockVector::Free(
10795 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10799 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10801 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10803 if(IsCorruptionDetectionEnabled())
10805 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10806 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10809 if(hAllocation->IsPersistentMap())
10811 pBlock->Unmap(m_hAllocator, 1);
10814 pBlock->m_pMetadata->Free(hAllocation);
10815 VMA_HEAVY_ASSERT(pBlock->Validate());
10817 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10820 if(pBlock->m_pMetadata->IsEmpty())
10823 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10825 pBlockToDelete = pBlock;
10831 m_HasEmptyBlock =
true;
10836 else if(m_HasEmptyBlock)
10838 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10839 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10841 pBlockToDelete = pLastBlock;
10842 m_Blocks.pop_back();
10843 m_HasEmptyBlock =
false;
10847 IncrementallySortBlocks();
10852 if(pBlockToDelete != VMA_NULL)
10854 VMA_DEBUG_LOG(
" Deleted empty allocation");
10855 pBlockToDelete->Destroy(m_hAllocator);
10856 vma_delete(m_hAllocator, pBlockToDelete);
10860 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10862 VkDeviceSize result = 0;
10863 for(
size_t i = m_Blocks.size(); i--; )
10865 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10866 if(result >= m_PreferredBlockSize)
10874 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10876 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10878 if(m_Blocks[blockIndex] == pBlock)
10880 VmaVectorRemove(m_Blocks, blockIndex);
10887 void VmaBlockVector::IncrementallySortBlocks()
10892 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10894 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10896 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10903 VkResult VmaBlockVector::AllocateFromBlock(
10904 VmaDeviceMemoryBlock* pBlock,
10906 uint32_t currentFrameIndex,
10908 VkDeviceSize alignment,
10911 VmaSuballocationType suballocType,
10920 VmaAllocationRequest currRequest = {};
10921 if(pBlock->m_pMetadata->CreateAllocationRequest(
10924 m_BufferImageGranularity,
10934 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10938 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10939 if(res != VK_SUCCESS)
10946 if(pBlock->m_pMetadata->IsEmpty())
10948 m_HasEmptyBlock =
false;
10951 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10952 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10953 (*pAllocation)->InitBlockAllocation(
10956 currRequest.offset,
10962 VMA_HEAVY_ASSERT(pBlock->Validate());
10963 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10964 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10966 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10968 if(IsCorruptionDetectionEnabled())
10970 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10971 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10975 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10978 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10980 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10981 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10982 allocInfo.allocationSize = blockSize;
10983 VkDeviceMemory mem = VK_NULL_HANDLE;
10984 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10993 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
10998 allocInfo.allocationSize,
11002 m_Blocks.push_back(pBlock);
11003 if(pNewBlockIndex != VMA_NULL)
11005 *pNewBlockIndex = m_Blocks.size() - 1;
11011 #if VMA_STATS_STRING_ENABLED 11013 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
11015 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11017 json.BeginObject();
11021 json.WriteString(
"MemoryTypeIndex");
11022 json.WriteNumber(m_MemoryTypeIndex);
11024 json.WriteString(
"BlockSize");
11025 json.WriteNumber(m_PreferredBlockSize);
11027 json.WriteString(
"BlockCount");
11028 json.BeginObject(
true);
11029 if(m_MinBlockCount > 0)
11031 json.WriteString(
"Min");
11032 json.WriteNumber((uint64_t)m_MinBlockCount);
11034 if(m_MaxBlockCount < SIZE_MAX)
11036 json.WriteString(
"Max");
11037 json.WriteNumber((uint64_t)m_MaxBlockCount);
11039 json.WriteString(
"Cur");
11040 json.WriteNumber((uint64_t)m_Blocks.size());
11043 if(m_FrameInUseCount > 0)
11045 json.WriteString(
"FrameInUseCount");
11046 json.WriteNumber(m_FrameInUseCount);
11049 if(m_Algorithm != 0)
11051 json.WriteString(
"Algorithm");
11052 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
11057 json.WriteString(
"PreferredBlockSize");
11058 json.WriteNumber(m_PreferredBlockSize);
11061 json.WriteString(
"Blocks");
11062 json.BeginObject();
11063 for(
size_t i = 0; i < m_Blocks.size(); ++i)
11065 json.BeginString();
11066 json.ContinueString(m_Blocks[i]->GetId());
11069 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
11076 #endif // #if VMA_STATS_STRING_ENABLED 11078 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
11080 uint32_t currentFrameIndex)
11082 if(m_pDefragmentator == VMA_NULL)
11084 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
11087 currentFrameIndex);
11090 return m_pDefragmentator;
11093 VkResult VmaBlockVector::Defragment(
11095 VkDeviceSize& maxBytesToMove,
11096 uint32_t& maxAllocationsToMove)
11098 if(m_pDefragmentator == VMA_NULL)
11103 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11106 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
11109 if(pDefragmentationStats != VMA_NULL)
11111 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
11112 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
11113 pDefragmentationStats->
bytesMoved += bytesMoved;
11115 VMA_ASSERT(bytesMoved <= maxBytesToMove);
11116 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
11117 maxBytesToMove -= bytesMoved;
11118 maxAllocationsToMove -= allocationsMoved;
11122 m_HasEmptyBlock =
false;
11123 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11125 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11126 if(pBlock->m_pMetadata->IsEmpty())
11128 if(m_Blocks.size() > m_MinBlockCount)
11130 if(pDefragmentationStats != VMA_NULL)
11133 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
11136 VmaVectorRemove(m_Blocks, blockIndex);
11137 pBlock->Destroy(m_hAllocator);
11138 vma_delete(m_hAllocator, pBlock);
11142 m_HasEmptyBlock =
true;
11150 void VmaBlockVector::DestroyDefragmentator()
11152 if(m_pDefragmentator != VMA_NULL)
11154 vma_delete(m_hAllocator, m_pDefragmentator);
11155 m_pDefragmentator = VMA_NULL;
11159 void VmaBlockVector::MakePoolAllocationsLost(
11160 uint32_t currentFrameIndex,
11161 size_t* pLostAllocationCount)
11163 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11164 size_t lostAllocationCount = 0;
11165 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11167 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11168 VMA_ASSERT(pBlock);
11169 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
11171 if(pLostAllocationCount != VMA_NULL)
11173 *pLostAllocationCount = lostAllocationCount;
11177 VkResult VmaBlockVector::CheckCorruption()
11179 if(!IsCorruptionDetectionEnabled())
11181 return VK_ERROR_FEATURE_NOT_PRESENT;
11184 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11185 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11187 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11188 VMA_ASSERT(pBlock);
11189 VkResult res = pBlock->CheckCorruption(m_hAllocator);
11190 if(res != VK_SUCCESS)
11198 void VmaBlockVector::AddStats(
VmaStats* pStats)
11200 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11201 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11203 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11205 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11207 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11208 VMA_ASSERT(pBlock);
11209 VMA_HEAVY_ASSERT(pBlock->Validate());
11211 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11212 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11213 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11214 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11221 VmaDefragmentator::VmaDefragmentator(
11223 VmaBlockVector* pBlockVector,
11224 uint32_t currentFrameIndex) :
11225 m_hAllocator(hAllocator),
11226 m_pBlockVector(pBlockVector),
11227 m_CurrentFrameIndex(currentFrameIndex),
11229 m_AllocationsMoved(0),
11230 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11231 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11233 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11236 VmaDefragmentator::~VmaDefragmentator()
11238 for(
size_t i = m_Blocks.size(); i--; )
11240 vma_delete(m_hAllocator, m_Blocks[i]);
11244 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11246 AllocationInfo allocInfo;
11247 allocInfo.m_hAllocation = hAlloc;
11248 allocInfo.m_pChanged = pChanged;
11249 m_Allocations.push_back(allocInfo);
11252 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11255 if(m_pMappedDataForDefragmentation)
11257 *ppMappedData = m_pMappedDataForDefragmentation;
11262 if(m_pBlock->GetMappedData())
11264 *ppMappedData = m_pBlock->GetMappedData();
11269 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11270 *ppMappedData = m_pMappedDataForDefragmentation;
11274 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11276 if(m_pMappedDataForDefragmentation != VMA_NULL)
11278 m_pBlock->Unmap(hAllocator, 1);
11282 VkResult VmaDefragmentator::DefragmentRound(
11283 VkDeviceSize maxBytesToMove,
11284 uint32_t maxAllocationsToMove)
11286 if(m_Blocks.empty())
11291 size_t srcBlockIndex = m_Blocks.size() - 1;
11292 size_t srcAllocIndex = SIZE_MAX;
11298 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11300 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11303 if(srcBlockIndex == 0)
11310 srcAllocIndex = SIZE_MAX;
11315 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11319 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11320 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11322 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11323 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11324 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11325 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11328 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11330 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11331 VmaAllocationRequest dstAllocRequest;
11332 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11333 m_CurrentFrameIndex,
11334 m_pBlockVector->GetFrameInUseCount(),
11335 m_pBlockVector->GetBufferImageGranularity(),
11342 &dstAllocRequest) &&
11344 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11346 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11349 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11350 (m_BytesMoved + size > maxBytesToMove))
11352 return VK_INCOMPLETE;
11355 void* pDstMappedData = VMA_NULL;
11356 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11357 if(res != VK_SUCCESS)
11362 void* pSrcMappedData = VMA_NULL;
11363 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11364 if(res != VK_SUCCESS)
11371 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11372 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11373 static_cast<size_t>(size));
11375 if(VMA_DEBUG_MARGIN > 0)
11377 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11378 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11381 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11386 allocInfo.m_hAllocation);
11387 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11389 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11391 if(allocInfo.m_pChanged != VMA_NULL)
11393 *allocInfo.m_pChanged = VK_TRUE;
11396 ++m_AllocationsMoved;
11397 m_BytesMoved += size;
11399 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11407 if(srcAllocIndex > 0)
11413 if(srcBlockIndex > 0)
11416 srcAllocIndex = SIZE_MAX;
11426 VkResult VmaDefragmentator::Defragment(
11427 VkDeviceSize maxBytesToMove,
11428 uint32_t maxAllocationsToMove)
11430 if(m_Allocations.empty())
11436 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11437 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11439 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11440 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11441 m_Blocks.push_back(pBlockInfo);
11445 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11448 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11450 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11452 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11454 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11455 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11456 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11458 (*it)->m_Allocations.push_back(allocInfo);
11466 m_Allocations.clear();
11468 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11470 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11471 pBlockInfo->CalcHasNonMovableAllocations();
11472 pBlockInfo->SortAllocationsBySizeDescecnding();
11476 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11479 VkResult result = VK_SUCCESS;
11480 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11482 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11486 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11488 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11494 bool VmaDefragmentator::MoveMakesSense(
11495 size_t dstBlockIndex, VkDeviceSize dstOffset,
11496 size_t srcBlockIndex, VkDeviceSize srcOffset)
11498 if(dstBlockIndex < srcBlockIndex)
11502 if(dstBlockIndex > srcBlockIndex)
11506 if(dstOffset < srcOffset)
11516 #if VMA_RECORDING_ENABLED 11518 VmaRecorder::VmaRecorder() :
11523 m_StartCounter(INT64_MAX)
11529 m_UseMutex = useMutex;
11530 m_Flags = settings.
flags;
11532 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11533 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11536 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11539 return VK_ERROR_INITIALIZATION_FAILED;
11543 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11544 fprintf(m_File,
"%s\n",
"1,4");
11549 VmaRecorder::~VmaRecorder()
11551 if(m_File != VMA_NULL)
11557 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11559 CallParams callParams;
11560 GetBasicParams(callParams);
11562 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11563 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11567 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11569 CallParams callParams;
11570 GetBasicParams(callParams);
11572 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11573 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11579 CallParams callParams;
11580 GetBasicParams(callParams);
11582 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11583 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11594 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11596 CallParams callParams;
11597 GetBasicParams(callParams);
11599 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11600 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11605 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11606 const VkMemoryRequirements& vkMemReq,
11610 CallParams callParams;
11611 GetBasicParams(callParams);
11613 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11614 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11615 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11617 vkMemReq.alignment,
11618 vkMemReq.memoryTypeBits,
11626 userDataStr.GetString());
11630 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11631 const VkMemoryRequirements& vkMemReq,
11632 bool requiresDedicatedAllocation,
11633 bool prefersDedicatedAllocation,
11637 CallParams callParams;
11638 GetBasicParams(callParams);
11640 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11641 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11642 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11644 vkMemReq.alignment,
11645 vkMemReq.memoryTypeBits,
11646 requiresDedicatedAllocation ? 1 : 0,
11647 prefersDedicatedAllocation ? 1 : 0,
11655 userDataStr.GetString());
11659 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11660 const VkMemoryRequirements& vkMemReq,
11661 bool requiresDedicatedAllocation,
11662 bool prefersDedicatedAllocation,
11666 CallParams callParams;
11667 GetBasicParams(callParams);
11669 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11670 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11671 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11673 vkMemReq.alignment,
11674 vkMemReq.memoryTypeBits,
11675 requiresDedicatedAllocation ? 1 : 0,
11676 prefersDedicatedAllocation ? 1 : 0,
11684 userDataStr.GetString());
11688 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11691 CallParams callParams;
11692 GetBasicParams(callParams);
11694 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11695 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11700 void VmaRecorder::RecordResizeAllocation(
11701 uint32_t frameIndex,
11703 VkDeviceSize newSize)
11705 CallParams callParams;
11706 GetBasicParams(callParams);
11708 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11709 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
11710 allocation, newSize);
11714 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11716 const void* pUserData)
11718 CallParams callParams;
11719 GetBasicParams(callParams);
11721 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11722 UserDataString userDataStr(
11725 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11727 userDataStr.GetString());
11731 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11734 CallParams callParams;
11735 GetBasicParams(callParams);
11737 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11738 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11743 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11746 CallParams callParams;
11747 GetBasicParams(callParams);
11749 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11750 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11755 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11758 CallParams callParams;
11759 GetBasicParams(callParams);
11761 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11762 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11767 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11768 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11770 CallParams callParams;
11771 GetBasicParams(callParams);
11773 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11774 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11781 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11782 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11784 CallParams callParams;
11785 GetBasicParams(callParams);
11787 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11788 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11795 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11796 const VkBufferCreateInfo& bufCreateInfo,
11800 CallParams callParams;
11801 GetBasicParams(callParams);
11803 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11804 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11805 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11806 bufCreateInfo.flags,
11807 bufCreateInfo.size,
11808 bufCreateInfo.usage,
11809 bufCreateInfo.sharingMode,
11810 allocCreateInfo.
flags,
11811 allocCreateInfo.
usage,
11815 allocCreateInfo.
pool,
11817 userDataStr.GetString());
11821 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11822 const VkImageCreateInfo& imageCreateInfo,
11826 CallParams callParams;
11827 GetBasicParams(callParams);
11829 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11830 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11831 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11832 imageCreateInfo.flags,
11833 imageCreateInfo.imageType,
11834 imageCreateInfo.format,
11835 imageCreateInfo.extent.width,
11836 imageCreateInfo.extent.height,
11837 imageCreateInfo.extent.depth,
11838 imageCreateInfo.mipLevels,
11839 imageCreateInfo.arrayLayers,
11840 imageCreateInfo.samples,
11841 imageCreateInfo.tiling,
11842 imageCreateInfo.usage,
11843 imageCreateInfo.sharingMode,
11844 imageCreateInfo.initialLayout,
11845 allocCreateInfo.
flags,
11846 allocCreateInfo.
usage,
11850 allocCreateInfo.
pool,
11852 userDataStr.GetString());
11856 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11859 CallParams callParams;
11860 GetBasicParams(callParams);
11862 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11863 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11868 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11871 CallParams callParams;
11872 GetBasicParams(callParams);
11874 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11875 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11880 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11883 CallParams callParams;
11884 GetBasicParams(callParams);
11886 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11887 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11892 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11895 CallParams callParams;
11896 GetBasicParams(callParams);
11898 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11899 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11904 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11907 CallParams callParams;
11908 GetBasicParams(callParams);
11910 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11911 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11918 if(pUserData != VMA_NULL)
11922 m_Str = (
const char*)pUserData;
11926 sprintf_s(m_PtrStr,
"%p", pUserData);
11936 void VmaRecorder::WriteConfiguration(
11937 const VkPhysicalDeviceProperties& devProps,
11938 const VkPhysicalDeviceMemoryProperties& memProps,
11939 bool dedicatedAllocationExtensionEnabled)
11941 fprintf(m_File,
"Config,Begin\n");
11943 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11944 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11945 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11946 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11947 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11948 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11950 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11951 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11952 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11954 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11955 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11957 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11958 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11960 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11961 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11963 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11964 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11967 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11969 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11970 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11971 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11972 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11973 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11974 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11975 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11976 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11977 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11979 fprintf(m_File,
"Config,End\n");
11982 void VmaRecorder::GetBasicParams(CallParams& outParams)
11984 outParams.threadId = GetCurrentThreadId();
11986 LARGE_INTEGER counter;
11987 QueryPerformanceCounter(&counter);
11988 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11991 void VmaRecorder::Flush()
11999 #endif // #if VMA_RECORDING_ENABLED 12007 m_hDevice(pCreateInfo->device),
12008 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
12009 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
12010 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
12011 m_PreferredLargeHeapBlockSize(0),
12012 m_PhysicalDevice(pCreateInfo->physicalDevice),
12013 m_CurrentFrameIndex(0),
12014 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
12017 ,m_pRecorder(VMA_NULL)
12020 if(VMA_DEBUG_DETECT_CORRUPTION)
12023 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
12028 #if !(VMA_DEDICATED_ALLOCATION) 12031 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
12035 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
12036 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
12037 memset(&m_MemProps, 0,
sizeof(m_MemProps));
12039 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
12040 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
12042 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12044 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
12055 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
12056 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
12058 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
12059 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
12060 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
12061 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
12068 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
12070 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
12071 if(limit != VK_WHOLE_SIZE)
12073 m_HeapSizeLimit[heapIndex] = limit;
12074 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
12076 m_MemProps.memoryHeaps[heapIndex].size = limit;
12082 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12084 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
12086 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
12089 preferredBlockSize,
12092 GetBufferImageGranularity(),
12099 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
12106 VkResult res = VK_SUCCESS;
12111 #if VMA_RECORDING_ENABLED 12112 m_pRecorder = vma_new(
this, VmaRecorder)();
12114 if(res != VK_SUCCESS)
12118 m_pRecorder->WriteConfiguration(
12119 m_PhysicalDeviceProperties,
12121 m_UseKhrDedicatedAllocation);
12122 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
12124 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
12125 return VK_ERROR_FEATURE_NOT_PRESENT;
12132 VmaAllocator_T::~VmaAllocator_T()
12134 #if VMA_RECORDING_ENABLED 12135 if(m_pRecorder != VMA_NULL)
12137 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
12138 vma_delete(
this, m_pRecorder);
12142 VMA_ASSERT(m_Pools.empty());
12144 for(
size_t i = GetMemoryTypeCount(); i--; )
12146 vma_delete(
this, m_pDedicatedAllocations[i]);
12147 vma_delete(
this, m_pBlockVectors[i]);
12151 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
12153 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12154 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
12155 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
12156 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
12157 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
12158 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
12159 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
12160 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
12161 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
12162 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
12163 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
12164 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
12165 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
12166 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
12167 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
12168 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
12169 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
12170 #if VMA_DEDICATED_ALLOCATION 12171 if(m_UseKhrDedicatedAllocation)
12173 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
12174 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
12175 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
12176 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
12178 #endif // #if VMA_DEDICATED_ALLOCATION 12179 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12181 #define VMA_COPY_IF_NOT_NULL(funcName) \ 12182 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 12184 if(pVulkanFunctions != VMA_NULL)
12186 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
12187 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
12188 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
12189 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
12190 VMA_COPY_IF_NOT_NULL(vkMapMemory);
12191 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
12192 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
12193 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
12194 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
12195 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
12196 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
12197 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
12198 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
12199 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
12200 VMA_COPY_IF_NOT_NULL(vkCreateImage);
12201 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
12202 #if VMA_DEDICATED_ALLOCATION 12203 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
12204 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
12208 #undef VMA_COPY_IF_NOT_NULL 12212 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12213 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12214 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12215 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12216 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12217 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12218 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12219 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12220 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12221 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12222 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12223 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12224 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12225 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12226 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12227 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12228 #if VMA_DEDICATED_ALLOCATION 12229 if(m_UseKhrDedicatedAllocation)
12231 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12232 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12237 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12239 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12240 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12241 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12242 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12245 VkResult VmaAllocator_T::AllocateMemoryOfType(
12247 VkDeviceSize alignment,
12248 bool dedicatedAllocation,
12249 VkBuffer dedicatedBuffer,
12250 VkImage dedicatedImage,
12252 uint32_t memTypeIndex,
12253 VmaSuballocationType suballocType,
12256 VMA_ASSERT(pAllocation != VMA_NULL);
12257 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12263 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12268 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12269 VMA_ASSERT(blockVector);
12271 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12272 bool preferDedicatedMemory =
12273 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12274 dedicatedAllocation ||
12276 size > preferredBlockSize / 2;
12278 if(preferDedicatedMemory &&
12280 finalCreateInfo.
pool == VK_NULL_HANDLE)
12289 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12293 return AllocateDedicatedMemory(
12307 VkResult res = blockVector->Allocate(
12309 m_CurrentFrameIndex.load(),
12315 if(res == VK_SUCCESS)
12323 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12327 res = AllocateDedicatedMemory(
12333 finalCreateInfo.pUserData,
12337 if(res == VK_SUCCESS)
12340 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12346 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12353 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12355 VmaSuballocationType suballocType,
12356 uint32_t memTypeIndex,
12358 bool isUserDataString,
12360 VkBuffer dedicatedBuffer,
12361 VkImage dedicatedImage,
12364 VMA_ASSERT(pAllocation);
12366 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12367 allocInfo.memoryTypeIndex = memTypeIndex;
12368 allocInfo.allocationSize = size;
12370 #if VMA_DEDICATED_ALLOCATION 12371 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12372 if(m_UseKhrDedicatedAllocation)
12374 if(dedicatedBuffer != VK_NULL_HANDLE)
12376 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12377 dedicatedAllocInfo.buffer = dedicatedBuffer;
12378 allocInfo.pNext = &dedicatedAllocInfo;
12380 else if(dedicatedImage != VK_NULL_HANDLE)
12382 dedicatedAllocInfo.image = dedicatedImage;
12383 allocInfo.pNext = &dedicatedAllocInfo;
12386 #endif // #if VMA_DEDICATED_ALLOCATION 12389 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12390 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12393 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12397 void* pMappedData = VMA_NULL;
12400 res = (*m_VulkanFunctions.vkMapMemory)(
12409 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12410 FreeVulkanMemory(memTypeIndex, size, hMemory);
12415 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12416 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12417 (*pAllocation)->SetUserData(
this, pUserData);
12418 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12420 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12425 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12426 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12427 VMA_ASSERT(pDedicatedAllocations);
12428 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12431 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12436 void VmaAllocator_T::GetBufferMemoryRequirements(
12438 VkMemoryRequirements& memReq,
12439 bool& requiresDedicatedAllocation,
12440 bool& prefersDedicatedAllocation)
const 12442 #if VMA_DEDICATED_ALLOCATION 12443 if(m_UseKhrDedicatedAllocation)
12445 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12446 memReqInfo.buffer = hBuffer;
12448 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12450 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12451 memReq2.pNext = &memDedicatedReq;
12453 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12455 memReq = memReq2.memoryRequirements;
12456 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12457 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12460 #endif // #if VMA_DEDICATED_ALLOCATION 12462 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12463 requiresDedicatedAllocation =
false;
12464 prefersDedicatedAllocation =
false;
12468 void VmaAllocator_T::GetImageMemoryRequirements(
12470 VkMemoryRequirements& memReq,
12471 bool& requiresDedicatedAllocation,
12472 bool& prefersDedicatedAllocation)
const 12474 #if VMA_DEDICATED_ALLOCATION 12475 if(m_UseKhrDedicatedAllocation)
12477 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12478 memReqInfo.image = hImage;
12480 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12482 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12483 memReq2.pNext = &memDedicatedReq;
12485 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12487 memReq = memReq2.memoryRequirements;
12488 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12489 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12492 #endif // #if VMA_DEDICATED_ALLOCATION 12494 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12495 requiresDedicatedAllocation =
false;
12496 prefersDedicatedAllocation =
false;
12500 VkResult VmaAllocator_T::AllocateMemory(
12501 const VkMemoryRequirements& vkMemReq,
12502 bool requiresDedicatedAllocation,
12503 bool prefersDedicatedAllocation,
12504 VkBuffer dedicatedBuffer,
12505 VkImage dedicatedImage,
12507 VmaSuballocationType suballocType,
12510 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12512 if(vkMemReq.size == 0)
12514 return VK_ERROR_VALIDATION_FAILED_EXT;
12519 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12520 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12525 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12526 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12528 if(requiresDedicatedAllocation)
12532 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12533 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12535 if(createInfo.
pool != VK_NULL_HANDLE)
12537 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12538 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12541 if((createInfo.
pool != VK_NULL_HANDLE) &&
12544 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12545 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12548 if(createInfo.
pool != VK_NULL_HANDLE)
12550 const VkDeviceSize alignmentForPool = VMA_MAX(
12551 vkMemReq.alignment,
12552 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12553 return createInfo.
pool->m_BlockVector.Allocate(
12555 m_CurrentFrameIndex.load(),
12565 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12566 uint32_t memTypeIndex = UINT32_MAX;
12568 if(res == VK_SUCCESS)
12570 VkDeviceSize alignmentForMemType = VMA_MAX(
12571 vkMemReq.alignment,
12572 GetMemoryTypeMinAlignment(memTypeIndex));
12574 res = AllocateMemoryOfType(
12576 alignmentForMemType,
12577 requiresDedicatedAllocation || prefersDedicatedAllocation,
12585 if(res == VK_SUCCESS)
12595 memoryTypeBits &= ~(1u << memTypeIndex);
12598 if(res == VK_SUCCESS)
12600 alignmentForMemType = VMA_MAX(
12601 vkMemReq.alignment,
12602 GetMemoryTypeMinAlignment(memTypeIndex));
12604 res = AllocateMemoryOfType(
12606 alignmentForMemType,
12607 requiresDedicatedAllocation || prefersDedicatedAllocation,
12615 if(res == VK_SUCCESS)
12625 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12636 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12638 VMA_ASSERT(allocation);
12640 if(TouchAllocation(allocation))
12642 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12644 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12647 switch(allocation->GetType())
12649 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12651 VmaBlockVector* pBlockVector = VMA_NULL;
12652 VmaPool hPool = allocation->GetPool();
12653 if(hPool != VK_NULL_HANDLE)
12655 pBlockVector = &hPool->m_BlockVector;
12659 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12660 pBlockVector = m_pBlockVectors[memTypeIndex];
12662 pBlockVector->Free(allocation);
12665 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12666 FreeDedicatedMemory(allocation);
12673 allocation->SetUserData(
this, VMA_NULL);
12674 vma_delete(
this, allocation);
12677 VkResult VmaAllocator_T::ResizeAllocation(
12679 VkDeviceSize newSize)
12681 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
12683 return VK_ERROR_VALIDATION_FAILED_EXT;
12685 if(newSize == alloc->GetSize())
12690 switch(alloc->GetType())
12692 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12693 return VK_ERROR_FEATURE_NOT_PRESENT;
12694 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12695 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
12697 alloc->ChangeSize(newSize);
12698 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
12703 return VK_ERROR_OUT_OF_POOL_MEMORY;
12707 return VK_ERROR_VALIDATION_FAILED_EXT;
12711 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12714 InitStatInfo(pStats->
total);
12715 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12717 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12721 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12723 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12724 VMA_ASSERT(pBlockVector);
12725 pBlockVector->AddStats(pStats);
12730 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12731 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12733 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12738 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12740 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12741 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12742 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12743 VMA_ASSERT(pDedicatedAllocVector);
12744 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12747 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12748 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12749 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12750 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12755 VmaPostprocessCalcStatInfo(pStats->
total);
12756 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12757 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12758 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12759 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12762 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12764 VkResult VmaAllocator_T::Defragment(
12766 size_t allocationCount,
12767 VkBool32* pAllocationsChanged,
12771 if(pAllocationsChanged != VMA_NULL)
12773 memset(pAllocationsChanged, 0, allocationCount *
sizeof(VkBool32));
12775 if(pDefragmentationStats != VMA_NULL)
12777 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12780 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12782 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12784 const size_t poolCount = m_Pools.size();
12787 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12790 VMA_ASSERT(hAlloc);
12791 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12793 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12794 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12796 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12798 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12800 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12802 const VmaPool hAllocPool = hAlloc->GetPool();
12804 if(hAllocPool != VK_NULL_HANDLE)
12807 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12809 pAllocBlockVector = &hAllocPool->m_BlockVector;
12815 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12818 if(pAllocBlockVector != VMA_NULL)
12820 VmaDefragmentator*
const pDefragmentator =
12821 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12822 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12823 &pAllocationsChanged[allocIndex] : VMA_NULL;
12824 pDefragmentator->AddAllocation(hAlloc, pChanged);
12829 VkResult result = VK_SUCCESS;
12833 VkDeviceSize maxBytesToMove = SIZE_MAX;
12834 uint32_t maxAllocationsToMove = UINT32_MAX;
12835 if(pDefragmentationInfo != VMA_NULL)
12842 for(uint32_t memTypeIndex = 0;
12843 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12847 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12849 result = m_pBlockVectors[memTypeIndex]->Defragment(
12850 pDefragmentationStats,
12852 maxAllocationsToMove);
12857 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12859 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12860 pDefragmentationStats,
12862 maxAllocationsToMove);
12868 for(
size_t poolIndex = poolCount; poolIndex--; )
12870 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12874 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12876 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12878 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12887 if(hAllocation->CanBecomeLost())
12893 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12894 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12897 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12901 pAllocationInfo->
offset = 0;
12902 pAllocationInfo->
size = hAllocation->GetSize();
12904 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12907 else if(localLastUseFrameIndex == localCurrFrameIndex)
12909 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12910 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12911 pAllocationInfo->
offset = hAllocation->GetOffset();
12912 pAllocationInfo->
size = hAllocation->GetSize();
12914 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12919 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12921 localLastUseFrameIndex = localCurrFrameIndex;
12928 #if VMA_STATS_STRING_ENABLED 12929 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12930 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12933 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12934 if(localLastUseFrameIndex == localCurrFrameIndex)
12940 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12942 localLastUseFrameIndex = localCurrFrameIndex;
12948 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12949 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12950 pAllocationInfo->
offset = hAllocation->GetOffset();
12951 pAllocationInfo->
size = hAllocation->GetSize();
12952 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12953 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12957 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12960 if(hAllocation->CanBecomeLost())
12962 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12963 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12966 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12970 else if(localLastUseFrameIndex == localCurrFrameIndex)
12976 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12978 localLastUseFrameIndex = localCurrFrameIndex;
12985 #if VMA_STATS_STRING_ENABLED 12986 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12987 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12990 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12991 if(localLastUseFrameIndex == localCurrFrameIndex)
12997 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12999 localLastUseFrameIndex = localCurrFrameIndex;
13011 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
13021 return VK_ERROR_INITIALIZATION_FAILED;
13024 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
13026 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
13028 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
13029 if(res != VK_SUCCESS)
13031 vma_delete(
this, *pPool);
13038 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13039 (*pPool)->SetId(m_NextPoolId++);
13040 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
13046 void VmaAllocator_T::DestroyPool(
VmaPool pool)
13050 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13051 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
13052 VMA_ASSERT(success &&
"Pool not found in Allocator.");
13055 vma_delete(
this, pool);
13060 pool->m_BlockVector.GetPoolStats(pPoolStats);
13063 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
13065 m_CurrentFrameIndex.store(frameIndex);
13068 void VmaAllocator_T::MakePoolAllocationsLost(
13070 size_t* pLostAllocationCount)
13072 hPool->m_BlockVector.MakePoolAllocationsLost(
13073 m_CurrentFrameIndex.load(),
13074 pLostAllocationCount);
13077 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
13079 return hPool->m_BlockVector.CheckCorruption();
13082 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
13084 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
13087 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13089 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
13091 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
13092 VMA_ASSERT(pBlockVector);
13093 VkResult localRes = pBlockVector->CheckCorruption();
13096 case VK_ERROR_FEATURE_NOT_PRESENT:
13099 finalRes = VK_SUCCESS;
13109 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13110 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
13112 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
13114 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
13117 case VK_ERROR_FEATURE_NOT_PRESENT:
13120 finalRes = VK_SUCCESS;
13132 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
13134 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
13135 (*pAllocation)->InitLost();
13138 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
13140 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
13143 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13145 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13146 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
13148 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13149 if(res == VK_SUCCESS)
13151 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
13156 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
13161 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13164 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
13166 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
13172 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
13174 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
13176 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
13179 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
13181 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
13182 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13184 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13185 m_HeapSizeLimit[heapIndex] += size;
13189 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
13191 if(hAllocation->CanBecomeLost())
13193 return VK_ERROR_MEMORY_MAP_FAILED;
13196 switch(hAllocation->GetType())
13198 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13200 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13201 char *pBytes = VMA_NULL;
13202 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
13203 if(res == VK_SUCCESS)
13205 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
13206 hAllocation->BlockAllocMap();
13210 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13211 return hAllocation->DedicatedAllocMap(
this, ppData);
13214 return VK_ERROR_MEMORY_MAP_FAILED;
13220 switch(hAllocation->GetType())
13222 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13224 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13225 hAllocation->BlockAllocUnmap();
13226 pBlock->Unmap(
this, 1);
13229 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13230 hAllocation->DedicatedAllocUnmap(
this);
13237 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
13239 VkResult res = VK_SUCCESS;
13240 switch(hAllocation->GetType())
13242 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13243 res = GetVulkanFunctions().vkBindBufferMemory(
13246 hAllocation->GetMemory(),
13249 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13251 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13252 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13253 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13262 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13264 VkResult res = VK_SUCCESS;
13265 switch(hAllocation->GetType())
13267 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13268 res = GetVulkanFunctions().vkBindImageMemory(
13271 hAllocation->GetMemory(),
13274 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13276 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13277 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13278 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13287 void VmaAllocator_T::FlushOrInvalidateAllocation(
13289 VkDeviceSize offset, VkDeviceSize size,
13290 VMA_CACHE_OPERATION op)
13292 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13293 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13295 const VkDeviceSize allocationSize = hAllocation->GetSize();
13296 VMA_ASSERT(offset <= allocationSize);
13298 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13300 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13301 memRange.memory = hAllocation->GetMemory();
13303 switch(hAllocation->GetType())
13305 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13306 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13307 if(size == VK_WHOLE_SIZE)
13309 memRange.size = allocationSize - memRange.offset;
13313 VMA_ASSERT(offset + size <= allocationSize);
13314 memRange.size = VMA_MIN(
13315 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13316 allocationSize - memRange.offset);
13320 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13323 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13324 if(size == VK_WHOLE_SIZE)
13326 size = allocationSize - offset;
13330 VMA_ASSERT(offset + size <= allocationSize);
13332 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13335 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13336 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13337 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13338 memRange.offset += allocationOffset;
13339 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13350 case VMA_CACHE_FLUSH:
13351 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13353 case VMA_CACHE_INVALIDATE:
13354 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13363 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13365 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13367 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13369 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13370 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13371 VMA_ASSERT(pDedicatedAllocations);
13372 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13373 VMA_ASSERT(success);
13376 VkDeviceMemory hMemory = allocation->GetMemory();
13388 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13390 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13393 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13395 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13396 !hAllocation->CanBecomeLost() &&
13397 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13399 void* pData = VMA_NULL;
13400 VkResult res = Map(hAllocation, &pData);
13401 if(res == VK_SUCCESS)
13403 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13404 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13405 Unmap(hAllocation);
13409 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13414 #if VMA_STATS_STRING_ENABLED 13416 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13418 bool dedicatedAllocationsStarted =
false;
13419 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13421 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13422 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13423 VMA_ASSERT(pDedicatedAllocVector);
13424 if(pDedicatedAllocVector->empty() ==
false)
13426 if(dedicatedAllocationsStarted ==
false)
13428 dedicatedAllocationsStarted =
true;
13429 json.WriteString(
"DedicatedAllocations");
13430 json.BeginObject();
13433 json.BeginString(
"Type ");
13434 json.ContinueString(memTypeIndex);
13439 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13441 json.BeginObject(
true);
13443 hAlloc->PrintParameters(json);
13450 if(dedicatedAllocationsStarted)
13456 bool allocationsStarted =
false;
13457 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13459 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13461 if(allocationsStarted ==
false)
13463 allocationsStarted =
true;
13464 json.WriteString(
"DefaultPools");
13465 json.BeginObject();
13468 json.BeginString(
"Type ");
13469 json.ContinueString(memTypeIndex);
13472 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13475 if(allocationsStarted)
13483 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13484 const size_t poolCount = m_Pools.size();
13487 json.WriteString(
"Pools");
13488 json.BeginObject();
13489 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13491 json.BeginString();
13492 json.ContinueString(m_Pools[poolIndex]->GetId());
13495 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13502 #endif // #if VMA_STATS_STRING_ENABLED 13511 VMA_ASSERT(pCreateInfo && pAllocator);
13512 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13514 return (*pAllocator)->Init(pCreateInfo);
13520 if(allocator != VK_NULL_HANDLE)
13522 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13523 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13524 vma_delete(&allocationCallbacks, allocator);
13530 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13532 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13533 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13538 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13540 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13541 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13546 uint32_t memoryTypeIndex,
13547 VkMemoryPropertyFlags* pFlags)
13549 VMA_ASSERT(allocator && pFlags);
13550 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13551 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13556 uint32_t frameIndex)
13558 VMA_ASSERT(allocator);
13559 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13561 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13563 allocator->SetCurrentFrameIndex(frameIndex);
13570 VMA_ASSERT(allocator && pStats);
13571 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13572 allocator->CalculateStats(pStats);
13575 #if VMA_STATS_STRING_ENABLED 13579 char** ppStatsString,
13580 VkBool32 detailedMap)
13582 VMA_ASSERT(allocator && ppStatsString);
13583 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13585 VmaStringBuilder sb(allocator);
13587 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13588 json.BeginObject();
13591 allocator->CalculateStats(&stats);
13593 json.WriteString(
"Total");
13594 VmaPrintStatInfo(json, stats.
total);
13596 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13598 json.BeginString(
"Heap ");
13599 json.ContinueString(heapIndex);
13601 json.BeginObject();
13603 json.WriteString(
"Size");
13604 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13606 json.WriteString(
"Flags");
13607 json.BeginArray(
true);
13608 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13610 json.WriteString(
"DEVICE_LOCAL");
13616 json.WriteString(
"Stats");
13617 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13620 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13622 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13624 json.BeginString(
"Type ");
13625 json.ContinueString(typeIndex);
13628 json.BeginObject();
13630 json.WriteString(
"Flags");
13631 json.BeginArray(
true);
13632 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13633 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13635 json.WriteString(
"DEVICE_LOCAL");
13637 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13639 json.WriteString(
"HOST_VISIBLE");
13641 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13643 json.WriteString(
"HOST_COHERENT");
13645 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13647 json.WriteString(
"HOST_CACHED");
13649 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13651 json.WriteString(
"LAZILY_ALLOCATED");
13657 json.WriteString(
"Stats");
13658 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13667 if(detailedMap == VK_TRUE)
13669 allocator->PrintDetailedMap(json);
13675 const size_t len = sb.GetLength();
13676 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13679 memcpy(pChars, sb.GetData(), len);
13681 pChars[len] =
'\0';
13682 *ppStatsString = pChars;
13687 char* pStatsString)
13689 if(pStatsString != VMA_NULL)
13691 VMA_ASSERT(allocator);
13692 size_t len = strlen(pStatsString);
13693 vma_delete_array(allocator, pStatsString, len + 1);
13697 #endif // #if VMA_STATS_STRING_ENABLED 13704 uint32_t memoryTypeBits,
13706 uint32_t* pMemoryTypeIndex)
13708 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13709 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13710 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13717 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13718 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13723 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13727 switch(pAllocationCreateInfo->
usage)
13732 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13734 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13738 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13741 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13742 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13744 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13748 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13749 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13755 *pMemoryTypeIndex = UINT32_MAX;
13756 uint32_t minCost = UINT32_MAX;
13757 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13758 memTypeIndex < allocator->GetMemoryTypeCount();
13759 ++memTypeIndex, memTypeBit <<= 1)
13762 if((memTypeBit & memoryTypeBits) != 0)
13764 const VkMemoryPropertyFlags currFlags =
13765 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13767 if((requiredFlags & ~currFlags) == 0)
13770 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13772 if(currCost < minCost)
13774 *pMemoryTypeIndex = memTypeIndex;
13779 minCost = currCost;
13784 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13789 const VkBufferCreateInfo* pBufferCreateInfo,
13791 uint32_t* pMemoryTypeIndex)
13793 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13794 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13795 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13796 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13798 const VkDevice hDev = allocator->m_hDevice;
13799 VkBuffer hBuffer = VK_NULL_HANDLE;
13800 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13801 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13802 if(res == VK_SUCCESS)
13804 VkMemoryRequirements memReq = {};
13805 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13806 hDev, hBuffer, &memReq);
13810 memReq.memoryTypeBits,
13811 pAllocationCreateInfo,
13814 allocator->GetVulkanFunctions().vkDestroyBuffer(
13815 hDev, hBuffer, allocator->GetAllocationCallbacks());
13822 const VkImageCreateInfo* pImageCreateInfo,
13824 uint32_t* pMemoryTypeIndex)
13826 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13827 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13828 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13829 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13831 const VkDevice hDev = allocator->m_hDevice;
13832 VkImage hImage = VK_NULL_HANDLE;
13833 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13834 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13835 if(res == VK_SUCCESS)
13837 VkMemoryRequirements memReq = {};
13838 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13839 hDev, hImage, &memReq);
13843 memReq.memoryTypeBits,
13844 pAllocationCreateInfo,
13847 allocator->GetVulkanFunctions().vkDestroyImage(
13848 hDev, hImage, allocator->GetAllocationCallbacks());
13858 VMA_ASSERT(allocator && pCreateInfo && pPool);
13860 VMA_DEBUG_LOG(
"vmaCreatePool");
13862 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13864 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13866 #if VMA_RECORDING_ENABLED 13867 if(allocator->GetRecorder() != VMA_NULL)
13869 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13880 VMA_ASSERT(allocator);
13882 if(pool == VK_NULL_HANDLE)
13887 VMA_DEBUG_LOG(
"vmaDestroyPool");
13889 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13891 #if VMA_RECORDING_ENABLED 13892 if(allocator->GetRecorder() != VMA_NULL)
13894 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13898 allocator->DestroyPool(pool);
13906 VMA_ASSERT(allocator && pool && pPoolStats);
13908 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13910 allocator->GetPoolStats(pool, pPoolStats);
13916 size_t* pLostAllocationCount)
13918 VMA_ASSERT(allocator && pool);
13920 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13922 #if VMA_RECORDING_ENABLED 13923 if(allocator->GetRecorder() != VMA_NULL)
13925 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13929 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13934 VMA_ASSERT(allocator && pool);
13936 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13938 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13940 return allocator->CheckPoolCorruption(pool);
13945 const VkMemoryRequirements* pVkMemoryRequirements,
13950 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13952 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13954 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13956 VkResult result = allocator->AllocateMemory(
13957 *pVkMemoryRequirements,
13963 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13966 #if VMA_RECORDING_ENABLED 13967 if(allocator->GetRecorder() != VMA_NULL)
13969 allocator->GetRecorder()->RecordAllocateMemory(
13970 allocator->GetCurrentFrameIndex(),
13971 *pVkMemoryRequirements,
13977 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13979 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13992 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13994 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
13996 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13998 VkMemoryRequirements vkMemReq = {};
13999 bool requiresDedicatedAllocation =
false;
14000 bool prefersDedicatedAllocation =
false;
14001 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
14002 requiresDedicatedAllocation,
14003 prefersDedicatedAllocation);
14005 VkResult result = allocator->AllocateMemory(
14007 requiresDedicatedAllocation,
14008 prefersDedicatedAllocation,
14012 VMA_SUBALLOCATION_TYPE_BUFFER,
14015 #if VMA_RECORDING_ENABLED 14016 if(allocator->GetRecorder() != VMA_NULL)
14018 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
14019 allocator->GetCurrentFrameIndex(),
14021 requiresDedicatedAllocation,
14022 prefersDedicatedAllocation,
14028 if(pAllocationInfo && result == VK_SUCCESS)
14030 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14043 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
14045 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
14047 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14049 VkMemoryRequirements vkMemReq = {};
14050 bool requiresDedicatedAllocation =
false;
14051 bool prefersDedicatedAllocation =
false;
14052 allocator->GetImageMemoryRequirements(image, vkMemReq,
14053 requiresDedicatedAllocation, prefersDedicatedAllocation);
14055 VkResult result = allocator->AllocateMemory(
14057 requiresDedicatedAllocation,
14058 prefersDedicatedAllocation,
14062 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
14065 #if VMA_RECORDING_ENABLED 14066 if(allocator->GetRecorder() != VMA_NULL)
14068 allocator->GetRecorder()->RecordAllocateMemoryForImage(
14069 allocator->GetCurrentFrameIndex(),
14071 requiresDedicatedAllocation,
14072 prefersDedicatedAllocation,
14078 if(pAllocationInfo && result == VK_SUCCESS)
14080 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14090 VMA_ASSERT(allocator);
14092 if(allocation == VK_NULL_HANDLE)
14097 VMA_DEBUG_LOG(
"vmaFreeMemory");
14099 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14101 #if VMA_RECORDING_ENABLED 14102 if(allocator->GetRecorder() != VMA_NULL)
14104 allocator->GetRecorder()->RecordFreeMemory(
14105 allocator->GetCurrentFrameIndex(),
14110 allocator->FreeMemory(allocation);
14116 VkDeviceSize newSize)
14118 VMA_ASSERT(allocator && allocation);
14120 VMA_DEBUG_LOG(
"vmaResizeAllocation");
14122 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14124 #if VMA_RECORDING_ENABLED 14125 if(allocator->GetRecorder() != VMA_NULL)
14127 allocator->GetRecorder()->RecordResizeAllocation(
14128 allocator->GetCurrentFrameIndex(),
14134 return allocator->ResizeAllocation(allocation, newSize);
14142 VMA_ASSERT(allocator && allocation && pAllocationInfo);
14144 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14146 #if VMA_RECORDING_ENABLED 14147 if(allocator->GetRecorder() != VMA_NULL)
14149 allocator->GetRecorder()->RecordGetAllocationInfo(
14150 allocator->GetCurrentFrameIndex(),
14155 allocator->GetAllocationInfo(allocation, pAllocationInfo);
14162 VMA_ASSERT(allocator && allocation);
14164 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14166 #if VMA_RECORDING_ENABLED 14167 if(allocator->GetRecorder() != VMA_NULL)
14169 allocator->GetRecorder()->RecordTouchAllocation(
14170 allocator->GetCurrentFrameIndex(),
14175 return allocator->TouchAllocation(allocation);
14183 VMA_ASSERT(allocator && allocation);
14185 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14187 allocation->SetUserData(allocator, pUserData);
14189 #if VMA_RECORDING_ENABLED 14190 if(allocator->GetRecorder() != VMA_NULL)
14192 allocator->GetRecorder()->RecordSetAllocationUserData(
14193 allocator->GetCurrentFrameIndex(),
14204 VMA_ASSERT(allocator && pAllocation);
14206 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
14208 allocator->CreateLostAllocation(pAllocation);
14210 #if VMA_RECORDING_ENABLED 14211 if(allocator->GetRecorder() != VMA_NULL)
14213 allocator->GetRecorder()->RecordCreateLostAllocation(
14214 allocator->GetCurrentFrameIndex(),
14225 VMA_ASSERT(allocator && allocation && ppData);
14227 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14229 VkResult res = allocator->Map(allocation, ppData);
14231 #if VMA_RECORDING_ENABLED 14232 if(allocator->GetRecorder() != VMA_NULL)
14234 allocator->GetRecorder()->RecordMapMemory(
14235 allocator->GetCurrentFrameIndex(),
14247 VMA_ASSERT(allocator && allocation);
14249 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14251 #if VMA_RECORDING_ENABLED 14252 if(allocator->GetRecorder() != VMA_NULL)
14254 allocator->GetRecorder()->RecordUnmapMemory(
14255 allocator->GetCurrentFrameIndex(),
14260 allocator->Unmap(allocation);
14265 VMA_ASSERT(allocator && allocation);
14267 VMA_DEBUG_LOG(
"vmaFlushAllocation");
14269 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14271 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
14273 #if VMA_RECORDING_ENABLED 14274 if(allocator->GetRecorder() != VMA_NULL)
14276 allocator->GetRecorder()->RecordFlushAllocation(
14277 allocator->GetCurrentFrameIndex(),
14278 allocation, offset, size);
14285 VMA_ASSERT(allocator && allocation);
14287 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14289 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14291 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14293 #if VMA_RECORDING_ENABLED 14294 if(allocator->GetRecorder() != VMA_NULL)
14296 allocator->GetRecorder()->RecordInvalidateAllocation(
14297 allocator->GetCurrentFrameIndex(),
14298 allocation, offset, size);
14305 VMA_ASSERT(allocator);
14307 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14309 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14311 return allocator->CheckCorruption(memoryTypeBits);
14317 size_t allocationCount,
14318 VkBool32* pAllocationsChanged,
14322 VMA_ASSERT(allocator && pAllocations);
14324 VMA_DEBUG_LOG(
"vmaDefragment");
14326 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14328 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14336 VMA_ASSERT(allocator && allocation && buffer);
14338 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14340 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14342 return allocator->BindBufferMemory(allocation, buffer);
14350 VMA_ASSERT(allocator && allocation && image);
14352 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14354 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14356 return allocator->BindImageMemory(allocation, image);
14361 const VkBufferCreateInfo* pBufferCreateInfo,
14367 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14369 if(pBufferCreateInfo->size == 0)
14371 return VK_ERROR_VALIDATION_FAILED_EXT;
14374 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14376 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14378 *pBuffer = VK_NULL_HANDLE;
14379 *pAllocation = VK_NULL_HANDLE;
14382 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14383 allocator->m_hDevice,
14385 allocator->GetAllocationCallbacks(),
14390 VkMemoryRequirements vkMemReq = {};
14391 bool requiresDedicatedAllocation =
false;
14392 bool prefersDedicatedAllocation =
false;
14393 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14394 requiresDedicatedAllocation, prefersDedicatedAllocation);
14398 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14400 VMA_ASSERT(vkMemReq.alignment %
14401 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14403 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14405 VMA_ASSERT(vkMemReq.alignment %
14406 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14408 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14410 VMA_ASSERT(vkMemReq.alignment %
14411 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14415 res = allocator->AllocateMemory(
14417 requiresDedicatedAllocation,
14418 prefersDedicatedAllocation,
14421 *pAllocationCreateInfo,
14422 VMA_SUBALLOCATION_TYPE_BUFFER,
14425 #if VMA_RECORDING_ENABLED 14426 if(allocator->GetRecorder() != VMA_NULL)
14428 allocator->GetRecorder()->RecordCreateBuffer(
14429 allocator->GetCurrentFrameIndex(),
14430 *pBufferCreateInfo,
14431 *pAllocationCreateInfo,
14439 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14443 #if VMA_STATS_STRING_ENABLED 14444 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14446 if(pAllocationInfo != VMA_NULL)
14448 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14453 allocator->FreeMemory(*pAllocation);
14454 *pAllocation = VK_NULL_HANDLE;
14455 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14456 *pBuffer = VK_NULL_HANDLE;
14459 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14460 *pBuffer = VK_NULL_HANDLE;
14471 VMA_ASSERT(allocator);
14473 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14478 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14480 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14482 #if VMA_RECORDING_ENABLED 14483 if(allocator->GetRecorder() != VMA_NULL)
14485 allocator->GetRecorder()->RecordDestroyBuffer(
14486 allocator->GetCurrentFrameIndex(),
14491 if(buffer != VK_NULL_HANDLE)
14493 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14496 if(allocation != VK_NULL_HANDLE)
14498 allocator->FreeMemory(allocation);
14504 const VkImageCreateInfo* pImageCreateInfo,
14510 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14512 if(pImageCreateInfo->extent.width == 0 ||
14513 pImageCreateInfo->extent.height == 0 ||
14514 pImageCreateInfo->extent.depth == 0 ||
14515 pImageCreateInfo->mipLevels == 0 ||
14516 pImageCreateInfo->arrayLayers == 0)
14518 return VK_ERROR_VALIDATION_FAILED_EXT;
14521 VMA_DEBUG_LOG(
"vmaCreateImage");
14523 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14525 *pImage = VK_NULL_HANDLE;
14526 *pAllocation = VK_NULL_HANDLE;
14529 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14530 allocator->m_hDevice,
14532 allocator->GetAllocationCallbacks(),
14536 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14537 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14538 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14541 VkMemoryRequirements vkMemReq = {};
14542 bool requiresDedicatedAllocation =
false;
14543 bool prefersDedicatedAllocation =
false;
14544 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14545 requiresDedicatedAllocation, prefersDedicatedAllocation);
14547 res = allocator->AllocateMemory(
14549 requiresDedicatedAllocation,
14550 prefersDedicatedAllocation,
14553 *pAllocationCreateInfo,
14557 #if VMA_RECORDING_ENABLED 14558 if(allocator->GetRecorder() != VMA_NULL)
14560 allocator->GetRecorder()->RecordCreateImage(
14561 allocator->GetCurrentFrameIndex(),
14563 *pAllocationCreateInfo,
14571 res = allocator->BindImageMemory(*pAllocation, *pImage);
14575 #if VMA_STATS_STRING_ENABLED 14576 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14578 if(pAllocationInfo != VMA_NULL)
14580 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14585 allocator->FreeMemory(*pAllocation);
14586 *pAllocation = VK_NULL_HANDLE;
14587 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14588 *pImage = VK_NULL_HANDLE;
14591 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14592 *pImage = VK_NULL_HANDLE;
14603 VMA_ASSERT(allocator);
14605 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14610 VMA_DEBUG_LOG(
"vmaDestroyImage");
14612 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14614 #if VMA_RECORDING_ENABLED 14615 if(allocator->GetRecorder() != VMA_NULL)
14617 allocator->GetRecorder()->RecordDestroyImage(
14618 allocator->GetCurrentFrameIndex(),
14623 if(image != VK_NULL_HANDLE)
14625 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14627 if(allocation != VK_NULL_HANDLE)
14629 allocator->FreeMemory(allocation);
14633 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1584
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1885
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1641
@@ -82,7 +82,7 @@ $(function() {
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1588
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2307
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1638
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2552
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2577
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2096
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1485
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
@@ -102,13 +102,13 @@ $(function() {
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1775
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1593
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1774
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2556
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2581
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1667
VmaStatInfo total
Definition: vk_mem_alloc.h:1784
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2564
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2589
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1979
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2547
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2572
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1594
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1519
Represents main object of this library initialized.
@@ -131,10 +131,10 @@ $(function() {
VmaMemoryUsage
Definition: vk_mem_alloc.h:1820
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2542
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2567
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2560
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2585
Definition: vk_mem_alloc.h:1859
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2003
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1592
@@ -151,7 +151,7 @@ $(function() {
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1617
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1551
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2562
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2587
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1990
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2204
@@ -210,7 +210,7 @@ $(function() {
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2312
Definition: vk_mem_alloc.h:1960
Definition: vk_mem_alloc.h:1972
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2558
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2583
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1583
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1770
@@ -230,6 +230,7 @@ $(function() {
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1586
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2113
+
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2293
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
diff --git a/src/Tests.cpp b/src/Tests.cpp
index e339f9c..f7536c4 100644
--- a/src/Tests.cpp
+++ b/src/Tests.cpp
@@ -2711,6 +2711,159 @@ static void TestPool_SameSize()
vmaDestroyPool(g_hAllocator, pool);
}
+static void TestResize()
+{
+ wprintf(L"Testing vmaResizeAllocation...\n");
+
+ const VkDeviceSize KILOBYTE = 1024ull;
+ const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
+
+ VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+ bufCreateInfo.size = 2 * MEGABYTE;
+ bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+
+ VmaAllocationCreateInfo allocCreateInfo = {};
+ allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
+
+ uint32_t memTypeIndex = UINT32_MAX;
+ TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
+
+ VmaPoolCreateInfo poolCreateInfo = {};
+ poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
+ poolCreateInfo.blockSize = 8 * MEGABYTE;
+ poolCreateInfo.minBlockCount = 1;
+ poolCreateInfo.maxBlockCount = 1;
+ poolCreateInfo.memoryTypeIndex = memTypeIndex;
+
+ VmaPool pool;
+ TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
+
+ allocCreateInfo.pool = pool;
+
+ // Fill 8 MB pool with 4 * 2 MB allocations.
+ VmaAllocation allocs[4] = {};
+
+ VkMemoryRequirements memReq = {};
+ memReq.memoryTypeBits = UINT32_MAX;
+ memReq.alignment = 4;
+ memReq.size = bufCreateInfo.size;
+
+ VmaAllocationInfo allocInfo = {};
+
+ for(uint32_t i = 0; i < 4; ++i)
+ {
+ TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
+ }
+
+ // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
+
+ // Case: Resize to the same size always succeeds.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
+ vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
+ TEST(allocInfo.size == 2ull * 1024 * 1024);
+ }
+
+ // Case: Shrink allocation at the end.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
+ vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
+ TEST(allocInfo.size == 1ull * 1024 * 1024);
+ }
+
+ // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
+
+ // Case: Shrink allocation before free space.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
+ vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
+ TEST(allocInfo.size == 512 * KILOBYTE);
+ }
+
+ // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
+
+ // Case: Shrink allocation before next allocation.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
+ vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
+ TEST(allocInfo.size == 1 * MEGABYTE);
+ }
+
+ // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
+
+ // Case: Grow allocation while there is even more space available.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
+ vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
+ TEST(allocInfo.size == 1 * MEGABYTE);
+ }
+
+ // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
+
+ // Case: Grow allocation while there is exact amount of free space available.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
+ vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
+ TEST(allocInfo.size == 2 * MEGABYTE);
+ }
+
+ // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
+
+ // Case: Fail to grow when there is not enough free space due to next allocation.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
+ vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
+ TEST(allocInfo.size == 2 * MEGABYTE);
+ }
+
+ // Case: Fail to grow when there is not enough free space due to end of memory block.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
+ vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
+ TEST(allocInfo.size == 1 * MEGABYTE);
+ }
+
+ for(uint32_t i = 4; i--; )
+ {
+ vmaFreeMemory(g_hAllocator, allocs[i]);
+ }
+
+ vmaDestroyPool(g_hAllocator, pool);
+
+ // Test dedicated allocation
+ {
+ VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
+ dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
+ dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
+
+ VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
+ TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
+
+ // Case: Resize to the same size always succeeds.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
+ vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
+ TEST(allocInfo.size == 2ull * 1024 * 1024);
+ }
+
+ // Case: Shrinking fails.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
+ vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
+ TEST(allocInfo.size == 2ull * 1024 * 1024);
+ }
+
+ // Case: Growing fails.
+ {
+ TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
+ vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
+ TEST(allocInfo.size == 2ull * 1024 * 1024);
+ }
+
+ vmaFreeMemory(g_hAllocator, dedicatedAlloc);
+ }
+}
+
static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
{
const uint8_t* pBytes = (const uint8_t*)pMemory;
@@ -4275,7 +4428,7 @@ void Test()
// ########################################
// ########################################
- BasicTestBuddyAllocator();
+ TestResize();
return;
}
@@ -4287,6 +4440,7 @@ void Test()
#else
TestPool_SameSize();
TestHeapSizeLimit();
+ TestResize();
#endif
#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
TestAllocationsInitialization();
diff --git a/src/VmaReplay/VmaReplay.cpp b/src/VmaReplay/VmaReplay.cpp
index 9635b03..a9acfa5 100644
--- a/src/VmaReplay/VmaReplay.cpp
+++ b/src/VmaReplay/VmaReplay.cpp
@@ -82,6 +82,7 @@ enum class VMA_FUNCTION
TouchAllocation,
GetAllocationInfo,
MakePoolAllocationsLost,
+ ResizeAllocation,
Count
};
static const char* VMA_FUNCTION_NAMES[] = {
@@ -104,6 +105,7 @@ static const char* VMA_FUNCTION_NAMES[] = {
"vmaTouchAllocation",
"vmaGetAllocationInfo",
"vmaMakePoolAllocationsLost",
+ "vmaResizeAllocation",
};
static_assert(
_countof(VMA_FUNCTION_NAMES) == (size_t)VMA_FUNCTION::Count,
@@ -143,7 +145,7 @@ static size_t g_DumpStatsAfterLineNextIndex = 0;
static bool ValidateFileVersion()
{
if(GetVersionMajor(g_FileVersion) == 1 &&
- GetVersionMinor(g_FileVersion) <= 3)
+ GetVersionMinor(g_FileVersion) <= 4)
{
return true;
}
@@ -1015,6 +1017,7 @@ private:
void ExecuteTouchAllocation(size_t lineNumber, const CsvSplit& csvSplit);
void ExecuteGetAllocationInfo(size_t lineNumber, const CsvSplit& csvSplit);
void ExecuteMakePoolAllocationsLost(size_t lineNumber, const CsvSplit& csvSplit);
+ void ExecuteResizeAllocation(size_t lineNumber, const CsvSplit& csvSplit);
void DestroyAllocation(size_t lineNumber, const CsvSplit& csvSplit);
};
@@ -1156,6 +1159,8 @@ void Player::ExecuteLine(size_t lineNumber, const StrRange& line)
ExecuteGetAllocationInfo(lineNumber, csvSplit);
else if(StrRangeEq(functionName, "vmaMakePoolAllocationsLost"))
ExecuteMakePoolAllocationsLost(lineNumber, csvSplit);
+ else if(StrRangeEq(functionName, "vmaResizeAllocation"))
+ ExecuteResizeAllocation(lineNumber, csvSplit);
else
{
if(IssueWarning())
@@ -2599,6 +2604,45 @@ void Player::ExecuteMakePoolAllocationsLost(size_t lineNumber, const CsvSplit& c
}
}
+void Player::ExecuteResizeAllocation(size_t lineNumber, const CsvSplit& csvSplit)
+{
+ m_Stats.RegisterFunctionCall(VMA_FUNCTION::ResizeAllocation);
+
+ if(ValidateFunctionParameterCount(lineNumber, csvSplit, 2, false))
+ {
+ uint64_t origPtr = 0;
+ uint64_t newSize = 0;
+
+ if(StrRangeToPtr(csvSplit.GetRange(FIRST_PARAM_INDEX), origPtr) &&
+ StrRangeToUint(csvSplit.GetRange(FIRST_PARAM_INDEX + 1), newSize))
+ {
+ if(origPtr != 0)
+ {
+ const auto it = m_Allocations.find(origPtr);
+ if(it != m_Allocations.end())
+ {
+ vmaResizeAllocation(m_Allocator, it->second.allocation, newSize);
+ UpdateMemStats();
+ }
+ else
+ {
+ if(IssueWarning())
+ {
+ printf("Line %zu: Allocation %llX not found.\n", lineNumber, origPtr);
+ }
+ }
+ }
+ }
+ else
+ {
+ if(IssueWarning())
+ {
+ printf("Line %zu: Invalid parameters for vmaResizeAllocation.\n", lineNumber);
+ }
+ }
+ }
+}
+
////////////////////////////////////////////////////////////////////////////////
// Main functions
diff --git a/src/VulkanSample.cpp b/src/VulkanSample.cpp
index 99727f3..8c7b2c6 100644
--- a/src/VulkanSample.cpp
+++ b/src/VulkanSample.cpp
@@ -1306,6 +1306,15 @@ static void InitializeApplication()
allocatorInfo.pAllocationCallbacks = &cpuAllocationCallbacks;
}
+ // Uncomment to enable recording to CSV file.
+ /*
+ {
+ VmaRecordSettings recordSettings = {};
+ recordSettings.pFilePath = "VulkanSample.csv";
+ allocatorInfo.pRecordSettings = &recordSettings;
+ }
+ */
+
ERR_GUARD_VULKAN( vmaCreateAllocator(&allocatorInfo, &g_hAllocator) );
// Retrieve queue (doesn't need to be destroyed)
diff --git a/src/vk_mem_alloc.h b/src/vk_mem_alloc.h
index 6fb57d0..6326324 100644
--- a/src/vk_mem_alloc.h
+++ b/src/vk_mem_alloc.h
@@ -2374,6 +2374,31 @@ void vmaFreeMemory(
VmaAllocator allocator,
VmaAllocation allocation);
+/** \brief Tries to resize an allocation in place, if there is enough free memory after it.
+
+Tries to change allocation's size without moving or reallocating it.
+You can both shrink and grow allocation size.
+When growing, it succeeds only when the allocation belongs to a memory block with enough
+free space after it.
+
+Returns `VK_SUCCESS` if allocation's size has been successfully changed.
+Returns `VK_ERROR_OUT_OF_POOL_MEMORY` if allocation's size could not be changed.
+
+After successful call to this function, VmaAllocationInfo::size of this allocation changes.
+All other parameters stay the same: memory pool and type, alignment, offset, mapped pointer.
+
+- Calling this function on allocation that is in lost state fails with result `VK_ERROR_VALIDATION_FAILED_EXT`.
+- Calling this function with `newSize` same as current allocation size does nothing and returns `VK_SUCCESS`.
+- Resizing dedicated allocations, as well as allocations created in pools that use linear
+ or buddy algorithm, is not supported.
+ The function returns `VK_ERROR_FEATURE_NOT_PRESENT` in such cases.
+ Support may be added in the future.
+*/
+VkResult vmaResizeAllocation(
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkDeviceSize newSize);
+
/** \brief Returns current information about specified allocation and atomically marks it as used in current frame.
Current paramters of given allocation are returned in `pAllocationInfo`.
@@ -4504,7 +4529,9 @@ public:
void ChangeBlockAllocation(
VmaAllocator hAllocator,
VmaDeviceMemoryBlock* block,
- VkDeviceSize offset);
+ VkDeviceSize offset);
+
+ void ChangeSize(VkDeviceSize newSize);
// pMappedData not null means allocation is created with MAPPED flag.
void InitDedicatedAllocation(
@@ -4766,6 +4793,9 @@ public:
virtual void Free(const VmaAllocation allocation) = 0;
virtual void FreeAtOffset(VkDeviceSize offset) = 0;
+ // Tries to resize (grow or shrink) space for given allocation, in place.
+ virtual bool ResizeAllocation(const VmaAllocation alloc, VkDeviceSize newSize) { return false; }
+
protected:
const VkAllocationCallbacks* GetAllocationCallbacks() const { return m_pAllocationCallbacks; }
@@ -4845,6 +4875,8 @@ public:
virtual void Free(const VmaAllocation allocation);
virtual void FreeAtOffset(VkDeviceSize offset);
+ virtual bool ResizeAllocation(const VmaAllocation alloc, VkDeviceSize newSize);
+
private:
uint32_t m_FreeCount;
VkDeviceSize m_SumFreeSize;
@@ -5597,6 +5629,10 @@ public:
VmaAllocation allocation);
void RecordFreeMemory(uint32_t frameIndex,
VmaAllocation allocation);
+ void RecordResizeAllocation(
+ uint32_t frameIndex,
+ VmaAllocation allocation,
+ VkDeviceSize newSize);
void RecordSetAllocationUserData(uint32_t frameIndex,
VmaAllocation allocation,
const void* pUserData);
@@ -5763,6 +5799,10 @@ public:
// Main deallocation function.
void FreeMemory(const VmaAllocation allocation);
+ VkResult ResizeAllocation(
+ const VmaAllocation alloc,
+ VkDeviceSize newSize);
+
void CalculateStats(VmaStats* pStats);
#if VMA_STATS_STRING_ENABLED
@@ -6296,6 +6336,12 @@ void VmaAllocation_T::ChangeBlockAllocation(
m_BlockAllocation.m_Offset = offset;
}
+void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
+{
+ VMA_ASSERT(newSize > 0);
+ m_Size = newSize;
+}
+
VkDeviceSize VmaAllocation_T::GetOffset() const
{
switch(m_Type)
@@ -7222,6 +7268,133 @@ void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
VMA_ASSERT(0 && "Not found!");
}
+bool VmaBlockMetadata_Generic::ResizeAllocation(const VmaAllocation alloc, VkDeviceSize newSize)
+{
+ typedef VmaSuballocationList::iterator iter_type;
+ for(iter_type suballocItem = m_Suballocations.begin();
+ suballocItem != m_Suballocations.end();
+ ++suballocItem)
+ {
+ VmaSuballocation& suballoc = *suballocItem;
+ if(suballoc.hAllocation == alloc)
+ {
+ iter_type nextItem = suballocItem;
+ ++nextItem;
+
+ // Should have been ensured on higher level.
+ VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
+
+ // Shrinking.
+ if(newSize < alloc->GetSize())
+ {
+ const VkDeviceSize sizeDiff = suballoc.size - newSize;
+
+ // There is next item.
+ if(nextItem != m_Suballocations.end())
+ {
+ // Next item is free.
+ if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ // Grow this next item backward.
+ UnregisterFreeSuballocation(nextItem);
+ nextItem->offset -= sizeDiff;
+ nextItem->size += sizeDiff;
+ RegisterFreeSuballocation(nextItem);
+ }
+ // Next item is not free.
+ else
+ {
+ // Create free item after current one.
+ VmaSuballocation newFreeSuballoc;
+ newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
+ newFreeSuballoc.offset = suballoc.offset + newSize;
+ newFreeSuballoc.size = sizeDiff;
+ newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
+ RegisterFreeSuballocation(newFreeSuballocIt);
+
+ ++m_FreeCount;
+ }
+ }
+ // This is the last item.
+ else
+ {
+ // Create free item at the end.
+ VmaSuballocation newFreeSuballoc;
+ newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
+ newFreeSuballoc.offset = suballoc.offset + newSize;
+ newFreeSuballoc.size = sizeDiff;
+ newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ m_Suballocations.push_back(newFreeSuballoc);
+
+ iter_type newFreeSuballocIt = m_Suballocations.end();
+ RegisterFreeSuballocation(--newFreeSuballocIt);
+
+ ++m_FreeCount;
+ }
+
+ suballoc.size = newSize;
+ m_SumFreeSize += sizeDiff;
+ }
+ // Growing.
+ else
+ {
+ const VkDeviceSize sizeDiff = newSize - suballoc.size;
+
+ // There is next item.
+ if(nextItem != m_Suballocations.end())
+ {
+ // Next item is free.
+ if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ // There is not enough free space, including margin.
+ if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
+ {
+ return false;
+ }
+
+ // There is more free space than required.
+ if(nextItem->size > sizeDiff)
+ {
+ // Move and shrink this next item.
+ UnregisterFreeSuballocation(nextItem);
+ nextItem->offset += sizeDiff;
+ nextItem->size -= sizeDiff;
+ RegisterFreeSuballocation(nextItem);
+ }
+ // There is exactly the amount of free space required.
+ else
+ {
+ // Remove this next free item.
+ UnregisterFreeSuballocation(nextItem);
+ m_Suballocations.erase(nextItem);
+ --m_FreeCount;
+ }
+ }
+ // Next item is not free - there is no space to grow.
+ else
+ {
+ return false;
+ }
+ }
+ // This is the last item - there is no space to grow.
+ else
+ {
+ return false;
+ }
+
+ suballoc.size = newSize;
+ m_SumFreeSize -= sizeDiff;
+ }
+
+ // We cannot call Validate() here because alloc object is updated to new size outside of this call.
+ return true;
+ }
+ }
+ VMA_ASSERT(0 && "Not found!");
+ return false;
+}
+
bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList() const
{
VkDeviceSize lastSize = 0;
@@ -11368,7 +11541,7 @@ VkResult VmaRecorder::Init(const VmaRecordSettings& settings, bool useMutex)
// Write header.
fprintf(m_File, "%s\n", "Vulkan Memory Allocator,Calls recording");
- fprintf(m_File, "%s\n", "1,3");
+ fprintf(m_File, "%s\n", "1,4");
return VK_SUCCESS;
}
@@ -11524,6 +11697,20 @@ void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
Flush();
}
+void VmaRecorder::RecordResizeAllocation(
+ uint32_t frameIndex,
+ VmaAllocation allocation,
+ VkDeviceSize newSize)
+{
+ CallParams callParams;
+ GetBasicParams(callParams);
+
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
+ allocation, newSize);
+ Flush();
+}
+
void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
VmaAllocation allocation,
const void* pUserData)
@@ -12487,6 +12674,40 @@ void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
vma_delete(this, allocation);
}
+VkResult VmaAllocator_T::ResizeAllocation(
+ const VmaAllocation alloc,
+ VkDeviceSize newSize)
+{
+ if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
+ {
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+ if(newSize == alloc->GetSize())
+ {
+ return VK_SUCCESS;
+ }
+
+ switch(alloc->GetType())
+ {
+ case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+ return VK_ERROR_FEATURE_NOT_PRESENT;
+ case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+ if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
+ {
+ alloc->ChangeSize(newSize);
+ VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
+ return VK_SUCCESS;
+ }
+ else
+ {
+ return VK_ERROR_OUT_OF_POOL_MEMORY;
+ }
+ default:
+ VMA_ASSERT(0);
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+}
+
void VmaAllocator_T::CalculateStats(VmaStats* pStats)
{
// Initialize.
@@ -13889,6 +14110,30 @@ void vmaFreeMemory(
allocator->FreeMemory(allocation);
}
+VkResult vmaResizeAllocation(
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkDeviceSize newSize)
+{
+ VMA_ASSERT(allocator && allocation);
+
+ VMA_DEBUG_LOG("vmaResizeAllocation");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordResizeAllocation(
+ allocator->GetCurrentFrameIndex(),
+ allocation,
+ newSize);
+ }
+#endif
+
+ return allocator->ResizeAllocation(allocation, newSize);
+}
+
void vmaGetAllocationInfo(
VmaAllocator allocator,
VmaAllocation allocation,