23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1483 #ifndef VMA_RECORDING_ENABLED 1485 #define VMA_RECORDING_ENABLED 1 1487 #define VMA_RECORDING_ENABLED 0 1492 #define NOMINMAX // For windows.h 1495 #include <vulkan/vulkan.h> 1497 #if VMA_RECORDING_ENABLED 1498 #include <windows.h> 1501 #if !defined(VMA_DEDICATED_ALLOCATION) 1502 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1503 #define VMA_DEDICATED_ALLOCATION 1 1505 #define VMA_DEDICATED_ALLOCATION 0 1523 uint32_t memoryType,
1524 VkDeviceMemory memory,
1529 uint32_t memoryType,
1530 VkDeviceMemory memory,
1602 #if VMA_DEDICATED_ALLOCATION 1603 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1604 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1731 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1739 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1749 uint32_t memoryTypeIndex,
1750 VkMemoryPropertyFlags* pFlags);
1762 uint32_t frameIndex);
1795 #define VMA_STATS_STRING_ENABLED 1 1797 #if VMA_STATS_STRING_ENABLED 1804 char** ppStatsString,
1805 VkBool32 detailedMap);
1809 char* pStatsString);
1811 #endif // #if VMA_STATS_STRING_ENABLED 2043 uint32_t memoryTypeBits,
2045 uint32_t* pMemoryTypeIndex);
2061 const VkBufferCreateInfo* pBufferCreateInfo,
2063 uint32_t* pMemoryTypeIndex);
2079 const VkImageCreateInfo* pImageCreateInfo,
2081 uint32_t* pMemoryTypeIndex);
2253 size_t* pLostAllocationCount);
2352 const VkMemoryRequirements* pVkMemoryRequirements,
2406 VkDeviceSize newSize);
2639 size_t allocationCount,
2640 VkBool32* pAllocationsChanged,
2706 const VkBufferCreateInfo* pBufferCreateInfo,
2731 const VkImageCreateInfo* pImageCreateInfo,
2757 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2760 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2761 #define VMA_IMPLEMENTATION 2764 #ifdef VMA_IMPLEMENTATION 2765 #undef VMA_IMPLEMENTATION 2787 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2788 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2800 #if VMA_USE_STL_CONTAINERS 2801 #define VMA_USE_STL_VECTOR 1 2802 #define VMA_USE_STL_UNORDERED_MAP 1 2803 #define VMA_USE_STL_LIST 1 2806 #if VMA_USE_STL_VECTOR 2810 #if VMA_USE_STL_UNORDERED_MAP 2811 #include <unordered_map> 2814 #if VMA_USE_STL_LIST 2823 #include <algorithm> 2829 #define VMA_NULL nullptr 2832 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 2834 void *aligned_alloc(
size_t alignment,
size_t size)
2837 if(alignment <
sizeof(
void*))
2839 alignment =
sizeof(
void*);
2842 return memalign(alignment, size);
2844 #elif defined(__APPLE__) || defined(__ANDROID__) 2846 void *aligned_alloc(
size_t alignment,
size_t size)
2849 if(alignment <
sizeof(
void*))
2851 alignment =
sizeof(
void*);
2855 if(posix_memalign(&pointer, alignment, size) == 0)
2869 #define VMA_ASSERT(expr) assert(expr) 2871 #define VMA_ASSERT(expr) 2877 #ifndef VMA_HEAVY_ASSERT 2879 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2881 #define VMA_HEAVY_ASSERT(expr) 2885 #ifndef VMA_ALIGN_OF 2886 #define VMA_ALIGN_OF(type) (__alignof(type)) 2889 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2891 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2893 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2897 #ifndef VMA_SYSTEM_FREE 2899 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2901 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2906 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2910 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2914 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2918 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2921 #ifndef VMA_DEBUG_LOG 2922 #define VMA_DEBUG_LOG(format, ...) 2932 #if VMA_STATS_STRING_ENABLED 2933 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2935 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2937 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2939 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2941 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2943 snprintf(outStr, strLen,
"%p", ptr);
2953 void Lock() { m_Mutex.lock(); }
2954 void Unlock() { m_Mutex.unlock(); }
2958 #define VMA_MUTEX VmaMutex 2969 #ifndef VMA_ATOMIC_UINT32 2970 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2973 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2978 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2981 #ifndef VMA_DEBUG_ALIGNMENT 2986 #define VMA_DEBUG_ALIGNMENT (1) 2989 #ifndef VMA_DEBUG_MARGIN 2994 #define VMA_DEBUG_MARGIN (0) 2997 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3002 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3005 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3011 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3014 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3019 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3022 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3027 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3030 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3031 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3035 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3036 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3040 #ifndef VMA_CLASS_NO_COPY 3041 #define VMA_CLASS_NO_COPY(className) \ 3043 className(const className&) = delete; \ 3044 className& operator=(const className&) = delete; 3047 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3050 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3052 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3053 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3059 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3060 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3063 static inline uint32_t VmaCountBitsSet(uint32_t v)
3065 uint32_t c = v - ((v >> 1) & 0x55555555);
3066 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3067 c = ((c >> 4) + c) & 0x0F0F0F0F;
3068 c = ((c >> 8) + c) & 0x00FF00FF;
3069 c = ((c >> 16) + c) & 0x0000FFFF;
3075 template <
typename T>
3076 static inline T VmaAlignUp(T val, T align)
3078 return (val + align - 1) / align * align;
3082 template <
typename T>
3083 static inline T VmaAlignDown(T val, T align)
3085 return val / align * align;
3089 template <
typename T>
3090 static inline T VmaRoundDiv(T x, T y)
3092 return (x + (y / (T)2)) / y;
3100 template <
typename T>
3101 inline bool VmaIsPow2(T x)
3103 return (x & (x-1)) == 0;
3107 static inline uint32_t VmaNextPow2(uint32_t v)
3118 static inline uint64_t VmaNextPow2(uint64_t v)
3132 static inline uint32_t VmaPrevPow2(uint32_t v)
3142 static inline uint64_t VmaPrevPow2(uint64_t v)
3154 static inline bool VmaStrIsEmpty(
const char* pStr)
3156 return pStr == VMA_NULL || *pStr ==
'\0';
3159 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3177 template<
typename Iterator,
typename Compare>
3178 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3180 Iterator centerValue = end; --centerValue;
3181 Iterator insertIndex = beg;
3182 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3184 if(cmp(*memTypeIndex, *centerValue))
3186 if(insertIndex != memTypeIndex)
3188 VMA_SWAP(*memTypeIndex, *insertIndex);
3193 if(insertIndex != centerValue)
3195 VMA_SWAP(*insertIndex, *centerValue);
3200 template<
typename Iterator,
typename Compare>
3201 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3205 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3206 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3207 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3211 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3213 #endif // #ifndef VMA_SORT 3222 static inline bool VmaBlocksOnSamePage(
3223 VkDeviceSize resourceAOffset,
3224 VkDeviceSize resourceASize,
3225 VkDeviceSize resourceBOffset,
3226 VkDeviceSize pageSize)
3228 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3229 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3230 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3231 VkDeviceSize resourceBStart = resourceBOffset;
3232 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3233 return resourceAEndPage == resourceBStartPage;
3236 enum VmaSuballocationType
3238 VMA_SUBALLOCATION_TYPE_FREE = 0,
3239 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3240 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3241 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3242 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3243 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3244 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3253 static inline bool VmaIsBufferImageGranularityConflict(
3254 VmaSuballocationType suballocType1,
3255 VmaSuballocationType suballocType2)
3257 if(suballocType1 > suballocType2)
3259 VMA_SWAP(suballocType1, suballocType2);
3262 switch(suballocType1)
3264 case VMA_SUBALLOCATION_TYPE_FREE:
3266 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3268 case VMA_SUBALLOCATION_TYPE_BUFFER:
3270 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3271 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3272 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3274 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3275 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3276 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3277 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3279 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3280 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3288 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3290 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3291 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3292 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3294 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3298 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3300 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3301 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3302 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3304 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3315 VMA_CLASS_NO_COPY(VmaMutexLock)
3317 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3318 m_pMutex(useMutex ? &mutex : VMA_NULL)
3335 VMA_MUTEX* m_pMutex;
3338 #if VMA_DEBUG_GLOBAL_MUTEX 3339 static VMA_MUTEX gDebugGlobalMutex;
3340 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3342 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3346 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3357 template <
typename CmpLess,
typename IterT,
typename KeyT>
3358 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3360 size_t down = 0, up = (end - beg);
3363 const size_t mid = (down + up) / 2;
3364 if(cmp(*(beg+mid), key))
3379 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3381 if((pAllocationCallbacks != VMA_NULL) &&
3382 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3384 return (*pAllocationCallbacks->pfnAllocation)(
3385 pAllocationCallbacks->pUserData,
3388 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3392 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3396 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3398 if((pAllocationCallbacks != VMA_NULL) &&
3399 (pAllocationCallbacks->pfnFree != VMA_NULL))
3401 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3405 VMA_SYSTEM_FREE(ptr);
3409 template<
typename T>
3410 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3412 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3415 template<
typename T>
3416 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3418 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3421 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3423 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3425 template<
typename T>
3426 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3429 VmaFree(pAllocationCallbacks, ptr);
3432 template<
typename T>
3433 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3437 for(
size_t i = count; i--; )
3441 VmaFree(pAllocationCallbacks, ptr);
3446 template<
typename T>
3447 class VmaStlAllocator
3450 const VkAllocationCallbacks*
const m_pCallbacks;
3451 typedef T value_type;
3453 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3454 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3456 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3457 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3459 template<
typename U>
3460 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3462 return m_pCallbacks == rhs.m_pCallbacks;
3464 template<
typename U>
3465 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3467 return m_pCallbacks != rhs.m_pCallbacks;
3470 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3473 #if VMA_USE_STL_VECTOR 3475 #define VmaVector std::vector 3477 template<
typename T,
typename allocatorT>
3478 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3480 vec.insert(vec.begin() + index, item);
3483 template<
typename T,
typename allocatorT>
3484 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3486 vec.erase(vec.begin() + index);
3489 #else // #if VMA_USE_STL_VECTOR 3494 template<
typename T,
typename AllocatorT>
3498 typedef T value_type;
3500 VmaVector(
const AllocatorT& allocator) :
3501 m_Allocator(allocator),
3508 VmaVector(
size_t count,
const AllocatorT& allocator) :
3509 m_Allocator(allocator),
3510 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3516 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3517 m_Allocator(src.m_Allocator),
3518 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3519 m_Count(src.m_Count),
3520 m_Capacity(src.m_Count)
3524 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3530 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3533 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3537 resize(rhs.m_Count);
3540 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3546 bool empty()
const {
return m_Count == 0; }
3547 size_t size()
const {
return m_Count; }
3548 T* data() {
return m_pArray; }
3549 const T* data()
const {
return m_pArray; }
3551 T& operator[](
size_t index)
3553 VMA_HEAVY_ASSERT(index < m_Count);
3554 return m_pArray[index];
3556 const T& operator[](
size_t index)
const 3558 VMA_HEAVY_ASSERT(index < m_Count);
3559 return m_pArray[index];
3564 VMA_HEAVY_ASSERT(m_Count > 0);
3567 const T& front()
const 3569 VMA_HEAVY_ASSERT(m_Count > 0);
3574 VMA_HEAVY_ASSERT(m_Count > 0);
3575 return m_pArray[m_Count - 1];
3577 const T& back()
const 3579 VMA_HEAVY_ASSERT(m_Count > 0);
3580 return m_pArray[m_Count - 1];
3583 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3585 newCapacity = VMA_MAX(newCapacity, m_Count);
3587 if((newCapacity < m_Capacity) && !freeMemory)
3589 newCapacity = m_Capacity;
3592 if(newCapacity != m_Capacity)
3594 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3597 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3599 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3600 m_Capacity = newCapacity;
3601 m_pArray = newArray;
3605 void resize(
size_t newCount,
bool freeMemory =
false)
3607 size_t newCapacity = m_Capacity;
3608 if(newCount > m_Capacity)
3610 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3614 newCapacity = newCount;
3617 if(newCapacity != m_Capacity)
3619 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3620 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3621 if(elementsToCopy != 0)
3623 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3625 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3626 m_Capacity = newCapacity;
3627 m_pArray = newArray;
3633 void clear(
bool freeMemory =
false)
3635 resize(0, freeMemory);
3638 void insert(
size_t index,
const T& src)
3640 VMA_HEAVY_ASSERT(index <= m_Count);
3641 const size_t oldCount = size();
3642 resize(oldCount + 1);
3643 if(index < oldCount)
3645 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3647 m_pArray[index] = src;
3650 void remove(
size_t index)
3652 VMA_HEAVY_ASSERT(index < m_Count);
3653 const size_t oldCount = size();
3654 if(index < oldCount - 1)
3656 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3658 resize(oldCount - 1);
3661 void push_back(
const T& src)
3663 const size_t newIndex = size();
3664 resize(newIndex + 1);
3665 m_pArray[newIndex] = src;
3670 VMA_HEAVY_ASSERT(m_Count > 0);
3674 void push_front(
const T& src)
3681 VMA_HEAVY_ASSERT(m_Count > 0);
3685 typedef T* iterator;
3687 iterator begin() {
return m_pArray; }
3688 iterator end() {
return m_pArray + m_Count; }
3691 AllocatorT m_Allocator;
3697 template<
typename T,
typename allocatorT>
3698 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3700 vec.insert(index, item);
3703 template<
typename T,
typename allocatorT>
3704 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3709 #endif // #if VMA_USE_STL_VECTOR 3711 template<
typename CmpLess,
typename VectorT>
3712 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3714 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3716 vector.data() + vector.size(),
3718 CmpLess()) - vector.data();
3719 VmaVectorInsert(vector, indexToInsert, value);
3720 return indexToInsert;
3723 template<
typename CmpLess,
typename VectorT>
3724 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3727 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3732 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3734 size_t indexToRemove = it - vector.begin();
3735 VmaVectorRemove(vector, indexToRemove);
3741 template<
typename CmpLess,
typename IterT,
typename KeyT>
3742 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3745 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3746 beg, end, value, comparator);
3748 (!comparator(*it, value) && !comparator(value, *it)))
3763 template<
typename T>
3764 class VmaPoolAllocator
3766 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3768 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3769 ~VmaPoolAllocator();
3777 uint32_t NextFreeIndex;
3784 uint32_t FirstFreeIndex;
3787 const VkAllocationCallbacks* m_pAllocationCallbacks;
3788 size_t m_ItemsPerBlock;
3789 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3791 ItemBlock& CreateNewBlock();
3794 template<
typename T>
3795 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3796 m_pAllocationCallbacks(pAllocationCallbacks),
3797 m_ItemsPerBlock(itemsPerBlock),
3798 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3800 VMA_ASSERT(itemsPerBlock > 0);
3803 template<
typename T>
3804 VmaPoolAllocator<T>::~VmaPoolAllocator()
3809 template<
typename T>
3810 void VmaPoolAllocator<T>::Clear()
3812 for(
size_t i = m_ItemBlocks.size(); i--; )
3813 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3814 m_ItemBlocks.clear();
3817 template<
typename T>
3818 T* VmaPoolAllocator<T>::Alloc()
3820 for(
size_t i = m_ItemBlocks.size(); i--; )
3822 ItemBlock& block = m_ItemBlocks[i];
3824 if(block.FirstFreeIndex != UINT32_MAX)
3826 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3827 block.FirstFreeIndex = pItem->NextFreeIndex;
3828 return &pItem->Value;
3833 ItemBlock& newBlock = CreateNewBlock();
3834 Item*
const pItem = &newBlock.pItems[0];
3835 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3836 return &pItem->Value;
3839 template<
typename T>
3840 void VmaPoolAllocator<T>::Free(T* ptr)
3843 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3845 ItemBlock& block = m_ItemBlocks[i];
3849 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3852 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3854 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3855 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3856 block.FirstFreeIndex = index;
3860 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3863 template<
typename T>
3864 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3866 ItemBlock newBlock = {
3867 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3869 m_ItemBlocks.push_back(newBlock);
3872 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3873 newBlock.pItems[i].NextFreeIndex = i + 1;
3874 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3875 return m_ItemBlocks.back();
3881 #if VMA_USE_STL_LIST 3883 #define VmaList std::list 3885 #else // #if VMA_USE_STL_LIST 3887 template<
typename T>
3896 template<
typename T>
3899 VMA_CLASS_NO_COPY(VmaRawList)
3901 typedef VmaListItem<T> ItemType;
3903 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3907 size_t GetCount()
const {
return m_Count; }
3908 bool IsEmpty()
const {
return m_Count == 0; }
3910 ItemType* Front() {
return m_pFront; }
3911 const ItemType* Front()
const {
return m_pFront; }
3912 ItemType* Back() {
return m_pBack; }
3913 const ItemType* Back()
const {
return m_pBack; }
3915 ItemType* PushBack();
3916 ItemType* PushFront();
3917 ItemType* PushBack(
const T& value);
3918 ItemType* PushFront(
const T& value);
3923 ItemType* InsertBefore(ItemType* pItem);
3925 ItemType* InsertAfter(ItemType* pItem);
3927 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3928 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3930 void Remove(ItemType* pItem);
3933 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3934 VmaPoolAllocator<ItemType> m_ItemAllocator;
3940 template<
typename T>
3941 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3942 m_pAllocationCallbacks(pAllocationCallbacks),
3943 m_ItemAllocator(pAllocationCallbacks, 128),
3950 template<
typename T>
3951 VmaRawList<T>::~VmaRawList()
3957 template<
typename T>
3958 void VmaRawList<T>::Clear()
3960 if(IsEmpty() ==
false)
3962 ItemType* pItem = m_pBack;
3963 while(pItem != VMA_NULL)
3965 ItemType*
const pPrevItem = pItem->pPrev;
3966 m_ItemAllocator.Free(pItem);
3969 m_pFront = VMA_NULL;
3975 template<
typename T>
3976 VmaListItem<T>* VmaRawList<T>::PushBack()
3978 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3979 pNewItem->pNext = VMA_NULL;
3982 pNewItem->pPrev = VMA_NULL;
3983 m_pFront = pNewItem;
3989 pNewItem->pPrev = m_pBack;
3990 m_pBack->pNext = pNewItem;
3997 template<
typename T>
3998 VmaListItem<T>* VmaRawList<T>::PushFront()
4000 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4001 pNewItem->pPrev = VMA_NULL;
4004 pNewItem->pNext = VMA_NULL;
4005 m_pFront = pNewItem;
4011 pNewItem->pNext = m_pFront;
4012 m_pFront->pPrev = pNewItem;
4013 m_pFront = pNewItem;
4019 template<
typename T>
4020 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4022 ItemType*
const pNewItem = PushBack();
4023 pNewItem->Value = value;
4027 template<
typename T>
4028 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4030 ItemType*
const pNewItem = PushFront();
4031 pNewItem->Value = value;
4035 template<
typename T>
4036 void VmaRawList<T>::PopBack()
4038 VMA_HEAVY_ASSERT(m_Count > 0);
4039 ItemType*
const pBackItem = m_pBack;
4040 ItemType*
const pPrevItem = pBackItem->pPrev;
4041 if(pPrevItem != VMA_NULL)
4043 pPrevItem->pNext = VMA_NULL;
4045 m_pBack = pPrevItem;
4046 m_ItemAllocator.Free(pBackItem);
4050 template<
typename T>
4051 void VmaRawList<T>::PopFront()
4053 VMA_HEAVY_ASSERT(m_Count > 0);
4054 ItemType*
const pFrontItem = m_pFront;
4055 ItemType*
const pNextItem = pFrontItem->pNext;
4056 if(pNextItem != VMA_NULL)
4058 pNextItem->pPrev = VMA_NULL;
4060 m_pFront = pNextItem;
4061 m_ItemAllocator.Free(pFrontItem);
4065 template<
typename T>
4066 void VmaRawList<T>::Remove(ItemType* pItem)
4068 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4069 VMA_HEAVY_ASSERT(m_Count > 0);
4071 if(pItem->pPrev != VMA_NULL)
4073 pItem->pPrev->pNext = pItem->pNext;
4077 VMA_HEAVY_ASSERT(m_pFront == pItem);
4078 m_pFront = pItem->pNext;
4081 if(pItem->pNext != VMA_NULL)
4083 pItem->pNext->pPrev = pItem->pPrev;
4087 VMA_HEAVY_ASSERT(m_pBack == pItem);
4088 m_pBack = pItem->pPrev;
4091 m_ItemAllocator.Free(pItem);
4095 template<
typename T>
4096 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4098 if(pItem != VMA_NULL)
4100 ItemType*
const prevItem = pItem->pPrev;
4101 ItemType*
const newItem = m_ItemAllocator.Alloc();
4102 newItem->pPrev = prevItem;
4103 newItem->pNext = pItem;
4104 pItem->pPrev = newItem;
4105 if(prevItem != VMA_NULL)
4107 prevItem->pNext = newItem;
4111 VMA_HEAVY_ASSERT(m_pFront == pItem);
4121 template<
typename T>
4122 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4124 if(pItem != VMA_NULL)
4126 ItemType*
const nextItem = pItem->pNext;
4127 ItemType*
const newItem = m_ItemAllocator.Alloc();
4128 newItem->pNext = nextItem;
4129 newItem->pPrev = pItem;
4130 pItem->pNext = newItem;
4131 if(nextItem != VMA_NULL)
4133 nextItem->pPrev = newItem;
4137 VMA_HEAVY_ASSERT(m_pBack == pItem);
4147 template<
typename T>
4148 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4150 ItemType*
const newItem = InsertBefore(pItem);
4151 newItem->Value = value;
4155 template<
typename T>
4156 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4158 ItemType*
const newItem = InsertAfter(pItem);
4159 newItem->Value = value;
4163 template<
typename T,
typename AllocatorT>
4166 VMA_CLASS_NO_COPY(VmaList)
4177 T& operator*()
const 4179 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4180 return m_pItem->Value;
4182 T* operator->()
const 4184 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4185 return &m_pItem->Value;
4188 iterator& operator++()
4190 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4191 m_pItem = m_pItem->pNext;
4194 iterator& operator--()
4196 if(m_pItem != VMA_NULL)
4198 m_pItem = m_pItem->pPrev;
4202 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4203 m_pItem = m_pList->Back();
4208 iterator operator++(
int)
4210 iterator result = *
this;
4214 iterator operator--(
int)
4216 iterator result = *
this;
4221 bool operator==(
const iterator& rhs)
const 4223 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4224 return m_pItem == rhs.m_pItem;
4226 bool operator!=(
const iterator& rhs)
const 4228 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4229 return m_pItem != rhs.m_pItem;
4233 VmaRawList<T>* m_pList;
4234 VmaListItem<T>* m_pItem;
4236 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4242 friend class VmaList<T, AllocatorT>;
4245 class const_iterator
4254 const_iterator(
const iterator& src) :
4255 m_pList(src.m_pList),
4256 m_pItem(src.m_pItem)
4260 const T& operator*()
const 4262 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4263 return m_pItem->Value;
4265 const T* operator->()
const 4267 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4268 return &m_pItem->Value;
4271 const_iterator& operator++()
4273 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4274 m_pItem = m_pItem->pNext;
4277 const_iterator& operator--()
4279 if(m_pItem != VMA_NULL)
4281 m_pItem = m_pItem->pPrev;
4285 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4286 m_pItem = m_pList->Back();
4291 const_iterator operator++(
int)
4293 const_iterator result = *
this;
4297 const_iterator operator--(
int)
4299 const_iterator result = *
this;
4304 bool operator==(
const const_iterator& rhs)
const 4306 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4307 return m_pItem == rhs.m_pItem;
4309 bool operator!=(
const const_iterator& rhs)
const 4311 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4312 return m_pItem != rhs.m_pItem;
4316 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4322 const VmaRawList<T>* m_pList;
4323 const VmaListItem<T>* m_pItem;
4325 friend class VmaList<T, AllocatorT>;
4328 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4330 bool empty()
const {
return m_RawList.IsEmpty(); }
4331 size_t size()
const {
return m_RawList.GetCount(); }
4333 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4334 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4336 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4337 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4339 void clear() { m_RawList.Clear(); }
4340 void push_back(
const T& value) { m_RawList.PushBack(value); }
4341 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4342 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4345 VmaRawList<T> m_RawList;
4348 #endif // #if VMA_USE_STL_LIST 4356 #if VMA_USE_STL_UNORDERED_MAP 4358 #define VmaPair std::pair 4360 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4361 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4363 #else // #if VMA_USE_STL_UNORDERED_MAP 4365 template<
typename T1,
typename T2>
4371 VmaPair() : first(), second() { }
4372 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4378 template<
typename KeyT,
typename ValueT>
4382 typedef VmaPair<KeyT, ValueT> PairType;
4383 typedef PairType* iterator;
4385 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4387 iterator begin() {
return m_Vector.begin(); }
4388 iterator end() {
return m_Vector.end(); }
4390 void insert(
const PairType& pair);
4391 iterator find(
const KeyT& key);
4392 void erase(iterator it);
4395 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4398 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4400 template<
typename FirstT,
typename SecondT>
4401 struct VmaPairFirstLess
4403 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4405 return lhs.first < rhs.first;
4407 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4409 return lhs.first < rhsFirst;
4413 template<
typename KeyT,
typename ValueT>
4414 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4416 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4418 m_Vector.data() + m_Vector.size(),
4420 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4421 VmaVectorInsert(m_Vector, indexToInsert, pair);
4424 template<
typename KeyT,
typename ValueT>
4425 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4427 PairType* it = VmaBinaryFindFirstNotLess(
4429 m_Vector.data() + m_Vector.size(),
4431 VmaPairFirstLess<KeyT, ValueT>());
4432 if((it != m_Vector.end()) && (it->first == key))
4438 return m_Vector.end();
4442 template<
typename KeyT,
typename ValueT>
4443 void VmaMap<KeyT, ValueT>::erase(iterator it)
4445 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4448 #endif // #if VMA_USE_STL_UNORDERED_MAP 4454 class VmaDeviceMemoryBlock;
4456 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4458 struct VmaAllocation_T
4460 VMA_CLASS_NO_COPY(VmaAllocation_T)
4462 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4466 FLAG_USER_DATA_STRING = 0x01,
4470 enum ALLOCATION_TYPE
4472 ALLOCATION_TYPE_NONE,
4473 ALLOCATION_TYPE_BLOCK,
4474 ALLOCATION_TYPE_DEDICATED,
4477 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4480 m_pUserData(VMA_NULL),
4481 m_LastUseFrameIndex(currentFrameIndex),
4482 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4483 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4485 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4487 #if VMA_STATS_STRING_ENABLED 4488 m_CreationFrameIndex = currentFrameIndex;
4489 m_BufferImageUsage = 0;
4495 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4498 VMA_ASSERT(m_pUserData == VMA_NULL);
4501 void InitBlockAllocation(
4503 VmaDeviceMemoryBlock* block,
4504 VkDeviceSize offset,
4505 VkDeviceSize alignment,
4507 VmaSuballocationType suballocationType,
4511 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4512 VMA_ASSERT(block != VMA_NULL);
4513 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4514 m_Alignment = alignment;
4516 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4517 m_SuballocationType = (uint8_t)suballocationType;
4518 m_BlockAllocation.m_hPool = hPool;
4519 m_BlockAllocation.m_Block = block;
4520 m_BlockAllocation.m_Offset = offset;
4521 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4526 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4527 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4528 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4529 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4530 m_BlockAllocation.m_Block = VMA_NULL;
4531 m_BlockAllocation.m_Offset = 0;
4532 m_BlockAllocation.m_CanBecomeLost =
true;
4535 void ChangeBlockAllocation(
4537 VmaDeviceMemoryBlock* block,
4538 VkDeviceSize offset);
4540 void ChangeSize(VkDeviceSize newSize);
4543 void InitDedicatedAllocation(
4544 uint32_t memoryTypeIndex,
4545 VkDeviceMemory hMemory,
4546 VmaSuballocationType suballocationType,
4550 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4551 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4552 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4555 m_SuballocationType = (uint8_t)suballocationType;
4556 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4557 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4558 m_DedicatedAllocation.m_hMemory = hMemory;
4559 m_DedicatedAllocation.m_pMappedData = pMappedData;
4562 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4563 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4564 VkDeviceSize GetSize()
const {
return m_Size; }
4565 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4566 void* GetUserData()
const {
return m_pUserData; }
4567 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4568 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4570 VmaDeviceMemoryBlock* GetBlock()
const 4572 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4573 return m_BlockAllocation.m_Block;
4575 VkDeviceSize GetOffset()
const;
4576 VkDeviceMemory GetMemory()
const;
4577 uint32_t GetMemoryTypeIndex()
const;
4578 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4579 void* GetMappedData()
const;
4580 bool CanBecomeLost()
const;
4583 uint32_t GetLastUseFrameIndex()
const 4585 return m_LastUseFrameIndex.load();
4587 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4589 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4599 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4601 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4603 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4614 void BlockAllocMap();
4615 void BlockAllocUnmap();
4616 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4619 #if VMA_STATS_STRING_ENABLED 4620 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4621 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4623 void InitBufferImageUsage(uint32_t bufferImageUsage)
4625 VMA_ASSERT(m_BufferImageUsage == 0);
4626 m_BufferImageUsage = bufferImageUsage;
4629 void PrintParameters(
class VmaJsonWriter& json)
const;
4633 VkDeviceSize m_Alignment;
4634 VkDeviceSize m_Size;
4636 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4638 uint8_t m_SuballocationType;
4645 struct BlockAllocation
4648 VmaDeviceMemoryBlock* m_Block;
4649 VkDeviceSize m_Offset;
4650 bool m_CanBecomeLost;
4654 struct DedicatedAllocation
4656 uint32_t m_MemoryTypeIndex;
4657 VkDeviceMemory m_hMemory;
4658 void* m_pMappedData;
4664 BlockAllocation m_BlockAllocation;
4666 DedicatedAllocation m_DedicatedAllocation;
4669 #if VMA_STATS_STRING_ENABLED 4670 uint32_t m_CreationFrameIndex;
4671 uint32_t m_BufferImageUsage;
4681 struct VmaSuballocation
4683 VkDeviceSize offset;
4686 VmaSuballocationType type;
4690 struct VmaSuballocationOffsetLess
4692 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4694 return lhs.offset < rhs.offset;
4697 struct VmaSuballocationOffsetGreater
4699 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4701 return lhs.offset > rhs.offset;
4705 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4708 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4723 struct VmaAllocationRequest
4725 VkDeviceSize offset;
4726 VkDeviceSize sumFreeSize;
4727 VkDeviceSize sumItemSize;
4728 VmaSuballocationList::iterator item;
4729 size_t itemsToMakeLostCount;
4732 VkDeviceSize CalcCost()
const 4734 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4742 class VmaBlockMetadata
4746 virtual ~VmaBlockMetadata() { }
4747 virtual void Init(VkDeviceSize size) { m_Size = size; }
4750 virtual bool Validate()
const = 0;
4751 VkDeviceSize GetSize()
const {
return m_Size; }
4752 virtual size_t GetAllocationCount()
const = 0;
4753 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4754 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4756 virtual bool IsEmpty()
const = 0;
4758 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4760 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4762 #if VMA_STATS_STRING_ENABLED 4763 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4769 virtual bool CreateAllocationRequest(
4770 uint32_t currentFrameIndex,
4771 uint32_t frameInUseCount,
4772 VkDeviceSize bufferImageGranularity,
4773 VkDeviceSize allocSize,
4774 VkDeviceSize allocAlignment,
4776 VmaSuballocationType allocType,
4777 bool canMakeOtherLost,
4779 VmaAllocationRequest* pAllocationRequest) = 0;
4781 virtual bool MakeRequestedAllocationsLost(
4782 uint32_t currentFrameIndex,
4783 uint32_t frameInUseCount,
4784 VmaAllocationRequest* pAllocationRequest) = 0;
4786 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4788 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4792 const VmaAllocationRequest& request,
4793 VmaSuballocationType type,
4794 VkDeviceSize allocSize,
4800 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4803 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
4806 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4808 #if VMA_STATS_STRING_ENABLED 4809 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4810 VkDeviceSize unusedBytes,
4811 size_t allocationCount,
4812 size_t unusedRangeCount)
const;
4813 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4814 VkDeviceSize offset,
4816 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4817 VkDeviceSize offset,
4818 VkDeviceSize size)
const;
4819 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4823 VkDeviceSize m_Size;
4824 const VkAllocationCallbacks* m_pAllocationCallbacks;
4827 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4828 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4832 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4834 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4837 virtual ~VmaBlockMetadata_Generic();
4838 virtual void Init(VkDeviceSize size);
4840 virtual bool Validate()
const;
4841 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4842 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4843 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4844 virtual bool IsEmpty()
const;
4846 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4847 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4849 #if VMA_STATS_STRING_ENABLED 4850 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4853 virtual bool CreateAllocationRequest(
4854 uint32_t currentFrameIndex,
4855 uint32_t frameInUseCount,
4856 VkDeviceSize bufferImageGranularity,
4857 VkDeviceSize allocSize,
4858 VkDeviceSize allocAlignment,
4860 VmaSuballocationType allocType,
4861 bool canMakeOtherLost,
4863 VmaAllocationRequest* pAllocationRequest);
4865 virtual bool MakeRequestedAllocationsLost(
4866 uint32_t currentFrameIndex,
4867 uint32_t frameInUseCount,
4868 VmaAllocationRequest* pAllocationRequest);
4870 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4872 virtual VkResult CheckCorruption(
const void* pBlockData);
4875 const VmaAllocationRequest& request,
4876 VmaSuballocationType type,
4877 VkDeviceSize allocSize,
4882 virtual void FreeAtOffset(VkDeviceSize offset);
4884 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
4887 uint32_t m_FreeCount;
4888 VkDeviceSize m_SumFreeSize;
4889 VmaSuballocationList m_Suballocations;
4892 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4894 bool ValidateFreeSuballocationList()
const;
4898 bool CheckAllocation(
4899 uint32_t currentFrameIndex,
4900 uint32_t frameInUseCount,
4901 VkDeviceSize bufferImageGranularity,
4902 VkDeviceSize allocSize,
4903 VkDeviceSize allocAlignment,
4904 VmaSuballocationType allocType,
4905 VmaSuballocationList::const_iterator suballocItem,
4906 bool canMakeOtherLost,
4907 VkDeviceSize* pOffset,
4908 size_t* itemsToMakeLostCount,
4909 VkDeviceSize* pSumFreeSize,
4910 VkDeviceSize* pSumItemSize)
const;
4912 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4916 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4919 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4922 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5003 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5005 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5008 virtual ~VmaBlockMetadata_Linear();
5009 virtual void Init(VkDeviceSize size);
5011 virtual bool Validate()
const;
5012 virtual size_t GetAllocationCount()
const;
5013 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5014 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5015 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5017 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5018 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5020 #if VMA_STATS_STRING_ENABLED 5021 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5024 virtual bool CreateAllocationRequest(
5025 uint32_t currentFrameIndex,
5026 uint32_t frameInUseCount,
5027 VkDeviceSize bufferImageGranularity,
5028 VkDeviceSize allocSize,
5029 VkDeviceSize allocAlignment,
5031 VmaSuballocationType allocType,
5032 bool canMakeOtherLost,
5034 VmaAllocationRequest* pAllocationRequest);
5036 virtual bool MakeRequestedAllocationsLost(
5037 uint32_t currentFrameIndex,
5038 uint32_t frameInUseCount,
5039 VmaAllocationRequest* pAllocationRequest);
5041 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5043 virtual VkResult CheckCorruption(
const void* pBlockData);
5046 const VmaAllocationRequest& request,
5047 VmaSuballocationType type,
5048 VkDeviceSize allocSize,
5053 virtual void FreeAtOffset(VkDeviceSize offset);
5063 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5065 enum SECOND_VECTOR_MODE
5067 SECOND_VECTOR_EMPTY,
5072 SECOND_VECTOR_RING_BUFFER,
5078 SECOND_VECTOR_DOUBLE_STACK,
5081 VkDeviceSize m_SumFreeSize;
5082 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5083 uint32_t m_1stVectorIndex;
5084 SECOND_VECTOR_MODE m_2ndVectorMode;
5086 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5087 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5088 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5089 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5092 size_t m_1stNullItemsBeginCount;
5094 size_t m_1stNullItemsMiddleCount;
5096 size_t m_2ndNullItemsCount;
5098 bool ShouldCompact1st()
const;
5099 void CleanupAfterFree();
5113 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5115 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5118 virtual ~VmaBlockMetadata_Buddy();
5119 virtual void Init(VkDeviceSize size);
5121 virtual bool Validate()
const;
5122 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5123 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5124 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5125 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5127 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5128 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5130 #if VMA_STATS_STRING_ENABLED 5131 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5134 virtual bool CreateAllocationRequest(
5135 uint32_t currentFrameIndex,
5136 uint32_t frameInUseCount,
5137 VkDeviceSize bufferImageGranularity,
5138 VkDeviceSize allocSize,
5139 VkDeviceSize allocAlignment,
5141 VmaSuballocationType allocType,
5142 bool canMakeOtherLost,
5144 VmaAllocationRequest* pAllocationRequest);
5146 virtual bool MakeRequestedAllocationsLost(
5147 uint32_t currentFrameIndex,
5148 uint32_t frameInUseCount,
5149 VmaAllocationRequest* pAllocationRequest);
5151 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5153 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5156 const VmaAllocationRequest& request,
5157 VmaSuballocationType type,
5158 VkDeviceSize allocSize,
5162 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5163 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5166 static const VkDeviceSize MIN_NODE_SIZE = 32;
5167 static const size_t MAX_LEVELS = 30;
5169 struct ValidationContext
5171 size_t calculatedAllocationCount;
5172 size_t calculatedFreeCount;
5173 VkDeviceSize calculatedSumFreeSize;
5175 ValidationContext() :
5176 calculatedAllocationCount(0),
5177 calculatedFreeCount(0),
5178 calculatedSumFreeSize(0) { }
5183 VkDeviceSize offset;
5213 VkDeviceSize m_UsableSize;
5214 uint32_t m_LevelCount;
5220 } m_FreeList[MAX_LEVELS];
5222 size_t m_AllocationCount;
5226 VkDeviceSize m_SumFreeSize;
5228 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5229 void DeleteNode(Node* node);
5230 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5231 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5232 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5234 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5235 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5239 void AddToFreeListFront(uint32_t level, Node* node);
5243 void RemoveFromFreeList(uint32_t level, Node* node);
5245 #if VMA_STATS_STRING_ENABLED 5246 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5256 class VmaDeviceMemoryBlock
5258 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5260 VmaBlockMetadata* m_pMetadata;
5264 ~VmaDeviceMemoryBlock()
5266 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5267 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5273 uint32_t newMemoryTypeIndex,
5274 VkDeviceMemory newMemory,
5275 VkDeviceSize newSize,
5277 uint32_t algorithm);
5281 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5282 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5283 uint32_t GetId()
const {
return m_Id; }
5284 void* GetMappedData()
const {
return m_pMappedData; }
5287 bool Validate()
const;
5292 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5295 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5296 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5298 VkResult BindBufferMemory(
5302 VkResult BindImageMemory(
5308 uint32_t m_MemoryTypeIndex;
5310 VkDeviceMemory m_hMemory;
5315 uint32_t m_MapCount;
5316 void* m_pMappedData;
5319 struct VmaPointerLess
5321 bool operator()(
const void* lhs,
const void* rhs)
const 5327 class VmaDefragmentator;
5335 struct VmaBlockVector
5337 VMA_CLASS_NO_COPY(VmaBlockVector)
5341 uint32_t memoryTypeIndex,
5342 VkDeviceSize preferredBlockSize,
5343 size_t minBlockCount,
5344 size_t maxBlockCount,
5345 VkDeviceSize bufferImageGranularity,
5346 uint32_t frameInUseCount,
5348 bool explicitBlockSize,
5349 uint32_t algorithm);
5352 VkResult CreateMinBlocks();
5354 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5355 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5356 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5357 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5358 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5362 bool IsEmpty()
const {
return m_Blocks.empty(); }
5363 bool IsCorruptionDetectionEnabled()
const;
5367 uint32_t currentFrameIndex,
5369 VkDeviceSize alignment,
5371 VmaSuballocationType suballocType,
5380 #if VMA_STATS_STRING_ENABLED 5381 void PrintDetailedMap(
class VmaJsonWriter& json);
5384 void MakePoolAllocationsLost(
5385 uint32_t currentFrameIndex,
5386 size_t* pLostAllocationCount);
5387 VkResult CheckCorruption();
5389 VmaDefragmentator* EnsureDefragmentator(
5391 uint32_t currentFrameIndex);
5393 VkResult Defragment(
5395 VkDeviceSize& maxBytesToMove,
5396 uint32_t& maxAllocationsToMove);
5398 void DestroyDefragmentator();
5401 friend class VmaDefragmentator;
5404 const uint32_t m_MemoryTypeIndex;
5405 const VkDeviceSize m_PreferredBlockSize;
5406 const size_t m_MinBlockCount;
5407 const size_t m_MaxBlockCount;
5408 const VkDeviceSize m_BufferImageGranularity;
5409 const uint32_t m_FrameInUseCount;
5410 const bool m_IsCustomPool;
5411 const bool m_ExplicitBlockSize;
5412 const uint32_t m_Algorithm;
5413 bool m_HasEmptyBlock;
5416 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5420 VmaDefragmentator* m_pDefragmentator;
5421 uint32_t m_NextBlockId;
5423 VkDeviceSize CalcMaxBlockSize()
const;
5426 void Remove(VmaDeviceMemoryBlock* pBlock);
5430 void IncrementallySortBlocks();
5433 VkResult AllocateFromBlock(
5434 VmaDeviceMemoryBlock* pBlock,
5436 uint32_t currentFrameIndex,
5438 VkDeviceSize alignment,
5441 VmaSuballocationType suballocType,
5445 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5450 VMA_CLASS_NO_COPY(VmaPool_T)
5452 VmaBlockVector m_BlockVector;
5457 VkDeviceSize preferredBlockSize);
5460 uint32_t GetId()
const {
return m_Id; }
5461 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5463 #if VMA_STATS_STRING_ENABLED 5471 class VmaDefragmentator
5473 VMA_CLASS_NO_COPY(VmaDefragmentator)
5476 VmaBlockVector*
const m_pBlockVector;
5477 uint32_t m_CurrentFrameIndex;
5478 VkDeviceSize m_BytesMoved;
5479 uint32_t m_AllocationsMoved;
5481 struct AllocationInfo
5484 VkBool32* m_pChanged;
5487 m_hAllocation(VK_NULL_HANDLE),
5488 m_pChanged(VMA_NULL)
5493 struct AllocationInfoSizeGreater
5495 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5497 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5502 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5506 VmaDeviceMemoryBlock* m_pBlock;
5507 bool m_HasNonMovableAllocations;
5508 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5510 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5512 m_HasNonMovableAllocations(true),
5513 m_Allocations(pAllocationCallbacks),
5514 m_pMappedDataForDefragmentation(VMA_NULL)
5518 void CalcHasNonMovableAllocations()
5520 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5521 const size_t defragmentAllocCount = m_Allocations.size();
5522 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5525 void SortAllocationsBySizeDescecnding()
5527 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5530 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5535 void* m_pMappedDataForDefragmentation;
5538 struct BlockPointerLess
5540 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5542 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5544 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5546 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5552 struct BlockInfoCompareMoveDestination
5554 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5556 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5560 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5564 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5572 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5573 BlockInfoVector m_Blocks;
5575 VkResult DefragmentRound(
5576 VkDeviceSize maxBytesToMove,
5577 uint32_t maxAllocationsToMove);
5579 static bool MoveMakesSense(
5580 size_t dstBlockIndex, VkDeviceSize dstOffset,
5581 size_t srcBlockIndex, VkDeviceSize srcOffset);
5586 VmaBlockVector* pBlockVector,
5587 uint32_t currentFrameIndex);
5589 ~VmaDefragmentator();
5591 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5592 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5594 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5596 VkResult Defragment(
5597 VkDeviceSize maxBytesToMove,
5598 uint32_t maxAllocationsToMove);
5601 #if VMA_RECORDING_ENABLED 5608 void WriteConfiguration(
5609 const VkPhysicalDeviceProperties& devProps,
5610 const VkPhysicalDeviceMemoryProperties& memProps,
5611 bool dedicatedAllocationExtensionEnabled);
5614 void RecordCreateAllocator(uint32_t frameIndex);
5615 void RecordDestroyAllocator(uint32_t frameIndex);
5616 void RecordCreatePool(uint32_t frameIndex,
5619 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5620 void RecordAllocateMemory(uint32_t frameIndex,
5621 const VkMemoryRequirements& vkMemReq,
5624 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5625 const VkMemoryRequirements& vkMemReq,
5626 bool requiresDedicatedAllocation,
5627 bool prefersDedicatedAllocation,
5630 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5631 const VkMemoryRequirements& vkMemReq,
5632 bool requiresDedicatedAllocation,
5633 bool prefersDedicatedAllocation,
5636 void RecordFreeMemory(uint32_t frameIndex,
5638 void RecordResizeAllocation(
5639 uint32_t frameIndex,
5641 VkDeviceSize newSize);
5642 void RecordSetAllocationUserData(uint32_t frameIndex,
5644 const void* pUserData);
5645 void RecordCreateLostAllocation(uint32_t frameIndex,
5647 void RecordMapMemory(uint32_t frameIndex,
5649 void RecordUnmapMemory(uint32_t frameIndex,
5651 void RecordFlushAllocation(uint32_t frameIndex,
5652 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5653 void RecordInvalidateAllocation(uint32_t frameIndex,
5654 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5655 void RecordCreateBuffer(uint32_t frameIndex,
5656 const VkBufferCreateInfo& bufCreateInfo,
5659 void RecordCreateImage(uint32_t frameIndex,
5660 const VkImageCreateInfo& imageCreateInfo,
5663 void RecordDestroyBuffer(uint32_t frameIndex,
5665 void RecordDestroyImage(uint32_t frameIndex,
5667 void RecordTouchAllocation(uint32_t frameIndex,
5669 void RecordGetAllocationInfo(uint32_t frameIndex,
5671 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5681 class UserDataString
5685 const char* GetString()
const {
return m_Str; }
5695 VMA_MUTEX m_FileMutex;
5697 int64_t m_StartCounter;
5699 void GetBasicParams(CallParams& outParams);
5703 #endif // #if VMA_RECORDING_ENABLED 5706 struct VmaAllocator_T
5708 VMA_CLASS_NO_COPY(VmaAllocator_T)
5711 bool m_UseKhrDedicatedAllocation;
5713 bool m_AllocationCallbacksSpecified;
5714 VkAllocationCallbacks m_AllocationCallbacks;
5718 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5719 VMA_MUTEX m_HeapSizeLimitMutex;
5721 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5722 VkPhysicalDeviceMemoryProperties m_MemProps;
5725 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5728 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5729 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5730 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5736 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5738 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5742 return m_VulkanFunctions;
5745 VkDeviceSize GetBufferImageGranularity()
const 5748 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5749 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5752 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5753 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5755 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5757 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5758 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5761 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5763 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5764 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5767 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5769 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5770 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5771 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5774 bool IsIntegratedGpu()
const 5776 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5779 #if VMA_RECORDING_ENABLED 5780 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5783 void GetBufferMemoryRequirements(
5785 VkMemoryRequirements& memReq,
5786 bool& requiresDedicatedAllocation,
5787 bool& prefersDedicatedAllocation)
const;
5788 void GetImageMemoryRequirements(
5790 VkMemoryRequirements& memReq,
5791 bool& requiresDedicatedAllocation,
5792 bool& prefersDedicatedAllocation)
const;
5795 VkResult AllocateMemory(
5796 const VkMemoryRequirements& vkMemReq,
5797 bool requiresDedicatedAllocation,
5798 bool prefersDedicatedAllocation,
5799 VkBuffer dedicatedBuffer,
5800 VkImage dedicatedImage,
5802 VmaSuballocationType suballocType,
5808 VkResult ResizeAllocation(
5810 VkDeviceSize newSize);
5812 void CalculateStats(
VmaStats* pStats);
5814 #if VMA_STATS_STRING_ENABLED 5815 void PrintDetailedMap(
class VmaJsonWriter& json);
5818 VkResult Defragment(
5820 size_t allocationCount,
5821 VkBool32* pAllocationsChanged,
5829 void DestroyPool(
VmaPool pool);
5832 void SetCurrentFrameIndex(uint32_t frameIndex);
5833 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5835 void MakePoolAllocationsLost(
5837 size_t* pLostAllocationCount);
5838 VkResult CheckPoolCorruption(
VmaPool hPool);
5839 VkResult CheckCorruption(uint32_t memoryTypeBits);
5843 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5844 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5849 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5850 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5852 void FlushOrInvalidateAllocation(
5854 VkDeviceSize offset, VkDeviceSize size,
5855 VMA_CACHE_OPERATION op);
5857 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5860 VkDeviceSize m_PreferredLargeHeapBlockSize;
5862 VkPhysicalDevice m_PhysicalDevice;
5863 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5865 VMA_MUTEX m_PoolsMutex;
5867 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5868 uint32_t m_NextPoolId;
5872 #if VMA_RECORDING_ENABLED 5873 VmaRecorder* m_pRecorder;
5878 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5880 VkResult AllocateMemoryOfType(
5882 VkDeviceSize alignment,
5883 bool dedicatedAllocation,
5884 VkBuffer dedicatedBuffer,
5885 VkImage dedicatedImage,
5887 uint32_t memTypeIndex,
5888 VmaSuballocationType suballocType,
5892 VkResult AllocateDedicatedMemory(
5894 VmaSuballocationType suballocType,
5895 uint32_t memTypeIndex,
5897 bool isUserDataString,
5899 VkBuffer dedicatedBuffer,
5900 VkImage dedicatedImage,
5910 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5912 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5915 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5917 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5920 template<
typename T>
5923 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5926 template<
typename T>
5927 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5929 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5932 template<
typename T>
5933 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5938 VmaFree(hAllocator, ptr);
5942 template<
typename T>
5943 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5947 for(
size_t i = count; i--; )
5949 VmaFree(hAllocator, ptr);
5956 #if VMA_STATS_STRING_ENABLED 5958 class VmaStringBuilder
5961 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5962 size_t GetLength()
const {
return m_Data.size(); }
5963 const char* GetData()
const {
return m_Data.data(); }
5965 void Add(
char ch) { m_Data.push_back(ch); }
5966 void Add(
const char* pStr);
5967 void AddNewLine() { Add(
'\n'); }
5968 void AddNumber(uint32_t num);
5969 void AddNumber(uint64_t num);
5970 void AddPointer(
const void* ptr);
5973 VmaVector< char, VmaStlAllocator<char> > m_Data;
5976 void VmaStringBuilder::Add(
const char* pStr)
5978 const size_t strLen = strlen(pStr);
5981 const size_t oldCount = m_Data.size();
5982 m_Data.resize(oldCount + strLen);
5983 memcpy(m_Data.data() + oldCount, pStr, strLen);
5987 void VmaStringBuilder::AddNumber(uint32_t num)
5990 VmaUint32ToStr(buf,
sizeof(buf), num);
5994 void VmaStringBuilder::AddNumber(uint64_t num)
5997 VmaUint64ToStr(buf,
sizeof(buf), num);
6001 void VmaStringBuilder::AddPointer(
const void* ptr)
6004 VmaPtrToStr(buf,
sizeof(buf), ptr);
6008 #endif // #if VMA_STATS_STRING_ENABLED 6013 #if VMA_STATS_STRING_ENABLED 6017 VMA_CLASS_NO_COPY(VmaJsonWriter)
6019 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6022 void BeginObject(
bool singleLine =
false);
6025 void BeginArray(
bool singleLine =
false);
6028 void WriteString(
const char* pStr);
6029 void BeginString(
const char* pStr = VMA_NULL);
6030 void ContinueString(
const char* pStr);
6031 void ContinueString(uint32_t n);
6032 void ContinueString(uint64_t n);
6033 void ContinueString_Pointer(
const void* ptr);
6034 void EndString(
const char* pStr = VMA_NULL);
6036 void WriteNumber(uint32_t n);
6037 void WriteNumber(uint64_t n);
6038 void WriteBool(
bool b);
6042 static const char*
const INDENT;
6044 enum COLLECTION_TYPE
6046 COLLECTION_TYPE_OBJECT,
6047 COLLECTION_TYPE_ARRAY,
6051 COLLECTION_TYPE type;
6052 uint32_t valueCount;
6053 bool singleLineMode;
6056 VmaStringBuilder& m_SB;
6057 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6058 bool m_InsideString;
6060 void BeginValue(
bool isString);
6061 void WriteIndent(
bool oneLess =
false);
6064 const char*
const VmaJsonWriter::INDENT =
" ";
6066 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6068 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6069 m_InsideString(false)
6073 VmaJsonWriter::~VmaJsonWriter()
6075 VMA_ASSERT(!m_InsideString);
6076 VMA_ASSERT(m_Stack.empty());
6079 void VmaJsonWriter::BeginObject(
bool singleLine)
6081 VMA_ASSERT(!m_InsideString);
6087 item.type = COLLECTION_TYPE_OBJECT;
6088 item.valueCount = 0;
6089 item.singleLineMode = singleLine;
6090 m_Stack.push_back(item);
6093 void VmaJsonWriter::EndObject()
6095 VMA_ASSERT(!m_InsideString);
6100 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6104 void VmaJsonWriter::BeginArray(
bool singleLine)
6106 VMA_ASSERT(!m_InsideString);
6112 item.type = COLLECTION_TYPE_ARRAY;
6113 item.valueCount = 0;
6114 item.singleLineMode = singleLine;
6115 m_Stack.push_back(item);
6118 void VmaJsonWriter::EndArray()
6120 VMA_ASSERT(!m_InsideString);
6125 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6129 void VmaJsonWriter::WriteString(
const char* pStr)
6135 void VmaJsonWriter::BeginString(
const char* pStr)
6137 VMA_ASSERT(!m_InsideString);
6141 m_InsideString =
true;
6142 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6144 ContinueString(pStr);
6148 void VmaJsonWriter::ContinueString(
const char* pStr)
6150 VMA_ASSERT(m_InsideString);
6152 const size_t strLen = strlen(pStr);
6153 for(
size_t i = 0; i < strLen; ++i)
6186 VMA_ASSERT(0 &&
"Character not currently supported.");
6192 void VmaJsonWriter::ContinueString(uint32_t n)
6194 VMA_ASSERT(m_InsideString);
6198 void VmaJsonWriter::ContinueString(uint64_t n)
6200 VMA_ASSERT(m_InsideString);
6204 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6206 VMA_ASSERT(m_InsideString);
6207 m_SB.AddPointer(ptr);
6210 void VmaJsonWriter::EndString(
const char* pStr)
6212 VMA_ASSERT(m_InsideString);
6213 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6215 ContinueString(pStr);
6218 m_InsideString =
false;
6221 void VmaJsonWriter::WriteNumber(uint32_t n)
6223 VMA_ASSERT(!m_InsideString);
6228 void VmaJsonWriter::WriteNumber(uint64_t n)
6230 VMA_ASSERT(!m_InsideString);
6235 void VmaJsonWriter::WriteBool(
bool b)
6237 VMA_ASSERT(!m_InsideString);
6239 m_SB.Add(b ?
"true" :
"false");
6242 void VmaJsonWriter::WriteNull()
6244 VMA_ASSERT(!m_InsideString);
6249 void VmaJsonWriter::BeginValue(
bool isString)
6251 if(!m_Stack.empty())
6253 StackItem& currItem = m_Stack.back();
6254 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6255 currItem.valueCount % 2 == 0)
6257 VMA_ASSERT(isString);
6260 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6261 currItem.valueCount % 2 != 0)
6265 else if(currItem.valueCount > 0)
6274 ++currItem.valueCount;
6278 void VmaJsonWriter::WriteIndent(
bool oneLess)
6280 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6284 size_t count = m_Stack.size();
6285 if(count > 0 && oneLess)
6289 for(
size_t i = 0; i < count; ++i)
6296 #endif // #if VMA_STATS_STRING_ENABLED 6300 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6302 if(IsUserDataString())
6304 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6306 FreeUserDataString(hAllocator);
6308 if(pUserData != VMA_NULL)
6310 const char*
const newStrSrc = (
char*)pUserData;
6311 const size_t newStrLen = strlen(newStrSrc);
6312 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6313 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6314 m_pUserData = newStrDst;
6319 m_pUserData = pUserData;
6323 void VmaAllocation_T::ChangeBlockAllocation(
6325 VmaDeviceMemoryBlock* block,
6326 VkDeviceSize offset)
6328 VMA_ASSERT(block != VMA_NULL);
6329 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6332 if(block != m_BlockAllocation.m_Block)
6334 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6335 if(IsPersistentMap())
6337 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6338 block->Map(hAllocator, mapRefCount, VMA_NULL);
6341 m_BlockAllocation.m_Block = block;
6342 m_BlockAllocation.m_Offset = offset;
6345 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
6347 VMA_ASSERT(newSize > 0);
6351 VkDeviceSize VmaAllocation_T::GetOffset()
const 6355 case ALLOCATION_TYPE_BLOCK:
6356 return m_BlockAllocation.m_Offset;
6357 case ALLOCATION_TYPE_DEDICATED:
6365 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6369 case ALLOCATION_TYPE_BLOCK:
6370 return m_BlockAllocation.m_Block->GetDeviceMemory();
6371 case ALLOCATION_TYPE_DEDICATED:
6372 return m_DedicatedAllocation.m_hMemory;
6375 return VK_NULL_HANDLE;
6379 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6383 case ALLOCATION_TYPE_BLOCK:
6384 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6385 case ALLOCATION_TYPE_DEDICATED:
6386 return m_DedicatedAllocation.m_MemoryTypeIndex;
6393 void* VmaAllocation_T::GetMappedData()
const 6397 case ALLOCATION_TYPE_BLOCK:
6400 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6401 VMA_ASSERT(pBlockData != VMA_NULL);
6402 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6409 case ALLOCATION_TYPE_DEDICATED:
6410 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6411 return m_DedicatedAllocation.m_pMappedData;
6418 bool VmaAllocation_T::CanBecomeLost()
const 6422 case ALLOCATION_TYPE_BLOCK:
6423 return m_BlockAllocation.m_CanBecomeLost;
6424 case ALLOCATION_TYPE_DEDICATED:
6432 VmaPool VmaAllocation_T::GetPool()
const 6434 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6435 return m_BlockAllocation.m_hPool;
6438 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6440 VMA_ASSERT(CanBecomeLost());
6446 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6449 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6454 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6460 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6470 #if VMA_STATS_STRING_ENABLED 6473 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6482 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6484 json.WriteString(
"Type");
6485 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6487 json.WriteString(
"Size");
6488 json.WriteNumber(m_Size);
6490 if(m_pUserData != VMA_NULL)
6492 json.WriteString(
"UserData");
6493 if(IsUserDataString())
6495 json.WriteString((
const char*)m_pUserData);
6500 json.ContinueString_Pointer(m_pUserData);
6505 json.WriteString(
"CreationFrameIndex");
6506 json.WriteNumber(m_CreationFrameIndex);
6508 json.WriteString(
"LastUseFrameIndex");
6509 json.WriteNumber(GetLastUseFrameIndex());
6511 if(m_BufferImageUsage != 0)
6513 json.WriteString(
"Usage");
6514 json.WriteNumber(m_BufferImageUsage);
6520 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6522 VMA_ASSERT(IsUserDataString());
6523 if(m_pUserData != VMA_NULL)
6525 char*
const oldStr = (
char*)m_pUserData;
6526 const size_t oldStrLen = strlen(oldStr);
6527 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6528 m_pUserData = VMA_NULL;
6532 void VmaAllocation_T::BlockAllocMap()
6534 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6536 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6542 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6546 void VmaAllocation_T::BlockAllocUnmap()
6548 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6550 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6556 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6560 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6562 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6566 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6568 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6569 *ppData = m_DedicatedAllocation.m_pMappedData;
6575 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6576 return VK_ERROR_MEMORY_MAP_FAILED;
6581 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6582 hAllocator->m_hDevice,
6583 m_DedicatedAllocation.m_hMemory,
6588 if(result == VK_SUCCESS)
6590 m_DedicatedAllocation.m_pMappedData = *ppData;
6597 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6599 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6601 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6606 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6607 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6608 hAllocator->m_hDevice,
6609 m_DedicatedAllocation.m_hMemory);
6614 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6618 #if VMA_STATS_STRING_ENABLED 6620 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6624 json.WriteString(
"Blocks");
6627 json.WriteString(
"Allocations");
6630 json.WriteString(
"UnusedRanges");
6633 json.WriteString(
"UsedBytes");
6636 json.WriteString(
"UnusedBytes");
6641 json.WriteString(
"AllocationSize");
6642 json.BeginObject(
true);
6643 json.WriteString(
"Min");
6645 json.WriteString(
"Avg");
6647 json.WriteString(
"Max");
6654 json.WriteString(
"UnusedRangeSize");
6655 json.BeginObject(
true);
6656 json.WriteString(
"Min");
6658 json.WriteString(
"Avg");
6660 json.WriteString(
"Max");
6668 #endif // #if VMA_STATS_STRING_ENABLED 6670 struct VmaSuballocationItemSizeLess
6673 const VmaSuballocationList::iterator lhs,
6674 const VmaSuballocationList::iterator rhs)
const 6676 return lhs->size < rhs->size;
6679 const VmaSuballocationList::iterator lhs,
6680 VkDeviceSize rhsSize)
const 6682 return lhs->size < rhsSize;
6690 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6692 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6696 #if VMA_STATS_STRING_ENABLED 6698 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6699 VkDeviceSize unusedBytes,
6700 size_t allocationCount,
6701 size_t unusedRangeCount)
const 6705 json.WriteString(
"TotalBytes");
6706 json.WriteNumber(GetSize());
6708 json.WriteString(
"UnusedBytes");
6709 json.WriteNumber(unusedBytes);
6711 json.WriteString(
"Allocations");
6712 json.WriteNumber((uint64_t)allocationCount);
6714 json.WriteString(
"UnusedRanges");
6715 json.WriteNumber((uint64_t)unusedRangeCount);
6717 json.WriteString(
"Suballocations");
6721 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6722 VkDeviceSize offset,
6725 json.BeginObject(
true);
6727 json.WriteString(
"Offset");
6728 json.WriteNumber(offset);
6730 hAllocation->PrintParameters(json);
6735 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6736 VkDeviceSize offset,
6737 VkDeviceSize size)
const 6739 json.BeginObject(
true);
6741 json.WriteString(
"Offset");
6742 json.WriteNumber(offset);
6744 json.WriteString(
"Type");
6745 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6747 json.WriteString(
"Size");
6748 json.WriteNumber(size);
6753 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6759 #endif // #if VMA_STATS_STRING_ENABLED 6764 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6765 VmaBlockMetadata(hAllocator),
6768 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6769 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6773 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6777 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6779 VmaBlockMetadata::Init(size);
6782 m_SumFreeSize = size;
6784 VmaSuballocation suballoc = {};
6785 suballoc.offset = 0;
6786 suballoc.size = size;
6787 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6788 suballoc.hAllocation = VK_NULL_HANDLE;
6790 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6791 m_Suballocations.push_back(suballoc);
6792 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6794 m_FreeSuballocationsBySize.push_back(suballocItem);
6797 bool VmaBlockMetadata_Generic::Validate()
const 6799 VMA_VALIDATE(!m_Suballocations.empty());
6802 VkDeviceSize calculatedOffset = 0;
6804 uint32_t calculatedFreeCount = 0;
6806 VkDeviceSize calculatedSumFreeSize = 0;
6809 size_t freeSuballocationsToRegister = 0;
6811 bool prevFree =
false;
6813 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6814 suballocItem != m_Suballocations.cend();
6817 const VmaSuballocation& subAlloc = *suballocItem;
6820 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6822 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6824 VMA_VALIDATE(!prevFree || !currFree);
6826 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6830 calculatedSumFreeSize += subAlloc.size;
6831 ++calculatedFreeCount;
6832 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6834 ++freeSuballocationsToRegister;
6838 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6842 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6843 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6846 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6849 calculatedOffset += subAlloc.size;
6850 prevFree = currFree;
6855 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6857 VkDeviceSize lastSize = 0;
6858 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6860 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6863 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6865 VMA_VALIDATE(suballocItem->size >= lastSize);
6867 lastSize = suballocItem->size;
6871 VMA_VALIDATE(ValidateFreeSuballocationList());
6872 VMA_VALIDATE(calculatedOffset == GetSize());
6873 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6874 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6879 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6881 if(!m_FreeSuballocationsBySize.empty())
6883 return m_FreeSuballocationsBySize.back()->size;
6891 bool VmaBlockMetadata_Generic::IsEmpty()
const 6893 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6896 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6900 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6912 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6913 suballocItem != m_Suballocations.cend();
6916 const VmaSuballocation& suballoc = *suballocItem;
6917 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6930 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6932 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6934 inoutStats.
size += GetSize();
6941 #if VMA_STATS_STRING_ENABLED 6943 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6945 PrintDetailedMap_Begin(json,
6947 m_Suballocations.size() - (size_t)m_FreeCount,
6951 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6952 suballocItem != m_Suballocations.cend();
6953 ++suballocItem, ++i)
6955 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6957 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6961 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6965 PrintDetailedMap_End(json);
6968 #endif // #if VMA_STATS_STRING_ENABLED 6970 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6971 uint32_t currentFrameIndex,
6972 uint32_t frameInUseCount,
6973 VkDeviceSize bufferImageGranularity,
6974 VkDeviceSize allocSize,
6975 VkDeviceSize allocAlignment,
6977 VmaSuballocationType allocType,
6978 bool canMakeOtherLost,
6980 VmaAllocationRequest* pAllocationRequest)
6982 VMA_ASSERT(allocSize > 0);
6983 VMA_ASSERT(!upperAddress);
6984 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6985 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6986 VMA_HEAVY_ASSERT(Validate());
6989 if(canMakeOtherLost ==
false &&
6990 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6996 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6997 if(freeSuballocCount > 0)
7002 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7003 m_FreeSuballocationsBySize.data(),
7004 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7005 allocSize + 2 * VMA_DEBUG_MARGIN,
7006 VmaSuballocationItemSizeLess());
7007 size_t index = it - m_FreeSuballocationsBySize.data();
7008 for(; index < freeSuballocCount; ++index)
7013 bufferImageGranularity,
7017 m_FreeSuballocationsBySize[index],
7019 &pAllocationRequest->offset,
7020 &pAllocationRequest->itemsToMakeLostCount,
7021 &pAllocationRequest->sumFreeSize,
7022 &pAllocationRequest->sumItemSize))
7024 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7032 for(
size_t index = freeSuballocCount; index--; )
7037 bufferImageGranularity,
7041 m_FreeSuballocationsBySize[index],
7043 &pAllocationRequest->offset,
7044 &pAllocationRequest->itemsToMakeLostCount,
7045 &pAllocationRequest->sumFreeSize,
7046 &pAllocationRequest->sumItemSize))
7048 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7055 if(canMakeOtherLost)
7059 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7060 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7062 VmaAllocationRequest tmpAllocRequest = {};
7063 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7064 suballocIt != m_Suballocations.end();
7067 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7068 suballocIt->hAllocation->CanBecomeLost())
7073 bufferImageGranularity,
7079 &tmpAllocRequest.offset,
7080 &tmpAllocRequest.itemsToMakeLostCount,
7081 &tmpAllocRequest.sumFreeSize,
7082 &tmpAllocRequest.sumItemSize))
7084 tmpAllocRequest.item = suballocIt;
7086 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7089 *pAllocationRequest = tmpAllocRequest;
7095 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7104 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7105 uint32_t currentFrameIndex,
7106 uint32_t frameInUseCount,
7107 VmaAllocationRequest* pAllocationRequest)
7109 while(pAllocationRequest->itemsToMakeLostCount > 0)
7111 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7113 ++pAllocationRequest->item;
7115 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7116 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7117 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7118 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7120 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7121 --pAllocationRequest->itemsToMakeLostCount;
7129 VMA_HEAVY_ASSERT(Validate());
7130 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7131 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7136 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7138 uint32_t lostAllocationCount = 0;
7139 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7140 it != m_Suballocations.end();
7143 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7144 it->hAllocation->CanBecomeLost() &&
7145 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7147 it = FreeSuballocation(it);
7148 ++lostAllocationCount;
7151 return lostAllocationCount;
7154 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7156 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7157 it != m_Suballocations.end();
7160 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7162 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7164 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7165 return VK_ERROR_VALIDATION_FAILED_EXT;
7167 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7169 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7170 return VK_ERROR_VALIDATION_FAILED_EXT;
7178 void VmaBlockMetadata_Generic::Alloc(
7179 const VmaAllocationRequest& request,
7180 VmaSuballocationType type,
7181 VkDeviceSize allocSize,
7185 VMA_ASSERT(!upperAddress);
7186 VMA_ASSERT(request.item != m_Suballocations.end());
7187 VmaSuballocation& suballoc = *request.item;
7189 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7191 VMA_ASSERT(request.offset >= suballoc.offset);
7192 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7193 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7194 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7198 UnregisterFreeSuballocation(request.item);
7200 suballoc.offset = request.offset;
7201 suballoc.size = allocSize;
7202 suballoc.type = type;
7203 suballoc.hAllocation = hAllocation;
7208 VmaSuballocation paddingSuballoc = {};
7209 paddingSuballoc.offset = request.offset + allocSize;
7210 paddingSuballoc.size = paddingEnd;
7211 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7212 VmaSuballocationList::iterator next = request.item;
7214 const VmaSuballocationList::iterator paddingEndItem =
7215 m_Suballocations.insert(next, paddingSuballoc);
7216 RegisterFreeSuballocation(paddingEndItem);
7222 VmaSuballocation paddingSuballoc = {};
7223 paddingSuballoc.offset = request.offset - paddingBegin;
7224 paddingSuballoc.size = paddingBegin;
7225 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7226 const VmaSuballocationList::iterator paddingBeginItem =
7227 m_Suballocations.insert(request.item, paddingSuballoc);
7228 RegisterFreeSuballocation(paddingBeginItem);
7232 m_FreeCount = m_FreeCount - 1;
7233 if(paddingBegin > 0)
7241 m_SumFreeSize -= allocSize;
7244 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7246 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7247 suballocItem != m_Suballocations.end();
7250 VmaSuballocation& suballoc = *suballocItem;
7251 if(suballoc.hAllocation == allocation)
7253 FreeSuballocation(suballocItem);
7254 VMA_HEAVY_ASSERT(Validate());
7258 VMA_ASSERT(0 &&
"Not found!");
7261 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7263 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7264 suballocItem != m_Suballocations.end();
7267 VmaSuballocation& suballoc = *suballocItem;
7268 if(suballoc.offset == offset)
7270 FreeSuballocation(suballocItem);
7274 VMA_ASSERT(0 &&
"Not found!");
7277 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
7279 typedef VmaSuballocationList::iterator iter_type;
7280 for(iter_type suballocItem = m_Suballocations.begin();
7281 suballocItem != m_Suballocations.end();
7284 VmaSuballocation& suballoc = *suballocItem;
7285 if(suballoc.hAllocation == alloc)
7287 iter_type nextItem = suballocItem;
7291 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
7294 if(newSize < alloc->GetSize())
7296 const VkDeviceSize sizeDiff = suballoc.size - newSize;
7299 if(nextItem != m_Suballocations.end())
7302 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7305 UnregisterFreeSuballocation(nextItem);
7306 nextItem->offset -= sizeDiff;
7307 nextItem->size += sizeDiff;
7308 RegisterFreeSuballocation(nextItem);
7314 VmaSuballocation newFreeSuballoc;
7315 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7316 newFreeSuballoc.offset = suballoc.offset + newSize;
7317 newFreeSuballoc.size = sizeDiff;
7318 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7319 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
7320 RegisterFreeSuballocation(newFreeSuballocIt);
7329 VmaSuballocation newFreeSuballoc;
7330 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7331 newFreeSuballoc.offset = suballoc.offset + newSize;
7332 newFreeSuballoc.size = sizeDiff;
7333 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7334 m_Suballocations.push_back(newFreeSuballoc);
7336 iter_type newFreeSuballocIt = m_Suballocations.end();
7337 RegisterFreeSuballocation(--newFreeSuballocIt);
7342 suballoc.size = newSize;
7343 m_SumFreeSize += sizeDiff;
7348 const VkDeviceSize sizeDiff = newSize - suballoc.size;
7351 if(nextItem != m_Suballocations.end())
7354 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7357 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
7363 if(nextItem->size > sizeDiff)
7366 UnregisterFreeSuballocation(nextItem);
7367 nextItem->offset += sizeDiff;
7368 nextItem->size -= sizeDiff;
7369 RegisterFreeSuballocation(nextItem);
7375 UnregisterFreeSuballocation(nextItem);
7376 m_Suballocations.erase(nextItem);
7392 suballoc.size = newSize;
7393 m_SumFreeSize -= sizeDiff;
7400 VMA_ASSERT(0 &&
"Not found!");
7404 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7406 VkDeviceSize lastSize = 0;
7407 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7409 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7411 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7412 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7413 VMA_VALIDATE(it->size >= lastSize);
7414 lastSize = it->size;
7419 bool VmaBlockMetadata_Generic::CheckAllocation(
7420 uint32_t currentFrameIndex,
7421 uint32_t frameInUseCount,
7422 VkDeviceSize bufferImageGranularity,
7423 VkDeviceSize allocSize,
7424 VkDeviceSize allocAlignment,
7425 VmaSuballocationType allocType,
7426 VmaSuballocationList::const_iterator suballocItem,
7427 bool canMakeOtherLost,
7428 VkDeviceSize* pOffset,
7429 size_t* itemsToMakeLostCount,
7430 VkDeviceSize* pSumFreeSize,
7431 VkDeviceSize* pSumItemSize)
const 7433 VMA_ASSERT(allocSize > 0);
7434 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7435 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7436 VMA_ASSERT(pOffset != VMA_NULL);
7438 *itemsToMakeLostCount = 0;
7442 if(canMakeOtherLost)
7444 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7446 *pSumFreeSize = suballocItem->size;
7450 if(suballocItem->hAllocation->CanBecomeLost() &&
7451 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7453 ++*itemsToMakeLostCount;
7454 *pSumItemSize = suballocItem->size;
7463 if(GetSize() - suballocItem->offset < allocSize)
7469 *pOffset = suballocItem->offset;
7472 if(VMA_DEBUG_MARGIN > 0)
7474 *pOffset += VMA_DEBUG_MARGIN;
7478 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7482 if(bufferImageGranularity > 1)
7484 bool bufferImageGranularityConflict =
false;
7485 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7486 while(prevSuballocItem != m_Suballocations.cbegin())
7489 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7490 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7492 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7494 bufferImageGranularityConflict =
true;
7502 if(bufferImageGranularityConflict)
7504 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7510 if(*pOffset >= suballocItem->offset + suballocItem->size)
7516 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7519 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7521 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7523 if(suballocItem->offset + totalSize > GetSize())
7530 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7531 if(totalSize > suballocItem->size)
7533 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7534 while(remainingSize > 0)
7537 if(lastSuballocItem == m_Suballocations.cend())
7541 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7543 *pSumFreeSize += lastSuballocItem->size;
7547 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7548 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7549 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7551 ++*itemsToMakeLostCount;
7552 *pSumItemSize += lastSuballocItem->size;
7559 remainingSize = (lastSuballocItem->size < remainingSize) ?
7560 remainingSize - lastSuballocItem->size : 0;
7566 if(bufferImageGranularity > 1)
7568 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7570 while(nextSuballocItem != m_Suballocations.cend())
7572 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7573 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7575 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7577 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7578 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7579 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7581 ++*itemsToMakeLostCount;
7600 const VmaSuballocation& suballoc = *suballocItem;
7601 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7603 *pSumFreeSize = suballoc.size;
7606 if(suballoc.size < allocSize)
7612 *pOffset = suballoc.offset;
7615 if(VMA_DEBUG_MARGIN > 0)
7617 *pOffset += VMA_DEBUG_MARGIN;
7621 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7625 if(bufferImageGranularity > 1)
7627 bool bufferImageGranularityConflict =
false;
7628 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7629 while(prevSuballocItem != m_Suballocations.cbegin())
7632 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7633 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7635 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7637 bufferImageGranularityConflict =
true;
7645 if(bufferImageGranularityConflict)
7647 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7652 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7655 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7658 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7665 if(bufferImageGranularity > 1)
7667 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7669 while(nextSuballocItem != m_Suballocations.cend())
7671 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7672 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7674 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7693 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7695 VMA_ASSERT(item != m_Suballocations.end());
7696 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7698 VmaSuballocationList::iterator nextItem = item;
7700 VMA_ASSERT(nextItem != m_Suballocations.end());
7701 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7703 item->size += nextItem->size;
7705 m_Suballocations.erase(nextItem);
7708 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7711 VmaSuballocation& suballoc = *suballocItem;
7712 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7713 suballoc.hAllocation = VK_NULL_HANDLE;
7717 m_SumFreeSize += suballoc.size;
7720 bool mergeWithNext =
false;
7721 bool mergeWithPrev =
false;
7723 VmaSuballocationList::iterator nextItem = suballocItem;
7725 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7727 mergeWithNext =
true;
7730 VmaSuballocationList::iterator prevItem = suballocItem;
7731 if(suballocItem != m_Suballocations.begin())
7734 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7736 mergeWithPrev =
true;
7742 UnregisterFreeSuballocation(nextItem);
7743 MergeFreeWithNext(suballocItem);
7748 UnregisterFreeSuballocation(prevItem);
7749 MergeFreeWithNext(prevItem);
7750 RegisterFreeSuballocation(prevItem);
7755 RegisterFreeSuballocation(suballocItem);
7756 return suballocItem;
7760 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7762 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7763 VMA_ASSERT(item->size > 0);
7767 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7769 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7771 if(m_FreeSuballocationsBySize.empty())
7773 m_FreeSuballocationsBySize.push_back(item);
7777 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7785 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7787 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7788 VMA_ASSERT(item->size > 0);
7792 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7794 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7796 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7797 m_FreeSuballocationsBySize.data(),
7798 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7800 VmaSuballocationItemSizeLess());
7801 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7802 index < m_FreeSuballocationsBySize.size();
7805 if(m_FreeSuballocationsBySize[index] == item)
7807 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7810 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7812 VMA_ASSERT(0 &&
"Not found.");
7821 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7822 VmaBlockMetadata(hAllocator),
7824 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7825 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7826 m_1stVectorIndex(0),
7827 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7828 m_1stNullItemsBeginCount(0),
7829 m_1stNullItemsMiddleCount(0),
7830 m_2ndNullItemsCount(0)
7834 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7838 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7840 VmaBlockMetadata::Init(size);
7841 m_SumFreeSize = size;
7844 bool VmaBlockMetadata_Linear::Validate()
const 7846 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7847 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7849 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7850 VMA_VALIDATE(!suballocations1st.empty() ||
7851 suballocations2nd.empty() ||
7852 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7854 if(!suballocations1st.empty())
7857 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7859 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7861 if(!suballocations2nd.empty())
7864 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7867 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7868 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7870 VkDeviceSize sumUsedSize = 0;
7871 const size_t suballoc1stCount = suballocations1st.size();
7872 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7874 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7876 const size_t suballoc2ndCount = suballocations2nd.size();
7877 size_t nullItem2ndCount = 0;
7878 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7880 const VmaSuballocation& suballoc = suballocations2nd[i];
7881 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7883 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7884 VMA_VALIDATE(suballoc.offset >= offset);
7888 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7889 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7890 sumUsedSize += suballoc.size;
7897 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7900 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7903 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7905 const VmaSuballocation& suballoc = suballocations1st[i];
7906 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7907 suballoc.hAllocation == VK_NULL_HANDLE);
7910 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7912 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7914 const VmaSuballocation& suballoc = suballocations1st[i];
7915 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7917 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7918 VMA_VALIDATE(suballoc.offset >= offset);
7919 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7923 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7924 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7925 sumUsedSize += suballoc.size;
7932 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7934 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7936 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7938 const size_t suballoc2ndCount = suballocations2nd.size();
7939 size_t nullItem2ndCount = 0;
7940 for(
size_t i = suballoc2ndCount; i--; )
7942 const VmaSuballocation& suballoc = suballocations2nd[i];
7943 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7945 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7946 VMA_VALIDATE(suballoc.offset >= offset);
7950 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7951 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7952 sumUsedSize += suballoc.size;
7959 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7962 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7965 VMA_VALIDATE(offset <= GetSize());
7966 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7971 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7973 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7974 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7977 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7979 const VkDeviceSize size = GetSize();
7991 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7993 switch(m_2ndVectorMode)
7995 case SECOND_VECTOR_EMPTY:
8001 const size_t suballocations1stCount = suballocations1st.size();
8002 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8003 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8004 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8006 firstSuballoc.offset,
8007 size - (lastSuballoc.offset + lastSuballoc.size));
8011 case SECOND_VECTOR_RING_BUFFER:
8016 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8017 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8018 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8019 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8023 case SECOND_VECTOR_DOUBLE_STACK:
8028 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8029 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8030 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8031 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8041 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8043 const VkDeviceSize size = GetSize();
8044 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8045 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8046 const size_t suballoc1stCount = suballocations1st.size();
8047 const size_t suballoc2ndCount = suballocations2nd.size();
8058 VkDeviceSize lastOffset = 0;
8060 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8062 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8063 size_t nextAlloc2ndIndex = 0;
8064 while(lastOffset < freeSpace2ndTo1stEnd)
8067 while(nextAlloc2ndIndex < suballoc2ndCount &&
8068 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8070 ++nextAlloc2ndIndex;
8074 if(nextAlloc2ndIndex < suballoc2ndCount)
8076 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8079 if(lastOffset < suballoc.offset)
8082 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8096 lastOffset = suballoc.offset + suballoc.size;
8097 ++nextAlloc2ndIndex;
8103 if(lastOffset < freeSpace2ndTo1stEnd)
8105 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8113 lastOffset = freeSpace2ndTo1stEnd;
8118 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8119 const VkDeviceSize freeSpace1stTo2ndEnd =
8120 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8121 while(lastOffset < freeSpace1stTo2ndEnd)
8124 while(nextAlloc1stIndex < suballoc1stCount &&
8125 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8127 ++nextAlloc1stIndex;
8131 if(nextAlloc1stIndex < suballoc1stCount)
8133 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8136 if(lastOffset < suballoc.offset)
8139 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8153 lastOffset = suballoc.offset + suballoc.size;
8154 ++nextAlloc1stIndex;
8160 if(lastOffset < freeSpace1stTo2ndEnd)
8162 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8170 lastOffset = freeSpace1stTo2ndEnd;
8174 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8176 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8177 while(lastOffset < size)
8180 while(nextAlloc2ndIndex != SIZE_MAX &&
8181 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8183 --nextAlloc2ndIndex;
8187 if(nextAlloc2ndIndex != SIZE_MAX)
8189 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8192 if(lastOffset < suballoc.offset)
8195 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8209 lastOffset = suballoc.offset + suballoc.size;
8210 --nextAlloc2ndIndex;
8216 if(lastOffset < size)
8218 const VkDeviceSize unusedRangeSize = size - lastOffset;
8234 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8236 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8237 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8238 const VkDeviceSize size = GetSize();
8239 const size_t suballoc1stCount = suballocations1st.size();
8240 const size_t suballoc2ndCount = suballocations2nd.size();
8242 inoutStats.
size += size;
8244 VkDeviceSize lastOffset = 0;
8246 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8248 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8249 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8250 while(lastOffset < freeSpace2ndTo1stEnd)
8253 while(nextAlloc2ndIndex < suballoc2ndCount &&
8254 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8256 ++nextAlloc2ndIndex;
8260 if(nextAlloc2ndIndex < suballoc2ndCount)
8262 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8265 if(lastOffset < suballoc.offset)
8268 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8279 lastOffset = suballoc.offset + suballoc.size;
8280 ++nextAlloc2ndIndex;
8285 if(lastOffset < freeSpace2ndTo1stEnd)
8288 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8295 lastOffset = freeSpace2ndTo1stEnd;
8300 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8301 const VkDeviceSize freeSpace1stTo2ndEnd =
8302 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8303 while(lastOffset < freeSpace1stTo2ndEnd)
8306 while(nextAlloc1stIndex < suballoc1stCount &&
8307 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8309 ++nextAlloc1stIndex;
8313 if(nextAlloc1stIndex < suballoc1stCount)
8315 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8318 if(lastOffset < suballoc.offset)
8321 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8332 lastOffset = suballoc.offset + suballoc.size;
8333 ++nextAlloc1stIndex;
8338 if(lastOffset < freeSpace1stTo2ndEnd)
8341 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8348 lastOffset = freeSpace1stTo2ndEnd;
8352 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8354 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8355 while(lastOffset < size)
8358 while(nextAlloc2ndIndex != SIZE_MAX &&
8359 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8361 --nextAlloc2ndIndex;
8365 if(nextAlloc2ndIndex != SIZE_MAX)
8367 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8370 if(lastOffset < suballoc.offset)
8373 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8384 lastOffset = suballoc.offset + suballoc.size;
8385 --nextAlloc2ndIndex;
8390 if(lastOffset < size)
8393 const VkDeviceSize unusedRangeSize = size - lastOffset;
8406 #if VMA_STATS_STRING_ENABLED 8407 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8409 const VkDeviceSize size = GetSize();
8410 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8411 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8412 const size_t suballoc1stCount = suballocations1st.size();
8413 const size_t suballoc2ndCount = suballocations2nd.size();
8417 size_t unusedRangeCount = 0;
8418 VkDeviceSize usedBytes = 0;
8420 VkDeviceSize lastOffset = 0;
8422 size_t alloc2ndCount = 0;
8423 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8425 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8426 size_t nextAlloc2ndIndex = 0;
8427 while(lastOffset < freeSpace2ndTo1stEnd)
8430 while(nextAlloc2ndIndex < suballoc2ndCount &&
8431 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8433 ++nextAlloc2ndIndex;
8437 if(nextAlloc2ndIndex < suballoc2ndCount)
8439 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8442 if(lastOffset < suballoc.offset)
8451 usedBytes += suballoc.size;
8454 lastOffset = suballoc.offset + suballoc.size;
8455 ++nextAlloc2ndIndex;
8460 if(lastOffset < freeSpace2ndTo1stEnd)
8467 lastOffset = freeSpace2ndTo1stEnd;
8472 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8473 size_t alloc1stCount = 0;
8474 const VkDeviceSize freeSpace1stTo2ndEnd =
8475 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8476 while(lastOffset < freeSpace1stTo2ndEnd)
8479 while(nextAlloc1stIndex < suballoc1stCount &&
8480 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8482 ++nextAlloc1stIndex;
8486 if(nextAlloc1stIndex < suballoc1stCount)
8488 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8491 if(lastOffset < suballoc.offset)
8500 usedBytes += suballoc.size;
8503 lastOffset = suballoc.offset + suballoc.size;
8504 ++nextAlloc1stIndex;
8509 if(lastOffset < size)
8516 lastOffset = freeSpace1stTo2ndEnd;
8520 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8522 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8523 while(lastOffset < size)
8526 while(nextAlloc2ndIndex != SIZE_MAX &&
8527 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8529 --nextAlloc2ndIndex;
8533 if(nextAlloc2ndIndex != SIZE_MAX)
8535 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8538 if(lastOffset < suballoc.offset)
8547 usedBytes += suballoc.size;
8550 lastOffset = suballoc.offset + suballoc.size;
8551 --nextAlloc2ndIndex;
8556 if(lastOffset < size)
8568 const VkDeviceSize unusedBytes = size - usedBytes;
8569 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8574 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8576 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8577 size_t nextAlloc2ndIndex = 0;
8578 while(lastOffset < freeSpace2ndTo1stEnd)
8581 while(nextAlloc2ndIndex < suballoc2ndCount &&
8582 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8584 ++nextAlloc2ndIndex;
8588 if(nextAlloc2ndIndex < suballoc2ndCount)
8590 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8593 if(lastOffset < suballoc.offset)
8596 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8597 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8602 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8605 lastOffset = suballoc.offset + suballoc.size;
8606 ++nextAlloc2ndIndex;
8611 if(lastOffset < freeSpace2ndTo1stEnd)
8614 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8615 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8619 lastOffset = freeSpace2ndTo1stEnd;
8624 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8625 while(lastOffset < freeSpace1stTo2ndEnd)
8628 while(nextAlloc1stIndex < suballoc1stCount &&
8629 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8631 ++nextAlloc1stIndex;
8635 if(nextAlloc1stIndex < suballoc1stCount)
8637 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8640 if(lastOffset < suballoc.offset)
8643 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8644 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8649 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8652 lastOffset = suballoc.offset + suballoc.size;
8653 ++nextAlloc1stIndex;
8658 if(lastOffset < freeSpace1stTo2ndEnd)
8661 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8662 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8666 lastOffset = freeSpace1stTo2ndEnd;
8670 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8672 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8673 while(lastOffset < size)
8676 while(nextAlloc2ndIndex != SIZE_MAX &&
8677 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8679 --nextAlloc2ndIndex;
8683 if(nextAlloc2ndIndex != SIZE_MAX)
8685 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8688 if(lastOffset < suballoc.offset)
8691 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8692 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8697 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8700 lastOffset = suballoc.offset + suballoc.size;
8701 --nextAlloc2ndIndex;
8706 if(lastOffset < size)
8709 const VkDeviceSize unusedRangeSize = size - lastOffset;
8710 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8719 PrintDetailedMap_End(json);
8721 #endif // #if VMA_STATS_STRING_ENABLED 8723 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8724 uint32_t currentFrameIndex,
8725 uint32_t frameInUseCount,
8726 VkDeviceSize bufferImageGranularity,
8727 VkDeviceSize allocSize,
8728 VkDeviceSize allocAlignment,
8730 VmaSuballocationType allocType,
8731 bool canMakeOtherLost,
8733 VmaAllocationRequest* pAllocationRequest)
8735 VMA_ASSERT(allocSize > 0);
8736 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8737 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8738 VMA_HEAVY_ASSERT(Validate());
8740 const VkDeviceSize size = GetSize();
8741 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8742 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8746 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8748 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8753 if(allocSize > size)
8757 VkDeviceSize resultBaseOffset = size - allocSize;
8758 if(!suballocations2nd.empty())
8760 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8761 resultBaseOffset = lastSuballoc.offset - allocSize;
8762 if(allocSize > lastSuballoc.offset)
8769 VkDeviceSize resultOffset = resultBaseOffset;
8772 if(VMA_DEBUG_MARGIN > 0)
8774 if(resultOffset < VMA_DEBUG_MARGIN)
8778 resultOffset -= VMA_DEBUG_MARGIN;
8782 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8786 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8788 bool bufferImageGranularityConflict =
false;
8789 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8791 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8792 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8794 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8796 bufferImageGranularityConflict =
true;
8804 if(bufferImageGranularityConflict)
8806 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8811 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8812 suballocations1st.back().offset + suballocations1st.back().size :
8814 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8818 if(bufferImageGranularity > 1)
8820 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8822 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8823 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8825 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8839 pAllocationRequest->offset = resultOffset;
8840 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8841 pAllocationRequest->sumItemSize = 0;
8843 pAllocationRequest->itemsToMakeLostCount = 0;
8849 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8853 VkDeviceSize resultBaseOffset = 0;
8854 if(!suballocations1st.empty())
8856 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8857 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8861 VkDeviceSize resultOffset = resultBaseOffset;
8864 if(VMA_DEBUG_MARGIN > 0)
8866 resultOffset += VMA_DEBUG_MARGIN;
8870 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8874 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8876 bool bufferImageGranularityConflict =
false;
8877 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8879 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8880 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8882 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8884 bufferImageGranularityConflict =
true;
8892 if(bufferImageGranularityConflict)
8894 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8898 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8899 suballocations2nd.back().offset : size;
8902 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8906 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8908 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8910 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8911 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8913 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8927 pAllocationRequest->offset = resultOffset;
8928 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8929 pAllocationRequest->sumItemSize = 0;
8931 pAllocationRequest->itemsToMakeLostCount = 0;
8938 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8940 VMA_ASSERT(!suballocations1st.empty());
8942 VkDeviceSize resultBaseOffset = 0;
8943 if(!suballocations2nd.empty())
8945 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8946 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8950 VkDeviceSize resultOffset = resultBaseOffset;
8953 if(VMA_DEBUG_MARGIN > 0)
8955 resultOffset += VMA_DEBUG_MARGIN;
8959 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8963 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8965 bool bufferImageGranularityConflict =
false;
8966 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8968 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8969 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8971 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8973 bufferImageGranularityConflict =
true;
8981 if(bufferImageGranularityConflict)
8983 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8987 pAllocationRequest->itemsToMakeLostCount = 0;
8988 pAllocationRequest->sumItemSize = 0;
8989 size_t index1st = m_1stNullItemsBeginCount;
8991 if(canMakeOtherLost)
8993 while(index1st < suballocations1st.size() &&
8994 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8997 const VmaSuballocation& suballoc = suballocations1st[index1st];
8998 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9004 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9005 if(suballoc.hAllocation->CanBecomeLost() &&
9006 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9008 ++pAllocationRequest->itemsToMakeLostCount;
9009 pAllocationRequest->sumItemSize += suballoc.size;
9021 if(bufferImageGranularity > 1)
9023 while(index1st < suballocations1st.size())
9025 const VmaSuballocation& suballoc = suballocations1st[index1st];
9026 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9028 if(suballoc.hAllocation != VK_NULL_HANDLE)
9031 if(suballoc.hAllocation->CanBecomeLost() &&
9032 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9034 ++pAllocationRequest->itemsToMakeLostCount;
9035 pAllocationRequest->sumItemSize += suballoc.size;
9054 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9055 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9059 if(bufferImageGranularity > 1)
9061 for(
size_t nextSuballocIndex = index1st;
9062 nextSuballocIndex < suballocations1st.size();
9063 nextSuballocIndex++)
9065 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9066 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9068 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9082 pAllocationRequest->offset = resultOffset;
9083 pAllocationRequest->sumFreeSize =
9084 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9086 - pAllocationRequest->sumItemSize;
9096 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9097 uint32_t currentFrameIndex,
9098 uint32_t frameInUseCount,
9099 VmaAllocationRequest* pAllocationRequest)
9101 if(pAllocationRequest->itemsToMakeLostCount == 0)
9106 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9108 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9109 size_t index1st = m_1stNullItemsBeginCount;
9110 size_t madeLostCount = 0;
9111 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9113 VMA_ASSERT(index1st < suballocations1st.size());
9114 VmaSuballocation& suballoc = suballocations1st[index1st];
9115 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9117 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9118 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9119 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9121 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9122 suballoc.hAllocation = VK_NULL_HANDLE;
9123 m_SumFreeSize += suballoc.size;
9124 ++m_1stNullItemsMiddleCount;
9141 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9143 uint32_t lostAllocationCount = 0;
9145 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9146 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9148 VmaSuballocation& suballoc = suballocations1st[i];
9149 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9150 suballoc.hAllocation->CanBecomeLost() &&
9151 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9153 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9154 suballoc.hAllocation = VK_NULL_HANDLE;
9155 ++m_1stNullItemsMiddleCount;
9156 m_SumFreeSize += suballoc.size;
9157 ++lostAllocationCount;
9161 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9162 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9164 VmaSuballocation& suballoc = suballocations2nd[i];
9165 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9166 suballoc.hAllocation->CanBecomeLost() &&
9167 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9169 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9170 suballoc.hAllocation = VK_NULL_HANDLE;
9171 ++m_2ndNullItemsCount;
9172 ++lostAllocationCount;
9176 if(lostAllocationCount)
9181 return lostAllocationCount;
9184 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9186 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9187 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9189 const VmaSuballocation& suballoc = suballocations1st[i];
9190 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9192 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9194 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9195 return VK_ERROR_VALIDATION_FAILED_EXT;
9197 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9199 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9200 return VK_ERROR_VALIDATION_FAILED_EXT;
9205 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9206 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9208 const VmaSuballocation& suballoc = suballocations2nd[i];
9209 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9211 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9213 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9214 return VK_ERROR_VALIDATION_FAILED_EXT;
9216 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9218 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9219 return VK_ERROR_VALIDATION_FAILED_EXT;
9227 void VmaBlockMetadata_Linear::Alloc(
9228 const VmaAllocationRequest& request,
9229 VmaSuballocationType type,
9230 VkDeviceSize allocSize,
9234 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9238 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9239 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9240 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9241 suballocations2nd.push_back(newSuballoc);
9242 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9246 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9249 if(suballocations1st.empty())
9251 suballocations1st.push_back(newSuballoc);
9256 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9259 VMA_ASSERT(request.offset + allocSize <= GetSize());
9260 suballocations1st.push_back(newSuballoc);
9263 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9265 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9267 switch(m_2ndVectorMode)
9269 case SECOND_VECTOR_EMPTY:
9271 VMA_ASSERT(suballocations2nd.empty());
9272 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9274 case SECOND_VECTOR_RING_BUFFER:
9276 VMA_ASSERT(!suballocations2nd.empty());
9278 case SECOND_VECTOR_DOUBLE_STACK:
9279 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9285 suballocations2nd.push_back(newSuballoc);
9289 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9294 m_SumFreeSize -= newSuballoc.size;
9297 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9299 FreeAtOffset(allocation->GetOffset());
9302 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9304 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9305 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9307 if(!suballocations1st.empty())
9310 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9311 if(firstSuballoc.offset == offset)
9313 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9314 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9315 m_SumFreeSize += firstSuballoc.size;
9316 ++m_1stNullItemsBeginCount;
9323 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9324 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9326 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9327 if(lastSuballoc.offset == offset)
9329 m_SumFreeSize += lastSuballoc.size;
9330 suballocations2nd.pop_back();
9336 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9338 VmaSuballocation& lastSuballoc = suballocations1st.back();
9339 if(lastSuballoc.offset == offset)
9341 m_SumFreeSize += lastSuballoc.size;
9342 suballocations1st.pop_back();
9350 VmaSuballocation refSuballoc;
9351 refSuballoc.offset = offset;
9353 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9354 suballocations1st.begin() + m_1stNullItemsBeginCount,
9355 suballocations1st.end(),
9357 if(it != suballocations1st.end())
9359 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9360 it->hAllocation = VK_NULL_HANDLE;
9361 ++m_1stNullItemsMiddleCount;
9362 m_SumFreeSize += it->size;
9368 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9371 VmaSuballocation refSuballoc;
9372 refSuballoc.offset = offset;
9374 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9375 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9376 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9377 if(it != suballocations2nd.end())
9379 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9380 it->hAllocation = VK_NULL_HANDLE;
9381 ++m_2ndNullItemsCount;
9382 m_SumFreeSize += it->size;
9388 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9391 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9393 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9394 const size_t suballocCount = AccessSuballocations1st().size();
9395 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9398 void VmaBlockMetadata_Linear::CleanupAfterFree()
9400 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9401 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9405 suballocations1st.clear();
9406 suballocations2nd.clear();
9407 m_1stNullItemsBeginCount = 0;
9408 m_1stNullItemsMiddleCount = 0;
9409 m_2ndNullItemsCount = 0;
9410 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9414 const size_t suballoc1stCount = suballocations1st.size();
9415 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9416 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9419 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9420 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9422 ++m_1stNullItemsBeginCount;
9423 --m_1stNullItemsMiddleCount;
9427 while(m_1stNullItemsMiddleCount > 0 &&
9428 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9430 --m_1stNullItemsMiddleCount;
9431 suballocations1st.pop_back();
9435 while(m_2ndNullItemsCount > 0 &&
9436 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9438 --m_2ndNullItemsCount;
9439 suballocations2nd.pop_back();
9442 if(ShouldCompact1st())
9444 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9445 size_t srcIndex = m_1stNullItemsBeginCount;
9446 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9448 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9452 if(dstIndex != srcIndex)
9454 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9458 suballocations1st.resize(nonNullItemCount);
9459 m_1stNullItemsBeginCount = 0;
9460 m_1stNullItemsMiddleCount = 0;
9464 if(suballocations2nd.empty())
9466 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9470 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9472 suballocations1st.clear();
9473 m_1stNullItemsBeginCount = 0;
9475 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9478 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9479 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9480 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9481 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9483 ++m_1stNullItemsBeginCount;
9484 --m_1stNullItemsMiddleCount;
9486 m_2ndNullItemsCount = 0;
9487 m_1stVectorIndex ^= 1;
9492 VMA_HEAVY_ASSERT(Validate());
9499 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9500 VmaBlockMetadata(hAllocator),
9502 m_AllocationCount(0),
9506 memset(m_FreeList, 0,
sizeof(m_FreeList));
9509 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9514 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9516 VmaBlockMetadata::Init(size);
9518 m_UsableSize = VmaPrevPow2(size);
9519 m_SumFreeSize = m_UsableSize;
9523 while(m_LevelCount < MAX_LEVELS &&
9524 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9529 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9530 rootNode->offset = 0;
9531 rootNode->type = Node::TYPE_FREE;
9532 rootNode->parent = VMA_NULL;
9533 rootNode->buddy = VMA_NULL;
9536 AddToFreeListFront(0, rootNode);
9539 bool VmaBlockMetadata_Buddy::Validate()
const 9542 ValidationContext ctx;
9543 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9545 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9547 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9548 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9551 for(uint32_t level = 0; level < m_LevelCount; ++level)
9553 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9554 m_FreeList[level].front->free.prev == VMA_NULL);
9556 for(Node* node = m_FreeList[level].front;
9558 node = node->free.next)
9560 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9562 if(node->free.next == VMA_NULL)
9564 VMA_VALIDATE(m_FreeList[level].back == node);
9568 VMA_VALIDATE(node->free.next->free.prev == node);
9574 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9576 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9582 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9584 for(uint32_t level = 0; level < m_LevelCount; ++level)
9586 if(m_FreeList[level].front != VMA_NULL)
9588 return LevelToNodeSize(level);
9594 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9596 const VkDeviceSize unusableSize = GetUnusableSize();
9607 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9609 if(unusableSize > 0)
9618 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9620 const VkDeviceSize unusableSize = GetUnusableSize();
9622 inoutStats.
size += GetSize();
9623 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9628 if(unusableSize > 0)
9635 #if VMA_STATS_STRING_ENABLED 9637 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9641 CalcAllocationStatInfo(stat);
9643 PrintDetailedMap_Begin(
9649 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9651 const VkDeviceSize unusableSize = GetUnusableSize();
9652 if(unusableSize > 0)
9654 PrintDetailedMap_UnusedRange(json,
9659 PrintDetailedMap_End(json);
9662 #endif // #if VMA_STATS_STRING_ENABLED 9664 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9665 uint32_t currentFrameIndex,
9666 uint32_t frameInUseCount,
9667 VkDeviceSize bufferImageGranularity,
9668 VkDeviceSize allocSize,
9669 VkDeviceSize allocAlignment,
9671 VmaSuballocationType allocType,
9672 bool canMakeOtherLost,
9674 VmaAllocationRequest* pAllocationRequest)
9676 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9680 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9681 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9682 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9684 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9685 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9688 if(allocSize > m_UsableSize)
9693 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9694 for(uint32_t level = targetLevel + 1; level--; )
9696 for(Node* freeNode = m_FreeList[level].front;
9697 freeNode != VMA_NULL;
9698 freeNode = freeNode->free.next)
9700 if(freeNode->offset % allocAlignment == 0)
9702 pAllocationRequest->offset = freeNode->offset;
9703 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9704 pAllocationRequest->sumItemSize = 0;
9705 pAllocationRequest->itemsToMakeLostCount = 0;
9706 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9715 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9716 uint32_t currentFrameIndex,
9717 uint32_t frameInUseCount,
9718 VmaAllocationRequest* pAllocationRequest)
9724 return pAllocationRequest->itemsToMakeLostCount == 0;
9727 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9736 void VmaBlockMetadata_Buddy::Alloc(
9737 const VmaAllocationRequest& request,
9738 VmaSuballocationType type,
9739 VkDeviceSize allocSize,
9743 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9744 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9746 Node* currNode = m_FreeList[currLevel].front;
9747 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9748 while(currNode->offset != request.offset)
9750 currNode = currNode->free.next;
9751 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9755 while(currLevel < targetLevel)
9759 RemoveFromFreeList(currLevel, currNode);
9761 const uint32_t childrenLevel = currLevel + 1;
9764 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9765 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9767 leftChild->offset = currNode->offset;
9768 leftChild->type = Node::TYPE_FREE;
9769 leftChild->parent = currNode;
9770 leftChild->buddy = rightChild;
9772 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9773 rightChild->type = Node::TYPE_FREE;
9774 rightChild->parent = currNode;
9775 rightChild->buddy = leftChild;
9778 currNode->type = Node::TYPE_SPLIT;
9779 currNode->split.leftChild = leftChild;
9782 AddToFreeListFront(childrenLevel, rightChild);
9783 AddToFreeListFront(childrenLevel, leftChild);
9788 currNode = m_FreeList[currLevel].front;
9797 VMA_ASSERT(currLevel == targetLevel &&
9798 currNode != VMA_NULL &&
9799 currNode->type == Node::TYPE_FREE);
9800 RemoveFromFreeList(currLevel, currNode);
9803 currNode->type = Node::TYPE_ALLOCATION;
9804 currNode->allocation.alloc = hAllocation;
9806 ++m_AllocationCount;
9808 m_SumFreeSize -= allocSize;
9811 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9813 if(node->type == Node::TYPE_SPLIT)
9815 DeleteNode(node->split.leftChild->buddy);
9816 DeleteNode(node->split.leftChild);
9819 vma_delete(GetAllocationCallbacks(), node);
9822 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9824 VMA_VALIDATE(level < m_LevelCount);
9825 VMA_VALIDATE(curr->parent == parent);
9826 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9827 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9830 case Node::TYPE_FREE:
9832 ctx.calculatedSumFreeSize += levelNodeSize;
9833 ++ctx.calculatedFreeCount;
9835 case Node::TYPE_ALLOCATION:
9836 ++ctx.calculatedAllocationCount;
9837 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9838 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9840 case Node::TYPE_SPLIT:
9842 const uint32_t childrenLevel = level + 1;
9843 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9844 const Node*
const leftChild = curr->split.leftChild;
9845 VMA_VALIDATE(leftChild != VMA_NULL);
9846 VMA_VALIDATE(leftChild->offset == curr->offset);
9847 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9849 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9851 const Node*
const rightChild = leftChild->buddy;
9852 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9853 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9855 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9866 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9870 VkDeviceSize currLevelNodeSize = m_UsableSize;
9871 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9872 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9875 currLevelNodeSize = nextLevelNodeSize;
9876 nextLevelNodeSize = currLevelNodeSize >> 1;
9881 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9884 Node* node = m_Root;
9885 VkDeviceSize nodeOffset = 0;
9887 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9888 while(node->type == Node::TYPE_SPLIT)
9890 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9891 if(offset < nodeOffset + nextLevelSize)
9893 node = node->split.leftChild;
9897 node = node->split.leftChild->buddy;
9898 nodeOffset += nextLevelSize;
9901 levelNodeSize = nextLevelSize;
9904 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9905 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9908 --m_AllocationCount;
9909 m_SumFreeSize += alloc->GetSize();
9911 node->type = Node::TYPE_FREE;
9914 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9916 RemoveFromFreeList(level, node->buddy);
9917 Node*
const parent = node->parent;
9919 vma_delete(GetAllocationCallbacks(), node->buddy);
9920 vma_delete(GetAllocationCallbacks(), node);
9921 parent->type = Node::TYPE_FREE;
9929 AddToFreeListFront(level, node);
9932 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9936 case Node::TYPE_FREE:
9942 case Node::TYPE_ALLOCATION:
9944 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9950 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9951 if(unusedRangeSize > 0)
9960 case Node::TYPE_SPLIT:
9962 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9963 const Node*
const leftChild = node->split.leftChild;
9964 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9965 const Node*
const rightChild = leftChild->buddy;
9966 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9974 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9976 VMA_ASSERT(node->type == Node::TYPE_FREE);
9979 Node*
const frontNode = m_FreeList[level].front;
9980 if(frontNode == VMA_NULL)
9982 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9983 node->free.prev = node->free.next = VMA_NULL;
9984 m_FreeList[level].front = m_FreeList[level].back = node;
9988 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9989 node->free.prev = VMA_NULL;
9990 node->free.next = frontNode;
9991 frontNode->free.prev = node;
9992 m_FreeList[level].front = node;
9996 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9998 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10001 if(node->free.prev == VMA_NULL)
10003 VMA_ASSERT(m_FreeList[level].front == node);
10004 m_FreeList[level].front = node->free.next;
10008 Node*
const prevFreeNode = node->free.prev;
10009 VMA_ASSERT(prevFreeNode->free.next == node);
10010 prevFreeNode->free.next = node->free.next;
10014 if(node->free.next == VMA_NULL)
10016 VMA_ASSERT(m_FreeList[level].back == node);
10017 m_FreeList[level].back = node->free.prev;
10021 Node*
const nextFreeNode = node->free.next;
10022 VMA_ASSERT(nextFreeNode->free.prev == node);
10023 nextFreeNode->free.prev = node->free.prev;
10027 #if VMA_STATS_STRING_ENABLED 10028 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10032 case Node::TYPE_FREE:
10033 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10035 case Node::TYPE_ALLOCATION:
10037 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10038 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10039 if(allocSize < levelNodeSize)
10041 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10045 case Node::TYPE_SPLIT:
10047 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10048 const Node*
const leftChild = node->split.leftChild;
10049 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10050 const Node*
const rightChild = leftChild->buddy;
10051 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10058 #endif // #if VMA_STATS_STRING_ENABLED 10064 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10065 m_pMetadata(VMA_NULL),
10066 m_MemoryTypeIndex(UINT32_MAX),
10068 m_hMemory(VK_NULL_HANDLE),
10070 m_pMappedData(VMA_NULL)
10074 void VmaDeviceMemoryBlock::Init(
10076 uint32_t newMemoryTypeIndex,
10077 VkDeviceMemory newMemory,
10078 VkDeviceSize newSize,
10080 uint32_t algorithm)
10082 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10084 m_MemoryTypeIndex = newMemoryTypeIndex;
10086 m_hMemory = newMemory;
10091 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10094 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10100 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10102 m_pMetadata->Init(newSize);
10105 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10109 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10111 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10112 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10113 m_hMemory = VK_NULL_HANDLE;
10115 vma_delete(allocator, m_pMetadata);
10116 m_pMetadata = VMA_NULL;
10119 bool VmaDeviceMemoryBlock::Validate()
const 10121 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
10122 (m_pMetadata->GetSize() != 0));
10124 return m_pMetadata->Validate();
10127 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
10129 void* pData =
nullptr;
10130 VkResult res = Map(hAllocator, 1, &pData);
10131 if(res != VK_SUCCESS)
10136 res = m_pMetadata->CheckCorruption(pData);
10138 Unmap(hAllocator, 1);
10143 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10150 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10151 if(m_MapCount != 0)
10153 m_MapCount += count;
10154 VMA_ASSERT(m_pMappedData != VMA_NULL);
10155 if(ppData != VMA_NULL)
10157 *ppData = m_pMappedData;
10163 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10164 hAllocator->m_hDevice,
10170 if(result == VK_SUCCESS)
10172 if(ppData != VMA_NULL)
10174 *ppData = m_pMappedData;
10176 m_MapCount = count;
10182 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10189 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10190 if(m_MapCount >= count)
10192 m_MapCount -= count;
10193 if(m_MapCount == 0)
10195 m_pMappedData = VMA_NULL;
10196 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10201 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10205 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10207 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10208 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10211 VkResult res = Map(hAllocator, 1, &pData);
10212 if(res != VK_SUCCESS)
10217 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10218 VmaWriteMagicValue(pData, allocOffset + allocSize);
10220 Unmap(hAllocator, 1);
10225 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10227 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10228 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10231 VkResult res = Map(hAllocator, 1, &pData);
10232 if(res != VK_SUCCESS)
10237 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10239 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10241 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10243 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10246 Unmap(hAllocator, 1);
10251 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10256 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10257 hAllocation->GetBlock() ==
this);
10259 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10260 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10261 hAllocator->m_hDevice,
10264 hAllocation->GetOffset());
10267 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10272 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10273 hAllocation->GetBlock() ==
this);
10275 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10276 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10277 hAllocator->m_hDevice,
10280 hAllocation->GetOffset());
10285 memset(&outInfo, 0,
sizeof(outInfo));
10304 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10312 VmaPool_T::VmaPool_T(
10315 VkDeviceSize preferredBlockSize) :
10318 createInfo.memoryTypeIndex,
10319 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10320 createInfo.minBlockCount,
10321 createInfo.maxBlockCount,
10323 createInfo.frameInUseCount,
10325 createInfo.blockSize != 0,
10331 VmaPool_T::~VmaPool_T()
10335 #if VMA_STATS_STRING_ENABLED 10337 #endif // #if VMA_STATS_STRING_ENABLED 10339 VmaBlockVector::VmaBlockVector(
10341 uint32_t memoryTypeIndex,
10342 VkDeviceSize preferredBlockSize,
10343 size_t minBlockCount,
10344 size_t maxBlockCount,
10345 VkDeviceSize bufferImageGranularity,
10346 uint32_t frameInUseCount,
10348 bool explicitBlockSize,
10349 uint32_t algorithm) :
10350 m_hAllocator(hAllocator),
10351 m_MemoryTypeIndex(memoryTypeIndex),
10352 m_PreferredBlockSize(preferredBlockSize),
10353 m_MinBlockCount(minBlockCount),
10354 m_MaxBlockCount(maxBlockCount),
10355 m_BufferImageGranularity(bufferImageGranularity),
10356 m_FrameInUseCount(frameInUseCount),
10357 m_IsCustomPool(isCustomPool),
10358 m_ExplicitBlockSize(explicitBlockSize),
10359 m_Algorithm(algorithm),
10360 m_HasEmptyBlock(false),
10361 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10362 m_pDefragmentator(VMA_NULL),
10367 VmaBlockVector::~VmaBlockVector()
10369 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10371 for(
size_t i = m_Blocks.size(); i--; )
10373 m_Blocks[i]->Destroy(m_hAllocator);
10374 vma_delete(m_hAllocator, m_Blocks[i]);
10378 VkResult VmaBlockVector::CreateMinBlocks()
10380 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10382 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10383 if(res != VK_SUCCESS)
10391 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10393 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10395 const size_t blockCount = m_Blocks.size();
10404 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10406 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10407 VMA_ASSERT(pBlock);
10408 VMA_HEAVY_ASSERT(pBlock->Validate());
10409 pBlock->m_pMetadata->AddPoolStats(*pStats);
10413 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10415 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10416 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10417 (VMA_DEBUG_MARGIN > 0) &&
10418 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10421 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10423 VkResult VmaBlockVector::Allocate(
10425 uint32_t currentFrameIndex,
10427 VkDeviceSize alignment,
10429 VmaSuballocationType suballocType,
10436 const bool canCreateNewBlock =
10438 (m_Blocks.size() < m_MaxBlockCount);
10445 canMakeOtherLost =
false;
10449 if(isUpperAddress &&
10452 return VK_ERROR_FEATURE_NOT_PRESENT;
10466 return VK_ERROR_FEATURE_NOT_PRESENT;
10470 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10472 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10475 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10482 if(!canMakeOtherLost || canCreateNewBlock)
10491 if(!m_Blocks.empty())
10493 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10494 VMA_ASSERT(pCurrBlock);
10495 VkResult res = AllocateFromBlock(
10506 if(res == VK_SUCCESS)
10508 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10518 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10520 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10521 VMA_ASSERT(pCurrBlock);
10522 VkResult res = AllocateFromBlock(
10533 if(res == VK_SUCCESS)
10535 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10543 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10545 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10546 VMA_ASSERT(pCurrBlock);
10547 VkResult res = AllocateFromBlock(
10558 if(res == VK_SUCCESS)
10560 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10568 if(canCreateNewBlock)
10571 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10572 uint32_t newBlockSizeShift = 0;
10573 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10575 if(!m_ExplicitBlockSize)
10578 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10579 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10581 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10582 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10584 newBlockSize = smallerNewBlockSize;
10585 ++newBlockSizeShift;
10594 size_t newBlockIndex = 0;
10595 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10597 if(!m_ExplicitBlockSize)
10599 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10601 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10602 if(smallerNewBlockSize >= size)
10604 newBlockSize = smallerNewBlockSize;
10605 ++newBlockSizeShift;
10606 res = CreateBlock(newBlockSize, &newBlockIndex);
10615 if(res == VK_SUCCESS)
10617 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10618 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10620 res = AllocateFromBlock(
10631 if(res == VK_SUCCESS)
10633 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10639 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10646 if(canMakeOtherLost)
10648 uint32_t tryIndex = 0;
10649 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10651 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10652 VmaAllocationRequest bestRequest = {};
10653 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10659 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10661 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10662 VMA_ASSERT(pCurrBlock);
10663 VmaAllocationRequest currRequest = {};
10664 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10667 m_BufferImageGranularity,
10676 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10677 if(pBestRequestBlock == VMA_NULL ||
10678 currRequestCost < bestRequestCost)
10680 pBestRequestBlock = pCurrBlock;
10681 bestRequest = currRequest;
10682 bestRequestCost = currRequestCost;
10684 if(bestRequestCost == 0)
10695 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10697 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10698 VMA_ASSERT(pCurrBlock);
10699 VmaAllocationRequest currRequest = {};
10700 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10703 m_BufferImageGranularity,
10712 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10713 if(pBestRequestBlock == VMA_NULL ||
10714 currRequestCost < bestRequestCost ||
10717 pBestRequestBlock = pCurrBlock;
10718 bestRequest = currRequest;
10719 bestRequestCost = currRequestCost;
10721 if(bestRequestCost == 0 ||
10731 if(pBestRequestBlock != VMA_NULL)
10735 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10736 if(res != VK_SUCCESS)
10742 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10748 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10750 m_HasEmptyBlock =
false;
10753 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10754 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10755 (*pAllocation)->InitBlockAllocation(
10758 bestRequest.offset,
10764 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10765 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10766 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10767 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10769 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10771 if(IsCorruptionDetectionEnabled())
10773 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10774 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10789 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10791 return VK_ERROR_TOO_MANY_OBJECTS;
10795 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10798 void VmaBlockVector::Free(
10801 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10805 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10807 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10809 if(IsCorruptionDetectionEnabled())
10811 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10812 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10815 if(hAllocation->IsPersistentMap())
10817 pBlock->Unmap(m_hAllocator, 1);
10820 pBlock->m_pMetadata->Free(hAllocation);
10821 VMA_HEAVY_ASSERT(pBlock->Validate());
10823 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10826 if(pBlock->m_pMetadata->IsEmpty())
10829 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10831 pBlockToDelete = pBlock;
10837 m_HasEmptyBlock =
true;
10842 else if(m_HasEmptyBlock)
10844 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10845 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10847 pBlockToDelete = pLastBlock;
10848 m_Blocks.pop_back();
10849 m_HasEmptyBlock =
false;
10853 IncrementallySortBlocks();
10858 if(pBlockToDelete != VMA_NULL)
10860 VMA_DEBUG_LOG(
" Deleted empty allocation");
10861 pBlockToDelete->Destroy(m_hAllocator);
10862 vma_delete(m_hAllocator, pBlockToDelete);
10866 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10868 VkDeviceSize result = 0;
10869 for(
size_t i = m_Blocks.size(); i--; )
10871 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10872 if(result >= m_PreferredBlockSize)
10880 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10882 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10884 if(m_Blocks[blockIndex] == pBlock)
10886 VmaVectorRemove(m_Blocks, blockIndex);
10893 void VmaBlockVector::IncrementallySortBlocks()
10898 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10900 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10902 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10909 VkResult VmaBlockVector::AllocateFromBlock(
10910 VmaDeviceMemoryBlock* pBlock,
10912 uint32_t currentFrameIndex,
10914 VkDeviceSize alignment,
10917 VmaSuballocationType suballocType,
10926 VmaAllocationRequest currRequest = {};
10927 if(pBlock->m_pMetadata->CreateAllocationRequest(
10930 m_BufferImageGranularity,
10940 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10944 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10945 if(res != VK_SUCCESS)
10952 if(pBlock->m_pMetadata->IsEmpty())
10954 m_HasEmptyBlock =
false;
10957 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10958 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10959 (*pAllocation)->InitBlockAllocation(
10962 currRequest.offset,
10968 VMA_HEAVY_ASSERT(pBlock->Validate());
10969 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10970 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10972 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10974 if(IsCorruptionDetectionEnabled())
10976 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10977 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10981 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10984 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10986 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10987 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10988 allocInfo.allocationSize = blockSize;
10989 VkDeviceMemory mem = VK_NULL_HANDLE;
10990 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10999 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11004 allocInfo.allocationSize,
11008 m_Blocks.push_back(pBlock);
11009 if(pNewBlockIndex != VMA_NULL)
11011 *pNewBlockIndex = m_Blocks.size() - 1;
11017 #if VMA_STATS_STRING_ENABLED 11019 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
11021 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11023 json.BeginObject();
11027 json.WriteString(
"MemoryTypeIndex");
11028 json.WriteNumber(m_MemoryTypeIndex);
11030 json.WriteString(
"BlockSize");
11031 json.WriteNumber(m_PreferredBlockSize);
11033 json.WriteString(
"BlockCount");
11034 json.BeginObject(
true);
11035 if(m_MinBlockCount > 0)
11037 json.WriteString(
"Min");
11038 json.WriteNumber((uint64_t)m_MinBlockCount);
11040 if(m_MaxBlockCount < SIZE_MAX)
11042 json.WriteString(
"Max");
11043 json.WriteNumber((uint64_t)m_MaxBlockCount);
11045 json.WriteString(
"Cur");
11046 json.WriteNumber((uint64_t)m_Blocks.size());
11049 if(m_FrameInUseCount > 0)
11051 json.WriteString(
"FrameInUseCount");
11052 json.WriteNumber(m_FrameInUseCount);
11055 if(m_Algorithm != 0)
11057 json.WriteString(
"Algorithm");
11058 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
11063 json.WriteString(
"PreferredBlockSize");
11064 json.WriteNumber(m_PreferredBlockSize);
11067 json.WriteString(
"Blocks");
11068 json.BeginObject();
11069 for(
size_t i = 0; i < m_Blocks.size(); ++i)
11071 json.BeginString();
11072 json.ContinueString(m_Blocks[i]->GetId());
11075 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
11082 #endif // #if VMA_STATS_STRING_ENABLED 11084 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
11086 uint32_t currentFrameIndex)
11088 if(m_pDefragmentator == VMA_NULL)
11090 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
11093 currentFrameIndex);
11096 return m_pDefragmentator;
11099 VkResult VmaBlockVector::Defragment(
11101 VkDeviceSize& maxBytesToMove,
11102 uint32_t& maxAllocationsToMove)
11104 if(m_pDefragmentator == VMA_NULL)
11109 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11112 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
11115 if(pDefragmentationStats != VMA_NULL)
11117 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
11118 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
11119 pDefragmentationStats->
bytesMoved += bytesMoved;
11121 VMA_ASSERT(bytesMoved <= maxBytesToMove);
11122 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
11123 maxBytesToMove -= bytesMoved;
11124 maxAllocationsToMove -= allocationsMoved;
11128 m_HasEmptyBlock =
false;
11129 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11131 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11132 if(pBlock->m_pMetadata->IsEmpty())
11134 if(m_Blocks.size() > m_MinBlockCount)
11136 if(pDefragmentationStats != VMA_NULL)
11139 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
11142 VmaVectorRemove(m_Blocks, blockIndex);
11143 pBlock->Destroy(m_hAllocator);
11144 vma_delete(m_hAllocator, pBlock);
11148 m_HasEmptyBlock =
true;
11156 void VmaBlockVector::DestroyDefragmentator()
11158 if(m_pDefragmentator != VMA_NULL)
11160 vma_delete(m_hAllocator, m_pDefragmentator);
11161 m_pDefragmentator = VMA_NULL;
11165 void VmaBlockVector::MakePoolAllocationsLost(
11166 uint32_t currentFrameIndex,
11167 size_t* pLostAllocationCount)
11169 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11170 size_t lostAllocationCount = 0;
11171 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11173 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11174 VMA_ASSERT(pBlock);
11175 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
11177 if(pLostAllocationCount != VMA_NULL)
11179 *pLostAllocationCount = lostAllocationCount;
11183 VkResult VmaBlockVector::CheckCorruption()
11185 if(!IsCorruptionDetectionEnabled())
11187 return VK_ERROR_FEATURE_NOT_PRESENT;
11190 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11191 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11193 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11194 VMA_ASSERT(pBlock);
11195 VkResult res = pBlock->CheckCorruption(m_hAllocator);
11196 if(res != VK_SUCCESS)
11204 void VmaBlockVector::AddStats(
VmaStats* pStats)
11206 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11207 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11209 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11211 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11213 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11214 VMA_ASSERT(pBlock);
11215 VMA_HEAVY_ASSERT(pBlock->Validate());
11217 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11218 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11219 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11220 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11227 VmaDefragmentator::VmaDefragmentator(
11229 VmaBlockVector* pBlockVector,
11230 uint32_t currentFrameIndex) :
11231 m_hAllocator(hAllocator),
11232 m_pBlockVector(pBlockVector),
11233 m_CurrentFrameIndex(currentFrameIndex),
11235 m_AllocationsMoved(0),
11236 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11237 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11239 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11242 VmaDefragmentator::~VmaDefragmentator()
11244 for(
size_t i = m_Blocks.size(); i--; )
11246 vma_delete(m_hAllocator, m_Blocks[i]);
11250 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11252 AllocationInfo allocInfo;
11253 allocInfo.m_hAllocation = hAlloc;
11254 allocInfo.m_pChanged = pChanged;
11255 m_Allocations.push_back(allocInfo);
11258 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11261 if(m_pMappedDataForDefragmentation)
11263 *ppMappedData = m_pMappedDataForDefragmentation;
11268 if(m_pBlock->GetMappedData())
11270 *ppMappedData = m_pBlock->GetMappedData();
11275 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11276 *ppMappedData = m_pMappedDataForDefragmentation;
11280 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11282 if(m_pMappedDataForDefragmentation != VMA_NULL)
11284 m_pBlock->Unmap(hAllocator, 1);
11288 VkResult VmaDefragmentator::DefragmentRound(
11289 VkDeviceSize maxBytesToMove,
11290 uint32_t maxAllocationsToMove)
11292 if(m_Blocks.empty())
11297 size_t srcBlockIndex = m_Blocks.size() - 1;
11298 size_t srcAllocIndex = SIZE_MAX;
11304 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11306 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11309 if(srcBlockIndex == 0)
11316 srcAllocIndex = SIZE_MAX;
11321 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11325 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11326 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11328 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11329 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11330 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11331 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11334 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11336 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11337 VmaAllocationRequest dstAllocRequest;
11338 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11339 m_CurrentFrameIndex,
11340 m_pBlockVector->GetFrameInUseCount(),
11341 m_pBlockVector->GetBufferImageGranularity(),
11348 &dstAllocRequest) &&
11350 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11352 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11355 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11356 (m_BytesMoved + size > maxBytesToMove))
11358 return VK_INCOMPLETE;
11361 void* pDstMappedData = VMA_NULL;
11362 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11363 if(res != VK_SUCCESS)
11368 void* pSrcMappedData = VMA_NULL;
11369 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11370 if(res != VK_SUCCESS)
11377 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11378 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11379 static_cast<size_t>(size));
11381 if(VMA_DEBUG_MARGIN > 0)
11383 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11384 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11387 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11392 allocInfo.m_hAllocation);
11393 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11395 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11397 if(allocInfo.m_pChanged != VMA_NULL)
11399 *allocInfo.m_pChanged = VK_TRUE;
11402 ++m_AllocationsMoved;
11403 m_BytesMoved += size;
11405 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11413 if(srcAllocIndex > 0)
11419 if(srcBlockIndex > 0)
11422 srcAllocIndex = SIZE_MAX;
11432 VkResult VmaDefragmentator::Defragment(
11433 VkDeviceSize maxBytesToMove,
11434 uint32_t maxAllocationsToMove)
11436 if(m_Allocations.empty())
11442 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11443 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11445 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11446 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11447 m_Blocks.push_back(pBlockInfo);
11451 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11454 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11456 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11458 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11460 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11461 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11462 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11464 (*it)->m_Allocations.push_back(allocInfo);
11472 m_Allocations.clear();
11474 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11476 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11477 pBlockInfo->CalcHasNonMovableAllocations();
11478 pBlockInfo->SortAllocationsBySizeDescecnding();
11482 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11485 VkResult result = VK_SUCCESS;
11486 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11488 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11492 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11494 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11500 bool VmaDefragmentator::MoveMakesSense(
11501 size_t dstBlockIndex, VkDeviceSize dstOffset,
11502 size_t srcBlockIndex, VkDeviceSize srcOffset)
11504 if(dstBlockIndex < srcBlockIndex)
11508 if(dstBlockIndex > srcBlockIndex)
11512 if(dstOffset < srcOffset)
11522 #if VMA_RECORDING_ENABLED 11524 VmaRecorder::VmaRecorder() :
11529 m_StartCounter(INT64_MAX)
11535 m_UseMutex = useMutex;
11536 m_Flags = settings.
flags;
11538 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11539 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11542 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11545 return VK_ERROR_INITIALIZATION_FAILED;
11549 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11550 fprintf(m_File,
"%s\n",
"1,4");
11555 VmaRecorder::~VmaRecorder()
11557 if(m_File != VMA_NULL)
11563 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11565 CallParams callParams;
11566 GetBasicParams(callParams);
11568 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11569 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11573 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11575 CallParams callParams;
11576 GetBasicParams(callParams);
11578 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11579 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11585 CallParams callParams;
11586 GetBasicParams(callParams);
11588 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11589 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11600 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11602 CallParams callParams;
11603 GetBasicParams(callParams);
11605 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11606 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11611 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11612 const VkMemoryRequirements& vkMemReq,
11616 CallParams callParams;
11617 GetBasicParams(callParams);
11619 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11620 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11621 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11623 vkMemReq.alignment,
11624 vkMemReq.memoryTypeBits,
11632 userDataStr.GetString());
11636 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11637 const VkMemoryRequirements& vkMemReq,
11638 bool requiresDedicatedAllocation,
11639 bool prefersDedicatedAllocation,
11643 CallParams callParams;
11644 GetBasicParams(callParams);
11646 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11647 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11648 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11650 vkMemReq.alignment,
11651 vkMemReq.memoryTypeBits,
11652 requiresDedicatedAllocation ? 1 : 0,
11653 prefersDedicatedAllocation ? 1 : 0,
11661 userDataStr.GetString());
11665 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11666 const VkMemoryRequirements& vkMemReq,
11667 bool requiresDedicatedAllocation,
11668 bool prefersDedicatedAllocation,
11672 CallParams callParams;
11673 GetBasicParams(callParams);
11675 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11676 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11677 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11679 vkMemReq.alignment,
11680 vkMemReq.memoryTypeBits,
11681 requiresDedicatedAllocation ? 1 : 0,
11682 prefersDedicatedAllocation ? 1 : 0,
11690 userDataStr.GetString());
11694 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11697 CallParams callParams;
11698 GetBasicParams(callParams);
11700 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11701 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11706 void VmaRecorder::RecordResizeAllocation(
11707 uint32_t frameIndex,
11709 VkDeviceSize newSize)
11711 CallParams callParams;
11712 GetBasicParams(callParams);
11714 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11715 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
11716 allocation, newSize);
11720 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11722 const void* pUserData)
11724 CallParams callParams;
11725 GetBasicParams(callParams);
11727 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11728 UserDataString userDataStr(
11731 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11733 userDataStr.GetString());
11737 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11740 CallParams callParams;
11741 GetBasicParams(callParams);
11743 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11744 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11749 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11752 CallParams callParams;
11753 GetBasicParams(callParams);
11755 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11756 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11761 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11764 CallParams callParams;
11765 GetBasicParams(callParams);
11767 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11768 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11773 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11774 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11776 CallParams callParams;
11777 GetBasicParams(callParams);
11779 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11780 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11787 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11788 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11790 CallParams callParams;
11791 GetBasicParams(callParams);
11793 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11794 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11801 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11802 const VkBufferCreateInfo& bufCreateInfo,
11806 CallParams callParams;
11807 GetBasicParams(callParams);
11809 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11810 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11811 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11812 bufCreateInfo.flags,
11813 bufCreateInfo.size,
11814 bufCreateInfo.usage,
11815 bufCreateInfo.sharingMode,
11816 allocCreateInfo.
flags,
11817 allocCreateInfo.
usage,
11821 allocCreateInfo.
pool,
11823 userDataStr.GetString());
11827 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11828 const VkImageCreateInfo& imageCreateInfo,
11832 CallParams callParams;
11833 GetBasicParams(callParams);
11835 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11836 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11837 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11838 imageCreateInfo.flags,
11839 imageCreateInfo.imageType,
11840 imageCreateInfo.format,
11841 imageCreateInfo.extent.width,
11842 imageCreateInfo.extent.height,
11843 imageCreateInfo.extent.depth,
11844 imageCreateInfo.mipLevels,
11845 imageCreateInfo.arrayLayers,
11846 imageCreateInfo.samples,
11847 imageCreateInfo.tiling,
11848 imageCreateInfo.usage,
11849 imageCreateInfo.sharingMode,
11850 imageCreateInfo.initialLayout,
11851 allocCreateInfo.
flags,
11852 allocCreateInfo.
usage,
11856 allocCreateInfo.
pool,
11858 userDataStr.GetString());
11862 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11865 CallParams callParams;
11866 GetBasicParams(callParams);
11868 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11869 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11874 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11877 CallParams callParams;
11878 GetBasicParams(callParams);
11880 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11881 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11886 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11889 CallParams callParams;
11890 GetBasicParams(callParams);
11892 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11893 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11898 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11901 CallParams callParams;
11902 GetBasicParams(callParams);
11904 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11905 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11910 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11913 CallParams callParams;
11914 GetBasicParams(callParams);
11916 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11917 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11924 if(pUserData != VMA_NULL)
11928 m_Str = (
const char*)pUserData;
11932 sprintf_s(m_PtrStr,
"%p", pUserData);
11942 void VmaRecorder::WriteConfiguration(
11943 const VkPhysicalDeviceProperties& devProps,
11944 const VkPhysicalDeviceMemoryProperties& memProps,
11945 bool dedicatedAllocationExtensionEnabled)
11947 fprintf(m_File,
"Config,Begin\n");
11949 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11950 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11951 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11952 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11953 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11954 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11956 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11957 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11958 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11960 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11961 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11963 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11964 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11966 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11967 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11969 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11970 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11973 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11975 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11976 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11977 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11978 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11979 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11980 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11981 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11982 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11983 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11985 fprintf(m_File,
"Config,End\n");
11988 void VmaRecorder::GetBasicParams(CallParams& outParams)
11990 outParams.threadId = GetCurrentThreadId();
11992 LARGE_INTEGER counter;
11993 QueryPerformanceCounter(&counter);
11994 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11997 void VmaRecorder::Flush()
12005 #endif // #if VMA_RECORDING_ENABLED 12013 m_hDevice(pCreateInfo->device),
12014 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
12015 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
12016 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
12017 m_PreferredLargeHeapBlockSize(0),
12018 m_PhysicalDevice(pCreateInfo->physicalDevice),
12019 m_CurrentFrameIndex(0),
12020 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
12023 ,m_pRecorder(VMA_NULL)
12026 if(VMA_DEBUG_DETECT_CORRUPTION)
12029 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
12034 #if !(VMA_DEDICATED_ALLOCATION) 12037 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
12041 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
12042 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
12043 memset(&m_MemProps, 0,
sizeof(m_MemProps));
12045 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
12046 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
12048 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12050 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
12061 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
12062 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
12064 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
12065 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
12066 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
12067 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
12074 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
12076 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
12077 if(limit != VK_WHOLE_SIZE)
12079 m_HeapSizeLimit[heapIndex] = limit;
12080 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
12082 m_MemProps.memoryHeaps[heapIndex].size = limit;
12088 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12090 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
12092 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
12095 preferredBlockSize,
12098 GetBufferImageGranularity(),
12105 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
12112 VkResult res = VK_SUCCESS;
12117 #if VMA_RECORDING_ENABLED 12118 m_pRecorder = vma_new(
this, VmaRecorder)();
12120 if(res != VK_SUCCESS)
12124 m_pRecorder->WriteConfiguration(
12125 m_PhysicalDeviceProperties,
12127 m_UseKhrDedicatedAllocation);
12128 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
12130 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
12131 return VK_ERROR_FEATURE_NOT_PRESENT;
12138 VmaAllocator_T::~VmaAllocator_T()
12140 #if VMA_RECORDING_ENABLED 12141 if(m_pRecorder != VMA_NULL)
12143 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
12144 vma_delete(
this, m_pRecorder);
12148 VMA_ASSERT(m_Pools.empty());
12150 for(
size_t i = GetMemoryTypeCount(); i--; )
12152 vma_delete(
this, m_pDedicatedAllocations[i]);
12153 vma_delete(
this, m_pBlockVectors[i]);
12157 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
12159 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12160 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
12161 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
12162 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
12163 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
12164 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
12165 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
12166 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
12167 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
12168 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
12169 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
12170 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
12171 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
12172 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
12173 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
12174 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
12175 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
12176 #if VMA_DEDICATED_ALLOCATION 12177 if(m_UseKhrDedicatedAllocation)
12179 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
12180 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
12181 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
12182 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
12184 #endif // #if VMA_DEDICATED_ALLOCATION 12185 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12187 #define VMA_COPY_IF_NOT_NULL(funcName) \ 12188 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 12190 if(pVulkanFunctions != VMA_NULL)
12192 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
12193 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
12194 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
12195 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
12196 VMA_COPY_IF_NOT_NULL(vkMapMemory);
12197 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
12198 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
12199 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
12200 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
12201 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
12202 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
12203 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
12204 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
12205 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
12206 VMA_COPY_IF_NOT_NULL(vkCreateImage);
12207 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
12208 #if VMA_DEDICATED_ALLOCATION 12209 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
12210 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
12214 #undef VMA_COPY_IF_NOT_NULL 12218 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12219 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12220 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12221 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12222 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12223 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12224 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12225 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12226 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12227 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12228 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12229 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12230 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12231 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12232 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12233 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12234 #if VMA_DEDICATED_ALLOCATION 12235 if(m_UseKhrDedicatedAllocation)
12237 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12238 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12243 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12245 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12246 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12247 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12248 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12251 VkResult VmaAllocator_T::AllocateMemoryOfType(
12253 VkDeviceSize alignment,
12254 bool dedicatedAllocation,
12255 VkBuffer dedicatedBuffer,
12256 VkImage dedicatedImage,
12258 uint32_t memTypeIndex,
12259 VmaSuballocationType suballocType,
12262 VMA_ASSERT(pAllocation != VMA_NULL);
12263 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12269 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12274 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12275 VMA_ASSERT(blockVector);
12277 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12278 bool preferDedicatedMemory =
12279 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12280 dedicatedAllocation ||
12282 size > preferredBlockSize / 2;
12284 if(preferDedicatedMemory &&
12286 finalCreateInfo.
pool == VK_NULL_HANDLE)
12295 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12299 return AllocateDedicatedMemory(
12313 VkResult res = blockVector->Allocate(
12315 m_CurrentFrameIndex.load(),
12321 if(res == VK_SUCCESS)
12329 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12333 res = AllocateDedicatedMemory(
12339 finalCreateInfo.pUserData,
12343 if(res == VK_SUCCESS)
12346 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12352 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12359 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12361 VmaSuballocationType suballocType,
12362 uint32_t memTypeIndex,
12364 bool isUserDataString,
12366 VkBuffer dedicatedBuffer,
12367 VkImage dedicatedImage,
12370 VMA_ASSERT(pAllocation);
12372 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12373 allocInfo.memoryTypeIndex = memTypeIndex;
12374 allocInfo.allocationSize = size;
12376 #if VMA_DEDICATED_ALLOCATION 12377 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12378 if(m_UseKhrDedicatedAllocation)
12380 if(dedicatedBuffer != VK_NULL_HANDLE)
12382 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12383 dedicatedAllocInfo.buffer = dedicatedBuffer;
12384 allocInfo.pNext = &dedicatedAllocInfo;
12386 else if(dedicatedImage != VK_NULL_HANDLE)
12388 dedicatedAllocInfo.image = dedicatedImage;
12389 allocInfo.pNext = &dedicatedAllocInfo;
12392 #endif // #if VMA_DEDICATED_ALLOCATION 12395 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12396 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12399 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12403 void* pMappedData = VMA_NULL;
12406 res = (*m_VulkanFunctions.vkMapMemory)(
12415 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12416 FreeVulkanMemory(memTypeIndex, size, hMemory);
12421 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12422 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12423 (*pAllocation)->SetUserData(
this, pUserData);
12424 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12426 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12431 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12432 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12433 VMA_ASSERT(pDedicatedAllocations);
12434 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12437 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12442 void VmaAllocator_T::GetBufferMemoryRequirements(
12444 VkMemoryRequirements& memReq,
12445 bool& requiresDedicatedAllocation,
12446 bool& prefersDedicatedAllocation)
const 12448 #if VMA_DEDICATED_ALLOCATION 12449 if(m_UseKhrDedicatedAllocation)
12451 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12452 memReqInfo.buffer = hBuffer;
12454 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12456 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12457 memReq2.pNext = &memDedicatedReq;
12459 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12461 memReq = memReq2.memoryRequirements;
12462 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12463 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12466 #endif // #if VMA_DEDICATED_ALLOCATION 12468 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12469 requiresDedicatedAllocation =
false;
12470 prefersDedicatedAllocation =
false;
12474 void VmaAllocator_T::GetImageMemoryRequirements(
12476 VkMemoryRequirements& memReq,
12477 bool& requiresDedicatedAllocation,
12478 bool& prefersDedicatedAllocation)
const 12480 #if VMA_DEDICATED_ALLOCATION 12481 if(m_UseKhrDedicatedAllocation)
12483 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12484 memReqInfo.image = hImage;
12486 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12488 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12489 memReq2.pNext = &memDedicatedReq;
12491 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12493 memReq = memReq2.memoryRequirements;
12494 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12495 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12498 #endif // #if VMA_DEDICATED_ALLOCATION 12500 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12501 requiresDedicatedAllocation =
false;
12502 prefersDedicatedAllocation =
false;
12506 VkResult VmaAllocator_T::AllocateMemory(
12507 const VkMemoryRequirements& vkMemReq,
12508 bool requiresDedicatedAllocation,
12509 bool prefersDedicatedAllocation,
12510 VkBuffer dedicatedBuffer,
12511 VkImage dedicatedImage,
12513 VmaSuballocationType suballocType,
12516 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12518 if(vkMemReq.size == 0)
12520 return VK_ERROR_VALIDATION_FAILED_EXT;
12525 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12526 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12531 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12532 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12534 if(requiresDedicatedAllocation)
12538 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12539 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12541 if(createInfo.
pool != VK_NULL_HANDLE)
12543 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12544 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12547 if((createInfo.
pool != VK_NULL_HANDLE) &&
12550 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12551 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12554 if(createInfo.
pool != VK_NULL_HANDLE)
12556 const VkDeviceSize alignmentForPool = VMA_MAX(
12557 vkMemReq.alignment,
12558 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12559 return createInfo.
pool->m_BlockVector.Allocate(
12561 m_CurrentFrameIndex.load(),
12571 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12572 uint32_t memTypeIndex = UINT32_MAX;
12574 if(res == VK_SUCCESS)
12576 VkDeviceSize alignmentForMemType = VMA_MAX(
12577 vkMemReq.alignment,
12578 GetMemoryTypeMinAlignment(memTypeIndex));
12580 res = AllocateMemoryOfType(
12582 alignmentForMemType,
12583 requiresDedicatedAllocation || prefersDedicatedAllocation,
12591 if(res == VK_SUCCESS)
12601 memoryTypeBits &= ~(1u << memTypeIndex);
12604 if(res == VK_SUCCESS)
12606 alignmentForMemType = VMA_MAX(
12607 vkMemReq.alignment,
12608 GetMemoryTypeMinAlignment(memTypeIndex));
12610 res = AllocateMemoryOfType(
12612 alignmentForMemType,
12613 requiresDedicatedAllocation || prefersDedicatedAllocation,
12621 if(res == VK_SUCCESS)
12631 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12642 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12644 VMA_ASSERT(allocation);
12646 if(TouchAllocation(allocation))
12648 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12650 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12653 switch(allocation->GetType())
12655 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12657 VmaBlockVector* pBlockVector = VMA_NULL;
12658 VmaPool hPool = allocation->GetPool();
12659 if(hPool != VK_NULL_HANDLE)
12661 pBlockVector = &hPool->m_BlockVector;
12665 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12666 pBlockVector = m_pBlockVectors[memTypeIndex];
12668 pBlockVector->Free(allocation);
12671 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12672 FreeDedicatedMemory(allocation);
12679 allocation->SetUserData(
this, VMA_NULL);
12680 vma_delete(
this, allocation);
12683 VkResult VmaAllocator_T::ResizeAllocation(
12685 VkDeviceSize newSize)
12687 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
12689 return VK_ERROR_VALIDATION_FAILED_EXT;
12691 if(newSize == alloc->GetSize())
12696 switch(alloc->GetType())
12698 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12699 return VK_ERROR_FEATURE_NOT_PRESENT;
12700 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12701 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
12703 alloc->ChangeSize(newSize);
12704 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
12709 return VK_ERROR_OUT_OF_POOL_MEMORY;
12713 return VK_ERROR_VALIDATION_FAILED_EXT;
12717 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12720 InitStatInfo(pStats->
total);
12721 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12723 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12727 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12729 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12730 VMA_ASSERT(pBlockVector);
12731 pBlockVector->AddStats(pStats);
12736 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12737 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12739 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12744 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12746 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12747 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12748 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12749 VMA_ASSERT(pDedicatedAllocVector);
12750 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12753 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12754 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12755 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12756 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12761 VmaPostprocessCalcStatInfo(pStats->
total);
12762 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12763 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12764 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12765 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12768 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12770 VkResult VmaAllocator_T::Defragment(
12772 size_t allocationCount,
12773 VkBool32* pAllocationsChanged,
12777 if(pAllocationsChanged != VMA_NULL)
12779 memset(pAllocationsChanged, 0, allocationCount *
sizeof(VkBool32));
12781 if(pDefragmentationStats != VMA_NULL)
12783 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12786 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12788 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12790 const size_t poolCount = m_Pools.size();
12793 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12796 VMA_ASSERT(hAlloc);
12797 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12799 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12800 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12802 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12804 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12806 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12808 const VmaPool hAllocPool = hAlloc->GetPool();
12810 if(hAllocPool != VK_NULL_HANDLE)
12813 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12815 pAllocBlockVector = &hAllocPool->m_BlockVector;
12821 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12824 if(pAllocBlockVector != VMA_NULL)
12826 VmaDefragmentator*
const pDefragmentator =
12827 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12828 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12829 &pAllocationsChanged[allocIndex] : VMA_NULL;
12830 pDefragmentator->AddAllocation(hAlloc, pChanged);
12835 VkResult result = VK_SUCCESS;
12839 VkDeviceSize maxBytesToMove = SIZE_MAX;
12840 uint32_t maxAllocationsToMove = UINT32_MAX;
12841 if(pDefragmentationInfo != VMA_NULL)
12848 for(uint32_t memTypeIndex = 0;
12849 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12853 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12855 result = m_pBlockVectors[memTypeIndex]->Defragment(
12856 pDefragmentationStats,
12858 maxAllocationsToMove);
12863 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12865 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12866 pDefragmentationStats,
12868 maxAllocationsToMove);
12874 for(
size_t poolIndex = poolCount; poolIndex--; )
12876 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12880 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12882 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12884 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12893 if(hAllocation->CanBecomeLost())
12899 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12900 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12903 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12907 pAllocationInfo->
offset = 0;
12908 pAllocationInfo->
size = hAllocation->GetSize();
12910 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12913 else if(localLastUseFrameIndex == localCurrFrameIndex)
12915 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12916 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12917 pAllocationInfo->
offset = hAllocation->GetOffset();
12918 pAllocationInfo->
size = hAllocation->GetSize();
12920 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12925 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12927 localLastUseFrameIndex = localCurrFrameIndex;
12934 #if VMA_STATS_STRING_ENABLED 12935 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12936 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12939 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12940 if(localLastUseFrameIndex == localCurrFrameIndex)
12946 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12948 localLastUseFrameIndex = localCurrFrameIndex;
12954 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12955 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12956 pAllocationInfo->
offset = hAllocation->GetOffset();
12957 pAllocationInfo->
size = hAllocation->GetSize();
12958 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12959 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12963 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12966 if(hAllocation->CanBecomeLost())
12968 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12969 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12972 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12976 else if(localLastUseFrameIndex == localCurrFrameIndex)
12982 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12984 localLastUseFrameIndex = localCurrFrameIndex;
12991 #if VMA_STATS_STRING_ENABLED 12992 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12993 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12996 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12997 if(localLastUseFrameIndex == localCurrFrameIndex)
13003 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
13005 localLastUseFrameIndex = localCurrFrameIndex;
13017 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
13027 return VK_ERROR_INITIALIZATION_FAILED;
13030 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
13032 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
13034 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
13035 if(res != VK_SUCCESS)
13037 vma_delete(
this, *pPool);
13044 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13045 (*pPool)->SetId(m_NextPoolId++);
13046 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
13052 void VmaAllocator_T::DestroyPool(
VmaPool pool)
13056 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13057 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
13058 VMA_ASSERT(success &&
"Pool not found in Allocator.");
13061 vma_delete(
this, pool);
13066 pool->m_BlockVector.GetPoolStats(pPoolStats);
13069 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
13071 m_CurrentFrameIndex.store(frameIndex);
13074 void VmaAllocator_T::MakePoolAllocationsLost(
13076 size_t* pLostAllocationCount)
13078 hPool->m_BlockVector.MakePoolAllocationsLost(
13079 m_CurrentFrameIndex.load(),
13080 pLostAllocationCount);
13083 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
13085 return hPool->m_BlockVector.CheckCorruption();
13088 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
13090 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
13093 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13095 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
13097 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
13098 VMA_ASSERT(pBlockVector);
13099 VkResult localRes = pBlockVector->CheckCorruption();
13102 case VK_ERROR_FEATURE_NOT_PRESENT:
13105 finalRes = VK_SUCCESS;
13115 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13116 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
13118 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
13120 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
13123 case VK_ERROR_FEATURE_NOT_PRESENT:
13126 finalRes = VK_SUCCESS;
13138 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
13140 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
13141 (*pAllocation)->InitLost();
13144 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
13146 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
13149 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13151 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13152 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
13154 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13155 if(res == VK_SUCCESS)
13157 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
13162 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
13167 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13170 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
13172 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
13178 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
13180 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
13182 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
13185 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
13187 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
13188 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13190 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13191 m_HeapSizeLimit[heapIndex] += size;
13195 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
13197 if(hAllocation->CanBecomeLost())
13199 return VK_ERROR_MEMORY_MAP_FAILED;
13202 switch(hAllocation->GetType())
13204 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13206 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13207 char *pBytes = VMA_NULL;
13208 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
13209 if(res == VK_SUCCESS)
13211 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
13212 hAllocation->BlockAllocMap();
13216 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13217 return hAllocation->DedicatedAllocMap(
this, ppData);
13220 return VK_ERROR_MEMORY_MAP_FAILED;
13226 switch(hAllocation->GetType())
13228 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13230 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13231 hAllocation->BlockAllocUnmap();
13232 pBlock->Unmap(
this, 1);
13235 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13236 hAllocation->DedicatedAllocUnmap(
this);
13243 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
13245 VkResult res = VK_SUCCESS;
13246 switch(hAllocation->GetType())
13248 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13249 res = GetVulkanFunctions().vkBindBufferMemory(
13252 hAllocation->GetMemory(),
13255 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13257 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13258 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13259 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13268 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13270 VkResult res = VK_SUCCESS;
13271 switch(hAllocation->GetType())
13273 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13274 res = GetVulkanFunctions().vkBindImageMemory(
13277 hAllocation->GetMemory(),
13280 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13282 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13283 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13284 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13293 void VmaAllocator_T::FlushOrInvalidateAllocation(
13295 VkDeviceSize offset, VkDeviceSize size,
13296 VMA_CACHE_OPERATION op)
13298 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13299 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13301 const VkDeviceSize allocationSize = hAllocation->GetSize();
13302 VMA_ASSERT(offset <= allocationSize);
13304 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13306 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13307 memRange.memory = hAllocation->GetMemory();
13309 switch(hAllocation->GetType())
13311 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13312 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13313 if(size == VK_WHOLE_SIZE)
13315 memRange.size = allocationSize - memRange.offset;
13319 VMA_ASSERT(offset + size <= allocationSize);
13320 memRange.size = VMA_MIN(
13321 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13322 allocationSize - memRange.offset);
13326 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13329 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13330 if(size == VK_WHOLE_SIZE)
13332 size = allocationSize - offset;
13336 VMA_ASSERT(offset + size <= allocationSize);
13338 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13341 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13342 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13343 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13344 memRange.offset += allocationOffset;
13345 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13356 case VMA_CACHE_FLUSH:
13357 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13359 case VMA_CACHE_INVALIDATE:
13360 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13369 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13371 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13373 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13375 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13376 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13377 VMA_ASSERT(pDedicatedAllocations);
13378 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13379 VMA_ASSERT(success);
13382 VkDeviceMemory hMemory = allocation->GetMemory();
13394 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13396 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13399 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13401 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13402 !hAllocation->CanBecomeLost() &&
13403 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13405 void* pData = VMA_NULL;
13406 VkResult res = Map(hAllocation, &pData);
13407 if(res == VK_SUCCESS)
13409 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13410 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13411 Unmap(hAllocation);
13415 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13420 #if VMA_STATS_STRING_ENABLED 13422 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13424 bool dedicatedAllocationsStarted =
false;
13425 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13427 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13428 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13429 VMA_ASSERT(pDedicatedAllocVector);
13430 if(pDedicatedAllocVector->empty() ==
false)
13432 if(dedicatedAllocationsStarted ==
false)
13434 dedicatedAllocationsStarted =
true;
13435 json.WriteString(
"DedicatedAllocations");
13436 json.BeginObject();
13439 json.BeginString(
"Type ");
13440 json.ContinueString(memTypeIndex);
13445 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13447 json.BeginObject(
true);
13449 hAlloc->PrintParameters(json);
13456 if(dedicatedAllocationsStarted)
13462 bool allocationsStarted =
false;
13463 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13465 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13467 if(allocationsStarted ==
false)
13469 allocationsStarted =
true;
13470 json.WriteString(
"DefaultPools");
13471 json.BeginObject();
13474 json.BeginString(
"Type ");
13475 json.ContinueString(memTypeIndex);
13478 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13481 if(allocationsStarted)
13489 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13490 const size_t poolCount = m_Pools.size();
13493 json.WriteString(
"Pools");
13494 json.BeginObject();
13495 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13497 json.BeginString();
13498 json.ContinueString(m_Pools[poolIndex]->GetId());
13501 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13508 #endif // #if VMA_STATS_STRING_ENABLED 13517 VMA_ASSERT(pCreateInfo && pAllocator);
13518 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13520 return (*pAllocator)->Init(pCreateInfo);
13526 if(allocator != VK_NULL_HANDLE)
13528 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13529 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13530 vma_delete(&allocationCallbacks, allocator);
13536 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13538 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13539 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13544 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13546 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13547 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13552 uint32_t memoryTypeIndex,
13553 VkMemoryPropertyFlags* pFlags)
13555 VMA_ASSERT(allocator && pFlags);
13556 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13557 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13562 uint32_t frameIndex)
13564 VMA_ASSERT(allocator);
13565 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13567 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13569 allocator->SetCurrentFrameIndex(frameIndex);
13576 VMA_ASSERT(allocator && pStats);
13577 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13578 allocator->CalculateStats(pStats);
13581 #if VMA_STATS_STRING_ENABLED 13585 char** ppStatsString,
13586 VkBool32 detailedMap)
13588 VMA_ASSERT(allocator && ppStatsString);
13589 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13591 VmaStringBuilder sb(allocator);
13593 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13594 json.BeginObject();
13597 allocator->CalculateStats(&stats);
13599 json.WriteString(
"Total");
13600 VmaPrintStatInfo(json, stats.
total);
13602 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13604 json.BeginString(
"Heap ");
13605 json.ContinueString(heapIndex);
13607 json.BeginObject();
13609 json.WriteString(
"Size");
13610 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13612 json.WriteString(
"Flags");
13613 json.BeginArray(
true);
13614 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13616 json.WriteString(
"DEVICE_LOCAL");
13622 json.WriteString(
"Stats");
13623 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13626 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13628 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13630 json.BeginString(
"Type ");
13631 json.ContinueString(typeIndex);
13634 json.BeginObject();
13636 json.WriteString(
"Flags");
13637 json.BeginArray(
true);
13638 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13639 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13641 json.WriteString(
"DEVICE_LOCAL");
13643 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13645 json.WriteString(
"HOST_VISIBLE");
13647 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13649 json.WriteString(
"HOST_COHERENT");
13651 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13653 json.WriteString(
"HOST_CACHED");
13655 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13657 json.WriteString(
"LAZILY_ALLOCATED");
13663 json.WriteString(
"Stats");
13664 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13673 if(detailedMap == VK_TRUE)
13675 allocator->PrintDetailedMap(json);
13681 const size_t len = sb.GetLength();
13682 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13685 memcpy(pChars, sb.GetData(), len);
13687 pChars[len] =
'\0';
13688 *ppStatsString = pChars;
13693 char* pStatsString)
13695 if(pStatsString != VMA_NULL)
13697 VMA_ASSERT(allocator);
13698 size_t len = strlen(pStatsString);
13699 vma_delete_array(allocator, pStatsString, len + 1);
13703 #endif // #if VMA_STATS_STRING_ENABLED 13710 uint32_t memoryTypeBits,
13712 uint32_t* pMemoryTypeIndex)
13714 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13715 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13716 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13723 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13724 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13729 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13733 switch(pAllocationCreateInfo->
usage)
13738 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13740 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13744 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13747 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13748 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13750 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13754 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13755 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13761 *pMemoryTypeIndex = UINT32_MAX;
13762 uint32_t minCost = UINT32_MAX;
13763 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13764 memTypeIndex < allocator->GetMemoryTypeCount();
13765 ++memTypeIndex, memTypeBit <<= 1)
13768 if((memTypeBit & memoryTypeBits) != 0)
13770 const VkMemoryPropertyFlags currFlags =
13771 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13773 if((requiredFlags & ~currFlags) == 0)
13776 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13778 if(currCost < minCost)
13780 *pMemoryTypeIndex = memTypeIndex;
13785 minCost = currCost;
13790 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13795 const VkBufferCreateInfo* pBufferCreateInfo,
13797 uint32_t* pMemoryTypeIndex)
13799 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13800 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13801 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13802 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13804 const VkDevice hDev = allocator->m_hDevice;
13805 VkBuffer hBuffer = VK_NULL_HANDLE;
13806 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13807 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13808 if(res == VK_SUCCESS)
13810 VkMemoryRequirements memReq = {};
13811 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13812 hDev, hBuffer, &memReq);
13816 memReq.memoryTypeBits,
13817 pAllocationCreateInfo,
13820 allocator->GetVulkanFunctions().vkDestroyBuffer(
13821 hDev, hBuffer, allocator->GetAllocationCallbacks());
13828 const VkImageCreateInfo* pImageCreateInfo,
13830 uint32_t* pMemoryTypeIndex)
13832 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13833 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13834 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13835 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13837 const VkDevice hDev = allocator->m_hDevice;
13838 VkImage hImage = VK_NULL_HANDLE;
13839 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13840 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13841 if(res == VK_SUCCESS)
13843 VkMemoryRequirements memReq = {};
13844 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13845 hDev, hImage, &memReq);
13849 memReq.memoryTypeBits,
13850 pAllocationCreateInfo,
13853 allocator->GetVulkanFunctions().vkDestroyImage(
13854 hDev, hImage, allocator->GetAllocationCallbacks());
13864 VMA_ASSERT(allocator && pCreateInfo && pPool);
13866 VMA_DEBUG_LOG(
"vmaCreatePool");
13868 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13870 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13872 #if VMA_RECORDING_ENABLED 13873 if(allocator->GetRecorder() != VMA_NULL)
13875 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13886 VMA_ASSERT(allocator);
13888 if(pool == VK_NULL_HANDLE)
13893 VMA_DEBUG_LOG(
"vmaDestroyPool");
13895 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13897 #if VMA_RECORDING_ENABLED 13898 if(allocator->GetRecorder() != VMA_NULL)
13900 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13904 allocator->DestroyPool(pool);
13912 VMA_ASSERT(allocator && pool && pPoolStats);
13914 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13916 allocator->GetPoolStats(pool, pPoolStats);
13922 size_t* pLostAllocationCount)
13924 VMA_ASSERT(allocator && pool);
13926 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13928 #if VMA_RECORDING_ENABLED 13929 if(allocator->GetRecorder() != VMA_NULL)
13931 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13935 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13940 VMA_ASSERT(allocator && pool);
13942 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13944 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13946 return allocator->CheckPoolCorruption(pool);
13951 const VkMemoryRequirements* pVkMemoryRequirements,
13956 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13958 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13960 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13962 VkResult result = allocator->AllocateMemory(
13963 *pVkMemoryRequirements,
13969 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13972 #if VMA_RECORDING_ENABLED 13973 if(allocator->GetRecorder() != VMA_NULL)
13975 allocator->GetRecorder()->RecordAllocateMemory(
13976 allocator->GetCurrentFrameIndex(),
13977 *pVkMemoryRequirements,
13983 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13985 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13998 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
14000 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
14002 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14004 VkMemoryRequirements vkMemReq = {};
14005 bool requiresDedicatedAllocation =
false;
14006 bool prefersDedicatedAllocation =
false;
14007 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
14008 requiresDedicatedAllocation,
14009 prefersDedicatedAllocation);
14011 VkResult result = allocator->AllocateMemory(
14013 requiresDedicatedAllocation,
14014 prefersDedicatedAllocation,
14018 VMA_SUBALLOCATION_TYPE_BUFFER,
14021 #if VMA_RECORDING_ENABLED 14022 if(allocator->GetRecorder() != VMA_NULL)
14024 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
14025 allocator->GetCurrentFrameIndex(),
14027 requiresDedicatedAllocation,
14028 prefersDedicatedAllocation,
14034 if(pAllocationInfo && result == VK_SUCCESS)
14036 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14049 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
14051 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
14053 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14055 VkMemoryRequirements vkMemReq = {};
14056 bool requiresDedicatedAllocation =
false;
14057 bool prefersDedicatedAllocation =
false;
14058 allocator->GetImageMemoryRequirements(image, vkMemReq,
14059 requiresDedicatedAllocation, prefersDedicatedAllocation);
14061 VkResult result = allocator->AllocateMemory(
14063 requiresDedicatedAllocation,
14064 prefersDedicatedAllocation,
14068 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
14071 #if VMA_RECORDING_ENABLED 14072 if(allocator->GetRecorder() != VMA_NULL)
14074 allocator->GetRecorder()->RecordAllocateMemoryForImage(
14075 allocator->GetCurrentFrameIndex(),
14077 requiresDedicatedAllocation,
14078 prefersDedicatedAllocation,
14084 if(pAllocationInfo && result == VK_SUCCESS)
14086 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14096 VMA_ASSERT(allocator);
14098 if(allocation == VK_NULL_HANDLE)
14103 VMA_DEBUG_LOG(
"vmaFreeMemory");
14105 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14107 #if VMA_RECORDING_ENABLED 14108 if(allocator->GetRecorder() != VMA_NULL)
14110 allocator->GetRecorder()->RecordFreeMemory(
14111 allocator->GetCurrentFrameIndex(),
14116 allocator->FreeMemory(allocation);
14122 VkDeviceSize newSize)
14124 VMA_ASSERT(allocator && allocation);
14126 VMA_DEBUG_LOG(
"vmaResizeAllocation");
14128 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14130 #if VMA_RECORDING_ENABLED 14131 if(allocator->GetRecorder() != VMA_NULL)
14133 allocator->GetRecorder()->RecordResizeAllocation(
14134 allocator->GetCurrentFrameIndex(),
14140 return allocator->ResizeAllocation(allocation, newSize);
14148 VMA_ASSERT(allocator && allocation && pAllocationInfo);
14150 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14152 #if VMA_RECORDING_ENABLED 14153 if(allocator->GetRecorder() != VMA_NULL)
14155 allocator->GetRecorder()->RecordGetAllocationInfo(
14156 allocator->GetCurrentFrameIndex(),
14161 allocator->GetAllocationInfo(allocation, pAllocationInfo);
14168 VMA_ASSERT(allocator && allocation);
14170 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14172 #if VMA_RECORDING_ENABLED 14173 if(allocator->GetRecorder() != VMA_NULL)
14175 allocator->GetRecorder()->RecordTouchAllocation(
14176 allocator->GetCurrentFrameIndex(),
14181 return allocator->TouchAllocation(allocation);
14189 VMA_ASSERT(allocator && allocation);
14191 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14193 allocation->SetUserData(allocator, pUserData);
14195 #if VMA_RECORDING_ENABLED 14196 if(allocator->GetRecorder() != VMA_NULL)
14198 allocator->GetRecorder()->RecordSetAllocationUserData(
14199 allocator->GetCurrentFrameIndex(),
14210 VMA_ASSERT(allocator && pAllocation);
14212 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
14214 allocator->CreateLostAllocation(pAllocation);
14216 #if VMA_RECORDING_ENABLED 14217 if(allocator->GetRecorder() != VMA_NULL)
14219 allocator->GetRecorder()->RecordCreateLostAllocation(
14220 allocator->GetCurrentFrameIndex(),
14231 VMA_ASSERT(allocator && allocation && ppData);
14233 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14235 VkResult res = allocator->Map(allocation, ppData);
14237 #if VMA_RECORDING_ENABLED 14238 if(allocator->GetRecorder() != VMA_NULL)
14240 allocator->GetRecorder()->RecordMapMemory(
14241 allocator->GetCurrentFrameIndex(),
14253 VMA_ASSERT(allocator && allocation);
14255 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14257 #if VMA_RECORDING_ENABLED 14258 if(allocator->GetRecorder() != VMA_NULL)
14260 allocator->GetRecorder()->RecordUnmapMemory(
14261 allocator->GetCurrentFrameIndex(),
14266 allocator->Unmap(allocation);
14271 VMA_ASSERT(allocator && allocation);
14273 VMA_DEBUG_LOG(
"vmaFlushAllocation");
14275 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14277 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
14279 #if VMA_RECORDING_ENABLED 14280 if(allocator->GetRecorder() != VMA_NULL)
14282 allocator->GetRecorder()->RecordFlushAllocation(
14283 allocator->GetCurrentFrameIndex(),
14284 allocation, offset, size);
14291 VMA_ASSERT(allocator && allocation);
14293 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14295 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14297 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14299 #if VMA_RECORDING_ENABLED 14300 if(allocator->GetRecorder() != VMA_NULL)
14302 allocator->GetRecorder()->RecordInvalidateAllocation(
14303 allocator->GetCurrentFrameIndex(),
14304 allocation, offset, size);
14311 VMA_ASSERT(allocator);
14313 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14315 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14317 return allocator->CheckCorruption(memoryTypeBits);
14323 size_t allocationCount,
14324 VkBool32* pAllocationsChanged,
14328 VMA_ASSERT(allocator && pAllocations);
14330 VMA_DEBUG_LOG(
"vmaDefragment");
14332 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14334 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14342 VMA_ASSERT(allocator && allocation && buffer);
14344 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14346 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14348 return allocator->BindBufferMemory(allocation, buffer);
14356 VMA_ASSERT(allocator && allocation && image);
14358 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14360 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14362 return allocator->BindImageMemory(allocation, image);
14367 const VkBufferCreateInfo* pBufferCreateInfo,
14373 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14375 if(pBufferCreateInfo->size == 0)
14377 return VK_ERROR_VALIDATION_FAILED_EXT;
14380 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14382 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14384 *pBuffer = VK_NULL_HANDLE;
14385 *pAllocation = VK_NULL_HANDLE;
14388 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14389 allocator->m_hDevice,
14391 allocator->GetAllocationCallbacks(),
14396 VkMemoryRequirements vkMemReq = {};
14397 bool requiresDedicatedAllocation =
false;
14398 bool prefersDedicatedAllocation =
false;
14399 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14400 requiresDedicatedAllocation, prefersDedicatedAllocation);
14404 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14406 VMA_ASSERT(vkMemReq.alignment %
14407 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14409 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14411 VMA_ASSERT(vkMemReq.alignment %
14412 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14414 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14416 VMA_ASSERT(vkMemReq.alignment %
14417 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14421 res = allocator->AllocateMemory(
14423 requiresDedicatedAllocation,
14424 prefersDedicatedAllocation,
14427 *pAllocationCreateInfo,
14428 VMA_SUBALLOCATION_TYPE_BUFFER,
14431 #if VMA_RECORDING_ENABLED 14432 if(allocator->GetRecorder() != VMA_NULL)
14434 allocator->GetRecorder()->RecordCreateBuffer(
14435 allocator->GetCurrentFrameIndex(),
14436 *pBufferCreateInfo,
14437 *pAllocationCreateInfo,
14445 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14449 #if VMA_STATS_STRING_ENABLED 14450 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14452 if(pAllocationInfo != VMA_NULL)
14454 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14459 allocator->FreeMemory(*pAllocation);
14460 *pAllocation = VK_NULL_HANDLE;
14461 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14462 *pBuffer = VK_NULL_HANDLE;
14465 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14466 *pBuffer = VK_NULL_HANDLE;
14477 VMA_ASSERT(allocator);
14479 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14484 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14486 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14488 #if VMA_RECORDING_ENABLED 14489 if(allocator->GetRecorder() != VMA_NULL)
14491 allocator->GetRecorder()->RecordDestroyBuffer(
14492 allocator->GetCurrentFrameIndex(),
14497 if(buffer != VK_NULL_HANDLE)
14499 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14502 if(allocation != VK_NULL_HANDLE)
14504 allocator->FreeMemory(allocation);
14510 const VkImageCreateInfo* pImageCreateInfo,
14516 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14518 if(pImageCreateInfo->extent.width == 0 ||
14519 pImageCreateInfo->extent.height == 0 ||
14520 pImageCreateInfo->extent.depth == 0 ||
14521 pImageCreateInfo->mipLevels == 0 ||
14522 pImageCreateInfo->arrayLayers == 0)
14524 return VK_ERROR_VALIDATION_FAILED_EXT;
14527 VMA_DEBUG_LOG(
"vmaCreateImage");
14529 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14531 *pImage = VK_NULL_HANDLE;
14532 *pAllocation = VK_NULL_HANDLE;
14535 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14536 allocator->m_hDevice,
14538 allocator->GetAllocationCallbacks(),
14542 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14543 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14544 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14547 VkMemoryRequirements vkMemReq = {};
14548 bool requiresDedicatedAllocation =
false;
14549 bool prefersDedicatedAllocation =
false;
14550 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14551 requiresDedicatedAllocation, prefersDedicatedAllocation);
14553 res = allocator->AllocateMemory(
14555 requiresDedicatedAllocation,
14556 prefersDedicatedAllocation,
14559 *pAllocationCreateInfo,
14563 #if VMA_RECORDING_ENABLED 14564 if(allocator->GetRecorder() != VMA_NULL)
14566 allocator->GetRecorder()->RecordCreateImage(
14567 allocator->GetCurrentFrameIndex(),
14569 *pAllocationCreateInfo,
14577 res = allocator->BindImageMemory(*pAllocation, *pImage);
14581 #if VMA_STATS_STRING_ENABLED 14582 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14584 if(pAllocationInfo != VMA_NULL)
14586 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14591 allocator->FreeMemory(*pAllocation);
14592 *pAllocation = VK_NULL_HANDLE;
14593 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14594 *pImage = VK_NULL_HANDLE;
14597 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14598 *pImage = VK_NULL_HANDLE;
14609 VMA_ASSERT(allocator);
14611 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14616 VMA_DEBUG_LOG(
"vmaDestroyImage");
14618 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14620 #if VMA_RECORDING_ENABLED 14621 if(allocator->GetRecorder() != VMA_NULL)
14623 allocator->GetRecorder()->RecordDestroyImage(
14624 allocator->GetCurrentFrameIndex(),
14629 if(image != VK_NULL_HANDLE)
14631 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14633 if(allocation != VK_NULL_HANDLE)
14635 allocator->FreeMemory(allocation);
14639 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1586
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1888
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1643
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1617
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2212
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2213
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1598
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1844
-
Definition: vk_mem_alloc.h:1947
+
Definition: vk_mem_alloc.h:1845
+
Definition: vk_mem_alloc.h:1948
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1590
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2312
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2313
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1640
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2582
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2101
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2583
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2102
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1487
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2193
-
Definition: vk_mem_alloc.h:1924
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2194
+
Definition: vk_mem_alloc.h:1925
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1579
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2000
-
Definition: vk_mem_alloc.h:1871
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2001
+
Definition: vk_mem_alloc.h:1872
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1652
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2129
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2130
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1705
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1706
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1637
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1875
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1876
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1777
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1778
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1595
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1776
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2586
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1777
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2587
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1669
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1786
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2594
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1984
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2577
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1787
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2595
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1985
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2578
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1596
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1521
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1646
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2143
-
Definition: vk_mem_alloc.h:2137
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1712
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2322
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2144
+
Definition: vk_mem_alloc.h:2138
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1713
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2323
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1591
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1615
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2021
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2163
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2199
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2022
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2164
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2200
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1577
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2146
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2147
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1822
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1823
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2572
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2573
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2590
-
Definition: vk_mem_alloc.h:1861
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2008
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2591
+
Definition: vk_mem_alloc.h:1862
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2009
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1594
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1782
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1783
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1527
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:1968
+
Definition: vk_mem_alloc.h:1969
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1548
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1619
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1553
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2592
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2593
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1995
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2209
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1996
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2210
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1587
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1765
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2158
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1766
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2159
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1540
-
Definition: vk_mem_alloc.h:2133
+
Definition: vk_mem_alloc.h:2134
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1931
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1778
+
Definition: vk_mem_alloc.h:1932
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1779
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1544
-
Definition: vk_mem_alloc.h:1958
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2149
-
Definition: vk_mem_alloc.h:1870
+
Definition: vk_mem_alloc.h:1959
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2150
+
Definition: vk_mem_alloc.h:1871
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1593
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1990
-
Definition: vk_mem_alloc.h:1981
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1991
+
Definition: vk_mem_alloc.h:1982
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1768
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1769
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1589
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2171
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2172
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1655
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2202
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1979
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2014
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2203
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1980
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2015
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1693
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1784
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1911
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1777
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1694
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1785
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1912
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1778
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1600
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1625
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1542
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1599
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2185
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2186
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1592
-
Definition: vk_mem_alloc.h:1942
+
Definition: vk_mem_alloc.h:1943
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1633
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2336
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2337
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1649
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1777
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1774
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1778
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1775
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2190
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2191
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
-
Definition: vk_mem_alloc.h:1951
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2317
-
Definition: vk_mem_alloc.h:1965
-
Definition: vk_mem_alloc.h:1977
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2588
+
Definition: vk_mem_alloc.h:1952
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2318
+
Definition: vk_mem_alloc.h:1966
+
Definition: vk_mem_alloc.h:1978
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2589
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1585
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1772
-
Definition: vk_mem_alloc.h:1827
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2139
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1773
+
Definition: vk_mem_alloc.h:1828
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2140
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1622
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1770
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1771
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1597
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1601
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1898
-
Definition: vk_mem_alloc.h:1972
-
Definition: vk_mem_alloc.h:1854
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2331
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1899
+
Definition: vk_mem_alloc.h:1973
+
Definition: vk_mem_alloc.h:1855
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2332
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1575
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1588
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2118
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2119
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2298
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2299
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1962
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2083
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1778
+
Definition: vk_mem_alloc.h:1963
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2084
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1779
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1937
+
Definition: vk_mem_alloc.h:1938
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1609
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1785
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1786
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2196
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1778
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2197
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1779
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2303
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2304